gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
---|---|
# Copyright 2012-2013 Ravello Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import tempfile
import shutil
import argparse
from nose import SkipTest
from testmill.ravello import RavelloClient
from testmill.state import env, _Environment
testenv = _Environment()
from testmill.test import networkblocker, sudo
from testmill.test.fileops import *
if sys.version_info[0] == 3:
import configparser
else:
import ConfigParser as configparser
__all__ = ('testenv', 'require_sudo', 'require_network_blocking',
'TestSuite', 'unittest', 'integrationtest', 'systemtest',
'tempdir', 'get_common_args', 'parse_ps_output', 'environ')
testdir, _ = os.path.split(os.path.abspath(__file__))
parent = testdir
for i in range(3):
parent, _ = os.path.split(parent)
topdir = parent
def setup_package():
# Have nose capture stderr too.
testenv._nose_saved_stderr = sys.stderr
sys.stderr = sys.stdout
def teardown_package():
sys.stderr = testenv._nose_saved_stderr
testenv._nose_saved_stderr = None
# Create an environment based on ~/test.cfg.
def create_test_environment():
"""Create at test environment based on $topdir/test.cfg."""
fname = os.path.join(topdir, 'test.cfg')
if not os.access(fname, os.R_OK):
m = 'Tests need to be run from a checked out source repository.'
raise RuntimeError(m)
config = configparser.ConfigParser()
config.read([fname])
def config_var(name, default=None):
try:
return config.get('test', name)
except (configparser.NoSectionError, configparser.NoOptionError):
return default
testenv.username = config_var('username')
testenv.password = config_var('password')
testenv.service_url = config_var('service_url') or RavelloClient.default_url
testenv.network_blocking = config_var('network_blocking')
testenv.sudo_password = config_var('sudo_password')
testenv.topdir = topdir
testenv.testdir = testdir
# Create the test environment here. A single copy is shared by all tests.
create_test_environment()
# method decorators
def require_sudo(func):
def wrapped(*args, **kwds):
if not sudo.have_sudo():
raise SkipTest('sudo is not available')
if testenv.sudo_password:
if not sudo.check_sudo_password(testenv.sudo_password):
raise SkipTest('incorrect sudo_password in test.cfg')
elif not sudo.check_passwordless_sudo():
raise SkipTest('sudo_password not set in test.cfg')
return func(*args, **kwds)
wrapped.__name__ = func.__name__
return wrapped
def require_network_blocking(func):
def wrapped(*args, **kwds):
if not networkblocker.have_blocker():
raise SkipTest('network blocking is not available')
if not testenv.network_blocking:
raise SkipTest('network blocking disabled in test.cfg')
return require_sudo(func)(*args, **kwds)
wrapped.__name__ = func.__name__
return wrapped
# utilities
def tempdir():
dname = tempfile.mkdtemp()
testenv._tempdirs.append(dname)
return dname
def rmtree(dname):
def paranoia_ok(dname):
if '/..' in dname or '\\..' in dname:
return False
return '/tmp/' in dname or '\\temp\\' in dname
# Refuse to remove directories that are not in a common temp
# directory. This check is just for peace of mind, it should
# never fail. In platforms with uncommon temp directories this
# check may result in a temp directory not being cleaned up.
if paranoia_ok(dname):
try:
shutil.rmtree(dname)
except OSError:
# On Windows a WindowsError is raised when files are
# still open
pass
def parse_ps_output(output):
"""Parse the output of `ravtest ps` and return a list of
(project, application, running) tuples.
"""
result = []
project = app = info = None
for line in output.splitlines():
line = line.strip()
if not line and app:
result.append((project, app, info))
app = None
continue
if line.startswith('== '):
project = line[line.find(':')+3:-1]
elif line.startswith('=== '):
app = line[line.find(':')+3:-1]
info = {}
if not app:
continue
if 'VMs' in line:
info['vms'] = int(line.split()[0])
return result
def get_common_args():
"""Return a list with the common command-line options."""
args = ['-u', testenv.username, '-p', testenv.password,
'-s', testenv.service_url]
return args
class environ(object):
"""Context manager to manage os.environ."""
def __init__(self, **env):
self.env = env
self.restore = {}
def __enter__(self):
for key in self.env:
self.restore[key] = os.environ[key]
os.environ[key] = self.env[key]
def __exit__(self, *exc_info):
for key in self.restore:
os.environ[key] = self.restore[key]
self.restore.clear()
# This is a copy of main.create_environment(). This allows us
# to run unittests and some integration tests already on Py3k.
# (Fabric doesn't yet support Py3k)
def create_environment(args):
"""Set up the global environment."""
env.username = args.user
env.password = args.password
env.service_url = args.service_url
env.quiet = args.quiet
env.verbose = args.verbose
env.manifest = args.manifest
env.debug = args.debug
env.always_confirm = args.yes
env.args = args
env.api = RavelloClient(env.username, env.password, env.service_url)
class TestSuite(object):
"""Base for test suites."""
@classmethod
def setup_class(cls):
os.chdir(testenv.testdir)
def setup(self):
unittest = getattr(self, 'unittest', False)
integrationtest = getattr(self, 'integrationtest', False)
systemtest = getattr(self, 'systemtest', False)
if integrationtest or systemtest:
if not testenv.username or not testenv.password:
raise SkipTest('This test requires API credentials.')
testenv._tempdirs = []
testenv.tempdir = tempdir()
testenv._saved_stderr = sys.stderr
sys.stderr = sys.stdout # Have nose capture stderr too
testenv.context = env.new()
testenv.context.__enter__()
if integrationtest:
args = argparse.Namespace(
user = testenv.username,
password = testenv.password,
service_url = testenv.service_url,
manifest = None,
quiet = False, verbose=True,
debug = True, yes = False)
create_environment(args)
env.api._login()
def teardown(self):
testenv.context.__exit__()
sys.stderr = testenv._saved_stderr
for dname in testenv._tempdirs:
rmtree(dname)
del testenv._tempdirs[:]
def unittest(obj):
"""A suite of unit tests.
Each unit tests get a new empty env.
Run all unit tests with "nosetests -a unittest".
"""
obj.unittest = True
return obj
def integrationtest(obj):
"""A suite of integration tests.
Each test get a new and fully configured env.
Run all integration tests with "nosetests -a integrationtest".
"""
obj.integrationtest = True
return obj
def systemtest(obj):
"""A suite of system tests.
Each test gets a new empty env. The test will bootstrap the env
through its command-line arguments.
"""
obj.systemtest = True
return obj
|
|
#!/usr/bin/env python
""" """
###############################################################################
# Copyright 2016 Hendrix Demers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# Standard library modules.
import math
# Third party modules.
# Local modules.
# Globals and constants variables.
g_AvogadroNumber_atom_mol = 6.02205E23
g_elementSymbol = [
"H" , "He" , "Li" , "Be" , "B" , "C" , "N" , "O"
, "F" , "Ne" , "Na" , "Mg" , "Al" , "Si" , "P" , "S"
, "Cl" , "Ar" , "K" , "Ca" , "Sc" , "Ti" , "V" , "Cr"
, "Mn" , "Fe" , "Co" , "Ni" , "Cu" , "Zn" , "Ga" , "Ge"
, "As" , "Se" , "Br" , "Kr" , "Rb" , "Sr" , "Y" , "Zr"
, "Nb" , "Mo" , "Tc" , "Ru" , "Rh" , "Pd" , "Ag" , "Cd"
, "In" , "Sn" , "Sb" , "Te" , "I" , "Xe" , "Cs" , "Ba"
, "La" , "Ce" , "Pr" , "Nd" , "Pm" , "Sm" , "Eu" , "Gd"
, "Tb" , "Dy" , "Ho" , "Er" , "Tm" , "Yb" , "Lu" , "Hf"
, "Ta" , "W" , "Re" , "Os" , "Ir" , "Pt" , "Au" , "Hg"
, "Tl" , "Pb" , "Bi" , "Po" , "At" , "Rn" , "Fr" , "Ra"
, "Ac" , "Th" , "Pa" , "U" , "Np" , "Pu" , "Am" , "Cm"
, "Bk" , "Cf" , "Es" , "Fm" , "Md" , "No" , "Lr" , "Unq"
, "Unp" , "Unh"]
g_elementName = [
"Hydrogen"
, "Helium"
, "Lithium"
, "Beryllium"
, "Boron"
, "Carbon"
, "Nitrogen"
, "Oxygen"
, "Fluorine"
, "Neon"
, "Sodium"
, "Magnesium"
, "Aluminium"
, "Silicon"
, "Phosphorus"
, "Sulfur"
, "Chlorine"
, "Argon"
, "Potassium"
, "Calcium"
, "Scandium"
, "Titanium"
, "Vanadium"
, "Chromium"
, "Manganese"
, "Iron"
, "Cobalt"
, "Nickel"
, "Copper"
, "Zinc"
, "Gallium"
, "Germanium"
, "Arsenic"
, "Selenium"
, "Bromine"
, "Krypton"
, "Rubidium"
, "Strontium"
, "Yttrium"
, "Zirconium"
, "Niobium"
, "Molybdenum"
, "Technetium"
, "Ruthenium"
, "Rhodium"
, "Palladium"
, "Silver"
, "Cadmium"
, "Indium"
, "Tin"
, "Antimony"
, "Tellurium"
, "Iodine"
, "Xenon"
, "Cesium"
, "Barium"
, "Lanthanum"
, "Cerium"
, "Praseodymium"
, "Neodymium"
, "Promethium"
, "Samarium"
, "Europium"
, "Gadolinium"
, "Terbium"
, "Dysprosium"
, "Holmium"
, "Erbium"
, "Thulium"
, "Ytterbium"
, "Lutetium"
, "Hafnium"
, "Tantalum"
, "Tungsten"
, "Rhenium"
, "Osmium"
, "Iridium"
, "Platinum"
, "Gold"
, "Mercury"
, "Thallium"
, "Lead"
, "Bismuth"
, "Polonium"
, "Astatine"
, "Radon"
, "Francium"
, "Radium"
, "Actinium"
, "Thorium"
, "Protactinium"
, "Uranium"
, "Neptunium"
, "Plutonium"
, "Americium"
, "Curium"
, "Berkelium"
, "Californium"
, "Einsteinium"
, "Fermium"
, "Mendelevium"
, "Nobelium"
, "Lawrencium"
, "Unnilquadium"
, "Unnilpentium"
, "Unnilhexium"]
"""
* Mass density of element in atomic number order.
*
* For elment H to Cm (1--96).
*
* In \f$ \gram\per\centi\meter^{3} \f$.
*
* From: Tableau periodique des elements, Sargent-Welch scientifique Canada
* Limitee.
*
* @note Element Z = 85 and 87 set to 1 for the calculation.
"""
g_massDensity_g_cm3 = [
0.0899, 0.1787, 0.5300, 1.8500, 2.3400, 2.6200, 1.2510, 1.4290,
1.6960, 0.9010, 0.9700, 1.7400, 2.7000, 2.3300, 1.8200, 2.0700,
3.1700, 1.7840, 0.8600, 1.5500, 3.0000, 4.5000, 5.8000, 7.1900,
7.4300, 7.8600, 8.9000, 8.9000, 8.9600, 7.1400, 5.9100, 5.3200,
5.7200, 4.8000, 3.1200, 3.7400, 1.5300, 2.6000, 4.5000, 6.4900,
8.5500, 10.200, 11.500, 12.200, 12.400, 12.000, 10.500, 8.6500,
7.3100, 7.3000, 6.6800, 6.2400, 4.9200, 5.8900, 1.8700, 3.5000,
6.7000, 6.7800, 6.7700, 7.0000, 6.4750, 7.5400, 5.2600, 7.8900,
8.2700, 8.5400, 8.8000, 9.0500, 9.3300, 6.9800, 9.8400, 13.100,
16.600, 19.300, 21.000, 22.400, 22.500, 21.400, 19.300, 13.530,
11.850, 11.400, 9.8000, 9.4000, 1.0000, 9.9100, 1.0000, 5.0000,
10.070, 11.700, 15.400, 18.900, 20.400, 19.800, 13.600, 13.511
]
"""
* Atomic weight of element in atomic number order.
*
* For element H to Sg (1--106).
*
* Unit \f$ \gram\per\mole \f$.
*
* From: Tableau periodique des elements, Sargent-Welch scientifique Canada
* Limitee.
"""
g_atomicMass_g_mol = [
1.0079000, 4.0026000, 6.9410000, 9.0121800, 10.810000, 12.011000,
14.006700, 15.999400, 18.998403, 20.179000, 22.989770, 24.305000,
26.981540, 28.085500, 30.973760, 32.060000, 35.453000, 39.948000,
39.098300, 40.080000, 44.955900, 47.900000, 50.941500, 51.996000,
54.938000, 55.847000, 58.933200, 58.700000, 63.546000, 65.380000,
69.720000, 72.590000, 74.921600, 78.960000, 79.904000, 83.800000,
85.467800, 87.620000, 88.905600, 91.220000, 92.906400, 95.940000,
98.000000, 101.07000, 102.90550, 106.40000, 107.86800, 112.41000,
114.82000, 118.69000, 121.75000, 127.60000, 126.90450, 131.30000,
132.90540, 137.33000, 138.90550, 140.12000, 140.90770, 144.24000,
145.00000, 150.40000, 151.96000, 157.25000, 158.92540, 162.50000,
164.93040, 167.26000, 168.93420, 173.04000, 174.96700, 178.49000,
180.94790, 183.85000, 186.20700, 190.20000, 192.22000, 195.09000,
196.96650, 200.59000, 204.37000, 207.20000, 208.98040, 209.00000,
210.00000, 222.00000, 223.00000, 226.02540, 227.02780, 232.03810,
231.03590, 238.02900, 237.04820, 244.00000, 243.00000, 247.00000,
247.00000, 251.00000, 252.00000, 257.00000, 258.00000, 259.00000,
260.00000, 261.00000, 262.00000, 263.00000
]
"""
* Fermi energy of element in atomic number order.
*
* For element H to Lr (1--103).
* From: CASINO source code, DOS version.
*
* @todo Add units.
"""
g_FermiEnergy = [
1.000, 1.000, 4.700, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000,
1.000, 3.100, 1.000, 1.000, 0.555, 1.000, 1.000, 1.000, 1.000,
1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000,
1.000, 7.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000,
1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000,
1.000, 5.500, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000,
1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000,
1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000,
1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 5.500, 1.000, 1.000,
1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000,
1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 0.000,
1.000, 1.000, 1.000, 1.000
]
"""
* Fermi wavelength of element in atomic number order.
*
* For element H to Lr (1--103).
* From: CASINO source code, DOS version.
*
* @todo Add units.
"""
g_kFermi = [
7.00E7, 7.00E7, 1.10E8, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7,
7.00E7, 7.00E7, 9.00E7, 7.00E7, 7.00E7, 4.00E7, 7.00E7, 7.00E7,
7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7,
7.00E7, 7.00E7, 7.00E7, 7.00E7, 1.35E8, 7.00E7, 7.00E7, 7.00E7,
7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7,
7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 1.19E8, 7.00E7,
7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7,
7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7,
7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7,
7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 1.19E8, 7.00E7,
7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7,
7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7, 7.00E7,
7.00E7, 7.00E7, 0.00E0, 7.00E7, 7.00E7, 7.00E7, 7.00E7
]
"""
* Plasmon energy of element in atomic number order.
*
* For element H to Lr (1--103).
* From: CASINO source code, DOS version.
*
* @todo Add units.
"""
g_plasmonEnergy = [
15.0, 15.0, 7.10, 18.7, 22.7, 15.0, 15.0, 15.0, 15.0, 15.0, 5.70,
10.3, 15.0, 16.7, 15.0, 15.0, 15.0, 15.0, 3.70, 8.80, 14.0, 17.9,
21.8, 24.9, 21.6, 23.0, 20.9, 20.7, 19.3, 17.2, 13.8, 16.2, 15.0,
15.0, 15.0, 15.0, 3.41, 8.00, 12.5, 15.0, 15.0, 15.0, 15.0, 15.0,
15.0, 15.0, 15.0, 19.2, 15.0, 13.4, 15.2, 17.0, 11.4, 15.0, 2.90,
7.20, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 13.3, 15.0,
15.0, 14.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0,
35.0, 15.0, 15.0, 15.0, 13.0, 14.2, 15.0, 15.0, 15.0, 15.0, 15.0,
25.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0,
15.0, 15.0, 15.0, 15.0
]
def getMassDensity_g_cm3(atomicNumber):
index = int(atomicNumber)-1
return g_massDensity_g_cm3[index]
def getAtomicMass_g_mol(atomicNumber):
index = int(atomicNumber)-1
return g_atomicMass_g_mol[index]
def getFermiEnergy_eV(atomicNumber):
index = int(atomicNumber)-1
return g_FermiEnergy[index]
def getKFermi_eV(atomicNumber):
index = int(atomicNumber)-1
return g_kFermi[index]
def getPlasmonEnergy_eV(atomicNumber):
index = int(atomicNumber)-1
return g_plasmonEnergy[index]
def getMeanIonizationEnergy_eV(atomicNumber):
"""
* Get the mean ionization potential from the atomic number.
*
* In \f$ \electronvolt \f$.
*
* @param[in] atomicNumber Atomic number.
"""
if atomicNumber <= 13.0:
Value = 11.5*atomicNumber
else:
if math.pow(atomicNumber, 0.19) > 0.0:
Value = 9.76*atomicNumber + 58.8/math.pow(atomicNumber, 0.19)
else:
Value = 0.0
return Value
def getKRatioCorrection(atomicNumber):
"""
* Get the constant k ratio correction needed by the mean ionization potential
* from the atomic number.
*
* @param[in] atomicNumber Atomic number.
"""
Value = 0.734*math.pow(atomicNumber, 0.037)
return Value
def getKRatioCorrectionMonsel(atomicNumber, workFunction_keV):
"""
/// K value as defined by Monsel.
/// Used in DE/DS calculation. Casino uses K Gauvin,but for low energy,
/// JR Lowney says that this one is more appropriate (and by experience,
/// it is effectively better for the secondary yield).
/// <p> NOTE : Depends on J (ionisation potential). So it must already be calculated before.
/// @param element Element for whom we want to calculate the K value.
/// @return The K value of the element passed in argument
"""
value = (0.8576 - (workFunction_keV + 1.0e-3)/getMeanIonizationEnergy_eV(atomicNumber))
return value
def computeAtomicDensity_atom_cm3(massDensity_g_cm3, atomicMass_g_mol):
"""
* Compute the atomic density.
*
* \f[
* n_{i} = \frac{N_{A} \rho_{i}}{A_{i}}
* \f]
* where
* - \f$ n_{i} \f$ is the atomic density in \f$ \mathrm{atoms}\per\centi\meter^{3} \f$
* - \f$ N_{A} \f$ is the Avogadro number in \f$ \mathrm{atoms}\per\mole \f$
* - \f$ \rho_{i} \f$ is the mass density in \f$ \gram\per\centi\meter^{3} \f$
* - \f$ A_{i} \f$ is the atomic mass in \f$ \gram\per\mole \f$
*
* \param[in] massDensity_g_cm3
* \param[in] atomicMass_g_mol
"""
return g_AvogadroNumber_atom_mol*massDensity_g_cm3/atomicMass_g_mol
def getSymbol(atomicNumber):
index = int(atomicNumber-1)
return g_elementSymbol[index]
def getName(atomicNumber):
index = int(atomicNumber)-1
return g_elementName[index]
def getAtomicNumberBySymbol(symbol):
try:
return g_elementSymbol.index(symbol.capitalize())+1
except ValueError:
print(symbol)
def getAtomicNumberByName(name):
try:
return g_elementName.index(name.capitalize())+1
except ValueError:
print(name)
def getAtomicNumber(atomicNumber=None, name=None, symbol=None):
if atomicNumber != None:
return int(atomicNumber)
elif name != None:
return getAtomicNumberByName(name)
elif symbol != None:
return getAtomicNumberBySymbol(symbol)
def run():
print(getMassDensity_g_cm3(24))
print(7.19*0.054)
def runAtomicNumberSymbol():
atomicNumbers = range(1, 92+1, 1)
for atomicNumber in atomicNumbers:
symbol = getSymbol(atomicNumber)
print("%s" % (symbol))
def create_csv_file(output_path):
print(output_path)
file_name = "element_properties.csv"
file_path = os.path.join(output_path, file_name)
print(file_path)
print(len(g_elementName))
with open(file_path, 'w', newline='\n') as output_file:
writer = csv.writer(output_file)
row = ["atomic number"]
row.append("symbol")
row.append("name")
row.append("mass density (g/cm3)")
row.append("atomic mass (g/mol)")
row.append("Fermi energy (eV)")
row.append("k Fermi (eV)")
row.append("plasmon energy (eV)")
writer.writerow(row)
atomic_numbers = range(1, 106+1)
for atomic_number in atomic_numbers:
row = [atomic_number]
row.append(getSymbol(atomic_number))
row.append(getName(atomic_number))
try:
row.append(getMassDensity_g_cm3(atomic_number))
except IndexError:
row.append("")
try:
row.append(getAtomicMass_g_mol(atomic_number))
except IndexError:
row.append("")
try:
row.append(getFermiEnergy_eV(atomic_number))
except IndexError:
row.append("")
try:
row.append(getKFermi_eV(atomic_number))
except IndexError:
row.append("")
try:
row.append(getPlasmonEnergy_eV(atomic_number))
except IndexError:
row.append("")
writer.writerow(row)
if __name__ == '__main__': #pragma: no cover
import sys
import os.path
import csv
#runAtomicNumberSymbol()
if len(sys.argv) > 1:
create_csv_file(sys.argv[1])
|
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the artifacts file filter functions."""
import unittest
from artifacts import reader as artifacts_reader
from artifacts import registry as artifacts_registry
from dfvfs.helpers import file_system_searcher
from dfvfs.lib import definitions as dfvfs_definitions
from dfvfs.path import factory as path_spec_factory
from dfvfs.resolver import resolver as path_spec_resolver
from dfwinreg import regf as dfwinreg_regf
from dfwinreg import registry as dfwinreg_registry
from dfwinreg import registry_searcher as dfwinreg_registry_searcher
from plaso.containers import artifacts
from plaso.engine import artifact_filters
from plaso.engine import knowledge_base as knowledge_base_engine
from tests import test_lib as shared_test_lib
class ArtifactDefinitionsFiltersHelperTest(shared_test_lib.BaseTestCase):
"""Tests for artifact definitions filters helper."""
# pylint: disable=protected-access
def _CreateTestArtifactDefinitionsFiltersHelper(self, knowledge_base):
"""Creates an artifact definitions filters helper for testing.
Args:
knowledge_base (KnowledgeBase): contains information from the source
data needed for filtering.
Returns:
ArtifactDefinitionsFiltersHelper: artifact definitions filters helper.
Raises:
SkipTest: if the path inside the test data directory does not exist and
the test should be skipped.
"""
registry = artifacts_registry.ArtifactDefinitionsRegistry()
reader = artifacts_reader.YamlArtifactsReader()
test_artifacts_path = self._GetTestFilePath(['artifacts'])
self._SkipIfPathNotExists(test_artifacts_path)
registry.ReadFromDirectory(reader, test_artifacts_path)
return artifact_filters.ArtifactDefinitionsFiltersHelper(
registry, knowledge_base)
def _CreateTestKnowledgeBaseWindows(self):
"""Creates a knowledge base for testing Windows paths.
Creates a knowledge base with 2 user accounts.
Returns:
KnowledgeBase: knowledge base.
"""
knowledge_base = knowledge_base_engine.KnowledgeBase()
test_user1 = artifacts.UserAccountArtifact(
identifier='1000', path_separator='\\',
user_directory='C:\\Users\\testuser1',
username='testuser1')
knowledge_base.AddUserAccount(test_user1)
test_user2 = artifacts.UserAccountArtifact(
identifier='1001', path_separator='\\',
user_directory='%SystemDrive%\\Users\\testuser2',
username='testuser2')
knowledge_base.AddUserAccount(test_user2)
return knowledge_base
def testBuildFindSpecsWithFileSystem(self):
"""Tests the BuildFindSpecs function for file type artifacts."""
test_file_path = self._GetTestFilePath(['System.evtx'])
self._SkipIfPathNotExists(test_file_path)
test_file_path = self._GetTestFilePath(['testdir', 'filter_1.txt'])
self._SkipIfPathNotExists(test_file_path)
test_file_path = self._GetTestFilePath(['testdir', 'filter_3.txt'])
self._SkipIfPathNotExists(test_file_path)
knowledge_base = self._CreateTestKnowledgeBaseWindows()
artifact_filter_names = ['TestFiles', 'TestFiles2']
test_filters_helper = self._CreateTestArtifactDefinitionsFiltersHelper(
knowledge_base)
environment_variable = artifacts.EnvironmentVariableArtifact(
case_sensitive=False, name='SystemDrive', value='C:')
test_filters_helper.BuildFindSpecs(
artifact_filter_names, environment_variables=[environment_variable])
self.assertEqual(
len(test_filters_helper.included_file_system_find_specs), 16)
self.assertEqual(len(test_filters_helper.registry_find_specs), 0)
# Last find_spec should contain the testuser2 profile path.
location_segments = sorted([
find_spec._location_segments
for find_spec in test_filters_helper.included_file_system_find_specs])
path_segments = [
'Users', 'testuser2', 'Documents', 'WindowsPowerShell', 'profile\\.ps1']
self.assertEqual(location_segments[2], path_segments)
path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_OS, location='.')
file_system = path_spec_resolver.Resolver.OpenFileSystem(path_spec)
searcher = file_system_searcher.FileSystemSearcher(
file_system, path_spec)
path_spec_generator = searcher.Find(
find_specs=test_filters_helper.included_file_system_find_specs)
self.assertIsNotNone(path_spec_generator)
path_specs = list(path_spec_generator)
# Two evtx, one symbolic link to evtx, one AUTHORS, two filter_*.txt files,
# total 6 path specifications.
self.assertEqual(len(path_specs), 6)
def testBuildFindSpecsWithFileSystemAndGroup(self):
"""Tests the BuildFindSpecs function for file type artifacts."""
test_file_path = self._GetTestFilePath(['System.evtx'])
self._SkipIfPathNotExists(test_file_path)
test_file_path = self._GetTestFilePath(['testdir', 'filter_1.txt'])
self._SkipIfPathNotExists(test_file_path)
test_file_path = self._GetTestFilePath(['testdir', 'filter_3.txt'])
self._SkipIfPathNotExists(test_file_path)
knowledge_base = self._CreateTestKnowledgeBaseWindows()
artifact_filter_names = ['TestGroupExtract']
test_filters_helper = self._CreateTestArtifactDefinitionsFiltersHelper(
knowledge_base)
environment_variable = artifacts.EnvironmentVariableArtifact(
case_sensitive=False, name='SystemDrive', value='C:')
test_filters_helper.BuildFindSpecs(
artifact_filter_names, environment_variables=[environment_variable])
self.assertEqual(
len(test_filters_helper.included_file_system_find_specs), 16)
self.assertEqual(len(test_filters_helper.registry_find_specs), 0)
path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_OS, location='.')
file_system = path_spec_resolver.Resolver.OpenFileSystem(path_spec)
searcher = file_system_searcher.FileSystemSearcher(
file_system, path_spec)
path_spec_generator = searcher.Find(
find_specs=test_filters_helper.included_file_system_find_specs)
self.assertIsNotNone(path_spec_generator)
path_specs = list(path_spec_generator)
# Two evtx, one symbolic link to evtx, one AUTHORS, two filter_*.txt
# files,
# total 6 path specifications.
self.assertEqual(len(path_specs), 6)
def testBuildFindSpecsWithRegistry(self):
"""Tests the BuildFindSpecs function on Windows Registry sources."""
knowledge_base = knowledge_base_engine.KnowledgeBase()
artifact_filter_names = ['TestRegistry', 'TestRegistryValue']
test_filters_helper = self._CreateTestArtifactDefinitionsFiltersHelper(
knowledge_base)
test_filters_helper.BuildFindSpecs(artifact_filter_names)
# There should be 3 Windows Registry find specifications.
self.assertEqual(
len(test_filters_helper.included_file_system_find_specs), 0)
self.assertEqual(len(test_filters_helper.registry_find_specs), 3)
file_entry = self._GetTestFileEntry(['SYSTEM'])
file_object = file_entry.GetFileObject()
registry_file = dfwinreg_regf.REGFWinRegistryFile(
ascii_codepage='cp1252', emulate_virtual_keys=False)
registry_file.Open(file_object)
win_registry = dfwinreg_registry.WinRegistry()
key_path_prefix = win_registry.GetRegistryFileMapping(registry_file)
registry_file.SetKeyPathPrefix(key_path_prefix)
win_registry.MapFile(key_path_prefix, registry_file)
searcher = dfwinreg_registry_searcher.WinRegistrySearcher(win_registry)
key_paths = list(searcher.Find(
find_specs=test_filters_helper.registry_find_specs))
self.assertIsNotNone(key_paths)
self.assertEqual(len(key_paths), 8)
def testCheckKeyCompatibility(self):
"""Tests the CheckKeyCompatibility function"""
knowledge_base = knowledge_base_engine.KnowledgeBase()
test_filter_file = self._CreateTestArtifactDefinitionsFiltersHelper(
knowledge_base)
# Compatible Key.
key_path = 'HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Control'
compatible_key = test_filter_file.CheckKeyCompatibility(key_path)
self.assertTrue(compatible_key)
# NOT a Compatible Key.
key_path = 'HKEY_USERS\\S-1-5-18'
compatible_key = test_filter_file.CheckKeyCompatibility(key_path)
self.assertTrue(compatible_key)
# TODO: add tests for _BuildFindSpecsFromArtifact
# TODO: add tests for _BuildFindSpecsFromGroupName
def testBuildFindSpecsFromFileSourcePath(self):
"""Tests the _BuildFindSpecsFromFileSourcePath function on file sources."""
knowledge_base = knowledge_base_engine.KnowledgeBase()
test_filter_file = self._CreateTestArtifactDefinitionsFiltersHelper(
knowledge_base)
separator = '\\'
test_user_accounts = []
# Test expansion of environment variables.
path_entry = '%%environ_systemroot%%\\test_data\\*.evtx'
environment_variable = [artifacts.EnvironmentVariableArtifact(
case_sensitive=False, name='SystemRoot', value='C:\\Windows')]
find_specs = test_filter_file._BuildFindSpecsFromFileSourcePath(
path_entry, separator, environment_variable, test_user_accounts)
# Should build 1 find_spec.
self.assertEqual(len(find_specs), 1)
# Location segments should be equivalent to \Windows\test_data\*.evtx.
# Underscores are not escaped in regular expressions in supported versions
# of Python 3. See https://bugs.python.org/issue2650.
expected_location_segments = ['Windows', 'test_data', '.*\\.evtx']
self.assertEqual(
find_specs[0]._location_segments, expected_location_segments)
# Test expansion of globs.
path_entry = '\\test_data\\**'
find_specs = test_filter_file._BuildFindSpecsFromFileSourcePath(
path_entry, separator, environment_variable, test_user_accounts)
# Glob expansion should by default recurse ten levels.
self.assertEqual(len(find_specs), 10)
# Last entry in find_specs list should be 10 levels of depth.
# Underscores are not escaped in regular expressions in supported versions
# of Python 3. See https://bugs.python.org/issue2650
expected_location_segments = ['test_data']
expected_location_segments.extend([
'.*', '.*', '.*', '.*', '.*', '.*', '.*', '.*', '.*', '.*'])
self.assertEqual(
find_specs[9]._location_segments, expected_location_segments)
# Test expansion of user home directories
separator = '/'
test_user1 = artifacts.UserAccountArtifact(
user_directory='/homes/testuser1', username='testuser1')
test_user2 = artifacts.UserAccountArtifact(
user_directory='/home/testuser2', username='testuser2')
test_user_accounts = [test_user1, test_user2]
path_entry = '%%users.homedir%%/.thumbnails/**3'
find_specs = test_filter_file._BuildFindSpecsFromFileSourcePath(
path_entry, separator, environment_variable, test_user_accounts)
# 6 find specs should be created for testuser1 and testuser2.
self.assertEqual(len(find_specs), 6)
# Last entry in find_specs list should be testuser2 with a depth of 3
expected_location_segments = [
'home', 'testuser2', '\\.thumbnails', '.*', '.*', '.*']
self.assertEqual(
find_specs[5]._location_segments, expected_location_segments)
# Test Windows path with profile directories and globs with a depth of 4.
separator = '\\'
test_user1 = artifacts.UserAccountArtifact(
path_separator='\\', user_directory='C:\\Users\\testuser1',
username='testuser1')
test_user2 = artifacts.UserAccountArtifact(
path_separator='\\', user_directory='%SystemDrive%\\Users\\testuser2',
username='testuser2')
test_user_accounts = [test_user1, test_user2]
path_entry = '%%users.userprofile%%\\AppData\\**4'
find_specs = test_filter_file._BuildFindSpecsFromFileSourcePath(
path_entry, separator, environment_variable, test_user_accounts)
# 8 find specs should be created for testuser1 and testuser2.
self.assertEqual(len(find_specs), 8)
# Last entry in find_specs list should be testuser2, with a depth of 4.
expected_location_segments = [
'Users', 'testuser2', 'AppData', '.*', '.*', '.*', '.*']
self.assertEqual(
find_specs[7]._location_segments, expected_location_segments)
path_entry = '%%users.localappdata%%\\Microsoft\\**4'
find_specs = test_filter_file._BuildFindSpecsFromFileSourcePath(
path_entry, separator, environment_variable, test_user_accounts)
# 16 find specs should be created for testuser1 and testuser2.
self.assertEqual(len(find_specs), 16)
# Last entry in find_specs list should be testuser2, with a depth of 4.
expected_location_segments = [
'Users', 'testuser2', 'Local\\ Settings', 'Application\\ Data',
'Microsoft', '.*', '.*', '.*', '.*']
self.assertEqual(
find_specs[15]._location_segments, expected_location_segments)
# TODO: add tests for _BuildFindSpecsFromRegistrySourceKey
if __name__ == '__main__':
unittest.main()
|
|
from __future__ import unicode_literals
import re
import unicodedata
from gzip import GzipFile
from io import BytesIO
from django.utils.encoding import force_text
from django.utils.functional import allow_lazy, SimpleLazyObject
from django.utils import six
from django.utils.six.moves import html_entities
from django.utils.translation import ugettext_lazy, ugettext as _, pgettext
from django.utils.safestring import mark_safe
if not six.PY3:
# Import force_unicode even though this module doesn't use it, because some
# people rely on it being here.
from django.utils.encoding import force_unicode
# Capitalizes the first letter of a string.
capfirst = lambda x: x and force_text(x)[0].upper() + force_text(x)[1:]
capfirst = allow_lazy(capfirst, six.text_type)
# Set up regular expressions
re_words = re.compile(r'&.*?;|<.*?>|(\w[\w-]*)', re.U|re.S)
re_tag = re.compile(r'<(/)?([^ ]+?)(?:(\s*/)| .*?)?>', re.S)
def wrap(text, width):
"""
A word-wrap function that preserves existing line breaks and most spaces in
the text. Expects that existing line breaks are posix newlines.
"""
text = force_text(text)
def _generator():
it = iter(text.split(' '))
word = next(it)
yield word
pos = len(word) - word.rfind('\n') - 1
for word in it:
if "\n" in word:
lines = word.split('\n')
else:
lines = (word,)
pos += len(lines[0]) + 1
if pos > width:
yield '\n'
pos = len(lines[-1])
else:
yield ' '
if len(lines) > 1:
pos = len(lines[-1])
yield word
return ''.join(_generator())
wrap = allow_lazy(wrap, six.text_type)
class Truncator(SimpleLazyObject):
"""
An object used to truncate text, either by characters or words.
"""
def __init__(self, text):
super(Truncator, self).__init__(lambda: force_text(text))
def add_truncation_text(self, text, truncate=None):
if truncate is None:
truncate = pgettext(
'String to return when truncating text',
'%(truncated_text)s...')
truncate = force_text(truncate)
if '%(truncated_text)s' in truncate:
return truncate % {'truncated_text': text}
# The truncation text didn't contain the %(truncated_text)s string
# replacement argument so just append it to the text.
if text.endswith(truncate):
# But don't append the truncation text if the current text already
# ends in this.
return text
return '%s%s' % (text, truncate)
def chars(self, num, truncate=None):
"""
Returns the text truncated to be no longer than the specified number
of characters.
Takes an optional argument of what should be used to notify that the
string has been truncated, defaulting to a translatable string of an
ellipsis (...).
"""
length = int(num)
text = unicodedata.normalize('NFC', self._wrapped)
# Calculate the length to truncate to (max length - end_text length)
truncate_len = length
for char in self.add_truncation_text('', truncate):
if not unicodedata.combining(char):
truncate_len -= 1
if truncate_len == 0:
break
s_len = 0
end_index = None
for i, char in enumerate(text):
if unicodedata.combining(char):
# Don't consider combining characters
# as adding to the string length
continue
s_len += 1
if end_index is None and s_len > truncate_len:
end_index = i
if s_len > length:
# Return the truncated string
return self.add_truncation_text(text[:end_index or 0],
truncate)
# Return the original string since no truncation was necessary
return text
chars = allow_lazy(chars)
def words(self, num, truncate=None, html=False):
"""
Truncates a string after a certain number of words. Takes an optional
argument of what should be used to notify that the string has been
truncated, defaulting to ellipsis (...).
"""
length = int(num)
if html:
return self._html_words(length, truncate)
return self._text_words(length, truncate)
words = allow_lazy(words)
def _text_words(self, length, truncate):
"""
Truncates a string after a certain number of words.
Newlines in the string will be stripped.
"""
words = self._wrapped.split()
if len(words) > length:
words = words[:length]
return self.add_truncation_text(' '.join(words), truncate)
return ' '.join(words)
def _html_words(self, length, truncate):
"""
Truncates HTML to a certain number of words (not counting tags and
comments). Closes opened tags if they were correctly closed in the
given HTML.
Newlines in the HTML are preserved.
"""
if length <= 0:
return ''
html4_singlets = (
'br', 'col', 'link', 'base', 'img',
'param', 'area', 'hr', 'input'
)
# Count non-HTML words and keep note of open tags
pos = 0
end_text_pos = 0
words = 0
open_tags = []
while words <= length:
m = re_words.search(self._wrapped, pos)
if not m:
# Checked through whole string
break
pos = m.end(0)
if m.group(1):
# It's an actual non-HTML word
words += 1
if words == length:
end_text_pos = pos
continue
# Check for tag
tag = re_tag.match(m.group(0))
if not tag or end_text_pos:
# Don't worry about non tags or tags after our truncate point
continue
closing_tag, tagname, self_closing = tag.groups()
# Element names are always case-insensitive
tagname = tagname.lower()
if self_closing or tagname in html4_singlets:
pass
elif closing_tag:
# Check for match in open tags list
try:
i = open_tags.index(tagname)
except ValueError:
pass
else:
# SGML: An end tag closes, back to the matching start tag,
# all unclosed intervening start tags with omitted end tags
open_tags = open_tags[i + 1:]
else:
# Add it to the start of the open tags list
open_tags.insert(0, tagname)
if words <= length:
# Don't try to close tags if we don't need to truncate
return self._wrapped
out = self._wrapped[:end_text_pos]
truncate_text = self.add_truncation_text('', truncate)
if truncate_text:
out += truncate_text
# Close any tags still open
for tag in open_tags:
out += '</%s>' % tag
# Return string
return out
def get_valid_filename(s):
"""
Returns the given string converted to a string that can be used for a clean
filename. Specifically, leading and trailing spaces are removed; other
spaces are converted to underscores; and anything that is not a unicode
alphanumeric, dash, underscore, or dot, is removed.
>>> get_valid_filename("john's portrait in 2004.jpg")
'johns_portrait_in_2004.jpg'
"""
s = force_text(s).strip().replace(' ', '_')
return re.sub(r'(?u)[^-\w.]', '', s)
get_valid_filename = allow_lazy(get_valid_filename, six.text_type)
def get_text_list(list_, last_word=ugettext_lazy('or')):
"""
>>> get_text_list(['a', 'b', 'c', 'd'])
'a, b, c or d'
>>> get_text_list(['a', 'b', 'c'], 'and')
'a, b and c'
>>> get_text_list(['a', 'b'], 'and')
'a and b'
>>> get_text_list(['a'])
'a'
>>> get_text_list([])
''
"""
if len(list_) == 0: return ''
if len(list_) == 1: return force_text(list_[0])
return '%s %s %s' % (
# Translators: This string is used as a separator between list elements
_(', ').join([force_text(i) for i in list_][:-1]),
force_text(last_word), force_text(list_[-1]))
get_text_list = allow_lazy(get_text_list, six.text_type)
def normalize_newlines(text):
return force_text(re.sub(r'\r\n|\r|\n', '\n', text))
normalize_newlines = allow_lazy(normalize_newlines, six.text_type)
def recapitalize(text):
"Recapitalizes text, placing caps after end-of-sentence punctuation."
text = force_text(text).lower()
capsRE = re.compile(r'(?:^|(?<=[\.\?\!] ))([a-z])')
text = capsRE.sub(lambda x: x.group(1).upper(), text)
return text
recapitalize = allow_lazy(recapitalize)
def phone2numeric(phone):
"Converts a phone number with letters into its numeric equivalent."
char2number = {'a': '2', 'b': '2', 'c': '2', 'd': '3', 'e': '3', 'f': '3',
'g': '4', 'h': '4', 'i': '4', 'j': '5', 'k': '5', 'l': '5', 'm': '6',
'n': '6', 'o': '6', 'p': '7', 'q': '7', 'r': '7', 's': '7', 't': '8',
'u': '8', 'v': '8', 'w': '9', 'x': '9', 'y': '9', 'z': '9',
}
return ''.join(char2number.get(c, c) for c in phone.lower())
phone2numeric = allow_lazy(phone2numeric)
# From http://www.xhaus.com/alan/python/httpcomp.html#gzip
# Used with permission.
def compress_string(s):
zbuf = BytesIO()
zfile = GzipFile(mode='wb', compresslevel=6, fileobj=zbuf)
zfile.write(s)
zfile.close()
return zbuf.getvalue()
class StreamingBuffer(object):
def __init__(self):
self.vals = []
def write(self, val):
self.vals.append(val)
def read(self):
ret = b''.join(self.vals)
self.vals = []
return ret
def flush(self):
return
def close(self):
return
# Like compress_string, but for iterators of strings.
def compress_sequence(sequence):
buf = StreamingBuffer()
zfile = GzipFile(mode='wb', compresslevel=6, fileobj=buf)
# Output headers...
yield buf.read()
for item in sequence:
zfile.write(item)
zfile.flush()
yield buf.read()
zfile.close()
yield buf.read()
ustring_re = re.compile("([\u0080-\uffff])")
def javascript_quote(s, quote_double_quotes=False):
def fix(match):
return "\\u%04x" % ord(match.group(1))
if type(s) == bytes:
s = s.decode('utf-8')
elif type(s) != six.text_type:
raise TypeError(s)
s = s.replace('\\', '\\\\')
s = s.replace('\r', '\\r')
s = s.replace('\n', '\\n')
s = s.replace('\t', '\\t')
s = s.replace("'", "\\'")
if quote_double_quotes:
s = s.replace('"', '"')
return str(ustring_re.sub(fix, s))
javascript_quote = allow_lazy(javascript_quote, six.text_type)
# Expression to match some_token and some_token="with spaces" (and similarly
# for single-quoted strings).
smart_split_re = re.compile(r"""
((?:
[^\s'"]*
(?:
(?:"(?:[^"\\]|\\.)*" | '(?:[^'\\]|\\.)*')
[^\s'"]*
)+
) | \S+)
""", re.VERBOSE)
def smart_split(text):
r"""
Generator that splits a string by spaces, leaving quoted phrases together.
Supports both single and double quotes, and supports escaping quotes with
backslashes. In the output, strings will keep their initial and trailing
quote marks and escaped quotes will remain escaped (the results can then
be further processed with unescape_string_literal()).
>>> list(smart_split(r'This is "a person\'s" test.'))
['This', 'is', '"a person\\\'s"', 'test.']
>>> list(smart_split(r"Another 'person\'s' test."))
['Another', "'person\\'s'", 'test.']
>>> list(smart_split(r'A "\"funky\" style" test.'))
['A', '"\\"funky\\" style"', 'test.']
"""
text = force_text(text)
for bit in smart_split_re.finditer(text):
yield bit.group(0)
def _replace_entity(match):
text = match.group(1)
if text[0] == '#':
text = text[1:]
try:
if text[0] in 'xX':
c = int(text[1:], 16)
else:
c = int(text)
return unichr(c)
except ValueError:
return match.group(0)
else:
try:
return unichr(html_entities.name2codepoint[text])
except (ValueError, KeyError):
return match.group(0)
_entity_re = re.compile(r"&(#?[xX]?(?:[0-9a-fA-F]+|\w{1,8}));")
def unescape_entities(text):
return _entity_re.sub(_replace_entity, text)
unescape_entities = allow_lazy(unescape_entities, six.text_type)
def unescape_string_literal(s):
r"""
Convert quoted string literals to unquoted strings with escaped quotes and
backslashes unquoted::
>>> unescape_string_literal('"abc"')
'abc'
>>> unescape_string_literal("'abc'")
'abc'
>>> unescape_string_literal('"a \"bc\""')
'a "bc"'
>>> unescape_string_literal("'\'ab\' c'")
"'ab' c"
"""
if s[0] not in "\"'" or s[-1] != s[0]:
raise ValueError("Not a string literal: %r" % s)
quote = s[0]
return s[1:-1].replace(r'\%s' % quote, quote).replace(r'\\', '\\')
unescape_string_literal = allow_lazy(unescape_string_literal)
def slugify(value):
"""
Converts to lowercase, removes non-word characters (alphanumerics and
underscores) and converts spaces to hyphens. Also strips leading and
trailing whitespace.
"""
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii')
value = re.sub('[^\w\s-]', '', value).strip().lower()
return mark_safe(re.sub('[-\s]+', '-', value))
slugify = allow_lazy(slugify, six.text_type)
|
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Route.agency'
db.alter_column('gtfs_route', 'agency_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['gtfs.Agency'], null=True))
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'Route.agency'
raise RuntimeError("Cannot reverse this migration. 'Route.agency' and its values cannot be restored.")
models = {
'gtfs.agency': {
'Meta': {'unique_together': "(('source', 'agency_id'),)", 'object_name': 'Agency'},
'agency_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lang': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'name': ('django.db.models.fields.TextField', [], {}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Source']", 'null': 'True'}),
'timezone': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'gtfs.arrangement': {
'Meta': {'object_name': 'Arrangement'},
'desc': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'gtfs.block': {
'Meta': {'unique_together': "(('source', 'block_id'),)", 'object_name': 'Block'},
'block_id': ('django.db.models.fields.TextField', [], {'max_length': '20', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Source']", 'null': 'True'})
},
'gtfs.calendar': {
'Meta': {'object_name': 'Calendar'},
'end_date': ('django.db.models.fields.DateField', [], {}),
'friday': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'monday': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'saturday': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'service': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['gtfs.Service']", 'unique': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'sunday': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'thursday': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'tuesday': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'wednesday': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'gtfs.calendardate': {
'Meta': {'object_name': 'CalendarDate'},
'date': ('django.db.models.fields.DateField', [], {}),
'exception_type': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Service']"})
},
'gtfs.containsfarerule': {
'Meta': {'object_name': 'ContainsFareRule'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'rule': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.FareRule']"}),
'zone': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Zone']"})
},
'gtfs.destinationfarerule': {
'Meta': {'object_name': 'DestinationFareRule'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'rule': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.FareRule']"}),
'zone': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Zone']"})
},
'gtfs.farerule': {
'Meta': {'object_name': 'FareRule'},
'agency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Agency']"}),
'currency_type': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'farerule_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'payment_method': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.PaymentMethod']"}),
'price': ('django.db.models.fields.FloatField', [], {}),
'transfer_duration': ('django.db.models.fields.IntegerField', [], {}),
'transfer_permission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.TransferPermission']"})
},
'gtfs.frequency': {
'Meta': {'object_name': 'Frequency'},
'end_time': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'end_time_days': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'headway_secs': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start_time': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'start_time_days': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'trip': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Trip']"})
},
'gtfs.originfarerule': {
'Meta': {'object_name': 'OriginFareRule'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'rule': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.FareRule']"}),
'zone': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Zone']"})
},
'gtfs.paymentmethod': {
'Meta': {'object_name': 'PaymentMethod'},
'desc': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'gtfs.route': {
'Meta': {'unique_together': "(('agency', 'route_id'),)", 'object_name': 'Route'},
'agency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Agency']", 'null': 'True'}),
'color': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'desc': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'long_name': ('django.db.models.fields.TextField', [], {}),
'route_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_index': 'True'}),
'route_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.RouteType']"}),
'short_name': ('django.db.models.fields.TextField', [], {}),
'text_color': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'blank': 'True'})
},
'gtfs.routefarerule': {
'Meta': {'object_name': 'RouteFareRule'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'route': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Route']"}),
'rule': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.FareRule']"})
},
'gtfs.routetype': {
'Meta': {'object_name': 'RouteType'},
'desc': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.TextField', [], {})
},
'gtfs.service': {
'Meta': {'unique_together': "(('source', 'service_id'),)", 'object_name': 'Service'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'service_id': ('django.db.models.fields.TextField', [], {'max_length': '20', 'db_index': 'True'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Source']", 'null': 'True'})
},
'gtfs.shape': {
'Meta': {'object_name': 'Shape'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_string': ('django.contrib.gis.db.models.fields.LineStringField', [], {'null': 'True'}),
'shape_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_index': 'True'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Source']", 'null': 'True'})
},
'gtfs.source': {
'Meta': {'object_name': 'Source'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'gtfs.stop': {
'Meta': {'unique_together': "(('source', 'stop_id'),)", 'object_name': 'Stop'},
'code': ('django.db.models.fields.TextField', [], {}),
'desc': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_station': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {}),
'name': ('django.db.models.fields.TextField', [], {}),
'parent_station': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Stop']", 'null': 'True'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Source']", 'null': 'True'}),
'stop_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'zone': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Zone']", 'null': 'True'})
},
'gtfs.stoptime': {
'Meta': {'object_name': 'StopTime'},
'arrival_days': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'arrival_time': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'departure_days': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'departure_time': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'drop_off_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dropoff'", 'null': 'True', 'to': "orm['gtfs.Arrangement']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'pickup_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'pickup'", 'null': 'True', 'to': "orm['gtfs.Arrangement']"}),
'shape_dist_travelled': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'stop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Stop']"}),
'stop_headsign': ('django.db.models.fields.TextField', [], {}),
'stop_sequence': ('django.db.models.fields.IntegerField', [], {}),
'trip': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Trip']"})
},
'gtfs.transfer': {
'Meta': {'object_name': 'Transfer'},
'from_stop': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'transfer_from_stop'", 'to': "orm['gtfs.Stop']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'min_transfer_time': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'to_stop': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'transfer_to_stop'", 'to': "orm['gtfs.Stop']"}),
'transfer_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.TransferType']"})
},
'gtfs.transferpermission': {
'Meta': {'object_name': 'TransferPermission'},
'desc': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'limited': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'ntransfers': ('django.db.models.fields.IntegerField', [], {})
},
'gtfs.transfertype': {
'Meta': {'object_name': 'TransferType'},
'desc': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'gtfs.trip': {
'Meta': {'unique_together': "(('service', 'trip_id'), ('route', 'trip_id'))", 'object_name': 'Trip'},
'block': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Block']", 'null': 'True'}),
'headsign': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inbound': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'outbound': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'route': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Route']"}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Service']"}),
'shape': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['gtfs.Shape']", 'null': 'True'}),
'short_name': ('django.db.models.fields.TextField', [], {}),
'trip_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'})
},
'gtfs.zone': {
'Meta': {'object_name': 'Zone'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'zone_id': ('django.db.models.fields.TextField', [], {'max_length': '20', 'db_index': 'True'})
}
}
complete_apps = ['gtfs']
|
|
# Copyright 2012 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import contextlib
import mock
from oslo.config import cfg
from neutron.agent.linux import ip_lib
from neutron.agent.linux import ovs_lib
from neutron.openstack.common import log
from neutron.plugins.common import constants as p_const
from neutron.plugins.openvswitch.agent import ovs_neutron_agent
from neutron.plugins.openvswitch.common import constants
from neutron.tests import base
# Useful global dummy variables.
NET_UUID = '3faeebfe-5d37-11e1-a64b-000c29d5f0a7'
LS_ID = 42
LV_ID = 42
LV_IDS = [42, 43]
VIF_ID = '404deaec-5d37-11e1-a64b-000c29d5f0a8'
VIF_MAC = '3c:09:24:1e:78:23'
OFPORT_NUM = 1
VIF_PORT = ovs_lib.VifPort('port', OFPORT_NUM,
VIF_ID, VIF_MAC, 'switch')
VIF_PORTS = {VIF_ID: VIF_PORT}
LVM = ovs_neutron_agent.LocalVLANMapping(LV_ID, 'gre', None, LS_ID, VIF_PORTS)
LVM_FLAT = ovs_neutron_agent.LocalVLANMapping(
LV_ID, 'flat', 'net1', LS_ID, VIF_PORTS)
LVM_VLAN = ovs_neutron_agent.LocalVLANMapping(
LV_ID, 'vlan', 'net1', LS_ID, VIF_PORTS)
FIXED_IPS = [{'subnet_id': 'my-subnet-uuid',
'ip_address': '1.1.1.1'}]
VM_DEVICE_OWNER = "compute:None"
TUN_OFPORTS = {p_const.TYPE_GRE: {'ip1': '11', 'ip2': '12'}}
BCAST_MAC = "01:00:00:00:00:00/01:00:00:00:00:00"
UCAST_MAC = "00:00:00:00:00:00/01:00:00:00:00:00"
class DummyPort:
def __init__(self, interface_id):
self.interface_id = interface_id
class DummyVlanBinding:
def __init__(self, network_id, vlan_id):
self.network_id = network_id
self.vlan_id = vlan_id
class TunnelTest(base.BaseTestCase):
USE_VETH_INTERCONNECTION = False
VETH_MTU = None
def setUp(self):
super(TunnelTest, self).setUp()
cfg.CONF.set_default('firewall_driver',
'neutron.agent.firewall.NoopFirewallDriver',
group='SECURITYGROUP')
cfg.CONF.set_override('rpc_backend',
'neutron.openstack.common.rpc.impl_fake')
cfg.CONF.set_override('report_interval', 0, 'AGENT')
check_arp_responder_str = ('neutron.plugins.openvswitch.agent.'
'ovs_neutron_agent.OVSNeutronAgent.'
'_check_arp_responder_support')
self.mock_check_arp_resp = mock.patch(check_arp_responder_str).start()
self.mock_check_arp_resp.return_value = True
self.INT_BRIDGE = 'integration_bridge'
self.TUN_BRIDGE = 'tunnel_bridge'
self.MAP_TUN_BRIDGE = 'tun_br_map'
self.NET_MAPPING = {'net1': self.MAP_TUN_BRIDGE}
self.INT_OFPORT = 11111
self.TUN_OFPORT = 22222
self.MAP_TUN_INT_OFPORT = 33333
self.MAP_TUN_PHY_OFPORT = 44444
self.inta = mock.Mock()
self.intb = mock.Mock()
self.ovs_bridges = {self.INT_BRIDGE: mock.Mock(),
self.TUN_BRIDGE: mock.Mock(),
self.MAP_TUN_BRIDGE: mock.Mock(),
}
self.ovs_int_ofports = {
'patch-tun': self.TUN_OFPORT,
'int-%s' % self.MAP_TUN_BRIDGE: self.MAP_TUN_INT_OFPORT
}
self.mock_bridge = mock.patch.object(ovs_lib, 'OVSBridge').start()
self.mock_bridge.side_effect = (lambda br_name, root_helper:
self.ovs_bridges[br_name])
self.mock_int_bridge = self.ovs_bridges[self.INT_BRIDGE]
self.mock_int_bridge.add_port.return_value = self.MAP_TUN_INT_OFPORT
self.mock_int_bridge.add_patch_port.side_effect = (
lambda tap, peer: self.ovs_int_ofports[tap])
self.mock_map_tun_bridge = self.ovs_bridges[self.MAP_TUN_BRIDGE]
self.mock_map_tun_bridge.br_name = self.MAP_TUN_BRIDGE
self.mock_map_tun_bridge.add_port.return_value = (
self.MAP_TUN_PHY_OFPORT)
self.mock_map_tun_bridge.add_patch_port.return_value = (
self.MAP_TUN_PHY_OFPORT)
self.mock_tun_bridge = self.ovs_bridges[self.TUN_BRIDGE]
self.mock_tun_bridge.add_port.return_value = self.INT_OFPORT
self.mock_tun_bridge.add_patch_port.return_value = self.INT_OFPORT
self.device_exists = mock.patch.object(ip_lib, 'device_exists').start()
self.device_exists.return_value = True
self.ipdevice = mock.patch.object(ip_lib, 'IPDevice').start()
self.ipwrapper = mock.patch.object(ip_lib, 'IPWrapper').start()
add_veth = self.ipwrapper.return_value.add_veth
add_veth.return_value = [self.inta, self.intb]
self.get_bridges = mock.patch.object(ovs_lib, 'get_bridges').start()
self.get_bridges.return_value = [self.INT_BRIDGE,
self.TUN_BRIDGE,
self.MAP_TUN_BRIDGE]
self.execute = mock.patch('neutron.agent.linux.utils.execute').start()
self._define_expected_calls()
def _define_expected_calls(self):
self.mock_bridge_expected = [
mock.call(self.INT_BRIDGE, 'sudo'),
mock.call(self.MAP_TUN_BRIDGE, 'sudo'),
mock.call(self.TUN_BRIDGE, 'sudo'),
]
self.mock_int_bridge = self.ovs_bridges[self.INT_BRIDGE]
self.mock_int_bridge_expected = [
mock.call.create(),
mock.call.set_secure_mode(),
mock.call.delete_port('patch-tun'),
mock.call.remove_all_flows(),
mock.call.add_flow(priority=1, actions='normal'),
mock.call.add_flow(priority=0, table=constants.CANARY_TABLE,
actions='drop'),
]
self.mock_map_tun_bridge_expected = [
mock.call.remove_all_flows(),
mock.call.add_flow(priority=1, actions='normal'),
mock.call.delete_port('phy-%s' % self.MAP_TUN_BRIDGE),
mock.call.add_patch_port('phy-%s' % self.MAP_TUN_BRIDGE,
constants.NONEXISTENT_PEER),
]
self.mock_int_bridge_expected += [
mock.call.delete_port('int-%s' % self.MAP_TUN_BRIDGE),
mock.call.add_patch_port('int-%s' % self.MAP_TUN_BRIDGE,
constants.NONEXISTENT_PEER),
]
self.mock_int_bridge_expected += [
mock.call.add_flow(priority=2,
in_port=self.MAP_TUN_INT_OFPORT,
actions='drop'),
mock.call.set_db_attribute(
'Interface', 'int-%s' % self.MAP_TUN_BRIDGE,
'options:peer', 'phy-%s' % self.MAP_TUN_BRIDGE),
]
self.mock_map_tun_bridge_expected += [
mock.call.add_flow(priority=2,
in_port=self.MAP_TUN_PHY_OFPORT,
actions='drop'),
mock.call.set_db_attribute(
'Interface', 'phy-%s' % self.MAP_TUN_BRIDGE,
'options:peer', 'int-%s' % self.MAP_TUN_BRIDGE),
]
self.mock_tun_bridge_expected = [
mock.call.reset_bridge(),
mock.call.add_patch_port('patch-int', 'patch-tun'),
]
self.mock_int_bridge_expected += [
mock.call.add_patch_port('patch-tun', 'patch-int')
]
self.mock_tun_bridge_expected += [
mock.call.remove_all_flows(),
mock.call.add_flow(priority=1,
actions="resubmit(,%s)" %
constants.PATCH_LV_TO_TUN,
in_port=self.INT_OFPORT),
mock.call.add_flow(priority=0, actions="drop"),
mock.call.add_flow(priority=0, table=constants.PATCH_LV_TO_TUN,
dl_dst=UCAST_MAC,
actions="resubmit(,%s)" %
constants.UCAST_TO_TUN),
mock.call.add_flow(priority=0, table=constants.PATCH_LV_TO_TUN,
dl_dst=BCAST_MAC,
actions="resubmit(,%s)" %
constants.FLOOD_TO_TUN),
]
for tunnel_type in constants.TUNNEL_NETWORK_TYPES:
self.mock_tun_bridge_expected.append(
mock.call.add_flow(
table=constants.TUN_TABLE[tunnel_type],
priority=0,
actions="drop"))
learned_flow = ("table=%s,"
"priority=1,"
"hard_timeout=300,"
"NXM_OF_VLAN_TCI[0..11],"
"NXM_OF_ETH_DST[]=NXM_OF_ETH_SRC[],"
"load:0->NXM_OF_VLAN_TCI[],"
"load:NXM_NX_TUN_ID[]->NXM_NX_TUN_ID[],"
"output:NXM_OF_IN_PORT[]" %
constants.UCAST_TO_TUN)
self.mock_tun_bridge_expected += [
mock.call.add_flow(table=constants.LEARN_FROM_TUN,
priority=1,
actions="learn(%s),output:%s" %
(learned_flow, self.INT_OFPORT)),
mock.call.add_flow(table=constants.UCAST_TO_TUN,
priority=0,
actions="resubmit(,%s)" %
constants.FLOOD_TO_TUN),
mock.call.add_flow(table=constants.FLOOD_TO_TUN,
priority=0,
actions="drop")
]
self.device_exists_expected = []
self.ipdevice_expected = []
self.ipwrapper_expected = [mock.call('sudo')]
self.get_bridges_expected = [mock.call('sudo'), mock.call('sudo')]
self.inta_expected = []
self.intb_expected = []
self.execute_expected = []
def _build_agent(self, **kwargs):
kwargs.setdefault('integ_br', self.INT_BRIDGE)
kwargs.setdefault('tun_br', self.TUN_BRIDGE)
kwargs.setdefault('local_ip', '10.0.0.1')
kwargs.setdefault('bridge_mappings', self.NET_MAPPING)
kwargs.setdefault('root_helper', 'sudo')
kwargs.setdefault('polling_interval', 2)
kwargs.setdefault('tunnel_types', ['gre'])
kwargs.setdefault('veth_mtu', self.VETH_MTU)
kwargs.setdefault('use_veth_interconnection',
self.USE_VETH_INTERCONNECTION)
return ovs_neutron_agent.OVSNeutronAgent(**kwargs)
def _verify_mock_call(self, mock_obj, expected):
mock_obj.assert_has_calls(expected)
self.assertEqual(len(mock_obj.mock_calls), len(expected))
def _verify_mock_calls(self):
self._verify_mock_call(self.mock_bridge, self.mock_bridge_expected)
self._verify_mock_call(self.mock_int_bridge,
self.mock_int_bridge_expected)
self._verify_mock_call(self.mock_map_tun_bridge,
self.mock_map_tun_bridge_expected)
self._verify_mock_call(self.mock_tun_bridge,
self.mock_tun_bridge_expected)
self._verify_mock_call(self.device_exists, self.device_exists_expected)
self._verify_mock_call(self.ipdevice, self.ipdevice_expected)
self._verify_mock_call(self.ipwrapper, self.ipwrapper_expected)
self._verify_mock_call(self.get_bridges, self.get_bridges_expected)
self._verify_mock_call(self.inta, self.inta_expected)
self._verify_mock_call(self.intb, self.intb_expected)
self._verify_mock_call(self.execute, self.execute_expected)
def test_construct(self):
agent = self._build_agent()
self.assertEqual(agent.agent_id, 'ovs-agent-%s' % cfg.CONF.host)
self._verify_mock_calls()
# TODO(ethuleau): Initially, local ARP responder is be dependent to the
# ML2 l2 population mechanism driver.
# The next two tests use l2_pop flag to test ARP responder
def test_construct_with_arp_responder(self):
self._build_agent(l2_population=True, arp_responder=True)
self.mock_tun_bridge_expected.insert(
5, mock.call.add_flow(table=constants.PATCH_LV_TO_TUN,
priority=1,
proto="arp",
dl_dst="ff:ff:ff:ff:ff:ff",
actions="resubmit(,%s)" %
constants.ARP_RESPONDER)
)
self.mock_tun_bridge_expected.insert(
12, mock.call.add_flow(table=constants.ARP_RESPONDER,
priority=0,
actions="resubmit(,%s)" %
constants.FLOOD_TO_TUN)
)
self._verify_mock_calls()
def test_construct_without_arp_responder(self):
self._build_agent(l2_population=False, arp_responder=True)
self._verify_mock_calls()
def test_construct_vxlan(self):
self._build_agent(tunnel_types=['vxlan'])
self._verify_mock_calls()
def test_provision_local_vlan(self):
ofports = ','.join(TUN_OFPORTS[p_const.TYPE_GRE].values())
self.mock_tun_bridge_expected += [
mock.call.mod_flow(table=constants.FLOOD_TO_TUN,
dl_vlan=LV_ID,
actions="strip_vlan,"
"set_tunnel:%s,output:%s" %
(LS_ID, ofports)),
mock.call.add_flow(table=constants.TUN_TABLE['gre'],
priority=1,
tun_id=LS_ID,
actions="mod_vlan_vid:%s,resubmit(,%s)" %
(LV_ID, constants.LEARN_FROM_TUN)),
]
a = self._build_agent()
a.available_local_vlans = set([LV_ID])
a.tun_br_ofports = TUN_OFPORTS
a.provision_local_vlan(NET_UUID, p_const.TYPE_GRE, None, LS_ID)
self._verify_mock_calls()
def test_provision_local_vlan_flat(self):
action_string = 'strip_vlan,normal'
self.mock_map_tun_bridge_expected.append(
mock.call.add_flow(priority=4, in_port=self.MAP_TUN_PHY_OFPORT,
dl_vlan=LV_ID, actions=action_string))
action_string = 'mod_vlan_vid:%s,normal' % LV_ID
self.mock_int_bridge_expected.append(
mock.call.add_flow(priority=3, in_port=self.INT_OFPORT,
dl_vlan=65535, actions=action_string))
a = self._build_agent()
a.available_local_vlans = set([LV_ID])
a.phys_brs['net1'] = self.mock_map_tun_bridge
a.phys_ofports['net1'] = self.MAP_TUN_PHY_OFPORT
a.int_ofports['net1'] = self.INT_OFPORT
a.provision_local_vlan(NET_UUID, p_const.TYPE_FLAT, 'net1', LS_ID)
self._verify_mock_calls()
def test_provision_local_vlan_flat_fail(self):
a = self._build_agent()
a.provision_local_vlan(NET_UUID, p_const.TYPE_FLAT, 'net2', LS_ID)
self._verify_mock_calls()
def test_provision_local_vlan_vlan(self):
action_string = 'mod_vlan_vid:%s,normal' % LS_ID
self.mock_map_tun_bridge_expected.append(
mock.call.add_flow(priority=4, in_port=self.MAP_TUN_PHY_OFPORT,
dl_vlan=LV_ID, actions=action_string))
action_string = 'mod_vlan_vid:%s,normal' % LS_ID
self.mock_int_bridge_expected.append(
mock.call.add_flow(priority=3, in_port=self.INT_OFPORT,
dl_vlan=LV_ID, actions=action_string))
a = self._build_agent()
a.available_local_vlans = set([LV_ID])
a.phys_brs['net1'] = self.mock_map_tun_bridge
a.phys_ofports['net1'] = self.MAP_TUN_PHY_OFPORT
a.int_ofports['net1'] = self.INT_OFPORT
a.provision_local_vlan(NET_UUID, p_const.TYPE_VLAN, 'net1', LS_ID)
self._verify_mock_calls()
def test_provision_local_vlan_vlan_fail(self):
a = self._build_agent()
a.provision_local_vlan(NET_UUID, p_const.TYPE_VLAN, 'net2', LS_ID)
self._verify_mock_calls()
def test_reclaim_local_vlan(self):
self.mock_tun_bridge_expected += [
mock.call.delete_flows(
table=constants.TUN_TABLE['gre'], tun_id=LS_ID),
mock.call.delete_flows(dl_vlan=LVM.vlan)
]
a = self._build_agent()
a.available_local_vlans = set()
a.local_vlan_map[NET_UUID] = LVM
a.reclaim_local_vlan(NET_UUID)
self.assertIn(LVM.vlan, a.available_local_vlans)
self._verify_mock_calls()
def test_reclaim_local_vlan_flat(self):
self.mock_map_tun_bridge_expected.append(
mock.call.delete_flows(
in_port=self.MAP_TUN_PHY_OFPORT, dl_vlan=LVM_FLAT.vlan))
self.mock_int_bridge_expected.append(
mock.call.delete_flows(
dl_vlan=65535, in_port=self.INT_OFPORT))
a = self._build_agent()
a.phys_brs['net1'] = self.mock_map_tun_bridge
a.phys_ofports['net1'] = self.MAP_TUN_PHY_OFPORT
a.int_ofports['net1'] = self.INT_OFPORT
a.available_local_vlans = set()
a.local_vlan_map[NET_UUID] = LVM_FLAT
a.reclaim_local_vlan(NET_UUID)
self.assertIn(LVM_FLAT.vlan, a.available_local_vlans)
self._verify_mock_calls()
def test_reclaim_local_vlan_vlan(self):
self.mock_map_tun_bridge_expected.append(
mock.call.delete_flows(
in_port=self.MAP_TUN_PHY_OFPORT, dl_vlan=LVM_VLAN.vlan))
self.mock_int_bridge_expected.append(
mock.call.delete_flows(
dl_vlan=LV_ID, in_port=self.INT_OFPORT))
a = self._build_agent()
a.phys_brs['net1'] = self.mock_map_tun_bridge
a.phys_ofports['net1'] = self.MAP_TUN_PHY_OFPORT
a.int_ofports['net1'] = self.INT_OFPORT
a.available_local_vlans = set()
a.local_vlan_map[NET_UUID] = LVM_VLAN
a.reclaim_local_vlan(NET_UUID)
self.assertIn(LVM_VLAN.vlan, a.available_local_vlans)
self._verify_mock_calls()
def test_port_bound(self):
self.mock_int_bridge_expected += [
mock.call.db_get_val('Port', VIF_PORT.port_name, 'tag'),
mock.call.set_db_attribute('Port', VIF_PORT.port_name,
'tag', str(LVM.vlan)),
mock.call.delete_flows(in_port=VIF_PORT.ofport)
]
a = self._build_agent()
a.local_vlan_map[NET_UUID] = LVM
a.local_dvr_map = {}
a.port_bound(VIF_PORT, NET_UUID, 'gre', None, LS_ID,
FIXED_IPS, VM_DEVICE_OWNER, False)
self._verify_mock_calls()
def test_port_unbound(self):
with mock.patch.object(ovs_neutron_agent.OVSNeutronAgent,
'reclaim_local_vlan') as reclaim_local_vlan:
a = self._build_agent()
a.local_vlan_map[NET_UUID] = LVM
a.port_unbound(VIF_ID, NET_UUID)
reclaim_local_vlan.assert_called_once_with(NET_UUID)
self._verify_mock_calls()
def test_port_dead(self):
self.mock_int_bridge_expected += [
mock.call.db_get_val('Port', VIF_PORT.port_name, 'tag'),
mock.call.set_db_attribute(
'Port', VIF_PORT.port_name,
'tag', ovs_neutron_agent.DEAD_VLAN_TAG),
mock.call.add_flow(priority=2, in_port=VIF_PORT.ofport,
actions='drop')
]
a = self._build_agent()
a.available_local_vlans = set([LV_ID])
a.local_vlan_map[NET_UUID] = LVM
a.port_dead(VIF_PORT)
self._verify_mock_calls()
def test_tunnel_update(self):
tunnel_port = '9999'
self.mock_tun_bridge.add_tunnel_port.return_value = tunnel_port
self.mock_tun_bridge_expected += [
mock.call.add_tunnel_port('gre-1', '10.0.10.1', '10.0.0.1',
'gre', 4789, True),
mock.call.add_flow(priority=1, in_port=tunnel_port,
actions='resubmit(,3)')
]
a = self._build_agent()
a.tunnel_update(
mock.sentinel.ctx, tunnel_id='1', tunnel_ip='10.0.10.1',
tunnel_type=p_const.TYPE_GRE)
self._verify_mock_calls()
def test_tunnel_update_self(self):
a = self._build_agent()
a.tunnel_update(
mock.sentinel.ctx, tunnel_id='1', tunnel_ip='10.0.0.1')
self._verify_mock_calls()
def test_daemon_loop(self):
reply2 = {'current': set(['tap0']),
'added': set(['tap2']),
'removed': set([])}
reply3 = {'current': set(['tap2']),
'added': set([]),
'removed': set(['tap0'])}
self.mock_int_bridge_expected += [
mock.call.dump_flows_for_table(constants.CANARY_TABLE),
mock.call.dump_flows_for_table(constants.CANARY_TABLE)
]
with contextlib.nested(
mock.patch.object(log.ContextAdapter, 'exception'),
mock.patch.object(ovs_neutron_agent.OVSNeutronAgent,
'scan_ports'),
mock.patch.object(ovs_neutron_agent.OVSNeutronAgent,
'process_network_ports'),
mock.patch.object(ovs_neutron_agent.OVSNeutronAgent,
'tunnel_sync')
) as (log_exception, scan_ports, process_network_ports, ts):
log_exception.side_effect = Exception(
'Fake exception to get out of the loop')
scan_ports.side_effect = [reply2, reply3]
process_network_ports.side_effect = [
False, Exception('Fake exception to get out of the loop')]
q_agent = self._build_agent()
# Hack to test loop
# We start method and expect it will raise after 2nd loop
# If something goes wrong, assert_has_calls below will catch it
try:
q_agent.daemon_loop()
except Exception:
pass
# FIXME(salv-orlando): There should not be assertions on log messages
log_exception.assert_called_once_with(
"Error while processing VIF ports")
scan_ports.assert_has_calls([
mock.call(set(), set()),
mock.call(set(['tap0']), set())
])
process_network_ports.assert_has_calls([
mock.call({'current': set(['tap0']),
'removed': set([]),
'added': set(['tap2'])}, False),
mock.call({'current': set(['tap2']),
'removed': set(['tap0']),
'added': set([])}, False)
])
self._verify_mock_calls()
class TunnelTestUseVethInterco(TunnelTest):
USE_VETH_INTERCONNECTION = True
def _define_expected_calls(self):
self.mock_bridge_expected = [
mock.call(self.INT_BRIDGE, 'sudo'),
mock.call(self.MAP_TUN_BRIDGE, 'sudo'),
mock.call(self.TUN_BRIDGE, 'sudo'),
]
self.mock_int_bridge_expected = [
mock.call.create(),
mock.call.set_secure_mode(),
mock.call.delete_port('patch-tun'),
mock.call.remove_all_flows(),
mock.call.add_flow(priority=1, actions='normal'),
mock.call.add_flow(table=constants.CANARY_TABLE, priority=0,
actions="drop")
]
self.mock_map_tun_bridge_expected = [
mock.call.remove_all_flows(),
mock.call.add_flow(priority=1, actions='normal'),
mock.call.delete_port('phy-%s' % self.MAP_TUN_BRIDGE),
mock.call.add_port(self.intb),
]
self.mock_int_bridge_expected += [
mock.call.delete_port('int-%s' % self.MAP_TUN_BRIDGE),
mock.call.add_port(self.inta)
]
self.mock_int_bridge_expected += [
mock.call.add_flow(priority=2,
in_port=self.MAP_TUN_INT_OFPORT,
actions='drop')
]
self.mock_map_tun_bridge_expected += [
mock.call.add_flow(priority=2,
in_port=self.MAP_TUN_PHY_OFPORT,
actions='drop')
]
self.mock_tun_bridge_expected = [
mock.call.reset_bridge(),
mock.call.add_patch_port('patch-int', 'patch-tun'),
]
self.mock_int_bridge_expected += [
mock.call.add_patch_port('patch-tun', 'patch-int')
]
self.mock_tun_bridge_expected += [
mock.call.remove_all_flows(),
mock.call.add_flow(priority=1,
in_port=self.INT_OFPORT,
actions="resubmit(,%s)" %
constants.PATCH_LV_TO_TUN),
mock.call.add_flow(priority=0, actions='drop'),
mock.call.add_flow(priority=0,
table=constants.PATCH_LV_TO_TUN,
dl_dst=UCAST_MAC,
actions="resubmit(,%s)" %
constants.UCAST_TO_TUN),
mock.call.add_flow(priority=0,
table=constants.PATCH_LV_TO_TUN,
dl_dst=BCAST_MAC,
actions="resubmit(,%s)" %
constants.FLOOD_TO_TUN),
]
for tunnel_type in constants.TUNNEL_NETWORK_TYPES:
self.mock_tun_bridge_expected.append(
mock.call.add_flow(
table=constants.TUN_TABLE[tunnel_type],
priority=0,
actions="drop"))
learned_flow = ("table=%s,"
"priority=1,"
"hard_timeout=300,"
"NXM_OF_VLAN_TCI[0..11],"
"NXM_OF_ETH_DST[]=NXM_OF_ETH_SRC[],"
"load:0->NXM_OF_VLAN_TCI[],"
"load:NXM_NX_TUN_ID[]->NXM_NX_TUN_ID[],"
"output:NXM_OF_IN_PORT[]" %
constants.UCAST_TO_TUN)
self.mock_tun_bridge_expected += [
mock.call.add_flow(table=constants.LEARN_FROM_TUN,
priority=1,
actions="learn(%s),output:%s" %
(learned_flow, self.INT_OFPORT)),
mock.call.add_flow(table=constants.UCAST_TO_TUN,
priority=0,
actions="resubmit(,%s)" %
constants.FLOOD_TO_TUN),
mock.call.add_flow(table=constants.FLOOD_TO_TUN,
priority=0,
actions="drop")
]
self.device_exists_expected = [
mock.call('int-%s' % self.MAP_TUN_BRIDGE, 'sudo'),
]
self.ipdevice_expected = [
mock.call('int-%s' % self.MAP_TUN_BRIDGE, 'sudo'),
mock.call().link.delete()
]
self.ipwrapper_expected = [
mock.call('sudo'),
mock.call().add_veth('int-%s' % self.MAP_TUN_BRIDGE,
'phy-%s' % self.MAP_TUN_BRIDGE)
]
self.get_bridges_expected = [mock.call('sudo'), mock.call('sudo')]
self.inta_expected = [mock.call.link.set_up()]
self.intb_expected = [mock.call.link.set_up()]
self.execute_expected = [mock.call(['/sbin/udevadm', 'settle',
'--timeout=10'])]
class TunnelTestWithMTU(TunnelTestUseVethInterco):
VETH_MTU = 1500
def _define_expected_calls(self):
super(TunnelTestWithMTU, self)._define_expected_calls()
self.inta_expected.append(mock.call.link.set_mtu(self.VETH_MTU))
self.intb_expected.append(mock.call.link.set_mtu(self.VETH_MTU))
|
|
#!/usr/bin/env python
#
# Copyright 2011-2015 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
from splunklib.searchcommands.internals import CommandLineParser, InputHeader, RecordWriterV1
from splunklib.searchcommands.decorators import Configuration, Option
from splunklib.searchcommands.validators import Boolean
from splunklib.searchcommands.search_command import SearchCommand
from contextlib import closing
from cStringIO import StringIO
from itertools import izip
from unittest import main, TestCase
import os
class TestInternals(TestCase):
def setUp(self):
TestCase.setUp(self)
def test_command_line_parser(self):
@Configuration()
class TestCommandLineParserCommand(SearchCommand):
required_option = Option(validate=Boolean(), require=True)
unnecessary_option = Option(validate=Boolean(), default=True, require=False)
class ConfigurationSettings(SearchCommand.ConfigurationSettings):
@classmethod
def fix_up(cls, command_class): pass
# Command line without fieldnames
options = ['required_option=true', 'unnecessary_option=false']
command = TestCommandLineParserCommand()
CommandLineParser.parse(command, options)
for option in command.options.itervalues():
if option.name in ['logging_configuration', 'logging_level', 'record', 'show_configuration']:
self.assertFalse(option.is_set)
continue
self.assertTrue(option.is_set)
expected = 'testcommandlineparser required_option="t" unnecessary_option="f"'
self.assertEqual(expected, str(command))
self.assertEqual(command.fieldnames, [])
# Command line with fieldnames
fieldnames = ['field_1', 'field_2', 'field_3']
command = TestCommandLineParserCommand()
CommandLineParser.parse(command, options + fieldnames)
for option in command.options.itervalues():
if option.name in ['logging_configuration', 'logging_level', 'record', 'show_configuration']:
self.assertFalse(option.is_set)
continue
self.assertTrue(option.is_set)
expected = 'testcommandlineparser required_option="t" unnecessary_option="f" field_1 field_2 field_3'
self.assertEqual(expected, str(command))
self.assertEquals(command.fieldnames, fieldnames)
# Command line without any unnecessary options
command = TestCommandLineParserCommand()
CommandLineParser.parse(command, ['required_option=true'] + fieldnames)
for option in command.options.itervalues():
if option.name in ['unnecessary_option', 'logging_configuration', 'logging_level', 'record', 'show_configuration']:
self.assertFalse(option.is_set)
continue
self.assertTrue(option.is_set)
expected = 'testcommandlineparser required_option="t" field_1 field_2 field_3'
self.assertEqual(expected, str(command))
self.assertEquals(command.fieldnames, fieldnames)
# Command line with missing required options, with or without fieldnames or unnecessary options
options = ['unnecessary_option=true']
self.assertRaises(ValueError, CommandLineParser.parse, command, options + fieldnames)
self.assertRaises(ValueError, CommandLineParser.parse, command, options)
self.assertRaises(ValueError, CommandLineParser.parse, command, [])
# Command line with unrecognized options
self.assertRaises(ValueError, CommandLineParser.parse, command, ['unrecognized_option_1=foo', 'unrecognized_option_2=bar'])
# Command line with a variety of quoted/escaped text options
@Configuration()
class TestCommandLineParserCommand(SearchCommand):
text = Option()
class ConfigurationSettings(SearchCommand.ConfigurationSettings):
@classmethod
def fix_up(cls, command_class): pass
strings = [
r'"foo bar"',
r'"foo/bar"',
r'"foo\\bar"',
r'"""foo bar"""',
r'"\"foo bar\""',
r'Hello\ World!',
r'\"Hello\ World!\"']
expected_values = [
r'foo bar',
r'foo/bar',
r'foo\bar',
r'"foo bar"',
r'"foo bar"',
r'Hello World!',
r'"Hello World!"'
]
for string, expected_value in izip(strings, expected_values):
command = TestCommandLineParserCommand()
argv = ['text', '=', string]
CommandLineParser.parse(command, argv)
self.assertEqual(command.text, expected_value)
for string, expected_value in izip(strings, expected_values):
command = TestCommandLineParserCommand()
argv = [string]
CommandLineParser.parse(command, argv)
self.assertEqual(command.fieldnames[0], expected_value)
for string, expected_value in izip(strings, expected_values):
command = TestCommandLineParserCommand()
argv = ['text', '=', string] + strings
CommandLineParser.parse(command, argv)
self.assertEqual(command.text, expected_value)
self.assertEqual(command.fieldnames, expected_values)
strings = [
'some\\ string\\',
r'some\ string"',
r'"some string',
r'some"string'
]
for string in strings:
command = TestCommandLineParserCommand()
argv = [string]
self.assertRaises(SyntaxError, CommandLineParser.parse, command, argv)
return
def test_command_line_parser_unquote(self):
parser = CommandLineParser
options = [
r'foo', # unquoted string with no escaped characters
r'fo\o\ b\"a\\r', # unquoted string with some escaped characters
r'"foo"', # quoted string with no special characters
r'"""foobar1"""', # quoted string with quotes escaped like this: ""
r'"\"foobar2\""', # quoted string with quotes escaped like this: \"
r'"foo ""x"" bar"', # quoted string with quotes escaped like this: ""
r'"foo \"x\" bar"', # quoted string with quotes escaped like this: \"
r'"\\foobar"', # quoted string with an escaped backslash
r'"foo \\ bar"', # quoted string with an escaped backslash
r'"foobar\\"', # quoted string with an escaped backslash
r'foo\\\bar', # quoted string with an escaped backslash and an escaped 'b'
r'""', # pair of quotes
r''] # empty string
expected = [
r'foo',
r'foo b"a\r',
r'foo',
r'"foobar1"',
r'"foobar2"',
r'foo "x" bar',
r'foo "x" bar',
'\\foobar',
r'foo \ bar',
'foobar\\',
r'foo\bar',
r'',
r'']
# Command line with an assortment of string values
self.assertEqual(expected[-4], parser.unquote(options[-4]))
for i in range(0, len(options)):
self.assertEqual(expected[i], parser.unquote(options[i]))
self.assertRaises(SyntaxError, parser.unquote, '"')
self.assertRaises(SyntaxError, parser.unquote, '"foo')
self.assertRaises(SyntaxError, parser.unquote, 'foo"')
self.assertRaises(SyntaxError, parser.unquote, 'foo\\')
def test_input_header(self):
# No items
input_header = InputHeader()
with closing(StringIO('\r\n'.encode())) as input_file:
input_header.read(input_file)
self.assertEquals(len(input_header), 0)
# One unnamed single-line item (same as no items)
input_header = InputHeader()
with closing(StringIO('this%20is%20an%20unnamed%20single-line%20item\n\n'.encode())) as input_file:
input_header.read(input_file)
self.assertEquals(len(input_header), 0)
input_header = InputHeader()
with closing(StringIO('this%20is%20an%20unnamed\nmulti-\nline%20item\n\n'.encode())) as input_file:
input_header.read(input_file)
self.assertEquals(len(input_header), 0)
# One named single-line item
input_header = InputHeader()
with closing(StringIO('Foo:this%20is%20a%20single-line%20item\n\n'.encode())) as input_file:
input_header.read(input_file)
self.assertEquals(len(input_header), 1)
self.assertEquals(input_header['Foo'], 'this is a single-line item')
input_header = InputHeader()
with closing(StringIO('Bar:this is a\nmulti-\nline item\n\n'.encode())) as input_file:
input_header.read(input_file)
self.assertEquals(len(input_header), 1)
self.assertEquals(input_header['Bar'], 'this is a\nmulti-\nline item')
# The infoPath item (which is the path to a file that we open for reads)
input_header = InputHeader()
with closing(StringIO('infoPath:non-existent.csv\n\n'.encode())) as input_file:
input_header.read(input_file)
self.assertEquals(len(input_header), 1)
self.assertEqual(input_header['infoPath'], 'non-existent.csv')
# Set of named items
collection = {
'word_list': 'hello\nworld\n!',
'word_1': 'hello',
'word_2': 'world',
'word_3': '!',
'sentence': 'hello world!'}
input_header = InputHeader()
text = reduce(lambda value, item: value + '{}:{}\n'.format(item[0], item[1]), collection.iteritems(), '') + '\n'
with closing(StringIO(text.encode())) as input_file:
input_header.read(input_file)
self.assertDictEqual(input_header, collection)
# Set of named items with an unnamed item at the beginning (the only place that an unnamed item can appear)
with closing(StringIO(('unnamed item\n' + text).encode())) as input_file:
input_header.read(input_file)
self.assertDictEqual(input_header, collection)
# Test iterators, indirectly through items, keys, and values
self.assertEqual(sorted(input_header.items()), sorted(collection.items()))
self.assertEqual(sorted(input_header.keys()), sorted(collection.keys()))
self.assertEqual(sorted(input_header.values()), sorted(collection.values()))
return
def test_messages_header(self):
@Configuration()
class TestMessagesHeaderCommand(SearchCommand):
class ConfigurationSettings(SearchCommand.ConfigurationSettings):
@classmethod
def fix_up(cls, command_class): pass
command = TestMessagesHeaderCommand()
command._protocol_version = 1
output_buffer = StringIO()
command._record_writer = RecordWriterV1(output_buffer)
messages = [
(command.write_debug, 'debug_message'),
(command.write_error, 'error_message'),
(command.write_fatal, 'fatal_message'),
(command.write_info, 'info_message'),
(command.write_warning, 'warning_message')]
for write, message in messages:
write(message)
command.finish()
expected = (
'debug_message=debug_message\r\n'
'error_message=error_message\r\n'
'error_message=fatal_message\r\n'
'info_message=info_message\r\n'
'warn_message=warning_message\r\n'
'\r\n')
self.assertEquals(output_buffer.getvalue(), expected)
return
_package_path = os.path.dirname(__file__)
if __name__ == "__main__":
main()
|
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core import management
from django.core.exceptions import ObjectDoesNotExist
from django.db.models.manager import Manager
from django.test.testcases import TestCase
from moderation.register import ModerationManager, RegistrationError
from moderation.moderator import GenericModerator
from moderation.managers import ModerationObjectsManager
from moderation.models import ModeratedObject, MODERATION_STATUS_APPROVED
from moderation.signals import pre_moderation, post_moderation
from tests.models import UserProfile, \
ModelWithSlugField, ModelWithSlugField2, ModelWithMultipleManagers, \
CustomModel
from tests.utils import setup_moderation
from tests.utils import teardown_moderation
from moderation.helpers import import_moderator
from tests.models import Book
# reload is builtin in Python 2.x. Needs to be imported for Py3k
try:
from importlib import reload
except ImportError:
try:
# Python 3.2
from imp import reload
except:
pass
from django.db import IntegrityError, transaction
class MyModelModerator(GenericModerator):
pass
class RegistrationTestCase(TestCase):
fixtures = ['test_users.json', 'test_moderation.json']
def setUp(self):
self.moderation = setup_moderation([UserProfile])
self.user = User.objects.get(username='moderator')
def tearDown(self):
teardown_moderation()
def test_get_moderator(self):
moderator = self.moderation.get_moderator(UserProfile)
self.assertTrue(isinstance(moderator, GenericModerator))
def test_get_of_new_object_should_raise_exception(self):
"""Tests if after register of model class with moderation,
when new object is created and getting of object
raise ObjectDoesNotExist"""
profile = UserProfile(description='Profile for new user',
url='http://www.yahoo.com',
user=User.objects.get(username='user1'))
profile.save()
self.assertRaises(ObjectDoesNotExist, UserProfile.objects.get,
pk=profile.pk)
def test_creation_of_moderated_object(self):
"""
Test if after create of new object moderated object should be created
"""
profile = UserProfile(description='Profile for new user',
url='http://www.yahoo.com',
user=User.objects.get(username='user1'))
profile.save()
moderated_object = ModeratedObject.objects.get_for_instance(profile)
self.assertEqual(str(moderated_object),
"user1 - http://www.yahoo.com")
def test_get_of_existing_object_should_return_old_version_of_object(self):
"""Tests if after register of model class with moderation,
when existing object is saved, when getting of object returns
old version of object"""
profile = UserProfile.objects.get(user__username='moderator')
moderated_object = ModeratedObject(content_object=profile)
moderated_object.save()
moderated_object.approve(moderated_by=self.user)
profile.description = "New description"
profile.save()
old_profile = UserProfile.objects.get(pk=profile.pk)
self.assertEqual(old_profile.description, 'Old description')
def test_can_use_object_without_moderated_object(self):
"""
For backwards compatibility, when django-moderation is added to an
existing project, records with no ModeratedObject should be visible
as before.
"""
profile = UserProfile.objects.get(user__username='moderator')
# Pretend that it was created before django-moderation was installed,
# by deleting the ModeratedObject.
ModeratedObject.objects.filter(object_pk=profile.pk).delete()
# We should be able to load it
profile = UserProfile.objects.get(user__username='moderator')
# And save changes to it
profile.description = "New description"
profile.save()
# And now it should be invisible, because it's pending
self.assertEqual(
[], list(UserProfile.objects.all()),
"The previously unmoderated object should now be invisible, "
"because it has never been accepted.")
def test_register(self):
"""Tests if after creation of new model instance new
moderation object is created"""
UserProfile(description='Profile for new user',
url='http://www.yahoo.com',
user=User.objects.get(username='user1')).save()
self.assertEqual(ModeratedObject.objects.all().count(), 1,
"New moderation object was not created"
" after creation of new model instance "
"from model class that is registered with moderation")
def test_exception_is_raised_when_class_is_registered(self):
self.assertRaises(RegistrationError, self.moderation.register,
UserProfile)
def test_custom_moderator_should_be_registered_with_moderation(self):
self.moderation.register(CustomModel, MyModelModerator)
moderator_instance = self.moderation._registered_models[CustomModel]
self.assertTrue(isinstance(moderator_instance, MyModelModerator))
class AutoDiscoverTestCase(TestCase):
urls = 'tests.urls.auto_register'
def setUp(self):
self.moderation = setup_moderation()
def tearDown(self):
teardown_moderation()
def test_models_should_be_registered_if_moderator_in_module(self):
module = import_moderator('tests')
try: # force module reload
reload(module)
except:
pass
self.assertTrue(Book in self.moderation._registered_models)
self.assertEqual(module.__name__,
'tests.moderator')
class RegisterMultipleManagersTestCase(TestCase):
def setUp(self):
self.moderation = ModerationManager()
class ModelWithMultipleManagersModerator(GenericModerator):
manager_names = ['objects', 'men', 'women']
setup_moderation([(ModelWithMultipleManagers,
ModelWithMultipleManagersModerator)])
def tearDown(self):
teardown_moderation()
def test_multiple_managers(self):
obj = ModelWithMultipleManagers(gender=0)
obj.save()
obj2 = ModelWithMultipleManagers(gender=1)
obj2.save()
men = ModelWithMultipleManagers.men.all()
women = ModelWithMultipleManagers.women.all()
self.assertEqual(men.count(), 0)
self.assertEqual(women.count(), 0)
class IntegrityErrorTestCase(TestCase):
def setUp(self):
self.moderation = setup_moderation([ModelWithSlugField])
def tearDown(self):
teardown_moderation()
def test_raise_integrity_error_model_registered_with_moderation(self):
m1 = ModelWithSlugField(slug='test')
m1.save()
self.assertRaises(ObjectDoesNotExist, ModelWithSlugField.objects.get,
slug='test')
m2 = ModelWithSlugField(slug='test')
if hasattr(transaction, 'atomic'):
with transaction.atomic():
self.assertRaises(IntegrityError, m2.save)
else:
self.assertRaises(IntegrityError, m2.save)
self.assertEqual(ModeratedObject.objects.all().count(), 1)
def test_raise_integrity_error_model_not_registered_with_moderation(self):
m1 = ModelWithSlugField2(slug='test')
m1.save()
m1 = ModelWithSlugField2.objects.get(slug='test')
m2 = ModelWithSlugField2(slug='test')
if hasattr(transaction, 'atomic'):
with transaction.atomic():
self.assertRaises(IntegrityError, m2.save)
else:
self.assertRaises(IntegrityError, m2.save)
self.assertEqual(ModeratedObject.objects.all().count(), 0)
class IntegrityErrorRegressionTestCase(TestCase):
def setUp(self):
self.moderation = ModerationManager()
self.moderation.register(ModelWithSlugField)
self.filter_moderated_objects = ModelWithSlugField.objects.\
filter_moderated_objects
def filter_moderated_objects(query_set):
from moderation.models import MODERATION_STATUS_PENDING,\
MODERATION_STATUS_REJECTED
exclude_pks = []
for obj in query_set:
try:
if obj.moderated_object.moderation_status\
in [MODERATION_STATUS_PENDING,
MODERATION_STATUS_REJECTED]\
and obj.__dict__ == \
obj.moderated_object.changed_object.__dict__:
exclude_pks.append(object.pk)
except ObjectDoesNotExist:
pass
return query_set.exclude(pk__in=exclude_pks)
setattr(ModelWithSlugField.objects,
'filter_moderated_objects',
filter_moderated_objects)
def tearDown(self):
self.moderation.unregister(ModelWithSlugField)
def test_old_version_of_filter_moderated_objects_method(self):
m1 = ModelWithSlugField(slug='test')
m1.save()
m2 = ModelWithSlugField(slug='test')
if hasattr(transaction, 'atomic'):
with transaction.atomic():
self.assertRaises(IntegrityError, m2.save)
else:
self.assertRaises(IntegrityError, m2.save)
self.assertEqual(ModeratedObject.objects.all().count(), 1)
class ModerationManagerTestCase(TestCase):
fixtures = ['test_users.json', 'test_moderation.json']
def setUp(self):
self.moderation = setup_moderation()
self.user = User.objects.get(username='moderator')
def tearDown(self):
teardown_moderation()
def test_unregister(self):
"""Tests if model class is successfully unregistered from moderation"""
from django.db.models import signals
old_pre_save_receivers = [r for r in signals.pre_save.receivers]
old_post_save_receivers = [r for r in signals.post_save.receivers]
signals.pre_save.receivers = []
signals.post_save.receivers = []
self.moderation.register(UserProfile)
self.assertNotEqual(signals.pre_save.receivers, [])
self.assertNotEqual(signals.post_save.receivers, [])
UserProfile(description='Profile for new user',
url='http://www.yahoo.com',
user=User.objects.get(username='user1')).save()
self.moderation.unregister(UserProfile)
self.assertEqual(signals.pre_save.receivers, [])
self.assertEqual(signals.post_save.receivers, [])
self.assertEqual(UserProfile.objects.__class__, Manager)
self.assertEqual(hasattr(UserProfile, 'moderated_object'), False)
signals.pre_save.receivers = old_pre_save_receivers
signals.post_save.receivers = old_post_save_receivers
UserProfile.objects.get(user__username='user1')
User.objects.get(username='moderator')
management.call_command('loaddata', 'test_moderation.json',
verbosity=0)
def test_moderation_manager(self):
moderation = ModerationManager()
self.assertEqual(moderation._registered_models, {})
def test_save_new_instance_after_add_and_remove_fields_from_class(self):
"""Test if after removing moderation from model class new
instance of model can be created"""
class CustomManager(Manager):
pass
moderator = GenericModerator(UserProfile)
self.moderation._and_fields_to_model_class(moderator)
self.moderation._remove_fields(moderator)
profile = UserProfile(description='Profile for new user',
url='http://www.yahoo.com',
user=User.objects.get(username='user1'))
profile.save()
up = UserProfile._default_manager.filter(url='http://www.yahoo.com')
self.assertEqual(up.count(), 1)
def test_and_fields_to_model_class(self):
class CustomManager(Manager):
pass
moderator = GenericModerator(UserProfile)
self.moderation._and_fields_to_model_class(moderator)
manager = ModerationObjectsManager()(CustomManager)()
self.assertEqual(repr(UserProfile.objects.__class__),
repr(manager.__class__))
self.assertEqual(hasattr(UserProfile, 'moderated_object'), True)
# clean up
self.moderation._remove_fields(moderator)
def test_get_or_create_moderated_object_exist(self):
self.moderation.register(UserProfile)
profile = UserProfile.objects.get(user__username='moderator')
moderator = self.moderation.get_moderator(UserProfile)
ModeratedObject(content_object=profile).save()
profile.description = "New description"
unchanged_obj = self.moderation._get_unchanged_object(profile)
object = self.moderation._get_or_create_moderated_object(profile,
unchanged_obj,
moderator)
self.assertNotEqual(object.pk, None)
self.assertEqual(object.changed_object.description,
'Old description')
self.moderation.unregister(UserProfile)
def test_get_or_create_moderated_object_does_not_exist(self):
profile = UserProfile.objects.get(user__username='moderator')
profile.description = "New description"
self.moderation.register(UserProfile)
moderator = self.moderation.get_moderator(UserProfile)
unchanged_obj = self.moderation._get_unchanged_object(profile)
object = self.moderation._get_or_create_moderated_object(profile,
unchanged_obj,
moderator)
self.assertEqual(object.pk, None)
self.assertEqual(object.changed_object.description,
'Old description')
self.moderation.unregister(UserProfile)
def test_get_or_create_moderated_object_keep_history(self):
profile = UserProfile.objects.get(user__username='moderator')
profile.description = "New description"
self.moderation.register(UserProfile)
moderator = self.moderation.get_moderator(UserProfile)
moderator.keep_history = True
unchanged_obj = self.moderation._get_unchanged_object(profile)
moderated_object = self.moderation._get_or_create_moderated_object(
profile, unchanged_obj, moderator)
self.assertEqual(moderated_object.pk, None)
self.assertEqual(moderated_object.changed_object.description,
'Old description')
moderated_object.save()
# moderated_object should have a pk now, and since it's the first one
# it should be 1
self.assertEqual(1, moderated_object.pk)
# If we call it again, we should get a new moderated_object, evidenced
# by having no pk
moderated_object_2 = self.moderation._get_or_create_moderated_object(
profile, unchanged_obj, moderator)
self.assertEqual(moderated_object_2.pk, None)
self.assertEqual(moderated_object_2.changed_object.description,
'Old description')
def test_get_unchanged_object(self):
profile = UserProfile.objects.get(user__username='moderator')
profile.description = "New description"
object = self.moderation._get_unchanged_object(profile)
self.assertEqual(object.description,
'Old description')
class LoadingFixturesTestCase(TestCase):
fixtures = ['test_users.json']
def setUp(self):
self.new_moderation = setup_moderation([UserProfile])
self.user = User.objects.get(username='moderator')
def tearDown(self):
teardown_moderation()
def test_loading_fixture_for_moderated_model(self):
management.call_command('loaddata', 'test_moderation.json',
verbosity=0)
self.assertEqual(UserProfile.objects.all().count(), 1)
def test_loading_objs_from_fixture_should_not_create_moderated_obj(self):
management.call_command('loaddata', 'test_moderation.json',
verbosity=0)
profile = UserProfile.objects.get(user__username='moderator')
self.assertRaises(ObjectDoesNotExist,
ModeratedObject.objects.get, object_pk=profile.pk)
def test_moderated_object_is_created_when_not_loaded_from_fixture(self):
profile = UserProfile(description='Profile for new user',
url='http://www.yahoo.com',
user=User.objects.get(username='user1'))
profile.save()
moderated_objs = ModeratedObject.objects.filter(object_pk=profile.pk)
self.assertEqual(moderated_objs.count(), 1)
class ModerationSignalsTestCase(TestCase):
fixtures = ['test_users.json', 'test_moderation.json']
def setUp(self):
class UserProfileModerator(GenericModerator):
notify_moderator = False
self.moderation = setup_moderation(
[(UserProfile, UserProfileModerator)])
self.moderation._disconnect_signals(UserProfile)
self.user = User.objects.get(username='moderator')
self.profile = UserProfile.objects.get(user__username='moderator')
def tearDown(self):
teardown_moderation()
def test_send_pre_moderation_signal(self):
"""check if custom_approve_handler function was called when """
"""moderation_approve signal was send"""
def custom_pre_moderation_handler(sender, instance, status, **kwargs):
# do some stuff with approved instance
instance.description = 'Change description'
instance.save()
pre_moderation.connect(custom_pre_moderation_handler,
sender=UserProfile)
pre_moderation.send(sender=UserProfile, instance=self.profile,
status=MODERATION_STATUS_APPROVED)
self.assertEqual(self.profile.description, 'Change description')
def test_send_post_moderation_signal(self):
"""check if custom_approve_handler function was called when """
"""moderation_approve signal was send"""
def custom_post_moderation_handler(sender, instance, status, **kwargs):
# do some stuff with approved instance
instance.description = 'Change description'
instance.save()
post_moderation.connect(custom_post_moderation_handler,
sender=UserProfile)
post_moderation.send(sender=UserProfile, instance=self.profile,
status=MODERATION_STATUS_APPROVED)
self.assertEqual(self.profile.description, 'Change description')
def test_connect_and_disconnect_signals(self):
from django.db.models import signals
old_pre_save_receivers = [r for r in signals.pre_save.receivers]
old_post_save_receivers = [r for r in signals.post_save.receivers]
signals.pre_save.receivers = []
signals.post_save.receivers = []
self.moderation._connect_signals(UserProfile)
self.assertNotEqual(signals.pre_save.receivers, [])
self.assertNotEqual(signals.post_save.receivers, [])
self.moderation._disconnect_signals(UserProfile)
self.assertEqual(signals.pre_save.receivers, [])
self.assertEqual(signals.post_save.receivers, [])
signals.pre_save.receivers = old_pre_save_receivers
signals.post_save.receivers = old_post_save_receivers
def test_after_disconnecting_signals_moderation_object(self):
self.moderation._connect_signals(UserProfile)
self.moderation._disconnect_signals(UserProfile)
profile = UserProfile(description='Profile for new user',
url='http://www.yahoo.com',
user=User.objects.get(username='user1'))
profile.save()
self.assertRaises(ObjectDoesNotExist, ModeratedObject.objects.get,
object_pk=profile.pk)
def test_post_save_handler_for_existing_object(self):
from django.db.models import signals
signals.pre_save.connect(self.moderation.pre_save_handler,
sender=UserProfile)
signals.post_save.connect(self.moderation.post_save_handler,
sender=UserProfile)
profile = UserProfile.objects.get(user__username='moderator')
moderated_object = ModeratedObject(content_object=profile)
moderated_object.save()
moderated_object.approve(moderated_by=self.user)
profile.description = 'New description of user profile'
profile.save()
moderated_object = ModeratedObject.objects.get_for_instance(profile)
original_object = moderated_object.changed_object
self.assertEqual(original_object.description,
'New description of user profile')
self.assertEqual(UserProfile.objects.get(pk=profile.pk).description,
'Old description')
signals.pre_save.disconnect(self.moderation.pre_save_handler,
UserProfile)
signals.post_save.disconnect(self.moderation.post_save_handler,
UserProfile)
def test_pre_save_handler_for_existing_object(self):
from django.db.models import signals
signals.pre_save.connect(self.moderation.pre_save_handler,
sender=UserProfile)
profile = UserProfile.objects.get(user__username='moderator')
profile.description = 'New description of user profile'
profile.save()
moderated_object = ModeratedObject.objects.get_for_instance(profile)
original_object = moderated_object.changed_object
content_object = moderated_object.content_object
self.assertEqual(original_object.description,
'Old description')
self.assertEqual(content_object.description,
'New description of user profile')
signals.pre_save.disconnect(self.moderation.pre_save_handler,
UserProfile)
def test_post_save_handler_for_new_object(self):
from django.db.models import signals
signals.pre_save.connect(self.moderation.pre_save_handler,
sender=UserProfile)
signals.post_save.connect(self.moderation.post_save_handler,
sender=UserProfile)
profile = UserProfile(description='Profile for new user',
url='http://www.yahoo.com',
user=User.objects.get(username='user1'))
profile.save()
moderated_object = ModeratedObject.objects.get_for_instance(profile)
self.assertEqual(moderated_object.content_object, profile)
signals.pre_save.disconnect(self.moderation.pre_save_handler,
UserProfile)
signals.post_save.disconnect(self.moderation.post_save_handler,
UserProfile)
def test_save_handler_keep_history(self):
# de-register current Moderator and replace it with one that
# has keep_history set to True
from moderation import moderation
class KeepHistoryModerator(GenericModerator):
keep_history = True
notify_moderator = False
moderation.unregister(UserProfile)
moderation.register(UserProfile, KeepHistoryModerator)
from django.db.models import signals
signals.pre_save.connect(self.moderation.pre_save_handler,
sender=UserProfile)
signals.post_save.connect(self.moderation.post_save_handler,
sender=UserProfile)
profile = UserProfile(description='Profile for new user',
url='http://www.yahoo.com',
user=User.objects.get(username='user1'))
profile.save()
moderated_object = ModeratedObject.objects.get_for_instance(profile)
self.assertEqual(moderated_object.content_object, profile)
# Now update it and make sure it gets the right history object...
profile.url = 'http://www.google.com'
profile.save()
moderated_object = ModeratedObject.objects.get_for_instance(profile)
self.assertEqual(moderated_object.content_object, profile)
# There should only be two moderated objects
self.assertEqual(2, ModeratedObject.objects.count())
# Approve the change
moderated_object.approve(moderated_by=self.user,
reason='Testing post save handlers')
# There should *still* only be two moderated objects
self.assertEqual(2, ModeratedObject.objects.count())
signals.pre_save.disconnect(self.moderation.pre_save_handler,
UserProfile)
signals.post_save.disconnect(self.moderation.post_save_handler,
UserProfile)
self.moderation = False
def test_pre_save_handler_for_new_object(self):
from django.db.models import signals
signals.pre_save.connect(self.moderation.pre_save_handler,
sender=UserProfile)
profile = UserProfile(description='Profile for new user',
url='http://www.yahoo.com',
user=User.objects.get(username='user1'))
profile.save()
self.assertRaises(ObjectDoesNotExist,
ModeratedObject.objects.get_for_instance,
profile)
signals.pre_save.disconnect(self.moderation.pre_save_handler,
UserProfile)
|
|
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Collection of SA360 reporting operators.
Set of operators that allow developers to manage Queries and Reports within
Search Ads 360.
"""
import json
import os
import tempfile
from pathlib import Path
from airflow.contrib.hooks.gcs_hook import GoogleCloudStorageHook
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
from orchestra.google.marketing_platform.hooks.search_ads_360 import (
GoogleSearchAds360Hook
)
class GoogleSearchAds360InsertReportOperator(BaseOperator):
"""Creates and runs a new Search Ads 360 report.
Attributes:
report: The report body to create the report from. (templated)
Can receive a json string representing the report or reference to a
template file. Template references are recognized by a string ending in
'.json'.
gcp_conn_id: The connection ID to use when fetching connection info.
delegate_to: The account to impersonate, if any.
XComs:
report_id: The ID for the report created.
"""
template_fields = ['params', 'report']
template_ext = ['.json']
def __init__(self,
report,
gcp_conn_id='google_cloud_default',
delegate_to=None,
*args,
**kwargs):
super(GoogleSearchAds360InsertReportOperator, self).__init__(*args, **kwargs)
self.report = report
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.hook = None
def execute(self, context):
if self.hook is None:
self.hook = GoogleSearchAds360Hook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to)
report_body = json.loads(self.report)
request = self.hook.get_service().reports().request(body=report_body)
response = request.execute()
context['task_instance'].xcom_push('report_id', response['id'])
class GoogleSearchAds360DownloadReportOperator(BaseOperator):
"""Downloads a Search Ads 360 report into Google Cloud Storage.
Attributes:
report_id: The ID of the report to download. (templated)
destination_bucket: The destination Google cloud storage bucket where the
report should be written to. (templated)
destination_object: The destination name of the object in the destination
Google cloud storage bucket. (templated)
If the destination points to an existing folder, the report will be
written under the specified folder.
gcp_conn_id: The connection ID to use when fetching connection info.
delegate_to: The account to impersonate, if any.
XComs:
destination_bucket: The Google cloud storage bucket the report was written
to.
destination_object: The Google cloud storage URI for the report.
"""
template_fields = ['report_id', 'destination_bucket', 'destination_object']
def __init__(self,
report_id,
destination_bucket,
destination_object=None,
gcp_conn_id='google_cloud_default',
delegate_to=None,
*args,
**kwargs):
super(GoogleSearchAds360DownloadReportOperator, self).__init__(*args, **kwargs)
self.report_id = report_id
self.destination_bucket = destination_bucket
self.destination_object = destination_object
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.sa360_hook = None
self.gcs_hook = None
def _download_report(self, report_id, destination_file, fragment_count):
for i in range(fragment_count):
request = self.sa360_hook.get_service().reports().getFile(
reportId=report_id, reportFragment=i)
fragment = request.execute()
if i > 0:
fragment_records = fragment.split('\n', 1)
if len(fragment_records) > 1:
fragment = fragment_records[1]
else:
fragment = ''
destination_file.write(fragment)
def _get_destination_uri(self, destination_object, destination_file):
report_file_name = destination_file.name
if destination_object is None:
return report_file_name
if destination_object.endswith('/'):
return destination_object + report_file_name
return destination_object
def execute(self, context):
if self.sa360_hook is None:
self.sa360_hook = GoogleSearchAds360Hook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to)
if self.gcs_hook is None:
self.gcs_hook = GoogleCloudStorageHook(
google_cloud_storage_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to)
request = self.sa360_hook.get_service().reports().get(
reportId=self.report_id)
response = request.execute()
temp_file = tempfile.NamedTemporaryFile(delete=False)
try:
self._download_report(self.report_id, temp_file, len(response['files']))
destination_object_name = self._get_destination_uri(
self.destination_object, temp_file)
self.gcs_hook.upload(
bucket=self.destination_bucket,
object=destination_object_name,
filename=temp_file.name,
multipart=True)
context['task_instance'].xcom_push(
'destination_bucket', self.destination_bucket)
context['task_instance'].xcom_push(
'destination_object', destination_object_name)
finally:
temp_file.close()
os.unlink(temp_file.name)
class GoogleSearchAds360InsertConversionOperator(BaseOperator):
"""Insert conversions in Search Ads 360.
Attributes:
conversions_file: path to json file with conversions to be inserted (templated)
If the destination points to an existing folder, the report will be
written under the specified folder.
gcp_conn_id: The connection ID to use when fetching connection info.
delegate_to: The account to impersonate, if any.
XComs:
destination_bucket: The Google cloud storage bucket the report was written
to.
destination_object: The Google cloud storage URI for the report.
"""
template_fields = ['conversions_file']
hook = None
@apply_defaults
def __init__(self,
*args,
conversions_file,
gcp_conn_id='google_cloud_default',
delegate_to=None,
**kwargs):
super(GoogleSearchAds360InsertConversionOperator, self).__init__(*args, **kwargs)
self.conversions_file = conversions_file
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
def execute(self, context):
file = Path(self.conversions_file)
if not file.is_file():
raise AirflowException(
f'conversions_file {self.conversions_file} not found'
)
if self.hook is None:
self.hook = GoogleSearchAds360Hook(
gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to
)
conversions = json.loads(file.read_text())
if not conversions:
self.log.info('No conversions to insert')
return
request = (
self.hook.get_service()
.conversion()
.insert(body={'conversion': conversions})
)
request.execute()
|
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import unittest
from unittest import mock
from unittest.mock import ANY
import pytest
from botocore.exceptions import ClientError
from airflow.models import DAG, DagRun, TaskInstance
from airflow.operators.dummy import DummyOperator
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.aws.log.s3_task_handler import S3TaskHandler
from airflow.utils.state import State
from airflow.utils.timezone import datetime
from tests.test_utils.config import conf_vars
try:
import boto3
import moto
from moto import mock_s3
except ImportError:
mock_s3 = None
@unittest.skipIf(mock_s3 is None, "Skipping test because moto.mock_s3 is not available")
@mock_s3
class TestS3TaskHandler(unittest.TestCase):
@conf_vars({('logging', 'remote_log_conn_id'): 'aws_default'})
def setUp(self):
super().setUp()
self.remote_log_base = 's3://bucket/remote/log/location'
self.remote_log_location = 's3://bucket/remote/log/location/1.log'
self.remote_log_key = 'remote/log/location/1.log'
self.local_log_location = 'local/log/location'
self.filename_template = '{try_number}.log'
self.s3_task_handler = S3TaskHandler(
self.local_log_location, self.remote_log_base, self.filename_template
)
# Vivfy the hook now with the config override
assert self.s3_task_handler.hook is not None
date = datetime(2016, 1, 1)
self.dag = DAG('dag_for_testing_s3_task_handler', start_date=date)
task = DummyOperator(task_id='task_for_testing_s3_log_handler', dag=self.dag)
dag_run = DagRun(dag_id=self.dag.dag_id, execution_date=date, run_id="test")
self.ti = TaskInstance(task=task)
self.ti.dag_run = dag_run
self.ti.try_number = 1
self.ti.state = State.RUNNING
self.addCleanup(self.dag.clear)
self.conn = boto3.client('s3')
# We need to create the bucket since this is all in Moto's 'virtual'
# AWS account
moto.core.moto_api_backend.reset()
self.conn.create_bucket(Bucket="bucket")
def tearDown(self):
if self.s3_task_handler.handler:
try:
os.remove(self.s3_task_handler.handler.baseFilename)
except Exception:
pass
def test_hook(self):
assert isinstance(self.s3_task_handler.hook, S3Hook)
assert self.s3_task_handler.hook.transfer_config.use_threads is False
@conf_vars({('logging', 'remote_log_conn_id'): 'aws_default'})
def test_hook_raises(self):
handler = S3TaskHandler(self.local_log_location, self.remote_log_base, self.filename_template)
with mock.patch.object(handler.log, 'error') as mock_error:
with mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook") as mock_hook:
mock_hook.side_effect = Exception('Failed to connect')
# Initialize the hook
handler.hook
mock_error.assert_called_once_with(
'Could not create an S3Hook with connection id "%s". Please make '
'sure that apache-airflow[aws] is installed and the S3 connection exists. Exception : "%s"',
'aws_default',
ANY,
exc_info=True,
)
def test_log_exists(self):
self.conn.put_object(Bucket='bucket', Key=self.remote_log_key, Body=b'')
assert self.s3_task_handler.s3_log_exists(self.remote_log_location)
def test_log_exists_none(self):
assert not self.s3_task_handler.s3_log_exists(self.remote_log_location)
def test_log_exists_raises(self):
assert not self.s3_task_handler.s3_log_exists('s3://nonexistentbucket/foo')
def test_log_exists_no_hook(self):
with mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook") as mock_hook:
mock_hook.side_effect = Exception('Failed to connect')
with pytest.raises(Exception):
self.s3_task_handler.s3_log_exists(self.remote_log_location)
def test_set_context_raw(self):
self.ti.raw = True
mock_open = mock.mock_open()
with mock.patch('airflow.providers.amazon.aws.log.s3_task_handler.open', mock_open):
self.s3_task_handler.set_context(self.ti)
assert not self.s3_task_handler.upload_on_close
mock_open.assert_not_called()
def test_set_context_not_raw(self):
mock_open = mock.mock_open()
with mock.patch('airflow.providers.amazon.aws.log.s3_task_handler.open', mock_open):
self.s3_task_handler.set_context(self.ti)
assert self.s3_task_handler.upload_on_close
mock_open.assert_called_once_with(os.path.abspath('local/log/location/1.log'), 'w')
mock_open().write.assert_not_called()
def test_read(self):
self.conn.put_object(Bucket='bucket', Key=self.remote_log_key, Body=b'Log line\n')
log, metadata = self.s3_task_handler.read(self.ti)
assert (
log[0][0][-1]
== '*** Reading remote log from s3://bucket/remote/log/location/1.log.\nLog line\n\n'
)
assert metadata == [{'end_of_log': True}]
def test_read_when_s3_log_missing(self):
log, metadata = self.s3_task_handler.read(self.ti)
assert 1 == len(log)
assert len(log) == len(metadata)
assert '*** Log file does not exist:' in log[0][0][-1]
assert {'end_of_log': True} == metadata[0]
def test_s3_read_when_log_missing(self):
handler = self.s3_task_handler
url = 's3://bucket/foo'
with mock.patch.object(handler.log, 'error') as mock_error:
result = handler.s3_read(url, return_error=True)
msg = (
f'Could not read logs from {url} with error: An error occurred (404) when calling the '
f'HeadObject operation: Not Found'
)
assert result == msg
mock_error.assert_called_once_with(msg, exc_info=True)
def test_read_raises_return_error(self):
handler = self.s3_task_handler
url = 's3://nonexistentbucket/foo'
with mock.patch.object(handler.log, 'error') as mock_error:
result = handler.s3_read(url, return_error=True)
msg = (
f'Could not read logs from {url} with error: An error occurred (NoSuchBucket) when '
f'calling the HeadObject operation: The specified bucket does not exist'
)
assert result == msg
mock_error.assert_called_once_with(msg, exc_info=True)
def test_write(self):
with mock.patch.object(self.s3_task_handler.log, 'error') as mock_error:
self.s3_task_handler.s3_write('text', self.remote_log_location)
# We shouldn't expect any error logs in the default working case.
mock_error.assert_not_called()
body = boto3.resource('s3').Object('bucket', self.remote_log_key).get()['Body'].read()
assert body == b'text'
def test_write_existing(self):
self.conn.put_object(Bucket='bucket', Key=self.remote_log_key, Body=b'previous ')
self.s3_task_handler.s3_write('text', self.remote_log_location)
body = boto3.resource('s3').Object('bucket', self.remote_log_key).get()['Body'].read()
assert body == b'previous \ntext'
def test_write_raises(self):
handler = self.s3_task_handler
url = 's3://nonexistentbucket/foo'
with mock.patch.object(handler.log, 'error') as mock_error:
handler.s3_write('text', url)
mock_error.assert_called_once_with('Could not write logs to %s', url, exc_info=True)
def test_close(self):
self.s3_task_handler.set_context(self.ti)
assert self.s3_task_handler.upload_on_close
self.s3_task_handler.close()
# Should not raise
boto3.resource('s3').Object('bucket', self.remote_log_key).get()
def test_close_no_upload(self):
self.ti.raw = True
self.s3_task_handler.set_context(self.ti)
assert not self.s3_task_handler.upload_on_close
self.s3_task_handler.close()
with pytest.raises(ClientError):
boto3.resource('s3').Object('bucket', self.remote_log_key).get()
|
|
import sys
import time
from django.conf import settings
# The prefix to put on the default database name when creating
# the test database.
TEST_DATABASE_PREFIX = 'test_'
class BaseDatabaseCreation(object):
"""
This class encapsulates all backend-specific differences that pertain to
database *creation*, such as the column types to use for particular Django
Fields, the SQL used to create and destroy tables, and the creation and
destruction of test databases.
"""
data_types = {}
def __init__(self, connection):
self.connection = connection
def _digest(self, *args):
"""
Generates a 32-bit digest of a set of arguments that can be used to
shorten identifying names.
"""
return '%x' % (abs(hash(args)) % 4294967296L) # 2**32
def sql_create_model(self, model, style, known_models=set()):
"""
Returns the SQL required to create a single model, as a tuple of:
(list_of_sql, pending_references_dict)
"""
opts = model._meta
if not opts.managed or opts.proxy:
return [], {}
final_output = []
table_output = []
pending_references = {}
qn = self.connection.ops.quote_name
for f in opts.local_fields:
col_type = f.db_type(connection=self.connection)
tablespace = f.db_tablespace or opts.db_tablespace
if col_type is None:
# Skip ManyToManyFields, because they're not represented as
# database columns in this table.
continue
# Make the definition (e.g. 'foo VARCHAR(30)') for this field.
field_output = [style.SQL_FIELD(qn(f.column)),
style.SQL_COLTYPE(col_type)]
if not f.null:
field_output.append(style.SQL_KEYWORD('NOT NULL'))
if f.primary_key:
field_output.append(style.SQL_KEYWORD('PRIMARY KEY'))
elif f.unique:
field_output.append(style.SQL_KEYWORD('UNIQUE'))
if tablespace and f.unique:
# We must specify the index tablespace inline, because we
# won't be generating a CREATE INDEX statement for this field.
field_output.append(self.connection.ops.tablespace_sql(tablespace, inline=True))
if f.rel:
ref_output, pending = self.sql_for_inline_foreign_key_references(f, known_models, style)
if pending:
pr = pending_references.setdefault(f.rel.to, []).append((model, f))
else:
field_output.extend(ref_output)
table_output.append(' '.join(field_output))
for field_constraints in opts.unique_together:
table_output.append(style.SQL_KEYWORD('UNIQUE') + ' (%s)' % \
", ".join([style.SQL_FIELD(qn(opts.get_field(f).column)) for f in field_constraints]))
full_statement = [style.SQL_KEYWORD('CREATE TABLE') + ' ' + style.SQL_TABLE(qn(opts.db_table)) + ' (']
for i, line in enumerate(table_output): # Combine and add commas.
full_statement.append(' %s%s' % (line, i < len(table_output)-1 and ',' or ''))
full_statement.append(')')
if opts.db_tablespace:
full_statement.append(self.connection.ops.tablespace_sql(opts.db_tablespace))
full_statement.append(';')
final_output.append('\n'.join(full_statement))
if opts.has_auto_field:
# Add any extra SQL needed to support auto-incrementing primary keys.
auto_column = opts.auto_field.db_column or opts.auto_field.name
autoinc_sql = self.connection.ops.autoinc_sql(opts.db_table, auto_column)
if autoinc_sql:
for stmt in autoinc_sql:
final_output.append(stmt)
return final_output, pending_references
def sql_for_inline_foreign_key_references(self, field, known_models, style):
"Return the SQL snippet defining the foreign key reference for a field"
qn = self.connection.ops.quote_name
if field.rel.to in known_models:
output = [style.SQL_KEYWORD('REFERENCES') + ' ' + \
style.SQL_TABLE(qn(field.rel.to._meta.db_table)) + ' (' + \
style.SQL_FIELD(qn(field.rel.to._meta.get_field(field.rel.field_name).column)) + ')' +
self.connection.ops.deferrable_sql()
]
pending = False
else:
# We haven't yet created the table to which this field
# is related, so save it for later.
output = []
pending = True
return output, pending
def sql_for_pending_references(self, model, style, pending_references):
"Returns any ALTER TABLE statements to add constraints after the fact."
from django.db.backends.util import truncate_name
if not model._meta.managed or model._meta.proxy:
return []
qn = self.connection.ops.quote_name
final_output = []
opts = model._meta
if model in pending_references:
for rel_class, f in pending_references[model]:
rel_opts = rel_class._meta
r_table = rel_opts.db_table
r_col = f.column
table = opts.db_table
col = opts.get_field(f.rel.field_name).column
# For MySQL, r_name must be unique in the first 64 characters.
# So we are careful with character usage here.
r_name = '%s_refs_%s_%s' % (r_col, col, self._digest(r_table, table))
final_output.append(style.SQL_KEYWORD('ALTER TABLE') + ' %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % \
(qn(r_table), qn(truncate_name(r_name, self.connection.ops.max_name_length())),
qn(r_col), qn(table), qn(col),
self.connection.ops.deferrable_sql()))
del pending_references[model]
return final_output
def sql_for_many_to_many(self, model, style):
"Return the CREATE TABLE statments for all the many-to-many tables defined on a model"
import warnings
warnings.warn(
'Database creation API for m2m tables has been deprecated. M2M models are now automatically generated',
DeprecationWarning
)
output = []
for f in model._meta.local_many_to_many:
if model._meta.managed or f.rel.to._meta.managed:
output.extend(self.sql_for_many_to_many_field(model, f, style))
return output
def sql_for_many_to_many_field(self, model, f, style):
"Return the CREATE TABLE statements for a single m2m field"
import warnings
warnings.warn(
'Database creation API for m2m tables has been deprecated. M2M models are now automatically generated',
DeprecationWarning
)
from django.db import models
from django.db.backends.util import truncate_name
output = []
if f.auto_created:
opts = model._meta
qn = self.connection.ops.quote_name
tablespace = f.db_tablespace or opts.db_tablespace
if tablespace:
sql = self.connection.ops.tablespace_sql(tablespace, inline=True)
if sql:
tablespace_sql = ' ' + sql
else:
tablespace_sql = ''
else:
tablespace_sql = ''
table_output = [style.SQL_KEYWORD('CREATE TABLE') + ' ' + \
style.SQL_TABLE(qn(f.m2m_db_table())) + ' (']
table_output.append(' %s %s %s%s,' %
(style.SQL_FIELD(qn('id')),
style.SQL_COLTYPE(models.AutoField(primary_key=True).db_type(connection=self.connection)),
style.SQL_KEYWORD('NOT NULL PRIMARY KEY'),
tablespace_sql))
deferred = []
inline_output, deferred = self.sql_for_inline_many_to_many_references(model, f, style)
table_output.extend(inline_output)
table_output.append(' %s (%s, %s)%s' %
(style.SQL_KEYWORD('UNIQUE'),
style.SQL_FIELD(qn(f.m2m_column_name())),
style.SQL_FIELD(qn(f.m2m_reverse_name())),
tablespace_sql))
table_output.append(')')
if opts.db_tablespace:
# f.db_tablespace is only for indices, so ignore its value here.
table_output.append(self.connection.ops.tablespace_sql(opts.db_tablespace))
table_output.append(';')
output.append('\n'.join(table_output))
for r_table, r_col, table, col in deferred:
r_name = '%s_refs_%s_%s' % (r_col, col, self._digest(r_table, table))
output.append(style.SQL_KEYWORD('ALTER TABLE') + ' %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' %
(qn(r_table),
qn(truncate_name(r_name, self.connection.ops.max_name_length())),
qn(r_col), qn(table), qn(col),
self.connection.ops.deferrable_sql()))
# Add any extra SQL needed to support auto-incrementing PKs
autoinc_sql = self.connection.ops.autoinc_sql(f.m2m_db_table(), 'id')
if autoinc_sql:
for stmt in autoinc_sql:
output.append(stmt)
return output
def sql_for_inline_many_to_many_references(self, model, field, style):
"Create the references to other tables required by a many-to-many table"
import warnings
warnings.warn(
'Database creation API for m2m tables has been deprecated. M2M models are now automatically generated',
DeprecationWarning
)
from django.db import models
opts = model._meta
qn = self.connection.ops.quote_name
table_output = [
' %s %s %s %s (%s)%s,' %
(style.SQL_FIELD(qn(field.m2m_column_name())),
style.SQL_COLTYPE(models.ForeignKey(model).db_type(connection=self.connection)),
style.SQL_KEYWORD('NOT NULL REFERENCES'),
style.SQL_TABLE(qn(opts.db_table)),
style.SQL_FIELD(qn(opts.pk.column)),
self.connection.ops.deferrable_sql()),
' %s %s %s %s (%s)%s,' %
(style.SQL_FIELD(qn(field.m2m_reverse_name())),
style.SQL_COLTYPE(models.ForeignKey(field.rel.to).db_type(connection=self.connection)),
style.SQL_KEYWORD('NOT NULL REFERENCES'),
style.SQL_TABLE(qn(field.rel.to._meta.db_table)),
style.SQL_FIELD(qn(field.rel.to._meta.pk.column)),
self.connection.ops.deferrable_sql())
]
deferred = []
return table_output, deferred
def sql_indexes_for_model(self, model, style):
"Returns the CREATE INDEX SQL statements for a single model"
if not model._meta.managed or model._meta.proxy:
return []
output = []
for f in model._meta.local_fields:
output.extend(self.sql_indexes_for_field(model, f, style))
return output
def sql_indexes_for_field(self, model, f, style):
"Return the CREATE INDEX SQL statements for a single model field"
from django.db.backends.util import truncate_name
if f.db_index and not f.unique:
qn = self.connection.ops.quote_name
tablespace = f.db_tablespace or model._meta.db_tablespace
if tablespace:
sql = self.connection.ops.tablespace_sql(tablespace)
if sql:
tablespace_sql = ' ' + sql
else:
tablespace_sql = ''
else:
tablespace_sql = ''
i_name = '%s_%s' % (model._meta.db_table, self._digest(f.column))
output = [style.SQL_KEYWORD('CREATE INDEX') + ' ' +
style.SQL_TABLE(qn(truncate_name(i_name, self.connection.ops.max_name_length()))) + ' ' +
style.SQL_KEYWORD('ON') + ' ' +
style.SQL_TABLE(qn(model._meta.db_table)) + ' ' +
"(%s)" % style.SQL_FIELD(qn(f.column)) +
"%s;" % tablespace_sql]
else:
output = []
return output
def sql_destroy_model(self, model, references_to_delete, style):
"Return the DROP TABLE and restraint dropping statements for a single model"
if not model._meta.managed or model._meta.proxy:
return []
# Drop the table now
qn = self.connection.ops.quote_name
output = ['%s %s;' % (style.SQL_KEYWORD('DROP TABLE'),
style.SQL_TABLE(qn(model._meta.db_table)))]
if model in references_to_delete:
output.extend(self.sql_remove_table_constraints(model, references_to_delete, style))
if model._meta.has_auto_field:
ds = self.connection.ops.drop_sequence_sql(model._meta.db_table)
if ds:
output.append(ds)
return output
def sql_remove_table_constraints(self, model, references_to_delete, style):
from django.db.backends.util import truncate_name
if not model._meta.managed or model._meta.proxy:
return []
output = []
qn = self.connection.ops.quote_name
for rel_class, f in references_to_delete[model]:
table = rel_class._meta.db_table
col = f.column
r_table = model._meta.db_table
r_col = model._meta.get_field(f.rel.field_name).column
r_name = '%s_refs_%s_%s' % (col, r_col, self._digest(table, r_table))
output.append('%s %s %s %s;' % \
(style.SQL_KEYWORD('ALTER TABLE'),
style.SQL_TABLE(qn(table)),
style.SQL_KEYWORD(self.connection.ops.drop_foreignkey_sql()),
style.SQL_FIELD(qn(truncate_name(r_name, self.connection.ops.max_name_length())))))
del references_to_delete[model]
return output
def sql_destroy_many_to_many(self, model, f, style):
"Returns the DROP TABLE statements for a single m2m field"
import warnings
warnings.warn(
'Database creation API for m2m tables has been deprecated. M2M models are now automatically generated',
DeprecationWarning
)
qn = self.connection.ops.quote_name
output = []
if f.auto_created:
output.append("%s %s;" % (style.SQL_KEYWORD('DROP TABLE'),
style.SQL_TABLE(qn(f.m2m_db_table()))))
ds = self.connection.ops.drop_sequence_sql("%s_%s" % (model._meta.db_table, f.column))
if ds:
output.append(ds)
return output
def create_test_db(self, verbosity=1, autoclobber=False):
"""
Creates a test database, prompting the user for confirmation if the
database already exists. Returns the name of the test database created.
"""
# Don't import django.core.management if it isn't needed.
from django.core.management import call_command
test_database_name = self._get_test_db_name()
if verbosity >= 1:
test_db_repr = ''
if verbosity >= 2:
test_db_repr = " ('%s')" % test_database_name
print "Creating test database for alias '%s'%s..." % (self.connection.alias, test_db_repr)
self._create_test_db(verbosity, autoclobber)
self.connection.close()
self.connection.settings_dict["NAME"] = test_database_name
# Confirm the feature set of the test database
self.connection.features.confirm()
# Report syncdb messages at one level lower than that requested.
# This ensures we don't get flooded with messages during testing
# (unless you really ask to be flooded)
call_command('syncdb', verbosity=max(verbosity - 1, 0), interactive=False, database=self.connection.alias)
from django.core.cache import get_cache
from django.core.cache.backends.db import BaseDatabaseCache
for cache_alias in settings.CACHES:
cache = get_cache(cache_alias)
if isinstance(cache, BaseDatabaseCache):
from django.db import router
if router.allow_syncdb(self.connection.alias, cache.cache_model_class):
call_command('createcachetable', cache._table, database=self.connection.alias)
# Get a cursor (even though we don't need one yet). This has
# the side effect of initializing the test database.
cursor = self.connection.cursor()
return test_database_name
def _get_test_db_name(self):
"""
Internal implementation - returns the name of the test DB that will be
created. Only useful when called from create_test_db() and
_create_test_db() and when no external munging is done with the 'NAME'
or 'TEST_NAME' settings.
"""
if self.connection.settings_dict['TEST_NAME']:
return self.connection.settings_dict['TEST_NAME']
return TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME']
def _create_test_db(self, verbosity, autoclobber):
"Internal implementation - creates the test db tables."
suffix = self.sql_table_creation_suffix()
test_database_name = self._get_test_db_name()
qn = self.connection.ops.quote_name
# Create the test database and connect to it. We need to autocommit
# if the database supports it because PostgreSQL doesn't allow
# CREATE/DROP DATABASE statements within transactions.
cursor = self.connection.cursor()
self.set_autocommit()
try:
cursor.execute("CREATE DATABASE %s %s" % (qn(test_database_name), suffix))
except Exception, e:
sys.stderr.write("Got an error creating the test database: %s\n" % e)
if not autoclobber:
confirm = raw_input("Type 'yes' if you would like to try deleting the test database '%s', or 'no' to cancel: " % test_database_name)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print "Destroying old test database '%s'..." % self.connection.alias
cursor.execute("DROP DATABASE %s" % qn(test_database_name))
cursor.execute("CREATE DATABASE %s %s" % (qn(test_database_name), suffix))
except Exception, e:
sys.stderr.write("Got an error recreating the test database: %s\n" % e)
sys.exit(2)
else:
print "Tests cancelled."
sys.exit(1)
return test_database_name
def destroy_test_db(self, old_database_name, verbosity=1):
"""
Destroy a test database, prompting the user for confirmation if the
database already exists. Returns the name of the test database created.
"""
self.connection.close()
test_database_name = self.connection.settings_dict['NAME']
if verbosity >= 1:
test_db_repr = ''
if verbosity >= 2:
test_db_repr = " ('%s')" % test_database_name
print "Destroying test database for alias '%s'%s..." % (self.connection.alias, test_db_repr)
self.connection.settings_dict['NAME'] = old_database_name
self._destroy_test_db(test_database_name, verbosity)
def _destroy_test_db(self, test_database_name, verbosity):
"Internal implementation - remove the test db tables."
# Remove the test database to clean up after
# ourselves. Connect to the previous database (not the test database)
# to do so, because it's not allowed to delete a database while being
# connected to it.
cursor = self.connection.cursor()
self.set_autocommit()
time.sleep(1) # To avoid "database is being accessed by other users" errors.
cursor.execute("DROP DATABASE %s" % self.connection.ops.quote_name(test_database_name))
self.connection.close()
def set_autocommit(self):
"Make sure a connection is in autocommit mode."
if hasattr(self.connection.connection, "autocommit"):
if callable(self.connection.connection.autocommit):
self.connection.connection.autocommit(True)
else:
self.connection.connection.autocommit = True
elif hasattr(self.connection.connection, "set_isolation_level"):
self.connection.connection.set_isolation_level(0)
def sql_table_creation_suffix(self):
"SQL to append to the end of the test table creation statements"
return ''
|
|
import io
import json
import os
import sys
import time
import struct
import zipfile
import config
from ..apk import *
from .. import appstore
from .. import firmware
from .. import installer
from ..io import *
from ..marketserver.server import *
from ..usb import *
from ..usb.driver import *
from ..usb.sensershell import *
from ..usb.sony import *
from ..usb.usbshell import *
from ..util import http
scriptRoot = getattr(sys, '_MEIPASS', os.path.dirname(__file__) + '/../..')
def printStatus(status):
"""Print progress"""
print('%s %d%%' % (status.message, status.percent))
def switchToAppInstaller(dev):
"""Switches a camera in MTP mode to app installation mode"""
print('Switching to app install mode')
SonyExtCmdCamera(dev).switchToAppInstaller()
appListCache = None
def listApps(enableCache=False):
global appListCache
remoteAppStore = RemoteAppStore(config.appengineServer)
appStoreRepo = appstore.GithubApi(config.githubAppListUser, config.githubAppListRepo)
if not appListCache or not enableCache:
print('Loading app list')
try:
apps = remoteAppStore.listApps()
except:
print('Cannot connect to remote server, falling back to appstore repository')
apps = appstore.AppStore(appStoreRepo).apps
print('Found %d apps' % len(apps))
appListCache = apps
return appListCache
def installApp(dev, apkFile=None, appPackage=None, outFile=None, local=False):
"""Installs an app on the specified device."""
certFile = scriptRoot + '/certs/localtest.me.pem'
with ServerContext(LocalMarketServer(certFile, config.officialServer)) as server:
apkData = None
if apkFile:
apkData = apkFile.read()
elif appPackage:
print('Downloading apk')
apps = listApps(True)
if appPackage not in apps:
raise Exception('Unknown app: %s' % appPackage)
apkData = apps[appPackage].release.asset
if apkData:
print('Analyzing apk')
print('')
checkApk(io.BytesIO(apkData))
print('')
server.setApk(apkData)
print('Starting task')
xpdData = server.getXpd()
print('Starting communication')
# Point the camera to the web api
result = installer.install(SonyAppInstallCamera(dev), server.host, server.port, xpdData, printStatus)
if result.code != 0:
raise Exception('Communication error %d: %s' % (result.code, result.message))
result = server.getResult()
if not local:
try:
RemoteAppStore(config.appengineServer).sendStats(result)
except:
pass
print('Task completed successfully')
if outFile:
print('Writing to output file')
json.dump(result, outFile, indent=2)
return result
def checkApk(apkFile):
try:
apk = ApkParser(apkFile)
props = [
('Package', apk.getPackageName()),
('Version', apk.getVersionName()),
]
apk.getVersionCode()
for k, v in props:
print('%-9s%s' % (k + ': ', v))
sdk = apk.getMinSdkVersion()
if sdk > 10:
print('Warning: This app might not be compatible with the device (minSdkVersion = %d)' % sdk)
try:
apk.getCert()
except:
print('Warning: Cannot read apk certificate')
except:
print('Warning: Invalid apk file')
class UsbDriverList:
def __init__(self, *contexts):
self._contexts = contexts
self._drivers = []
def __enter__(self):
self._drivers = [context.__enter__() for context in self._contexts]
return self
def __exit__(self, *ex):
for context in self._contexts:
context.__exit__(*ex)
self._drivers = []
def listDevices(self, vendor):
for driver in self._drivers:
for dev in driver.listDevices(vendor):
yield dev, driver.classType, driver.openDevice(dev)
def importDriver(driverName=None):
"""Imports the usb driver. Use in a with statement"""
MscContext = None
MtpContext = None
VendorSpecificContext = None
# Load native drivers
if driverName == 'native' or driverName is None:
if sys.platform == 'win32':
from ..usb.driver.windows.msc import MscContext
from ..usb.driver.windows.wpd import MtpContext
elif sys.platform == 'darwin':
from ..usb.driver.osx import MscContext
else:
print('No native drivers available')
elif driverName == 'qemu':
from ..usb.driver.generic.qemu import MscContext
from ..usb.driver.generic.qemu import MtpContext
elif driverName == 'libusb':
from ..usb.driver.generic.libusb import VendorSpecificContext
else:
raise Exception('Unknown driver')
# Fallback to libusb
if MscContext is None:
from ..usb.driver.generic.libusb import MscContext
if MtpContext is None:
from ..usb.driver.generic.libusb import MtpContext
drivers = [context() for context in [MscContext, MtpContext, VendorSpecificContext] if context]
print('Using drivers %s' % ', '.join(d.name for d in drivers))
return UsbDriverList(*drivers)
def listDevices(driverList, quiet=False):
"""List all Sony usb devices"""
if not quiet:
print('Looking for Sony devices')
for dev, type, drv in driverList.listDevices(SONY_ID_VENDOR):
if type == USB_CLASS_MSC:
if not quiet:
print('\nQuerying mass storage device')
# Get device info
info = MscDevice(drv).getDeviceInfo()
if isSonyMscCamera(info):
if isSonyMscUpdaterCamera(dev):
if not quiet:
print('%s %s is a camera in updater mode' % (info.manufacturer, info.model))
yield SonyMscUpdaterDevice(drv)
else:
if not quiet:
print('%s %s is a camera in mass storage mode' % (info.manufacturer, info.model))
yield SonyMscExtCmdDevice(drv)
elif type == USB_CLASS_PTP:
if not quiet:
print('\nQuerying MTP device')
# Get device info
info = MtpDevice(drv).getDeviceInfo()
if isSonyMtpCamera(info):
if not quiet:
print('%s %s is a camera in MTP mode' % (info.manufacturer, info.model))
yield SonyMtpExtCmdDevice(drv)
elif isSonyMtpAppInstallCamera(info):
if not quiet:
print('%s %s is a camera in app install mode' % (info.manufacturer, info.model))
yield SonyMtpAppInstallDevice(drv)
elif type == USB_CLASS_VENDOR_SPECIFIC:
if isSonySenserCamera(dev):
print('Found a camera in senser mode')
yield SonySenserDevice(drv)
if not quiet:
print('')
def getDevice(driver):
"""Check for exactly one Sony usb device"""
devices = list(listDevices(driver))
if not devices:
print('No devices found. Ensure your camera is connected.')
elif len(devices) != 1:
print('Too many devices found. Only one camera is supported')
else:
return devices[0]
def infoCommand(driverName=None):
"""Display information about the camera connected via usb"""
with importDriver(driverName) as driver:
device = getDevice(driver)
if device:
if isinstance(device, SonyAppInstallDevice):
info = installApp(device)
print('')
props = [
('Model', info['deviceinfo']['name']),
('Product code', info['deviceinfo']['productcode']),
('Serial number', info['deviceinfo']['deviceid']),
('Firmware version', info['deviceinfo']['fwversion']),
]
elif isinstance(device, SonyExtCmdDevice):
dev = SonyExtCmdCamera(device)
info = dev.getCameraInfo()
updater = SonyUpdaterCamera(device)
updater.init()
firmwareOld, firmwareNew = updater.getFirmwareVersion()
props = [
('Model', info.modelName),
('Product code', info.modelCode),
('Serial number', info.serial),
('Firmware version', firmwareOld),
]
try:
lensInfo = dev.getLensInfo()
if lensInfo.model != 0:
props.append(('Lens', 'Model 0x%x (Firmware %s)' % (lensInfo.model, lensInfo.version)))
except (InvalidCommandException, UnknownMscException):
pass
try:
gpsInfo = dev.getGpsData()
props.append(('GPS Data', '%s - %s' % gpsInfo))
except (InvalidCommandException, UnknownMscException):
pass
else:
print('Error: Cannot use camera in this mode.')
return
for k, v in props:
print('%-20s%s' % (k + ': ', v))
def installCommand(driverName=None, apkFile=None, appPackage=None, outFile=None, local=False):
"""Install the given apk on the camera"""
with importDriver(driverName) as driver:
device = getDevice(driver)
if device and isinstance(device, SonyExtCmdDevice):
switchToAppInstaller(device)
device = None
print('Waiting for camera to switch...')
for i in range(10):
time.sleep(.5)
try:
devices = list(listDevices(driver, True))
if len(devices) == 1 and isinstance(devices[0], SonyAppInstallDevice):
device = devices[0]
break
except:
pass
else:
print('Operation timed out. Please run this command again when your camera has connected.')
if device and isinstance(device, SonyAppInstallDevice):
installApp(device, apkFile, appPackage, outFile, local)
elif device:
print('Error: Cannot use camera in this mode.')
def appSelectionCommand():
apps = list(listApps().values())
for i, app in enumerate(apps):
print(' [%2d] %s' % (i+1, app.package))
i = int(input('Enter number of app to install (0 to abort): '))
if i != 0:
pkg = apps[i - 1].package
print('')
print('Installing %s' % pkg)
return pkg
def getFdats():
fdatDir = scriptRoot + '/updatershell/fdat/'
for dir in os.listdir(fdatDir):
if os.path.isdir(fdatDir + dir):
payloadFile = fdatDir + dir + '.dat'
if os.path.isfile(payloadFile):
for model in os.listdir(fdatDir + dir):
hdrFile = fdatDir + dir + '/' + model
if os.path.isfile(hdrFile) and hdrFile.endswith('.hdr'):
yield model[:-4], (hdrFile, payloadFile)
def getFdat(device):
fdats = dict(getFdats())
while device != '' and not device[-1:].isdigit() and device not in fdats:
device = device[:-1]
if device in fdats:
hdrFile, payloadFile = fdats[device]
with open(hdrFile, 'rb') as hdr, open(payloadFile, 'rb') as payload:
return hdr.read() + payload.read()
def firmwareUpdateCommand(file, driverName=None):
offset, size = firmware.readDat(file)
with importDriver(driverName) as driver:
device = getDevice(driver)
if device:
firmwareUpdateCommandInternal(driver, device, file, offset, size)
def updaterShellCommand(model=None, fdatFile=None, driverName=None, complete=None):
with importDriver(driverName) as driver:
device = getDevice(driver)
if device:
if fdatFile:
fdat = fdatFile.read()
else:
if not model:
if not isinstance(device, SonyExtCmdDevice):
print('Error: Cannot determine camera model in this mode.')
return
print('Getting device info')
model = SonyExtCmdCamera(device).getCameraInfo().modelName
print('Using firmware for model %s' % model)
print('')
fdat = getFdat(model)
if not fdat:
print('Unknown device: %s' % model)
return
if not complete:
def complete(device):
print('Starting updater shell...')
print('')
UpdaterShell(device).run()
firmwareUpdateCommandInternal(driver, device, io.BytesIO(fdat), 0, len(fdat), complete)
def firmwareUpdateCommandInternal(driver, device, file, offset, size, complete=None):
if not isinstance(device, SonyUpdaterDevice) and not isinstance(device, SonyExtCmdDevice):
print('Error: Cannot use camera in this mode.')
return
dev = SonyUpdaterCamera(device)
print('Initializing firmware update')
dev.init()
file.seek(offset)
dev.checkGuard(file, size)
versions = dev.getFirmwareVersion()
if versions[1] != '9.99':
print('Updating from version %s to version %s' % versions)
if not isinstance(device, SonyUpdaterDevice):
print('Switching to updater mode')
dev.switchMode()
device = None
print('')
print('Waiting for camera to switch...')
print('Please follow the instructions on the camera screen.')
for i in range(60):
time.sleep(.5)
try:
devices = list(listDevices(driver, True))
if len(devices) == 1 and isinstance(devices[0], SonyUpdaterDevice):
device = devices[0]
break
except:
pass
else:
print('Operation timed out. Please run this command again when your camera has connected.')
if device:
firmwareUpdateCommandInternal(None, device, file, offset, size, complete)
else:
print('Writing firmware')
file.seek(offset)
dev.writeFirmware(ProgressFile(file, size), size, complete)
dev.complete()
print('Done')
def guessFirmwareCommand(file, driverName=None):
with importDriver(driverName) as driver:
device = getDevice(driver)
if device:
if not isinstance(device, SonyExtCmdDevice):
print('Error: Cannot use camera in this mode.')
return
print('Getting device info')
model = SonyExtCmdCamera(device).getCameraInfo().modelName
print('Model name: %s' % model)
print('')
dev = SonyUpdaterCamera(device)
with zipfile.ZipFile(file) as zip:
infos = zip.infolist()
print('Trying %d firmware images' % len(infos))
for info in infos:
data = zip.read(info)
try:
dev.init()
dev.checkGuard(io.BytesIO(data), len(data))
break
except Exception as e:
if 'Invalid model' not in str(e):
print(e)
break
else:
print('Fail: No matching file found')
return
print('Success: Found matching file: %s' % info.filename)
def gpsUpdateCommand(file=None, driverName=None):
with importDriver(driverName) as driver:
device = getDevice(driver)
if device:
if not isinstance(device, SonyExtCmdDevice):
print('Error: Cannot use camera in this mode.')
return
if not file:
print('Downloading GPS data')
file = io.BytesIO(http.get('https://control.d-imaging.sony.co.jp/GPS/assistme.dat').raw_data)
print('Writing GPS data')
SonyExtCmdCamera(device).writeGpsData(file)
print('Done')
def streamingCommand(write=None, file=None, driverName=None):
"""Read/Write Streaming information for the camera connected via usb"""
with importDriver(driverName) as driver:
device = getDevice(driver)
if device:
if not isinstance(device, SonyExtCmdDevice):
print('Error: Cannot use camera in this mode.')
else:
dev = SonyExtCmdCamera(device)
if write:
incoming = json.load(write)
# assemble Social (first 9 items in file)
mydict = {}
for key in incoming[:9]:
if key[0] in ['twitterEnabled', 'facebookEnabled']:
mydict[key[0]] = key[1] # Integer
else:
mydict[key[0]] = key[1].encode('ascii')
data = SonyExtCmdCamera.LiveStreamingSNSInfo.pack(
twitterEnabled = mydict['twitterEnabled'],
twitterConsumerKey = mydict['twitterConsumerKey'].ljust(1025, b'\x00'),
twitterConsumerSecret = mydict['twitterConsumerSecret'].ljust(1025, b'\x00'),
twitterAccessToken1 = mydict['twitterAccessToken1'].ljust(1025, b'\x00'),
twitterAccessTokenSecret = mydict['twitterAccessTokenSecret'].ljust(1025, b'\x00'),
twitterMessage = mydict['twitterMessage'].ljust(401, b'\x00'),
facebookEnabled = mydict['facebookEnabled'],
facebookAccessToken = mydict['facebookAccessToken'].ljust(1025, b'\x00'),
facebookMessage = mydict['facebookMessage'].ljust(401, b'\x00'),
)
dev.setLiveStreamingSocialInfo(data)
# assemble Streaming, file may contain multiple sets (of 14 items)
data = b'\x01\x00\x00\x00'
data += struct.pack('<i', int((len(incoming)-9)/14))
mydict = {}
count = 1
for key in incoming[9:]:
if key[0] in ['service', 'enabled', 'videoFormat', 'videoFormat', 'unknown', \
'enableRecordMode', 'channels', 'supportedFormats']:
mydict[key[0]] = key[1]
elif key[0] == 'macIssueTime':
mydict[key[0]] = binascii.a2b_hex(key[1])
else:
mydict[key[0]] = key[1].encode('ascii')
if count == 14:
# reassemble Structs
data += SonyExtCmdCamera.LiveStreamingServiceInfo1.pack(
service = mydict['service'],
enabled = mydict['enabled'],
macId = mydict['macId'].ljust(41, b'\x00'),
macSecret = mydict['macSecret'].ljust(41, b'\x00'),
macIssueTime = mydict['macIssueTime'],
unknown = 0, # mydict['unknown'],
)
data += struct.pack('<i', len(mydict['channels']))
for j in range(len(mydict['channels'])):
data += struct.pack('<i', mydict['channels'][j])
data += SonyExtCmdCamera.LiveStreamingServiceInfo2.pack(
shortURL = mydict['shortURL'].ljust(101, b'\x00'),
videoFormat = mydict['videoFormat'],
)
data += struct.pack('<i', len(mydict['supportedFormats']))
for j in range(len(mydict['supportedFormats'])):
data += struct.pack('<i', mydict['supportedFormats'][j])
data += SonyExtCmdCamera.LiveStreamingServiceInfo3.pack(
enableRecordMode = mydict['enableRecordMode'],
videoTitle = mydict['videoTitle'].ljust(401, b'\x00'),
videoDescription = mydict['videoDescription'].ljust(401, b'\x00'),
videoTag = mydict['videoTag'].ljust(401, b'\x00'),
)
count = 1
else:
count += 1
dev.setLiveStreamingServiceInfo(data)
return
# Read settings from camera (do this first so we know channels/supportedFormats)
settings = dev.getLiveStreamingServiceInfo()
social = dev.getLiveStreamingSocialInfo()
data = []
# Social settings
for key in (social._asdict()).items():
if key[0] in ['twitterEnabled', 'facebookEnabled']:
data.append([key[0], key[1]])
else:
data.append([key[0], key[1].decode('ascii').split('\x00')[0]])
# Streaming settings, file may contain muliple sets of data
try:
for key in next(settings).items():
if key[0] in ['service', 'enabled', 'videoFormat', 'enableRecordMode', \
'unknown', 'channels', 'supportedFormats']:
data.append([key[0], key[1]])
elif key[0] == 'macIssueTime':
data.append([key[0], binascii.b2a_hex(key[1]).decode('ascii')])
else:
data.append([key[0], key[1].decode('ascii').split('\x00')[0]])
except StopIteration:
pass
if file:
file.write(json.dumps(data, indent=4))
else:
for k, v in data:
print('%-20s%s' % (k + ': ', v))
def wifiCommand(write=None, file=None, multi=False, driverName=None):
"""Read/Write WiFi information for the camera connected via usb"""
with importDriver(driverName) as driver:
device = getDevice(driver)
if device:
if not isinstance(device, SonyExtCmdDevice):
print('Error: Cannot use camera in this mode.')
else:
dev = SonyExtCmdCamera(device)
if write:
incoming = json.load(write)
data = struct.pack('<i', int(len(incoming)/3))
mydict = {}
count = 1
for key in incoming:
if key[0] == 'keyType':
mydict[key[0]] = key[1] # Integer
else:
mydict[key[0]] = key[1].encode('ascii')
if count == 3:
# reassemble Struct
apinfo = SonyExtCmdCamera.APInfo.pack(
keyType = mydict['keyType'],
sid = mydict['sid'].ljust(33, b'\x00'),
key = mydict['key'].ljust(65, b'\x00'),
)
data += apinfo
count = 1
else:
count += 1
if multi:
dev.setMultiWifiAPInfo(data)
else:
dev.setWifiAPInfo(data)
return
# Read settings from camera
if multi:
settings = dev.getMultiWifiAPInfo()
else:
settings = dev.getWifiAPInfo()
data = []
try:
for key in next(settings)._asdict().items():
if key[0] == 'keyType':
data.append([key[0], key[1]]) # Integer
else:
data.append([key[0],key[1].decode('ascii').split('\x00')[0]])
except StopIteration:
pass
if file:
file.write(json.dumps(data, indent=4))
else:
for k, v in data:
print('%-20s%s' % (k + ': ', v))
def senserShellCommand(driverName='libusb'):
with importDriver(driverName) as driver:
device = getDevice(driver)
if device and isinstance(device, SonyMscExtCmdDevice):
print('Switching to senser mode')
dev = SonySenserAuthDevice(device.driver)
dev.start()
dev.authenticate()
device = None
print('')
print('Waiting for camera to switch...')
for i in range(10):
time.sleep(.5)
try:
devices = list(listDevices(driver, True))
if len(devices) == 1 and isinstance(devices[0], SonySenserDevice):
device = devices[0]
break
except:
pass
else:
print('Operation timed out. Please run this command again when your camera has connected.')
if device and isinstance(device, SonySenserDevice):
print('Authenticating')
dev = SonySenserAuthDevice(device.driver)
dev.start()
dev.authenticate()
try:
SenserShell(SonySenserCamera(device)).run()
finally:
dev.stop()
print('Done')
elif device:
print('Error: Cannot use camera in this mode.')
|
|
# -*- coding: utf-8 -*-
"""
Part of the pyosf package
https://github.com/psychopy/pyosf/
Released under MIT license
@author: Jon Peirce
"""
from __future__ import absolute_import, print_function
from pyosf import remote, project, constants, tools
import time
import os
from os.path import join
import gc
import shutil
import copy
def do_sync(proj, print_all=False):
changes = proj.get_changes()
print(changes)
if print_all:
print_all_changes(changes)
changes.apply(threaded=True)
# threaded so wait until threads have finished
while changes.progress != -1:
time.sleep(0.1)
# when status says finished we do need to do a round-up
changes.finish_sync()
proj.save()
def print_all_changes(changes):
for change_type in changes._change_types:
this_dict = getattr(changes, change_type)
if len(this_dict):
print("{}:".format(change_type))
for path in this_dict:
print(" - {}".format(path))
class TestProjectChanges():
def setup(self):
# this is done individually for every test
self.proj = project.Project(project_file=self.proj_file)
def teardown(self):
if self.proj is not None:
self.proj.osf.rebuild_index()
print("Project state:")
for asset in self.proj.index:
print(" - {}".format(asset['path']))
self.proj = None
def teardown_class(self):
self.proj = None
# take a copy of the remote project for reference
if os.path.isdir('EndOfLastTest'):
shutil.rmtree('EndOfLastTest') # start with no project root
shutil.copytree(self.proj_root, 'EndOfLastTest')
# revert the local project to original state
if os.path.isdir(self.proj_root):
shutil.rmtree(self.proj_root) # start with no project root
shutil.copytree(self.files_orig, self.proj_root)
# perform a sync with remote to reset all the files there
proj = project.Project(project_file=self.proj_file)
do_sync(proj)
def setup_class(self):
self.proj_id = 'gbw2t'
self.this_dir, filename = os.path.split(__file__)
self.files_orig = join(self.this_dir, "files_orig")
self.tmp_folder = join(self.this_dir, "tmp")
self.proj_file = join(self.this_dir, "tmp", "test.proj")
self.proj_root = join(self.this_dir, "tmp", "files")
if os.path.isfile(self.proj_file):
os.remove(self.proj_file) # start with no project file
if os.path.isdir(self.proj_root):
shutil.rmtree(self.proj_root) # start with no project root
# start with what we know
shutil.copytree(self.files_orig, self.proj_root)
# first time around we need to supply username/password
session = remote.Session(username='[email protected]',
password='aTestPassword') # to get a token
self.osf_proj = session.open_project(self.proj_id)
# in future we just give the proj_file and the rest can be recreated
proj = project.Project(project_file=self.proj_file,
root_path=self.proj_root, osf=self.osf_proj)
# test the saving of the file
print("Getting initial state of project")
t0 = time.time()
changes = proj.get_changes()
t1 = time.time()
print("Indexing and finding diffs took {:.3f}s".format(t1-t0))
print(changes) # prints a prettified table
print_all_changes(changes)
t2 = time.time()
print("Applying changes")
changes.apply(threaded=False) # do_sync test will be threaded
t3 = time.time()
print("Applying changes took {:.3f}s".format(t3-t2))
proj.save()
# having saved it we can test that it reloads without user/password
print("Re-running get_changes(). Should be None")
proj = project.Project(project_file=self.proj_file)
t0 = time.time()
changes = proj.get_changes()
t1 = time.time()
print("\nRedoing - indexing and finding diffs took {:.3f}s"
.format(t1-t0))
print(changes) # prints a prettified table
print_all_changes(changes)
assert len(changes) == 0
def test_save_load_proj(self):
def namestr(obj, namespace): # return string of gc.referrers
return [name for name in namespace if namespace[name] is obj]
# check that nothing else has created a ref to changes (no circular)
changes = self.proj.get_changes()
assert len(gc.get_referrers(changes)) == 1 # just one ref (ours!)
def test_add_and_remove_local(self):
# add a folder and some files locally to propogate to remote
print("Creating new files locally")
orig = join(self.proj_root, 'visual')
new = join(self.proj_root, 'visual2')
if os.path.isdir(new):
shutil.rmtree(new)
shutil.copytree(orig, new)
# sync with the new files to test upload and folder creation
do_sync(self.proj)
self.proj.save()
print("Removing files locally")
# then remove the folder and do the sync again
shutil.rmtree(new)
do_sync(self.proj)
self.proj.save()
def test_add_and_remove_remote(self):
test_path = 'newFolder/someTextFile.txt'
# add a folder and file remotely to propogate to local
# take an arbitrary file from local give a new path and push to remote
asset = tools.find_by_key(self.proj.local.index, 'path', 'README.txt')
new_asset = copy.copy(asset)
# change 'path' for upload but 'full_path' points to orig
new_asset['path'] = test_path
self.proj.osf.add_file(new_asset)
self.proj = None # discard and recreate
# now create proj and do sync
self.proj = project.Project(project_file=self.proj_file)
do_sync(self.proj)
self.proj.save()
print("Removing a file and folder remotely")
# remove folder and file remotely and propogate to local
asset = tools.find_by_key(self.proj.osf.index, 'path', test_path)
self.proj.osf.del_file(asset)
container, name = os.path.split(test_path)
asset = tools.find_by_key(self.proj.osf.index, 'path', container)
self.proj.osf.del_file(asset)
do_sync(self.proj)
def test_conflict(self):
filename = 'visual/text_in_visual.txt'
# make changes to both and test sync
self._make_changes(self.proj, filename,
local_change=True, remote_change=True)
print("Doing conflicted sync")
do_sync(self.proj)
def test_local_updated(self):
filename = 'lowerLevel.txt'
# make changes to both and test sync
self._make_changes(self.proj, filename,
local_change=True, remote_change=False)
print("Sync with a local update")
do_sync(self.proj)
def test_remote_updated(self):
filename = 'README.txt'
# make changes to both and test sync
self._make_changes(self.proj, filename,
local_change=False, remote_change=True)
print("Sync with a remote update")
do_sync(self.proj)
def test_folder_in_folder(self):
folder_path = "folderLevel1/folderLevel2"
self.proj.osf.add_container(folder_path, kind='folder')
print("Test sync with a folder in folder")
do_sync(self.proj)
assert os.path.isdir(join(self.proj_root, folder_path))
def _make_changes(self, proj, filename,
local_change=True, remote_change=True):
"""Function to apply changes to local file, remote or both
"""
# create a conflict by changing a file in both locations
last_index = proj.index
# find a text file
asset = None
for thisAsset in last_index:
if thisAsset['path'].endswith(filename):
asset = thisAsset
break
# found an asset
if not asset:
full_path = join(proj.root_path, filename)
path = filename
elif 'full_path' in asset:
full_path = asset['full_path']
path = asset['path']
else:
raise ValueError("full_path key not found in asset: {}".format(asset))
if constants.PY3:
mode = 'at'
else:
mode = 'ab'
if remote_change:
# modify it with no change to local date_modified
# (create a copy, modify, upload and delete copy)
shutil.copy(full_path, 'tmp.txt')
osf_asset = copy.copy(proj.osf.find_asset(path))
osf_asset['full_path'] = 'tmp.txt'
with open('tmp.txt', mode) as f:
f.write("A bit of text added remotely. ")
proj.osf.add_file(osf_asset, update=True)
os.remove('tmp.txt') # delete the copy used for secret edit
if local_change:
# change again locally
with open(full_path, mode) as f:
f.write("A bit of text added locally. ")
if __name__ == "__main__":
try:
from psychopy import logging
console = logging.console
except ImportError:
import logging
console = logging.getLogger()
console.setLevel(logging.INFO)
import pytest
# pytest.main(args=[__file__+"::TestProjectChanges::some_test", '-s'])
pytest.main(args=[__file__, '-s'])
|
|
# -*- coding: utf-8 -*-
'''
Connection module for Amazon CloudWatch
.. versionadded:: 2014.7.0
:configuration: This module accepts explicit credentials but can also utilize
IAM roles assigned to the instance through Instance Profiles. Dynamic
credentials are then automatically obtained from AWS API and no further
configuration is necessary. More Information available at:
.. code-block:: text
http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html
If IAM roles are not used you need to specify them either in a pillar or
in the minion's config file:
.. code-block:: yaml
cloudwatch.keyid: GKTADJGHEIQSXMKKRBJ08H
cloudwatch.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
A region may also be specified in the configuration:
.. code-block:: yaml
cloudwatch.region: us-east-1
If a region is not specified, the default is us-east-1.
It's also possible to specify key, keyid and region via a profile, either
as a passed in dict, or as a string to pull from pillars or minion config:
.. code-block:: yaml
myprofile:
keyid: GKTADJGHEIQSXMKKRBJ08H
key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
region: us-east-1
:depends: boto
'''
# keep lint from choking on _get_conn and _cache_id
#pylint: disable=E0602
from __future__ import absolute_import
# Import Python libs
import logging
import json
import yaml
import salt.utils.odict as odict
log = logging.getLogger(__name__)
# Import third party libs
try:
import boto
import boto.ec2.cloudwatch
import boto.ec2.cloudwatch.listelement
import boto.ec2.cloudwatch.dimension
logging.getLogger('boto').setLevel(logging.CRITICAL)
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
from salt.ext.six import string_types
def __virtual__():
'''
Only load if boto libraries exist.
'''
if not HAS_BOTO:
return (False, 'The boto_cloudwatch module cannot be loaded: boto libraries are unavailable.')
__utils__['boto.assign_funcs'](__name__, 'cloudwatch',
module='ec2.cloudwatch',
pack=__salt__)
return True
def get_alarm(name, region=None, key=None, keyid=None, profile=None):
'''
Get alarm details. Also can be used to check to see if an alarm exists.
CLI example::
salt myminion boto_cloudwatch.get_alarm myalarm region=us-east-1
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
alarms = conn.describe_alarms(alarm_names=[name])
if len(alarms) == 0:
return None
if len(alarms) > 1:
log.error("multiple alarms matched name '{0}'".format(name))
return _metric_alarm_to_dict(alarms[0])
def _safe_dump(data):
###########################################
# this presenter magic makes yaml.safe_dump
# work with the objects returned from
# boto.describe_alarms()
###########################################
def ordered_dict_presenter(dumper, data):
return dumper.represent_dict(list(data.items()))
yaml.add_representer(odict.OrderedDict, ordered_dict_presenter,
Dumper=yaml.dumper.SafeDumper)
def boto_listelement_presenter(dumper, data):
return dumper.represent_list(list(data))
yaml.add_representer(boto.ec2.cloudwatch.listelement.ListElement,
boto_listelement_presenter,
Dumper=yaml.dumper.SafeDumper)
def dimension_presenter(dumper, data):
return dumper.represent_dict(dict(data))
yaml.add_representer(boto.ec2.cloudwatch.dimension.Dimension,
dimension_presenter, Dumper=yaml.dumper.SafeDumper)
return yaml.safe_dump(data)
def get_all_alarms(region=None, prefix=None, key=None, keyid=None,
profile=None):
'''
Get all alarm details. Produces results that can be used to create an sls
file.
If prefix parameter is given, alarm names in the output will be prepended
with the prefix; alarms that have the prefix will be skipped. This can be
used to convert existing alarms to be managed by salt, as follows:
1. Make a "backup" of all existing alarms
$ salt-call boto_cloudwatch.get_all_alarms --out=txt | sed "s/local: //" > legacy_alarms.sls
2. Get all alarms with new prefixed names
$ salt-call boto_cloudwatch.get_all_alarms "prefix=**MANAGED BY SALT** " --out=txt | sed "s/local: //" > managed_alarms.sls
3. Insert the managed alarms into cloudwatch
$ salt-call state.template managed_alarms.sls
4. Manually verify that the new alarms look right
5. Delete the original alarms
$ sed s/present/absent/ legacy_alarms.sls > remove_legacy_alarms.sls
$ salt-call state.template remove_legacy_alarms.sls
6. Get all alarms again, verify no changes
$ salt-call boto_cloudwatch.get_all_alarms --out=txt | sed "s/local: //" > final_alarms.sls
$ diff final_alarms.sls managed_alarms.sls
CLI example::
salt myminion boto_cloudwatch.get_all_alarms region=us-east-1 --out=txt
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
alarms = conn.describe_alarms()
results = odict.OrderedDict()
for alarm in alarms:
alarm = _metric_alarm_to_dict(alarm)
name = alarm["name"]
if prefix:
if name.startswith(prefix):
continue
name = prefix + alarm["name"]
del alarm["name"]
alarm_sls = []
alarm_sls.append({"name": name})
alarm_sls.append({"attributes": alarm})
results["manage alarm " + name] = {"boto_cloudwatch_alarm.present":
alarm_sls}
return _safe_dump(results)
def create_or_update_alarm(
connection=None, name=None, metric=None, namespace=None,
statistic=None, comparison=None, threshold=None, period=None,
evaluation_periods=None, unit=None, description='',
dimensions=None, alarm_actions=None,
insufficient_data_actions=None, ok_actions=None,
region=None, key=None, keyid=None, profile=None):
'''
Create or update a cloudwatch alarm.
Params are the same as:
http://boto.readthedocs.org/en/latest/ref/cloudwatch.html#boto.ec2.cloudwatch.alarm.MetricAlarm.
Dimensions must be a dict. If the value of Dimensions is a string, it will
be json decoded to produce a dict. alarm_actions, insufficient_data_actions,
and ok_actions must be lists of string. If the passed-in value is a string,
it will be split on "," to produce a list. The strings themselves for
alarm_actions, insufficient_data_actions, and ok_actions must be Amazon
resource names (ARN's); however, this method also supports an arn lookup
notation, as follows:
arn:aws:.... ARN as per http://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html
scaling_policy:<as_name>:<scaling_policy_name> The named autoscale group scaling policy, for the named group (e.g. scaling_policy:my-asg:ScaleDown)
This is convenient for setting up autoscaling as follows. First specify a
boto_asg.present state for an ASG with scaling_policies, and then set up
boto_cloudwatch_alarm.present states which have alarm_actions that
reference the scaling_policy.
CLI example:
salt myminion boto_cloudwatch.create_alarm name=myalarm ... region=us-east-1
'''
# clean up argument types, so that CLI works
if threshold:
threshold = float(threshold)
if period:
period = int(period)
if evaluation_periods:
evaluation_periods = int(evaluation_periods)
if isinstance(dimensions, string_types):
dimensions = json.loads(dimensions)
if not isinstance(dimensions, dict):
log.error("could not parse dimensions argument: must be json encoding of a dict: '{0}'".format(dimensions))
return False
if isinstance(alarm_actions, string_types):
alarm_actions = alarm_actions.split(",")
if isinstance(insufficient_data_actions, string_types):
insufficient_data_actions = insufficient_data_actions.split(",")
if isinstance(ok_actions, string_types):
ok_actions = ok_actions.split(",")
# convert provided action names into ARN's
if alarm_actions:
alarm_actions = convert_to_arn(alarm_actions,
region=region,
key=key,
keyid=keyid,
profile=profile)
if insufficient_data_actions:
insufficient_data_actions = convert_to_arn(insufficient_data_actions,
region=region,
key=key,
keyid=keyid,
profile=profile)
if ok_actions:
ok_actions = convert_to_arn(ok_actions,
region=region,
key=key,
keyid=keyid,
profile=profile)
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
alarm = boto.ec2.cloudwatch.alarm.MetricAlarm(
connection=connection,
name=name,
metric=metric,
namespace=namespace,
statistic=statistic,
comparison=comparison,
threshold=threshold,
period=period,
evaluation_periods=evaluation_periods,
unit=unit,
description=description,
dimensions=dimensions,
alarm_actions=alarm_actions,
insufficient_data_actions=insufficient_data_actions,
ok_actions=ok_actions
)
conn.create_alarm(alarm)
log.info('Created/updated alarm {0}'.format(name))
return True
def convert_to_arn(arns, region=None, key=None, keyid=None, profile=None):
'''
Convert a list of strings into actual arns. Converts convenience names such
as 'scaling_policy:...'
CLI Example::
salt '*' convert_to_arn 'scaling_policy:'
'''
results = []
for arn in arns:
if arn.startswith("scaling_policy:"):
_, as_group, scaling_policy_name = arn.split(":")
policy_arn = __salt__["boto_asg.get_scaling_policy_arn"](
as_group, scaling_policy_name, region, key, keyid, profile
)
if policy_arn:
results.append(policy_arn)
else:
log.error('Could not convert: {0}'.format(arn))
else:
results.append(arn)
return results
def delete_alarm(name, region=None, key=None, keyid=None, profile=None):
'''
Delete a cloudwatch alarm
CLI example to delete a queue::
salt myminion boto_cloudwatch.delete_alarm myalarm region=us-east-1
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
conn.delete_alarms([name])
log.info('Deleted alarm {0}'.format(name))
return True
def _metric_alarm_to_dict(alarm):
'''
Convert a boto.ec2.cloudwatch.alarm.MetricAlarm into a dict. Convenience
for pretty printing.
'''
d = odict.OrderedDict()
fields = ['name', 'metric', 'namespace', 'statistic', 'comparison',
'threshold', 'period', 'evaluation_periods', 'unit',
'description', 'dimensions', 'alarm_actions',
'insufficient_data_actions', 'ok_actions']
for f in fields:
if hasattr(alarm, f):
d[f] = getattr(alarm, f)
return d
|
|
# Copyright 2012 Nicira, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import os
from lxml import etree
import mock
from oslo_concurrency import processutils
from oslo_config import cfg
import six
from nova import exception
from nova.network import linux_net
from nova.network import model as network_model
from nova import objects
from nova.pci import utils as pci_utils
from nova import test
from nova import utils
from nova.virt.libvirt import config as vconfig
from nova.virt.libvirt import host
from nova.virt.libvirt import vif
CONF = cfg.CONF
class LibvirtVifTestCase(test.NoDBTestCase):
gateway_bridge_4 = network_model.IP(address='101.168.1.1', type='gateway')
dns_bridge_4 = network_model.IP(address='8.8.8.8', type=None)
ips_bridge_4 = [network_model.IP(address='101.168.1.9', type=None)]
subnet_bridge_4 = network_model.Subnet(cidr='101.168.1.0/24',
dns=[dns_bridge_4],
gateway=gateway_bridge_4,
routes=None,
dhcp_server='191.168.1.1')
gateway_bridge_6 = network_model.IP(address='101:1db9::1', type='gateway')
subnet_bridge_6 = network_model.Subnet(cidr='101:1db9::/64',
dns=None,
gateway=gateway_bridge_6,
ips=None,
routes=None)
network_bridge = network_model.Network(id='network-id-xxx-yyy-zzz',
bridge='br0',
label=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
bridge_interface='eth0',
vlan=99)
vif_bridge = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_BRIDGE,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid=None)
network_bridge_neutron = network_model.Network(id='network-id-xxx-yyy-zzz',
bridge=None,
label=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
bridge_interface='eth0',
vlan=99)
vif_bridge_neutron = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge_neutron,
type=None,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
network_ovs = network_model.Network(id='network-id-xxx-yyy-zzz',
bridge='br0',
label=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
bridge_interface=None,
vlan=99)
network_ivs = network_model.Network(id='network-id-xxx-yyy-zzz',
bridge='br0',
label=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
bridge_interface=None,
vlan=99)
vif_ovs = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ovs,
type=network_model.VIF_TYPE_OVS,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_ovs_hybrid = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ovs,
type=network_model.VIF_TYPE_OVS,
details={'ovs_hybrid_plug': True,
'port_filter': True},
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_ovs_filter_cap = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ovs,
type=network_model.VIF_TYPE_OVS,
details={'port_filter': True},
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_ovs_legacy = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ovs,
type=None,
devname=None,
ovs_interfaceid=None)
vif_ivs = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ivs,
type=network_model.VIF_TYPE_IVS,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_ivs_legacy = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ovs,
type=None,
devname=None,
ovs_interfaceid='aaa')
vif_ivs_filter_direct = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ivs,
type=network_model.VIF_TYPE_IVS,
details={'port_filter': True},
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_ivs_filter_hybrid = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ivs,
type=network_model.VIF_TYPE_IVS,
details={
'port_filter': True,
'ovs_hybrid_plug': True},
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_none = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=None,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid=None)
network_8021 = network_model.Network(id='network-id-xxx-yyy-zzz',
bridge=None,
label=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
interface='eth0',
vlan=99)
vif_8021qbh = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_802_QBH,
vnic_type=network_model.VNIC_TYPE_DIRECT,
ovs_interfaceid=None,
details={
network_model.VIF_DETAILS_PROFILEID:
'MyPortProfile'},
profile={'pci_vendor_info': '1137:0043',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1'})
vif_hw_veb = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_HW_VEB,
vnic_type=network_model.VNIC_TYPE_DIRECT,
ovs_interfaceid=None,
details={
network_model.VIF_DETAILS_VLAN: '100'},
profile={'pci_vendor_info': '1137:0043',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1'})
vif_hw_veb_macvtap = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_HW_VEB,
vnic_type=network_model.VNIC_TYPE_MACVTAP,
ovs_interfaceid=None,
details={
network_model.VIF_DETAILS_VLAN: '100'},
profile={'pci_vendor_info': '1137:0043',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1'})
vif_8021qbg = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_802_QBG,
ovs_interfaceid=None,
qbg_params=network_model.VIF8021QbgParams(
managerid="xxx-yyy-zzz",
typeid="aaa-bbb-ccc",
typeidversion="1",
instanceid="ddd-eee-fff"))
network_mlnx = network_model.Network(id='network-id-xxx-yyy-zzz',
label=None,
bridge=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
interface='eth0')
network_midonet = network_model.Network(id='network-id-xxx-yyy-zzz',
label=None,
bridge=None,
subnets=[subnet_bridge_4],
interface='eth0')
network_vrouter = network_model.Network(id='network-id-xxx-yyy-zzz',
label=None,
bridge=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
interface='eth0')
vif_vrouter = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_vrouter,
type=network_model.VIF_TYPE_VROUTER,
devname='tap-xxx-yyy-zzz')
vif_mlnx = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_mlnx,
type=network_model.VIF_TYPE_MLNX_DIRECT,
devname='tap-xxx-yyy-zzz')
vif_ib_hostdev = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_IB_HOSTDEV,
vnic_type=network_model.VNIC_TYPE_DIRECT,
ovs_interfaceid=None,
details={
network_model.VIF_DETAILS_VLAN: '100'},
profile={'pci_vendor_info': '1137:0043',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1'})
vif_mlnx_net = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_mlnx,
type=network_model.VIF_TYPE_MLNX_DIRECT,
details={'physical_network':
'fake_phy_network'},
devname='tap-xxx-yyy-zzz')
vif_midonet = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_midonet,
type=network_model.VIF_TYPE_MIDONET,
devname='tap-xxx-yyy-zzz')
vif_tap = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
type=network_model.VIF_TYPE_TAP,
devname='tap-xxx-yyy-zzz')
vif_iovisor = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_IOVISOR,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid=None)
vif_vhostuser = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_VHOSTUSER,
details = {network_model.VIF_DETAILS_VHOSTUSER_MODE: 'client',
network_model.VIF_DETAILS_VHOSTUSER_SOCKET:
'/tmp/vif-xxx-yyy-zzz'}
)
vif_vhostuser_ovs = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_VHOSTUSER,
details = {network_model.VIF_DETAILS_VHOSTUSER_MODE: 'client',
network_model.VIF_DETAILS_VHOSTUSER_SOCKET:
'/tmp/usv-xxx-yyy-zzz',
network_model.VIF_DETAILS_VHOSTUSER_OVS_PLUG: True},
ovs_interfaceid='aaa-bbb-ccc'
)
vif_vhostuser_no_path = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_VHOSTUSER,
details = {network_model.VIF_DETAILS_VHOSTUSER_MODE: 'client'})
vif_macvtap_vlan = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_MACVTAP,
details={network_model.VIF_DETAILS_VLAN: '1',
network_model.VIF_DETAILS_PHYS_INTERFACE: 'eth0',
network_model.VIF_DETAILS_MACVTAP_SOURCE: 'eth0.1',
network_model.VIF_DETAILS_MACVTAP_MODE: 'vepa'})
vif_macvtap_flat = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_MACVTAP,
details={network_model.VIF_DETAILS_PHYS_INTERFACE: 'eth0',
network_model.VIF_DETAILS_MACVTAP_SOURCE: 'eth0',
network_model.VIF_DETAILS_MACVTAP_MODE: 'bridge'})
vif_macvtap_exception = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_MACVTAP)
instance = objects.Instance(id=1, uuid='instance-uuid')
bandwidth = {
'quota:vif_inbound_peak': '200',
'quota:vif_outbound_peak': '20',
'quota:vif_inbound_average': '100',
'quota:vif_outbound_average': '10',
'quota:vif_inbound_burst': '300',
'quota:vif_outbound_burst': '30'
}
def setUp(self):
super(LibvirtVifTestCase, self).setUp()
self.flags(allow_same_net_traffic=True)
self.executes = []
def fake_execute(*cmd, **kwargs):
self.executes.append(cmd)
return None, None
self.stubs.Set(utils, 'execute', fake_execute)
def _get_node(self, xml):
doc = etree.fromstring(xml)
ret = doc.findall('./devices/interface')
self.assertEqual(len(ret), 1)
return ret[0]
def _assertMacEquals(self, node, vif):
mac = node.find("mac").get("address")
self.assertEqual(mac, vif['address'])
def _assertTypeEquals(self, node, type, attr, source, br_want,
prefix=None):
self.assertEqual(node.get("type"), type)
br_name = node.find(attr).get(source)
if prefix is None:
self.assertEqual(br_name, br_want)
else:
self.assertTrue(br_name.startswith(prefix))
def _assertTypeAndMacEquals(self, node, type, attr, source, vif,
br_want=None, size=0, prefix=None):
ret = node.findall("filterref")
self.assertEqual(len(ret), size)
self._assertTypeEquals(node, type, attr, source, br_want,
prefix)
self._assertMacEquals(node, vif)
def _assertModel(self, xml, model_want=None, driver_want=None):
node = self._get_node(xml)
if model_want is None:
ret = node.findall("model")
self.assertEqual(len(ret), 0)
else:
model = node.find("model").get("type")
self.assertEqual(model, model_want)
if driver_want is None:
ret = node.findall("driver")
self.assertEqual(len(ret), 0)
else:
driver = node.find("driver").get("name")
self.assertEqual(driver, driver_want)
def _assertTypeAndPciEquals(self, node, type, vif):
self.assertEqual(node.get("type"), type)
self._assertPciEqual(node, vif, type="pci")
def _assertPciEqual(self, node, vif, type=None):
address = node.find("source").find("address")
if type:
addr_type = address.get("type")
self.assertEqual(type, addr_type)
pci_slot = "%(domain)s:%(bus)s:%(slot)s.%(func)s" % {
'domain': address.get("domain")[2:],
'bus': address.get("bus")[2:],
'slot': address.get("slot")[2:],
'func': address.get("function")[2:]}
pci_slot_want = vif['profile']['pci_slot']
self.assertEqual(pci_slot, pci_slot_want)
def _get_conf(self):
conf = vconfig.LibvirtConfigGuest()
conf.virt_type = "qemu"
conf.name = "fake-name"
conf.uuid = "fake-uuid"
conf.memory = 100 * 1024
conf.vcpus = 4
return conf
def _get_instance_xml(self, driver, vif, image_meta=None, flavor=None):
if flavor is None:
flavor = objects.Flavor(name='m1.small',
memory_mb=128,
vcpus=1,
root_gb=0,
ephemeral_gb=0,
swap=0,
extra_specs=dict(self.bandwidth),
deleted_at=None,
deleted=0,
created_at=None, flavorid=1,
is_public=True, vcpu_weight=None,
id=2, disabled=False, rxtx_factor=1.0)
conf = self._get_conf()
hostimpl = host.Host("qemu:///system")
nic = driver.get_config(self.instance, vif, image_meta,
flavor, CONF.libvirt.virt_type,
hostimpl)
conf.add_device(nic)
return conf.to_xml()
def test_virtio_multiqueue(self):
self.flags(use_virtio_for_bridges=True,
virt_type='kvm',
group='libvirt')
flavor = objects.Flavor(name='m1.small',
memory_mb=128,
vcpus=4,
root_gb=0,
ephemeral_gb=0,
swap=0,
deleted_at=None,
deleted=0,
created_at=None, flavorid=1,
is_public=True, vcpu_weight=None,
id=2, disabled=False, rxtx_factor=1.0)
d = vif.LibvirtGenericVIFDriver()
image_meta = objects.ImageMeta.from_dict(
{'properties': {'hw_vif_model': 'virtio',
'hw_vif_multiqueue_enabled': 'true'}})
xml = self._get_instance_xml(d, self.vif_bridge,
image_meta, flavor)
node = self._get_node(xml)
driver = node.find("driver").get("name")
self.assertEqual(driver, 'vhost')
queues = node.find("driver").get("queues")
self.assertEqual(queues, '4')
def test_multiple_nics(self):
conf = self._get_conf()
# Tests multiple nic configuration and that target_dev is
# set for each
nics = [{'net_type': 'bridge',
'mac_addr': '00:00:00:00:00:0b',
'source_dev': 'b_source_dev',
'target_dev': 'b_target_dev'},
{'net_type': 'ethernet',
'mac_addr': '00:00:00:00:00:0e',
'source_dev': 'e_source_dev',
'target_dev': 'e_target_dev'},
{'net_type': 'direct',
'mac_addr': '00:00:00:00:00:0d',
'source_dev': 'd_source_dev',
'target_dev': 'd_target_dev'}]
for nic in nics:
nic_conf = vconfig.LibvirtConfigGuestInterface()
nic_conf.net_type = nic['net_type']
nic_conf.target_dev = nic['target_dev']
nic_conf.mac_addr = nic['mac_addr']
nic_conf.source_dev = nic['source_dev']
conf.add_device(nic_conf)
xml = conf.to_xml()
doc = etree.fromstring(xml)
for nic in nics:
path = "./devices/interface/[@type='%s']" % nic['net_type']
node = doc.find(path)
self.assertEqual(nic['net_type'], node.get("type"))
self.assertEqual(nic['mac_addr'],
node.find("mac").get("address"))
self.assertEqual(nic['target_dev'],
node.find("target").get("dev"))
def test_model_novirtio(self):
self.flags(use_virtio_for_bridges=False,
virt_type='kvm',
group='libvirt')
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_bridge)
self._assertModel(xml)
def test_model_kvm(self):
self.flags(use_virtio_for_bridges=True,
virt_type='kvm',
group='libvirt')
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_bridge)
self._assertModel(xml, network_model.VIF_MODEL_VIRTIO)
def test_model_kvm_qemu_custom(self):
for virt in ('kvm', 'qemu'):
self.flags(use_virtio_for_bridges=True,
virt_type=virt,
group='libvirt')
d = vif.LibvirtGenericVIFDriver()
supported = (network_model.VIF_MODEL_NE2K_PCI,
network_model.VIF_MODEL_PCNET,
network_model.VIF_MODEL_RTL8139,
network_model.VIF_MODEL_E1000,
network_model.VIF_MODEL_SPAPR_VLAN)
for model in supported:
image_meta = objects.ImageMeta.from_dict(
{'properties': {'hw_vif_model': model}})
xml = self._get_instance_xml(d, self.vif_bridge,
image_meta)
self._assertModel(xml, model)
def _test_model_qemu(self, *vif_objs, **kw):
libvirt_version = kw.get('libvirt_version')
self.flags(use_virtio_for_bridges=True,
virt_type='qemu',
group='libvirt')
for vif_obj in vif_objs:
d = vif.LibvirtGenericVIFDriver()
if libvirt_version is not None:
d.libvirt_version = libvirt_version
xml = self._get_instance_xml(d, vif_obj)
doc = etree.fromstring(xml)
bandwidth = doc.find('./devices/interface/bandwidth')
self.assertNotEqual(bandwidth, None)
inbound = bandwidth.find('inbound')
self.assertEqual(inbound.get("average"),
self.bandwidth['quota:vif_inbound_average'])
self.assertEqual(inbound.get("peak"),
self.bandwidth['quota:vif_inbound_peak'])
self.assertEqual(inbound.get("burst"),
self.bandwidth['quota:vif_inbound_burst'])
outbound = bandwidth.find('outbound')
self.assertEqual(outbound.get("average"),
self.bandwidth['quota:vif_outbound_average'])
self.assertEqual(outbound.get("peak"),
self.bandwidth['quota:vif_outbound_peak'])
self.assertEqual(outbound.get("burst"),
self.bandwidth['quota:vif_outbound_burst'])
self._assertModel(xml, network_model.VIF_MODEL_VIRTIO, "qemu")
def test_model_qemu_no_firewall(self):
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
self._test_model_qemu(
self.vif_bridge,
self.vif_8021qbg,
self.vif_iovisor,
self.vif_mlnx,
self.vif_ovs,
)
def test_model_qemu_iptables(self):
self.flags(firewall_driver="nova.virt.firewall.IptablesFirewallDriver")
self._test_model_qemu(
self.vif_bridge,
self.vif_ovs,
self.vif_ivs,
self.vif_8021qbg,
self.vif_iovisor,
self.vif_mlnx,
)
def test_model_xen(self):
self.flags(use_virtio_for_bridges=True,
virt_type='xen',
group='libvirt')
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_bridge)
self._assertModel(xml)
def test_generic_driver_none(self):
d = vif.LibvirtGenericVIFDriver()
self.assertRaises(exception.NovaException,
self._get_instance_xml,
d,
self.vif_none)
def _check_bridge_driver(self, d, vif, br_want):
xml = self._get_instance_xml(d, vif)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
self.vif_bridge, br_want, 1)
def test_generic_driver_bridge(self):
d = vif.LibvirtGenericVIFDriver()
self._check_bridge_driver(d,
self.vif_bridge,
self.vif_bridge['network']['bridge'])
def _check_ivs_ethernet_driver(self, d, vif, dev_prefix):
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, vif)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
self.vif_ivs, prefix=dev_prefix)
script = node.find("script").get("path")
self.assertEqual(script, "")
def test_unplug_ivs_ethernet(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(linux_net, 'delete_ivs_vif_port') as delete:
delete.side_effect = processutils.ProcessExecutionError
d.unplug_ivs_ethernet(None, self.vif_ovs)
def _test_plug_ovs_hybrid(self, ipv6_exists):
calls = {
'device_exists': [mock.call('qbrvif-xxx-yyy'),
mock.call('qvovif-xxx-yyy')],
'_create_veth_pair': [mock.call('qvbvif-xxx-yyy',
'qvovif-xxx-yyy')],
'execute': [mock.call('brctl', 'addbr', 'qbrvif-xxx-yyy',
run_as_root=True),
mock.call('brctl', 'setfd', 'qbrvif-xxx-yyy', 0,
run_as_root=True),
mock.call('brctl', 'stp', 'qbrvif-xxx-yyy', 'off',
run_as_root=True),
mock.call('tee', ('/sys/class/net/qbrvif-xxx-yyy'
'/bridge/multicast_snooping'),
process_input='0', run_as_root=True,
check_exit_code=[0, 1])],
'create_ovs_vif_port': [mock.call('br0',
'qvovif-xxx-yyy', 'aaa-bbb-ccc',
'ca:fe:de:ad:be:ef',
'instance-uuid')]
}
# The disable_ipv6 call needs to be added in the middle, if required
if ipv6_exists:
calls['execute'].extend([
mock.call('tee', ('/proc/sys/net/ipv6/conf'
'/qbrvif-xxx-yyy/disable_ipv6'),
process_input='1', run_as_root=True,
check_exit_code=[0, 1])])
calls['execute'].extend([
mock.call('ip', 'link', 'set', 'qbrvif-xxx-yyy', 'up',
run_as_root=True),
mock.call('brctl', 'addif', 'qbrvif-xxx-yyy',
'qvbvif-xxx-yyy', run_as_root=True)])
with contextlib.nested(
mock.patch.object(linux_net, 'device_exists',
return_value=False),
mock.patch.object(utils, 'execute'),
mock.patch.object(linux_net, '_create_veth_pair'),
mock.patch.object(linux_net, 'create_ovs_vif_port'),
mock.patch.object(os.path, 'exists', return_value=ipv6_exists)
) as (device_exists, execute, _create_veth_pair, create_ovs_vif_port,
path_exists):
d = vif.LibvirtGenericVIFDriver()
d.plug_ovs_hybrid(self.instance, self.vif_ovs)
device_exists.assert_has_calls(calls['device_exists'])
_create_veth_pair.assert_has_calls(calls['_create_veth_pair'])
execute.assert_has_calls(calls['execute'])
create_ovs_vif_port.assert_has_calls(calls['create_ovs_vif_port'])
def test_plug_ovs_hybrid_ipv6(self):
self._test_plug_ovs_hybrid(ipv6_exists=True)
def test_plug_ovs_hybrid_no_ipv6(self):
self._test_plug_ovs_hybrid(ipv6_exists=False)
def test_unplug_ovs_hybrid(self):
calls = {
'device_exists': [mock.call('qbrvif-xxx-yyy')],
'execute': [mock.call('brctl', 'delif', 'qbrvif-xxx-yyy',
'qvbvif-xxx-yyy', run_as_root=True),
mock.call('ip', 'link', 'set',
'qbrvif-xxx-yyy', 'down', run_as_root=True),
mock.call('brctl', 'delbr',
'qbrvif-xxx-yyy', run_as_root=True)],
'delete_ovs_vif_port': [mock.call('br0', 'qvovif-xxx-yyy')]
}
with contextlib.nested(
mock.patch.object(linux_net, 'device_exists',
return_value=True),
mock.patch.object(utils, 'execute'),
mock.patch.object(linux_net, 'delete_ovs_vif_port')
) as (device_exists, execute, delete_ovs_vif_port):
d = vif.LibvirtGenericVIFDriver()
d.unplug_ovs_hybrid(None, self.vif_ovs)
device_exists.assert_has_calls(calls['device_exists'])
execute.assert_has_calls(calls['execute'])
delete_ovs_vif_port.assert_has_calls(calls['delete_ovs_vif_port'])
@mock.patch.object(utils, 'execute')
@mock.patch.object(pci_utils, 'get_ifname_by_pci_address')
@mock.patch.object(pci_utils, 'get_vf_num_by_pci_address', return_value=1)
def _test_hw_veb_op(self, op, vlan, mock_get_vf_num, mock_get_ifname,
mock_execute):
mock_get_ifname.side_effect = ['eth1', 'eth13']
exit_code = [0, 2, 254]
port_state = 'up' if vlan > 0 else 'down'
calls = {
'get_ifname':
[mock.call(self.vif_hw_veb_macvtap['profile']['pci_slot'],
pf_interface=True),
mock.call(self.vif_hw_veb_macvtap['profile']['pci_slot'])],
'get_vf_num':
[mock.call(self.vif_hw_veb_macvtap['profile']['pci_slot'])],
'execute': [mock.call('ip', 'link', 'set', 'eth1',
'vf', 1, 'mac',
self.vif_hw_veb_macvtap['address'],
'vlan', vlan,
run_as_root=True,
check_exit_code=exit_code),
mock.call('ip', 'link', 'set',
'eth13', port_state,
run_as_root=True,
check_exit_code=exit_code)]
}
op(None, self.vif_hw_veb_macvtap)
mock_get_ifname.assert_has_calls(calls['get_ifname'])
mock_get_vf_num.assert_has_calls(calls['get_vf_num'])
mock_execute.assert_has_calls(calls['execute'])
def test_plug_hw_veb(self):
d = vif.LibvirtGenericVIFDriver()
self._test_hw_veb_op(
d.plug_hw_veb,
self.vif_hw_veb_macvtap['details'][network_model.VIF_DETAILS_VLAN])
def test_unplug_hw_veb(self):
d = vif.LibvirtGenericVIFDriver()
self._test_hw_veb_op(d.unplug_hw_veb, 0)
def test_unplug_ovs_hybrid_bridge_does_not_exist(self):
calls = {
'device_exists': [mock.call('qbrvif-xxx-yyy')],
'delete_ovs_vif_port': [mock.call('br0', 'qvovif-xxx-yyy')]
}
with contextlib.nested(
mock.patch.object(linux_net, 'device_exists',
return_value=False),
mock.patch.object(linux_net, 'delete_ovs_vif_port')
) as (device_exists, delete_ovs_vif_port):
d = vif.LibvirtGenericVIFDriver()
d.unplug_ovs_hybrid(None, self.vif_ovs)
device_exists.assert_has_calls(calls['device_exists'])
delete_ovs_vif_port.assert_has_calls(calls['delete_ovs_vif_port'])
def test_plug_ivs_hybrid(self):
calls = {
'device_exists': [mock.call('qbrvif-xxx-yyy'),
mock.call('qvovif-xxx-yyy')],
'_create_veth_pair': [mock.call('qvbvif-xxx-yyy',
'qvovif-xxx-yyy')],
'execute': [mock.call('brctl', 'addbr', 'qbrvif-xxx-yyy',
run_as_root=True),
mock.call('brctl', 'setfd', 'qbrvif-xxx-yyy', 0,
run_as_root=True),
mock.call('brctl', 'stp', 'qbrvif-xxx-yyy', 'off',
run_as_root=True),
mock.call('tee', ('/sys/class/net/qbrvif-xxx-yyy'
'/bridge/multicast_snooping'),
process_input='0', run_as_root=True,
check_exit_code=[0, 1]),
mock.call('tee', ('/proc/sys/net/ipv6/conf'
'/qbrvif-xxx-yyy/disable_ipv6'),
process_input='1', run_as_root=True,
check_exit_code=[0, 1]),
mock.call('ip', 'link', 'set', 'qbrvif-xxx-yyy', 'up',
run_as_root=True),
mock.call('brctl', 'addif', 'qbrvif-xxx-yyy',
'qvbvif-xxx-yyy', run_as_root=True)],
'create_ivs_vif_port': [mock.call('qvovif-xxx-yyy', 'aaa-bbb-ccc',
'ca:fe:de:ad:be:ef',
'instance-uuid')]
}
with contextlib.nested(
mock.patch.object(linux_net, 'device_exists',
return_value=False),
mock.patch.object(utils, 'execute'),
mock.patch.object(linux_net, '_create_veth_pair'),
mock.patch.object(linux_net, 'create_ivs_vif_port'),
mock.patch.object(os.path, 'exists', return_value=True)
) as (device_exists, execute, _create_veth_pair, create_ivs_vif_port,
path_exists):
d = vif.LibvirtGenericVIFDriver()
d.plug_ivs_hybrid(self.instance, self.vif_ivs)
device_exists.assert_has_calls(calls['device_exists'])
_create_veth_pair.assert_has_calls(calls['_create_veth_pair'])
execute.assert_has_calls(calls['execute'])
create_ivs_vif_port.assert_has_calls(calls['create_ivs_vif_port'])
def test_unplug_ivs_hybrid(self):
calls = {
'execute': [mock.call('brctl', 'delif', 'qbrvif-xxx-yyy',
'qvbvif-xxx-yyy', run_as_root=True),
mock.call('ip', 'link', 'set',
'qbrvif-xxx-yyy', 'down', run_as_root=True),
mock.call('brctl', 'delbr',
'qbrvif-xxx-yyy', run_as_root=True)],
'delete_ivs_vif_port': [mock.call('qvovif-xxx-yyy')]
}
with contextlib.nested(
mock.patch.object(utils, 'execute'),
mock.patch.object(linux_net, 'delete_ivs_vif_port')
) as (execute, delete_ivs_vif_port):
d = vif.LibvirtGenericVIFDriver()
d.unplug_ivs_hybrid(None, self.vif_ivs)
execute.assert_has_calls(calls['execute'])
delete_ivs_vif_port.assert_has_calls(calls['delete_ivs_vif_port'])
def test_unplug_ivs_hybrid_bridge_does_not_exist(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
execute.side_effect = processutils.ProcessExecutionError
d.unplug_ivs_hybrid(None, self.vif_ivs)
def test_unplug_iovisor(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
execute.side_effect = processutils.ProcessExecutionError
mynetwork = network_model.Network(id='network-id-xxx-yyy-zzz',
label='mylabel')
myvif = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=mynetwork)
d.unplug_iovisor(None, myvif)
@mock.patch('nova.network.linux_net.device_exists')
def test_plug_iovisor(self, device_exists):
device_exists.return_value = True
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
execute.side_effect = processutils.ProcessExecutionError
instance = objects.Instance(id=1,
uuid='instance-uuid',
project_id='myproject')
d.plug_iovisor(instance, self.vif_ivs)
def test_unplug_mlnx_with_details(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
execute.side_effect = processutils.ProcessExecutionError
d.unplug_mlnx_direct(None, self.vif_mlnx_net)
execute.assert_called_once_with('ebrctl', 'del-port',
'fake_phy_network',
'ca:fe:de:ad:be:ef',
run_as_root=True)
def test_plug_mlnx_with_details(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
d.plug_mlnx_direct(self.instance, self.vif_mlnx_net)
execute.assert_called_once_with('ebrctl', 'add-port',
'ca:fe:de:ad:be:ef',
'instance-uuid',
'fake_phy_network',
'mlnx_direct',
'eth-xxx-yyy-zzz',
run_as_root=True)
def test_plug_mlnx_no_physical_network(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
self.assertRaises(exception.NovaException,
d.plug_mlnx_direct,
self.instance,
self.vif_mlnx)
self.assertEqual(0, execute.call_count)
def test_unplug_vrouter_with_details(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
d.unplug_vrouter(None, self.vif_vrouter)
execute.assert_called_once_with(
'vrouter-port-control',
'--oper=delete --uuid=vif-xxx-yyy-zzz',
run_as_root=True)
def test_plug_vrouter_with_details(self):
d = vif.LibvirtGenericVIFDriver()
instance = mock.Mock()
instance.name = 'instance-name'
instance.uuid = '46a4308b-e75a-4f90-a34a-650c86ca18b2'
instance.project_id = 'b168ea26fa0c49c1a84e1566d9565fa5'
instance.display_name = 'instance1'
with mock.patch.object(utils, 'execute') as execute:
d.plug_vrouter(instance, self.vif_vrouter)
execute.assert_has_calls([
mock.call('ip', 'tuntap', 'add', 'tap-xxx-yyy-zzz', 'mode',
'tap', run_as_root=True, check_exit_code=[0, 2, 254]),
mock.call('ip', 'link', 'set', 'tap-xxx-yyy-zzz', 'up',
run_as_root=True, check_exit_code=[0, 2, 254]),
mock.call('vrouter-port-control',
'--oper=add --uuid=vif-xxx-yyy-zzz '
'--instance_uuid=46a4308b-e75a-4f90-a34a-650c86ca18b2 '
'--vn_uuid=network-id-xxx-yyy-zzz '
'--vm_project_uuid=b168ea26fa0c49c1a84e1566d9565fa5 '
'--ip_address=0.0.0.0 '
'--ipv6_address=None '
'--vm_name=instance1 '
'--mac=ca:fe:de:ad:be:ef '
'--tap_name=tap-xxx-yyy-zzz '
'--port_type=NovaVMPort '
'--tx_vlan_id=-1 '
'--rx_vlan_id=-1', run_as_root=True)])
def test_ivs_ethernet_driver(self):
d = vif.LibvirtGenericVIFDriver()
self._check_ivs_ethernet_driver(d,
self.vif_ivs,
"tap")
def _check_ivs_virtualport_driver(self, d, vif, want_iface_id):
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, vif)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
vif, vif['devname'])
def _check_ovs_virtualport_driver(self, d, vif, want_iface_id):
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, vif)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
vif, "br0")
vp = node.find("virtualport")
self.assertEqual(vp.get("type"), "openvswitch")
iface_id_found = False
for p_elem in vp.findall("parameters"):
iface_id = p_elem.get("interfaceid", None)
if iface_id:
self.assertEqual(iface_id, want_iface_id)
iface_id_found = True
self.assertTrue(iface_id_found)
def test_generic_ovs_virtualport_driver(self):
d = vif.LibvirtGenericVIFDriver()
want_iface_id = self.vif_ovs['ovs_interfaceid']
self._check_ovs_virtualport_driver(d,
self.vif_ovs,
want_iface_id)
def test_generic_ivs_virtualport_driver(self):
d = vif.LibvirtGenericVIFDriver()
want_iface_id = self.vif_ivs['ovs_interfaceid']
self._check_ivs_virtualport_driver(d,
self.vif_ivs,
want_iface_id)
def test_ivs_plug_with_nova_firewall(self):
d = vif.LibvirtGenericVIFDriver()
br_want = "qbr" + self.vif_ivs['id']
br_want = br_want[:network_model.NIC_NAME_LEN]
xml = self._get_instance_xml(d, self.vif_ivs)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
self.vif_ivs, br_want, 1)
def test_ivs_plug_with_port_filter_direct_no_nova_firewall(self):
d = vif.LibvirtGenericVIFDriver()
br_want = "qbr" + self.vif_ivs_filter_hybrid['id']
br_want = br_want[:network_model.NIC_NAME_LEN]
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, self.vif_ivs_filter_hybrid)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
self.vif_ivs_filter_hybrid, br_want, 0)
def test_ivs_plug_with_port_filter_hybrid_no_nova_firewall(self):
d = vif.LibvirtGenericVIFDriver()
br_want = self.vif_ivs_filter_direct['devname']
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, self.vif_ivs_filter_direct)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
self.vif_ivs_filter_direct, br_want, 0)
def test_hybrid_plug_without_nova_firewall(self):
d = vif.LibvirtGenericVIFDriver()
br_want = "qbr" + self.vif_ovs_hybrid['id']
br_want = br_want[:network_model.NIC_NAME_LEN]
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, self.vif_ovs_hybrid)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
self.vif_ovs_hybrid, br_want, 0)
def test_direct_plug_with_port_filter_cap_no_nova_firewall(self):
d = vif.LibvirtGenericVIFDriver()
br_want = self.vif_midonet['devname']
xml = self._get_instance_xml(d, self.vif_ovs_filter_cap)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "target", "dev",
self.vif_ovs_filter_cap, br_want)
def _check_neutron_hybrid_driver(self, d, vif, br_want):
self.flags(firewall_driver="nova.virt.firewall.IptablesFirewallDriver")
xml = self._get_instance_xml(d, vif)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
vif, br_want, 1)
def test_generic_hybrid_driver(self):
d = vif.LibvirtGenericVIFDriver()
br_want = "qbr" + self.vif_ovs['id']
br_want = br_want[:network_model.NIC_NAME_LEN]
self._check_neutron_hybrid_driver(d,
self.vif_ovs,
br_want)
def test_ivs_hybrid_driver(self):
d = vif.LibvirtGenericVIFDriver()
br_want = "qbr" + self.vif_ivs['id']
br_want = br_want[:network_model.NIC_NAME_LEN]
self._check_neutron_hybrid_driver(d,
self.vif_ivs,
br_want)
def test_mlnx_direct_vif_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d,
self.vif_mlnx)
node = self._get_node(xml)
self.assertEqual(node.get("type"), "direct")
self._assertTypeEquals(node, "direct", "source",
"dev", "eth-xxx-yyy-zzz")
self._assertTypeEquals(node, "direct", "source",
"mode", "passthrough")
self._assertMacEquals(node, self.vif_mlnx)
self._assertModel(xml, network_model.VIF_MODEL_VIRTIO)
def test_ib_hostdev_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_ib_hostdev)
doc = etree.fromstring(xml)
node = doc.findall('./devices/hostdev')[0]
self.assertEqual(1, len(node))
self._assertPciEqual(node, self.vif_ib_hostdev)
def test_midonet_ethernet_vif_driver(self):
d = vif.LibvirtGenericVIFDriver()
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
br_want = self.vif_midonet['devname']
xml = self._get_instance_xml(d, self.vif_midonet)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
self.vif_midonet, br_want)
def test_tap_ethernet_vif_driver(self):
d = vif.LibvirtGenericVIFDriver()
br_want = self.vif_tap['devname']
xml = self._get_instance_xml(d, self.vif_tap)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
self.vif_tap, br_want)
@mock.patch('nova.network.linux_net.device_exists')
def test_plug_tap(self, device_exists):
device_exists.return_value = True
d = vif.LibvirtGenericVIFDriver()
d.plug_tap(None, self.vif_tap)
def test_unplug_tap(self):
d = vif.LibvirtGenericVIFDriver()
d.unplug_tap(None, self.vif_tap)
def test_generic_8021qbh_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_8021qbh)
node = self._get_node(xml)
self._assertTypeAndPciEquals(node, "hostdev", self.vif_8021qbh)
self._assertMacEquals(node, self.vif_8021qbh)
vp = node.find("virtualport")
self.assertEqual(vp.get("type"), "802.1Qbh")
profile_id_found = False
for p_elem in vp.findall("parameters"):
details = self.vif_8021qbh["details"]
profile_id = p_elem.get("profileid", None)
if profile_id:
self.assertEqual(profile_id,
details[network_model.VIF_DETAILS_PROFILEID])
profile_id_found = True
self.assertTrue(profile_id_found)
def test_hw_veb_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_hw_veb)
node = self._get_node(xml)
self._assertTypeAndPciEquals(node, "hostdev", self.vif_hw_veb)
self._assertMacEquals(node, self.vif_hw_veb)
vlan = node.find("vlan").find("tag").get("id")
vlan_want = self.vif_hw_veb["details"]["vlan"]
self.assertEqual(vlan, vlan_want)
@mock.patch.object(pci_utils, 'get_ifname_by_pci_address',
return_value='eth1')
def test_hw_veb_driver_macvtap(self, mock_get_ifname):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_hw_veb_macvtap)
node = self._get_node(xml)
self.assertEqual(node.get("type"), "direct")
self._assertTypeEquals(node, "direct", "source",
"dev", "eth1")
self._assertTypeEquals(node, "direct", "source",
"mode", "passthrough")
self._assertMacEquals(node, self.vif_hw_veb_macvtap)
vlan = node.find("vlan")
self.assertIsNone(vlan)
def test_driver_macvtap_vlan(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_macvtap_vlan)
node = self._get_node(xml)
self.assertEqual(node.get("type"), "direct")
self._assertTypeEquals(node, "direct", "source",
"dev", "eth0.1")
self._assertTypeEquals(node, "direct", "source",
"mode", "vepa")
self._assertMacEquals(node, self.vif_macvtap_vlan)
def test_driver_macvtap_flat(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_macvtap_flat)
node = self._get_node(xml)
self.assertEqual(node.get("type"), "direct")
self._assertTypeEquals(node, "direct", "source",
"dev", "eth0")
self._assertTypeEquals(node, "direct", "source",
"mode", "bridge")
self._assertMacEquals(node, self.vif_macvtap_flat)
def test_driver_macvtap_exception(self):
d = vif.LibvirtGenericVIFDriver()
e = self.assertRaises(exception.VifDetailsMissingMacvtapParameters,
self._get_instance_xml,
d,
self.vif_macvtap_exception)
self.assertIn('macvtap_source', six.text_type(e))
self.assertIn('macvtap_mode', six.text_type(e))
self.assertIn('physical_interface', six.text_type(e))
@mock.patch.object(linux_net.LinuxBridgeInterfaceDriver, 'ensure_vlan')
def test_macvtap_plug_vlan(self, ensure_vlan_mock):
d = vif.LibvirtGenericVIFDriver()
d.plug_macvtap(self.instance, self.vif_macvtap_vlan)
ensure_vlan_mock.assert_called_once_with('1', 'eth0',
interface='eth0.1')
@mock.patch.object(linux_net.LinuxBridgeInterfaceDriver, 'ensure_vlan')
def test_macvtap_plug_flat(self, ensure_vlan_mock):
d = vif.LibvirtGenericVIFDriver()
d.plug_macvtap(self.instance, self.vif_macvtap_flat)
self.assertFalse(ensure_vlan_mock.called)
def test_generic_iovisor_driver(self):
d = vif.LibvirtGenericVIFDriver()
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
br_want = self.vif_ivs['devname']
xml = self._get_instance_xml(d, self.vif_ivs)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
self.vif_ivs, br_want)
def test_generic_8021qbg_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_8021qbg)
node = self._get_node(xml)
self._assertTypeEquals(node, "direct", "source", "dev", "eth0")
self._assertMacEquals(node, self.vif_8021qbg)
vp = node.find("virtualport")
self.assertEqual(vp.get("type"), "802.1Qbg")
manager_id_found = False
type_id_found = False
typeversion_id_found = False
instance_id_found = False
for p_elem in vp.findall("parameters"):
wantparams = self.vif_8021qbg['qbg_params']
manager_id = p_elem.get("managerid", None)
type_id = p_elem.get("typeid", None)
typeversion_id = p_elem.get("typeidversion", None)
instance_id = p_elem.get("instanceid", None)
if manager_id:
self.assertEqual(manager_id,
wantparams['managerid'])
manager_id_found = True
if type_id:
self.assertEqual(type_id,
wantparams['typeid'])
type_id_found = True
if typeversion_id:
self.assertEqual(typeversion_id,
wantparams['typeidversion'])
typeversion_id_found = True
if instance_id:
self.assertEqual(instance_id,
wantparams['instanceid'])
instance_id_found = True
self.assertTrue(manager_id_found)
self.assertTrue(type_id_found)
self.assertTrue(typeversion_id_found)
self.assertTrue(instance_id_found)
def test_vhostuser_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_vhostuser)
node = self._get_node(xml)
self.assertEqual(node.get("type"),
network_model.VIF_TYPE_VHOSTUSER)
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "mode", "client")
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "path", "/tmp/vif-xxx-yyy-zzz")
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "type", "unix")
self._assertMacEquals(node, self.vif_vhostuser)
self._assertModel(xml, network_model.VIF_MODEL_VIRTIO)
def test_vhostuser_no_queues(self):
d = vif.LibvirtGenericVIFDriver()
image_meta = objects.ImageMeta.from_dict(
{'properties': {'hw_vif_model': 'virtio',
'hw_vif_multiqueue_enabled': 'true'}})
xml = self._get_instance_xml(d, self.vif_vhostuser, image_meta)
node = self._get_node(xml)
self.assertEqual(node.get("type"),
network_model.VIF_TYPE_VHOSTUSER)
self._assertMacEquals(node, self.vif_vhostuser)
driver = node.find("driver")
self.assertIsNone(driver, None)
def test_vhostuser_driver_no_path(self):
d = vif.LibvirtGenericVIFDriver()
self.assertRaises(exception.VifDetailsMissingVhostuserSockPath,
self._get_instance_xml,
d,
self.vif_vhostuser_no_path)
def test_vhostuser_driver_ovs(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d,
self.vif_vhostuser_ovs)
node = self._get_node(xml)
self.assertEqual(node.get("type"),
network_model.VIF_TYPE_VHOSTUSER)
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "mode", "client")
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "path", "/tmp/usv-xxx-yyy-zzz")
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "type", "unix")
self._assertMacEquals(node, self.vif_vhostuser_ovs)
self._assertModel(xml, network_model.VIF_MODEL_VIRTIO)
def test_vhostuser_ovs_plug(self):
calls = {
'create_ovs_vif_port': [mock.call('br0',
'usv-xxx-yyy-zzz',
'aaa-bbb-ccc',
'ca:fe:de:ad:be:ef',
'instance-uuid')],
'ovs_set_vhostuser_port_type': [mock.call('usv-xxx-yyy-zzz')]
}
with contextlib.nested(
mock.patch.object(linux_net, 'create_ovs_vif_port'),
mock.patch.object(linux_net, 'ovs_set_vhostuser_port_type')
) as (create_ovs_vif_port, ovs_set_vhostuser_port_type):
d = vif.LibvirtGenericVIFDriver()
d.plug_vhostuser(self.instance, self.vif_vhostuser_ovs)
create_ovs_vif_port.assert_has_calls(calls['create_ovs_vif_port'])
ovs_set_vhostuser_port_type.assert_has_calls(
calls['ovs_set_vhostuser_port_type'])
def test_vhostuser_ovs_unplug(self):
calls = {
'delete_ovs_vif_port': [mock.call('br0', 'usv-xxx-yyy-zzz')]
}
with mock.patch.object(linux_net,
'delete_ovs_vif_port') as delete_port:
d = vif.LibvirtGenericVIFDriver()
d.unplug_vhostuser(None, self.vif_vhostuser_ovs)
delete_port.assert_has_calls(calls['delete_ovs_vif_port'])
|
|
import argparse
from os.path import join
import re
from subprocess import CalledProcessError, check_output, STDOUT
import sys
from packaging.version import Version as V
try:
import colorama
def bright(text): return "%s%s%s" % (colorama.Style.BRIGHT, text, colorama.Style.RESET_ALL)
def dim(text): return "%s%s%s" % (colorama.Style.DIM, text, colorama.Style.RESET_ALL)
def white(text): return "%s%s%s" % (colorama.Fore.WHITE, text, colorama.Style.RESET_ALL)
def blue(text): return "%s%s%s" % (colorama.Fore.BLUE, text, colorama.Style.RESET_ALL)
def red(text): return "%s%s%s" % (colorama.Fore.RED, text, colorama.Style.RESET_ALL)
def green(text): return "%s%s%s" % (colorama.Fore.GREEN, text, colorama.Style.RESET_ALL)
def yellow(text): return "%s%s%s" % (colorama.Fore.YELLOW, text, colorama.Style.RESET_ALL)
sys.platform == "win32" and colorama.init()
except ImportError:
def bright(text): return text
def dim(text): return text
def white(text): return text
def blue(text): return text
def red(text): return text
def green(text): return text
def yellow(text): return text
class config(object):
ANY_VERSION = re.compile(r"^(\d+\.\d+\.\d+)((?:dev|rc)\d+)?$")
FULL_VERSION = re.compile(r"^(\d+\.\d+\.\d+)?$")
def __init__(self):
self._new_version = None
self._last_any_version = None
self._last_full_version = None
self._problems = []
@property
def new_version(self): return self._new_version
@new_version.setter
def new_version(self, v):
m = self.ANY_VERSION.match(v)
if not m: raise ValueError("Invalid Bokeh version %r" % v)
self._new_version = v
@property
def last_any_version(self): return self._last_any_version
@last_any_version.setter
def last_any_version(self, v):
m = self.ANY_VERSION.match(v)
if not m: raise ValueError("Invalid Bokeh version %r" % v)
self._last_any_version = v
@property
def last_full_version(self): return self._last_full_version
@last_full_version.setter
def last_full_version(self, v):
m = self.FULL_VERSION.match(v)
if not m: raise ValueError("Invalid Bokeh version %r" % v)
self._last_full_version = v
@property
def version_type(self):
if "rc" in self._new_version: return "RELEASE CANDIDATE"
elif "dev" in self._new_version: return "DEV BUILD"
else: return "FULL RELEASE"
@property
def release_branch(self):
return "release_%s" % self.new_version
@property
def problems(self):
return self._problems
@property
def top_dir(self):
return run("git rev-parse --show-toplevel")
CONFIG = config()
#--------------------------------------
#
# Utility functions
#
#--------------------------------------
def run(cmd):
if isinstance(cmd, str):
cmd = cmd.split()
return check_output(cmd, stderr=STDOUT).decode('utf-8').strip()
#--------------------------------------
#
# UI functions
#
#--------------------------------------
def banner(color, msg):
print()
print(color('='*80))
print(color("{:^80}".format(msg)))
print(color('='*80 + "\n"))
def passed(msg):
print(dim(green("[PASS] ")) + msg)
def failed(msg, details=None):
print((red("[FAIL] ")) + msg)
if details:
print()
for line in details:
print(" " + dim(red(line)))
print()
CONFIG.problems.append(msg)
def abort(checkout_master=True):
print()
print(bright(red("!!! The deploy has been aborted.")))
print()
print(bright(red("!!! NO REMOTE ACTIONS have been taken --- local checkout may be dirty")))
print()
run("git checkout master")
sys.exit(1)
def confirm(msg):
resp = "foo"
while resp not in "yn" or resp=='':
resp = input(bright(yellow(msg)) + bright(" (y/n): "))
if resp == "n":
run("git checkout master")
abort()
#--------------------------------------
#
# Check functions
#
#--------------------------------------
def check_py3():
if sys.version_info.major == 3:
passed("Running Python 3.x")
else:
failed("This script requires Python 3.x")
def check_git():
try:
run("which git")
passed("Command 'git' is available")
except CalledProcessError:
failed("Command 'git' is missing")
abort(checkout_master=False)
def check_maintainers():
try:
email = run("git config --get user.email")
except CalledProcessError:
failed("Could not determine Git config user.email")
abort()
filename = join(CONFIG.top_dir, "MAINTAINERS")
if any(email == line.strip() for line in open(filename)):
passed("Git config user.email %r found in MAINTAINERS file" % email)
else:
failed("User config user.email %r NOT found in MAINTAINERS file" % email)
print()
print(bright(yellow(" This probably means you should not try to run this script")))
abort()
def check_repo():
try:
run("git status")
except CalledProcessError:
failed("Executing outside of a git repository")
abort()
try:
remote = run("git config --get remote.origin.url")
if "bokeh/bokeh" in remote:
passed("Executing inside the the bokeh/bokeh repository")
else:
failed("Executing OUTSIDE the bokeh/bokeh repository")
abort()
except CalledProcessError:
failed("Could not determine Git config remote.origin.url")
abort()
def check_checkout():
try:
branch = run("git rev-parse --abbrev-ref HEAD")
if branch == "master":
passed("Working on master branch")
else:
failed("NOT working on master branch %r" % branch)
abort()
extras = run("git status --porcelain").split("\n")
extras = [x for x in extras if x != '']
if extras:
failed("Local checkout is NOT clean", extras)
else:
passed("Local checkout is clean")
try:
run("git remote update")
local = run("git rev-parse @")
remote = run("git rev-parse @{u}")
base = run("git merge-base @ @{u}")
if local == remote:
passed("Checkout is up to date with GitHub")
else:
if local == base: status = "NEED TO PULL"
elif remote == base: status = "NEED TO PUSH"
else: status = "DIVERGED"
failed("Checkout is NOT up to date with GitHub (%s)" % status)
except CalledProcessError:
failed("Could not check whether local and GitHub are up to date")
abort()
except CalledProcessError:
failed("Could not check the checkout state")
abort()
def check_last_versions():
try:
out = run("git for-each-ref --sort=-taggerdate --format '%(tag)' refs/tags")
tags = [x.strip("'\"") for x in out.split("\n")]
try:
CONFIG.last_any_version = tags[0]
passed("Detected valid last dev/rc/full version %r" % CONFIG.last_any_version)
except ValueError:
failed("Last dev/rc/full version %r is not a valid Bokeh version!" % CONFIG.last_any_version)
abort()
try:
CONFIG.last_full_version = [tag for tag in tags if ('rc' not in tag and 'dev' not in tag)][0]
passed("Detected valid last full release version %r" % CONFIG.last_full_version)
except ValueError:
failed("Last full release version %r is not a valid Bokeh version!" % CONFIG.last_full_version)
abort()
except CalledProcessError:
failed("Could not detect last version tags")
abort()
def check_version_order():
if V(CONFIG.new_version) > V(CONFIG.last_any_version):
passed("New version %r is newer than last version %r" % (CONFIG.new_version, CONFIG.last_any_version))
else:
failed("New version %r is NOT newer than last version %r" % (CONFIG.new_version, CONFIG.last_any_version))
def check_release_branch():
out = run("git branch --list %s" % CONFIG.release_branch)
if out:
failed("Release branch %r ALREADY exists" % CONFIG.release_branch)
else:
passed("Release branch %r does not already exist" % CONFIG.release_branch)
def check_issues():
try:
out = run("python issues.py -c -p %s" % CONFIG.last_full_version)
passed("Issue labels are BEP-1 compliant")
except CalledProcessError as e:
out = e.output.decode('utf-8')
if "HTTP Error 403: Forbidden" in out:
failed("Issues cannot be checked right now due to GitHub rate limiting")
else:
failed("Issue labels are NOT BEP-1 compliant", out.split("\n"))
#--------------------------------------
#
# Update functions
#
#--------------------------------------
def commit(filename, version):
path = join(CONFIG.top_dir, filename)
try:
run("git add %s" % path)
except CalledProcessError as e:
failed("Could not git add %r" % filename, str(e).split("/n"))
return
try:
run(["git", "commit", "-m", "'Updating for version %s'" % version])
except CalledProcessError as e:
failed("Could not git commit %r" % filename, str(e).split("/n"))
return
passed("Committed file %r" % filename)
def update_bokehjs_versions():
filenames = [
'bokehjs/src/coffee/version.coffee',
'bokehjs/package.json',
]
pat = r"(release|version)([\" ][:=] [\"\'])" + CONFIG.last_any_version + "([\"\'])"
for filename in filenames:
path = join(CONFIG.top_dir, filename)
with open(path) as f:
text = f.read()
match = re.search(pat, text)
if not match:
failed("Unable to find version string for %r in file %r" % (CONFIG.last_any_version, filename))
continue
text = re.sub(pat, r'\g<1>\g<2>%s\g<3>' % CONFIG.new_version, text)
try:
with open(path, 'w') as f:
f.write(text)
except Exception as e:
failed("Unable to write new version to file %r" % filename, str(e).split("\n"))
else:
passed("Updated version from %r to %r in file %r" % (CONFIG.last_any_version, CONFIG.new_version, filename))
commit(filename, CONFIG.new_version)
def update_docs_versions():
# Update all_versions.txt
filename = 'sphinx/source/all_versions.txt'
path = join(CONFIG.top_dir, filename)
try:
with open(path, 'a') as f:
f.write("{version}\n".format(version=CONFIG.new_version))
except Exception as e:
failed("Could not write new version to file %r" % filename, str(e).split("\n"))
else:
passed("Appended version %r to %r" % (CONFIG.new_version, filename))
commit(filename, new_version)
# Update Sphinx toctree
filename = 'sphinx/source/index.rst'
path = join(CONFIG.top_dir, filename)
new_line = " docs/releases/{version}\n".format(version=CONFIG.new_version)
with open(path, "r") as f:
lines = f.readlines()
if any(line==new_line for line in lines):
print(blue("[SKIP] ") + "Sphinx toctree already has entry for version %r" % CONFIG.new_version)
else:
ii = None
for i, line in enumerate(lines):
if line.startswith(" docs/releases"):
ii = i
break
if ii is None:
failed("Error updating toctree")
new_line = " docs/releases/%s\n" % CONFIG.new_version
lines.insert(i, new_line)
try:
with open(path, 'w') as f:
f.writelines(lines)
except Exception as e:
failed("Could not write new toctree to file %r" % filename, str(e).split("\n"))
else:
passed("Appended version %r to %r" % (CONFIG.new_version, filename))
commit(filename, new_version)
def update_changelog():
try:
out = run("python issues.py -p %s -r %s" % (CONFIG.last_full_version, CONFIG.new_version))
passed("Updated CHANGELOG with new closed issues")
except CalledProcessError as e:
out = e.output.decode('utf-8')
if "HTTP Error 403: Forbidden" in out:
failed("CHANGELOG cannot be updated right now due to GitHub rate limiting")
else:
failed("CHANGELOG update failed", out.split("\n"))
def merge_and_push():
try:
run("git checkout master")
passed("Checked out master branch")
except Exception as e:
failed("[FATAL] COULD NOT CHECK OUT MASTER BRANCH: %s" % e)
return False
try:
run(["git", "merge", "--no-ff", CONFIG.release_branch, "-m", "'Merge branch %s'" % CONFIG.release_branch])
passed("Merged release branch into master branch")
except Exception as e:
failed("[FATAL] COULD NOT MERGE RELEASE BRANCH TO MASTER: %s" % e)
return False
try:
# use --no-verify to prevent git hook that might ask for confirmation
run("git push --no-verify origin master")
passed("Pushed master branch to GitHub")
except Exception as e:
failed("[FATAL] COULD NOT PUSH MASTER TO ORIGIN: %s" % e)
return False
try:
out = run(["git", "branch", "-d", CONFIG.release_branch])
passed("Deleted release branch")
except Exception:
failed("[NON-FATAL] Could not delete release branch", out.split("\n"))
try:
run(["git", "tag", "-a", CONFIG.new_version, "-m", "Release %s" % CONFIG.new_version])
passed("Tagged release %r" % CONFIG.new_version)
except Exception as e:
failed("[FATAL] COULD NOT TAG RELEASE: %s" % e)
return False
try:
# use --no-verify to prevent git hook that might ask for confirmation
run(["git", "push", "--no-verify", "origin", CONFIG.new_version])
passed("Pushed tag %r to GitHub" % CONFIG.new_version)
except Exception as e:
failed("[FATAL] COULD NOT PUSH MASTER TO ORIGIN: %s" % e)
return False
try:
out = run("git checkout master")
passed("Returned to master branch")
except Exception as e:
failed("[NON-FATAL] Could not return to master branch", out.split("\n"))
return True
def show_updates():
print()
print("!!! Here is a diff of the changes made on the release branch:")
print()
diff = run("git diff --minimal master").split("\n")
for line in diff:
print(blue(" %s" % line))
print()
#--------------------------------------
#
# Main
#
#--------------------------------------
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Deploy a Bokeh release.')
parser.add_argument('version',
type=str,
nargs=1,
help='The new version number for this release')
args = parser.parse_args()
new_version = args.version[0]
banner(blue, "{:^80}".format("You are starting a Bokeh release deployment for %r" % new_version))
# pre-checks ------------------------------------------------------------
print("!!! Running pre-checks for release deploy\n")
check_py3()
check_git()
check_maintainers()
check_repo()
check_checkout()
try:
CONFIG.new_version = args.version[0]
passed("New version %r is a valid Bokeh version (%s)" % (CONFIG.new_version, bright(CONFIG.version_type)))
except ValueError:
failed("Version %r is NOT a valid Bokeh version" % CONFIG.new_version)
abort()
check_last_versions()
check_version_order()
check_release_branch()
if V(CONFIG.new_version).is_prerelease:
print(blue("[SKIP] ") + "Not checking issues for BEP-1 compliance for pre-releases")
else:
check_issues()
if CONFIG.problems:
print(red("\n!!! Some pre-checks have failed:\n"))
for p in CONFIG.problems:
print(" - " + yellow(p))
abort()
print(green("\n!!! All pre-checks have passed\n"))
confirm("Would you like to continue to file modifications?")
print(blue("\n" + '-'*80 + "\n"))
# modifications ---------------------------------------------------------
try:
run("git checkout -b %s" % CONFIG.release_branch)
passed("Checked out release branch %r" % CONFIG.release_branch)
except CalledProcessError as e:
failed("Could not check out release branch %r" % CONFIG.release_branch, str(e).split("/n"))
abort()
update_bokehjs_versions()
if V(CONFIG.new_version).is_prerelease:
print(blue("[SKIP] ") + "Not updating docs version or change log for pre-releases")
else:
update_docs_versions()
update_changelog()
if CONFIG.problems:
print(red("\n!!! Some updates have failed:\n"))
for p in CONFIG.problems:
print(" - " + yellow(p))
abort()
# confirmation ----------------------------------------------------------
show_updates()
confirm("Merge release branch and push these changes? [LAST CHANCE TO ABORT]")
success = merge_and_push()
if success:
if CONFIG.problems:
print(blue("\n!!! Some NON-FATAL problems occurred:\n"))
for p in CONFIG.problems:
print(" - " + yellow(p))
print()
banner(blue, "{:^80}".format("Bokeh %r release deployment: SUCCESS" % CONFIG.new_version))
else:
if CONFIG.problems:
print(red("\n!!! Some FATAL problems occurred:\n"))
for p in CONFIG.problems:
print(" - " + yellow(p))
print()
print(bright(red("!!! REMOTE ACTIONS MAY HAVE BEEN TAKEN --- local AND remote branches may be dirty")))
print()
banner(red, "{:^80}".format("Bokeh %r release deployment: FAILURE" % CONFIG.new_version))
sys.exit(1)
|
|
from django.contrib.auth.models import Permission
from django.test import TestCase
from django.urls import reverse
from wagtail.core.models import Page, Site
from wagtail.tests.utils import WagtailTestUtils
class TestSiteIndexView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
self.home_page = Page.objects.get(id=2)
def get(self, params={}):
return self.client.get(reverse('wagtailsites:index'), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailsites/index.html')
def test_pagination(self):
pages = ['0', '1', '-1', '9999', 'Not a page']
for page in pages:
response = self.get({'p': page})
self.assertEqual(response.status_code, 200)
class TestSiteCreateView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
self.home_page = Page.objects.get(id=2)
self.localhost = Site.objects.all()[0]
def get(self, params={}):
return self.client.get(reverse('wagtailsites:add'), params)
def post(self, post_data={}):
return self.client.post(reverse('wagtailsites:add'), post_data)
def create_site(self, hostname='testsite', port=80, is_default_site=False, root_page=None):
root_page = root_page or self.home_page
Site.objects.create(
hostname=hostname,
port=port,
is_default_site=is_default_site,
root_page=root_page)
def test_default_fixtures_present(self):
# we should have loaded with a single site
self.assertEqual(self.localhost.hostname, 'localhost')
self.assertEqual(self.localhost.port, 80)
self.assertEqual(self.localhost.is_default_site, True)
self.assertEqual(self.localhost.root_page, self.home_page)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailsites/create.html')
def test_create(self):
response = self.post({
'hostname': "testsite",
'port': "80",
'root_page': str(self.home_page.id),
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailsites:index'))
# Check that the site was created
self.assertEqual(Site.objects.filter(hostname='testsite').count(), 1)
def test_duplicate_defaults_not_allowed(self):
response = self.post({
'hostname': "also_default",
'port': "80",
'is_default_site': "on",
'root_page': str(self.home_page.id),
})
# Should return the form with errors
self.assertEqual(response.status_code, 200)
self.assertEqual(bool(response.context['form'].errors), True)
# Check that the site was not created
sites = Site.objects.filter(hostname='also_default')
self.assertEqual(sites.count(), 0)
def test_duplicate_hostnames_on_different_ports_allowed(self):
response = self.post({
'hostname': "localhost",
'port': "8000",
'root_page': str(self.home_page.id),
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailsites:index'))
# Check that the site was created
self.assertEqual(Site.objects.filter(hostname='localhost').count(), 2)
def test_duplicate_hostnames_on_same_port_not_allowed(self):
# Confirm there's one localhost already
self.assertEqual(Site.objects.filter(hostname='localhost').count(), 1)
response = self.post({
'hostname': "localhost",
'port': "80",
'root_page': str(self.home_page.id),
})
# Should return the form with errors
self.assertEqual(response.status_code, 200)
self.assertEqual(bool(response.context['form'].errors), True)
# Check that the site was not created, still only one localhost entry
self.assertEqual(Site.objects.filter(hostname='localhost').count(), 1)
class TestSiteEditView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
self.home_page = Page.objects.get(id=2)
self.localhost = Site.objects.all()[0]
def get(self, params={}, site_id=None):
return self.client.get(reverse('wagtailsites:edit', args=(site_id or self.localhost.id, )), params)
def post(self, post_data={}, site_id=None):
site_id = site_id or self.localhost.id
site = Site.objects.get(id=site_id)
post_defaults = {
'hostname': site.hostname,
'port': site.port,
'root_page': site.root_page.id,
}
for k, v in post_defaults.items():
post_data[k] = post_data.get(k, v)
if 'default' in post_data:
if post_data['default']: # only include the is_default_site key if we want to set it
post_data['is_default_site'] = 'on'
elif site.is_default_site:
post_data['is_default_site'] = 'on'
return self.client.post(reverse('wagtailsites:edit', args=(site_id,)), post_data)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailsites/edit.html')
def test_nonexistant_redirect(self):
self.assertEqual(self.get(site_id=100000).status_code, 404)
def test_edit(self):
edited_hostname = 'edited'
response = self.post({
'hostname': edited_hostname,
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailsites:index'))
# Check that the site was edited
self.assertEqual(Site.objects.get(id=self.localhost.id).hostname, edited_hostname)
def test_changing_the_default_site_workflow(self):
# First create a second, non-default, site
second_site = Site.objects.create(
hostname="not_yet_default",
port=80,
is_default_site=False,
root_page=self.home_page)
# Make the original default no longer default
response = self.post(
{
'default': False,
},
site_id=self.localhost.id
)
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailsites:index'))
# Check that the site is no longer default
self.assertEqual(Site.objects.get(id=self.localhost.id).is_default_site, False)
# Now make the second site default
response = self.post(
{
'default': True,
},
site_id=second_site.id
)
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailsites:index'))
# Check that the second site is now set as default
self.assertEqual(Site.objects.get(id=second_site.id).is_default_site, True)
def test_making_a_second_site_the_default_not_allowed(self):
second_site = Site.objects.create(
hostname="also_default",
port=80,
is_default_site=False,
root_page=self.home_page)
response = self.post(
{
'default': True,
},
site_id=second_site.id
)
# Should return the form with errors
self.assertEqual(response.status_code, 200)
self.assertEqual(bool(response.context['form'].errors), True)
# Check that the site was not editd
self.assertEqual(Site.objects.get(id=second_site.id).is_default_site, False)
class TestSiteDeleteView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
self.home_page = Page.objects.get(id=2)
self.localhost = Site.objects.all()[0]
def get(self, params={}, site_id=None):
return self.client.get(reverse('wagtailsites:delete', args=(site_id or self.localhost.id, )), params)
def post(self, post_data={}, site_id=None):
return self.client.post(reverse('wagtailsites:delete', args=(site_id or self.localhost.id, )), post_data)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/generic/confirm_delete.html')
def test_nonexistant_redirect(self):
self.assertEqual(self.get(site_id=100000).status_code, 404)
def test_posting_deletes_site(self):
response = self.post()
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailsites:index'))
# Check that the site was edited
with self.assertRaises(Site.DoesNotExist):
Site.objects.get(id=self.localhost.id)
class TestLimitedPermissions(TestCase, WagtailTestUtils):
def setUp(self):
# Create a user
user = self.create_user(username='test', password='password')
user.user_permissions.add(
Permission.objects.get(codename='access_admin'),
Permission.objects.get(codename='add_site'),
Permission.objects.get(codename='change_site'),
Permission.objects.get(codename='delete_site')
)
# Login
self.login(username='test', password='password')
self.home_page = Page.objects.get(id=2)
self.localhost = Site.objects.all()[0]
def test_get_index(self):
response = self.client.get(reverse('wagtailsites:index'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailsites/index.html')
def test_get_create_view(self):
response = self.client.get(reverse('wagtailsites:add'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailsites/create.html')
def test_create(self):
response = self.client.post(reverse('wagtailsites:add'), {
'hostname': "testsite",
'port': "80",
'root_page': str(self.home_page.id),
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailsites:index'))
# Check that the site was created
self.assertEqual(Site.objects.filter(hostname='testsite').count(), 1)
def test_get_edit_view(self):
edit_url = reverse('wagtailsites:edit', args=(self.localhost.id,))
response = self.client.get(edit_url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailsites/edit.html')
def test_edit(self):
edit_url = reverse('wagtailsites:edit', args=(self.localhost.id,))
edited_hostname = 'edited'
response = self.client.post(edit_url, {
'hostname': edited_hostname,
'port': 80,
'root_page': self.home_page.id,
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailsites:index'))
# Check that the site was edited
self.assertEqual(Site.objects.get(id=self.localhost.id).hostname, edited_hostname)
def test_get_delete_view(self):
delete_url = reverse('wagtailsites:delete', args=(self.localhost.id,))
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/generic/confirm_delete.html')
def test_delete(self):
delete_url = reverse('wagtailsites:delete', args=(self.localhost.id,))
response = self.client.post(delete_url)
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailsites:index'))
# Check that the site was edited
with self.assertRaises(Site.DoesNotExist):
Site.objects.get(id=self.localhost.id)
|
|
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import or_
from sqlalchemy.orm import exc
from oslo.db import exception as db_exc
from neutron.common import constants as n_const
from neutron.db import api as db_api
from neutron.db import models_v2
from neutron.db import securitygroups_db as sg_db
from neutron.db import trunk_port_db as trunk_port
from neutron.extensions import portbindings
from neutron import manager
from neutron.openstack.common import log
from neutron.openstack.common import uuidutils
from neutron.plugins.ml2 import driver_api as api
from neutron.plugins.ml2 import models
LOG = log.getLogger(__name__)
# limit the number of port OR LIKE statements in one query
MAX_PORTS_PER_QUERY = 500
def _make_segment_dict(record):
"""Make a segment dictionary out of a DB record."""
return {api.ID: record.id,
api.NETWORK_TYPE: record.network_type,
api.PHYSICAL_NETWORK: record.physical_network,
api.SEGMENTATION_ID: record.segmentation_id}
def add_network_segment(session, network_id, segment, is_dynamic=False):
with session.begin(subtransactions=True):
record = models.NetworkSegment(
id=uuidutils.generate_uuid(),
network_id=network_id,
network_type=segment.get(api.NETWORK_TYPE),
physical_network=segment.get(api.PHYSICAL_NETWORK),
segmentation_id=segment.get(api.SEGMENTATION_ID),
is_dynamic=is_dynamic
)
session.add(record)
segment[api.ID] = record.id
LOG.info(_("Added segment %(id)s of type %(network_type)s for network"
" %(network_id)s"),
{'id': record.id,
'network_type': record.network_type,
'network_id': record.network_id})
def get_network_segments(session, network_id, filter_dynamic=False):
with session.begin(subtransactions=True):
query = (session.query(models.NetworkSegment).
filter_by(network_id=network_id))
if filter_dynamic is not None:
query = query.filter_by(is_dynamic=filter_dynamic)
records = query.all()
return [_make_segment_dict(record) for record in records]
def get_segment_by_id(session, segment_id):
with session.begin(subtransactions=True):
try:
record = (session.query(models.NetworkSegment).
filter_by(id=segment_id).
one())
return _make_segment_dict(record)
except exc.NoResultFound:
return
def get_dynamic_segment(session, network_id, physical_network=None,
segmentation_id=None):
"""Return a dynamic segment for the filters provided if one exists."""
with session.begin(subtransactions=True):
query = (session.query(models.NetworkSegment).
filter_by(network_id=network_id, is_dynamic=True))
if physical_network:
query = query.filter_by(physical_network=physical_network)
if segmentation_id:
query = query.filter_by(segmentation_id=segmentation_id)
record = query.first()
if record:
return _make_segment_dict(record)
else:
LOG.debug("No dynamic segment %s found for "
"Network:%(network_id)s, "
"Physical network:%(physnet)s, "
"segmentation_id:%(segmentation_id)s",
{'network_id': network_id,
'physnet': physical_network,
'segmentation_id': segmentation_id})
return None
def delete_network_segment(session, segment_id):
"""Release a dynamic segment for the params provided if one exists."""
with session.begin(subtransactions=True):
(session.query(models.NetworkSegment).
filter_by(id=segment_id).delete())
def add_port_binding(session, port_id):
with session.begin(subtransactions=True):
record = models.PortBinding(
port_id=port_id,
vif_type=portbindings.VIF_TYPE_UNBOUND)
session.add(record)
return record
def get_locked_port_and_binding(session, port_id):
"""Get port and port binding records for update within transaction."""
try:
# REVISIT(rkukura): We need the Port and PortBinding records
# to both be added to the session and locked for update. A
# single joined query should work, but the combination of left
# outer joins and postgresql doesn't seem to work.
port = (session.query(models_v2.Port).
enable_eagerloads(False).
filter_by(id=port_id).
with_lockmode('update').
one())
binding = (session.query(models.PortBinding).
enable_eagerloads(False).
filter_by(port_id=port_id).
with_lockmode('update').
one())
return port, binding
except exc.NoResultFound:
return None, None
def ensure_dvr_port_binding(session, port_id, host, router_id=None):
record = (session.query(models.DVRPortBinding).
filter_by(port_id=port_id, host=host).first())
if record:
return record
try:
with session.begin(subtransactions=True):
record = models.DVRPortBinding(
port_id=port_id,
host=host,
router_id=router_id,
vif_type=portbindings.VIF_TYPE_UNBOUND,
vnic_type=portbindings.VNIC_NORMAL,
cap_port_filter=False,
status=n_const.PORT_STATUS_DOWN)
session.add(record)
return record
except db_exc.DBDuplicateEntry:
LOG.debug("DVR Port %s already bound", port_id)
return (session.query(models.DVRPortBinding).
filter_by(port_id=port_id, host=host).one())
def delete_dvr_port_binding(session, port_id, host):
with session.begin(subtransactions=True):
(session.query(models.DVRPortBinding).
filter_by(port_id=port_id, host=host).
delete(synchronize_session=False))
def delete_dvr_port_binding_if_stale(session, binding):
if not binding.router_id and binding.status == n_const.PORT_STATUS_DOWN:
with session.begin(subtransactions=True):
LOG.debug("DVR: Deleting binding %s", binding)
session.delete(binding)
def get_port(session, port_id):
"""Get port record for update within transcation."""
with session.begin(subtransactions=True):
try:
record = (session.query(models_v2.Port).
filter(models_v2.Port.id.startswith(port_id)).
one())
return record
except exc.NoResultFound:
return
except exc.MultipleResultsFound:
LOG.error(_("Multiple ports have port_id starting with %s"),
port_id)
return
def get_vlan_mappings(rpc_context, port):
"""Get VLAN mappings for a trunk network."""
session = rpc_context.session
remote_nets = {}
if 'trunkport:type' in port:
if port['trunkport:type'] != 'trunk':
return remote_nets
else:
return remote_nets
with session.begin(subtransactions=True):
try:
vlan_mappings = (
session.query(trunk_port.TrunkPort, models_v2.Port,
models.NetworkSegment).
enable_eagerloads(False).
filter(trunk_port.TrunkPort.parent_id == port['id']).
filter(trunk_port.TrunkPort.port_id == models_v2.Port.id).
filter(models.NetworkSegment.network_id ==
models_v2.Port.network_id).all())
if not vlan_mappings:
return remote_nets
for mapping in vlan_mappings:
subport_id = mapping[0].port_id
vid = mapping[0].vid
net = mapping[1].network_id
ntype = mapping[2].network_type
pnet = mapping[2].physical_network
seg_id = mapping[2].segmentation_id
remote_nets[subport_id] = {'net_id': net, 'vid': vid,
'network_type': ntype,
'physical_network': pnet,
'segmentation_id': seg_id}
except exc.NoResultFound:
pass
return remote_nets
def get_port_from_device_mac(device_mac):
LOG.debug(_("get_port_from_device_mac() called for mac %s"), device_mac)
session = db_api.get_session()
qry = session.query(models_v2.Port).filter_by(mac_address=device_mac)
return qry.first()
def get_ports_and_sgs(port_ids):
"""Get ports from database with security group info."""
# break large queries into smaller parts
if len(port_ids) > MAX_PORTS_PER_QUERY:
LOG.debug("Number of ports %(pcount)s exceeds the maximum per "
"query %(maxp)s. Partitioning queries.",
{'pcount': len(port_ids), 'maxp': MAX_PORTS_PER_QUERY})
return (get_ports_and_sgs(port_ids[:MAX_PORTS_PER_QUERY]) +
get_ports_and_sgs(port_ids[MAX_PORTS_PER_QUERY:]))
LOG.debug("get_ports_and_sgs() called for port_ids %s", port_ids)
if not port_ids:
# if port_ids is empty, avoid querying to DB to ask it for nothing
return []
ports_to_sg_ids = get_sg_ids_grouped_by_port(port_ids)
return [make_port_dict_with_security_groups(port, sec_groups)
for port, sec_groups in ports_to_sg_ids.iteritems()]
def get_sg_ids_grouped_by_port(port_ids):
sg_ids_grouped_by_port = {}
session = db_api.get_session()
sg_binding_port = sg_db.SecurityGroupPortBinding.port_id
with session.begin(subtransactions=True):
# partial UUIDs must be individually matched with startswith.
# full UUIDs may be matched directly in an IN statement
partial_uuids = set(port_id for port_id in port_ids
if not uuidutils.is_uuid_like(port_id))
full_uuids = set(port_ids) - partial_uuids
or_criteria = [models_v2.Port.id.startswith(port_id)
for port_id in partial_uuids]
if full_uuids:
or_criteria.append(models_v2.Port.id.in_(full_uuids))
query = session.query(models_v2.Port,
sg_db.SecurityGroupPortBinding.security_group_id)
query = query.outerjoin(sg_db.SecurityGroupPortBinding,
models_v2.Port.id == sg_binding_port)
query = query.filter(or_(*or_criteria))
for port, sg_id in query:
if port not in sg_ids_grouped_by_port:
sg_ids_grouped_by_port[port] = []
if sg_id:
sg_ids_grouped_by_port[port].append(sg_id)
return sg_ids_grouped_by_port
def make_port_dict_with_security_groups(port, sec_groups):
plugin = manager.NeutronManager.get_plugin()
port_dict = plugin._make_port_dict(port)
port_dict['security_groups'] = sec_groups
port_dict['security_group_rules'] = []
port_dict['security_group_source_groups'] = []
port_dict['fixed_ips'] = [ip['ip_address']
for ip in port['fixed_ips']]
return port_dict
def get_port_binding_host(port_id):
session = db_api.get_session()
with session.begin(subtransactions=True):
try:
query = (session.query(models.PortBinding).
filter(models.PortBinding.port_id.startswith(port_id)).
one())
except exc.NoResultFound:
LOG.debug(_("No binding found for port %(port_id)s"),
{'port_id': port_id})
return
except exc.MultipleResultsFound:
LOG.error(_("Multiple ports have port_id starting with %s"),
port_id)
return
return query.host
def generate_dvr_port_status(session, port_id):
# an OR'ed value of status assigned to parent port from the
# dvrportbinding bucket
query = session.query(models.DVRPortBinding)
final_status = n_const.PORT_STATUS_BUILD
for bind in query.filter(models.DVRPortBinding.port_id == port_id):
if bind.status == n_const.PORT_STATUS_ACTIVE:
return bind.status
elif bind.status == n_const.PORT_STATUS_DOWN:
final_status = bind.status
return final_status
def get_dvr_port_binding_by_host(session, port_id, host):
with session.begin(subtransactions=True):
binding = (session.query(models.DVRPortBinding).
filter(models.DVRPortBinding.port_id.startswith(port_id),
models.DVRPortBinding.host == host).first())
if not binding:
LOG.debug("No binding for DVR port %(port_id)s with host "
"%(host)s", {'port_id': port_id, 'host': host})
return binding
def get_dvr_port_bindings(session, port_id):
with session.begin(subtransactions=True):
bindings = (session.query(models.DVRPortBinding).
filter(models.DVRPortBinding.port_id.startswith(port_id)).
all())
if not bindings:
LOG.debug("No bindings for DVR port %s", port_id)
return bindings
|
|
"""SCons.Defaults
Builders and other things for the local site. Here's where we'll
duplicate the functionality of autoconf until we move it into the
installation procedure or use something like qmconf.
The code that reads the registry to find MSVC components was borrowed
from distutils.msvccompiler.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import division
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import os
import errno
import shutil
import stat
import time
import sys
import SCons.Action
import SCons.Builder
import SCons.CacheDir
import SCons.Environment
import SCons.PathList
import SCons.Subst
import SCons.Tool
# A placeholder for a default Environment (for fetching source files
# from source code management systems and the like). This must be
# initialized later, after the top-level directory is set by the calling
# interface.
_default_env = None
# Lazily instantiate the default environment so the overhead of creating
# it doesn't apply when it's not needed.
def _fetch_DefaultEnvironment(*args, **kw):
"""
Returns the already-created default construction environment.
"""
global _default_env
return _default_env
def DefaultEnvironment(*args, **kw):
"""
Initial public entry point for creating the default construction
Environment.
After creating the environment, we overwrite our name
(DefaultEnvironment) with the _fetch_DefaultEnvironment() function,
which more efficiently returns the initialized default construction
environment without checking for its existence.
(This function still exists with its _default_check because someone
else (*cough* Script/__init__.py *cough*) may keep a reference
to this function. So we can't use the fully functional idiom of
having the name originally be a something that *only* creates the
construction environment and then overwrites the name.)
"""
global _default_env
if not _default_env:
import SCons.Util
_default_env = SCons.Environment.Environment(*args, **kw)
if SCons.Util.md5:
_default_env.Decider('MD5')
else:
_default_env.Decider('timestamp-match')
global DefaultEnvironment
DefaultEnvironment = _fetch_DefaultEnvironment
_default_env._CacheDir_path = None
return _default_env
# Emitters for setting the shared attribute on object files,
# and an action for checking that all of the source files
# going into a shared library are, in fact, shared.
def StaticObjectEmitter(target, source, env):
for tgt in target:
tgt.attributes.shared = None
return (target, source)
def SharedObjectEmitter(target, source, env):
for tgt in target:
tgt.attributes.shared = 1
return (target, source)
def SharedFlagChecker(source, target, env):
same = env.subst('$STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME')
if same == '0' or same == '' or same == 'False':
for src in source:
try:
shared = src.attributes.shared
except AttributeError:
shared = None
if not shared:
raise SCons.Errors.UserError("Source file: %s is static and is not compatible with shared target: %s" % (src, target[0]))
SharedCheck = SCons.Action.Action(SharedFlagChecker, None)
# Some people were using these variable name before we made
# SourceFileScanner part of the public interface. Don't break their
# SConscript files until we've given them some fair warning and a
# transition period.
CScan = SCons.Tool.CScanner
DScan = SCons.Tool.DScanner
LaTeXScan = SCons.Tool.LaTeXScanner
ObjSourceScan = SCons.Tool.SourceFileScanner
ProgScan = SCons.Tool.ProgramScanner
# These aren't really tool scanners, so they don't quite belong with
# the rest of those in Tool/__init__.py, but I'm not sure where else
# they should go. Leave them here for now.
import SCons.Scanner.Dir
DirScanner = SCons.Scanner.Dir.DirScanner()
DirEntryScanner = SCons.Scanner.Dir.DirEntryScanner()
# Actions for common languages.
CAction = SCons.Action.Action("$CCCOM", "$CCCOMSTR")
ShCAction = SCons.Action.Action("$SHCCCOM", "$SHCCCOMSTR")
CXXAction = SCons.Action.Action("$CXXCOM", "$CXXCOMSTR")
ShCXXAction = SCons.Action.Action("$SHCXXCOM", "$SHCXXCOMSTR")
ASAction = SCons.Action.Action("$ASCOM", "$ASCOMSTR")
ASPPAction = SCons.Action.Action("$ASPPCOM", "$ASPPCOMSTR")
LinkAction = SCons.Action.Action("$LINKCOM", "$LINKCOMSTR")
ShLinkAction = SCons.Action.Action("$SHLINKCOM", "$SHLINKCOMSTR")
LdModuleLinkAction = SCons.Action.Action("$LDMODULECOM", "$LDMODULECOMSTR")
# Common tasks that we allow users to perform in platform-independent
# ways by creating ActionFactory instances.
ActionFactory = SCons.Action.ActionFactory
def get_paths_str(dest):
# If dest is a list, we need to manually call str() on each element
if SCons.Util.is_List(dest):
elem_strs = []
for element in dest:
elem_strs.append('"' + str(element) + '"')
return '[' + ', '.join(elem_strs) + ']'
else:
return '"' + str(dest) + '"'
def chmod_func(dest, mode):
SCons.Node.FS.invalidate_node_memos(dest)
if not SCons.Util.is_List(dest):
dest = [dest]
for element in dest:
os.chmod(str(element), mode)
def chmod_strfunc(dest, mode):
return 'Chmod(%s, 0%o)' % (get_paths_str(dest), mode)
Chmod = ActionFactory(chmod_func, chmod_strfunc)
def copy_func(dest, src):
SCons.Node.FS.invalidate_node_memos(dest)
if SCons.Util.is_List(src) and os.path.isdir(dest):
for file in src:
shutil.copy2(file, dest)
return 0
elif os.path.isfile(src):
return shutil.copy2(src, dest)
else:
return shutil.copytree(src, dest, 1)
Copy = ActionFactory(copy_func,
lambda dest, src: 'Copy("%s", "%s")' % (dest, src),
convert=str)
def delete_func(dest, must_exist=0):
SCons.Node.FS.invalidate_node_memos(dest)
if not SCons.Util.is_List(dest):
dest = [dest]
for entry in dest:
entry = str(entry)
# os.path.exists returns False with broken links that exist
entry_exists = os.path.exists(entry) or os.path.islink(entry)
if not entry_exists and not must_exist:
continue
# os.path.isdir returns True when entry is a link to a dir
if os.path.isdir(entry) and not os.path.islink(entry):
shutil.rmtree(entry, 1)
continue
os.unlink(entry)
def delete_strfunc(dest, must_exist=0):
return 'Delete(%s)' % get_paths_str(dest)
Delete = ActionFactory(delete_func, delete_strfunc)
def mkdir_func(dest):
SCons.Node.FS.invalidate_node_memos(dest)
if not SCons.Util.is_List(dest):
dest = [dest]
for entry in dest:
try:
os.makedirs(str(entry))
except os.error, e:
p = str(entry)
if (e.args[0] == errno.EEXIST or
(sys.platform=='win32' and e.args[0]==183)) \
and os.path.isdir(str(entry)):
pass # not an error if already exists
else:
raise
Mkdir = ActionFactory(mkdir_func,
lambda dir: 'Mkdir(%s)' % get_paths_str(dir))
def move_func(dest, src):
SCons.Node.FS.invalidate_node_memos(dest)
SCons.Node.FS.invalidate_node_memos(src)
shutil.move(src, dest)
Move = ActionFactory(move_func,
lambda dest, src: 'Move("%s", "%s")' % (dest, src),
convert=str)
def touch_func(dest):
SCons.Node.FS.invalidate_node_memos(dest)
if not SCons.Util.is_List(dest):
dest = [dest]
for file in dest:
file = str(file)
mtime = int(time.time())
if os.path.exists(file):
atime = os.path.getatime(file)
else:
open(file, 'w')
atime = mtime
os.utime(file, (atime, mtime))
Touch = ActionFactory(touch_func,
lambda file: 'Touch(%s)' % get_paths_str(file))
# Internal utility functions
def _concat(prefix, list, suffix, env, f=lambda x: x, target=None, source=None):
"""
Creates a new list from 'list' by first interpolating each element
in the list using the 'env' dictionary and then calling f on the
list, and finally calling _concat_ixes to concatenate 'prefix' and
'suffix' onto each element of the list.
"""
if not list:
return list
l = f(SCons.PathList.PathList(list).subst_path(env, target, source))
if l is not None:
list = l
return _concat_ixes(prefix, list, suffix, env)
def _concat_ixes(prefix, list, suffix, env):
"""
Creates a new list from 'list' by concatenating the 'prefix' and
'suffix' arguments onto each element of the list. A trailing space
on 'prefix' or leading space on 'suffix' will cause them to be put
into separate list elements rather than being concatenated.
"""
result = []
# ensure that prefix and suffix are strings
prefix = str(env.subst(prefix, SCons.Subst.SUBST_RAW))
suffix = str(env.subst(suffix, SCons.Subst.SUBST_RAW))
for x in list:
if isinstance(x, SCons.Node.FS.File):
result.append(x)
continue
x = str(x)
if x:
if prefix:
if prefix[-1] == ' ':
result.append(prefix[:-1])
elif x[:len(prefix)] != prefix:
x = prefix + x
result.append(x)
if suffix:
if suffix[0] == ' ':
result.append(suffix[1:])
elif x[-len(suffix):] != suffix:
result[-1] = result[-1]+suffix
return result
def _stripixes(prefix, itms, suffix, stripprefixes, stripsuffixes, env, c=None):
"""
This is a wrapper around _concat()/_concat_ixes() that checks for
the existence of prefixes or suffixes on list items and strips them
where it finds them. This is used by tools (like the GNU linker)
that need to turn something like 'libfoo.a' into '-lfoo'.
"""
if not itms:
return itms
if not callable(c):
env_c = env['_concat']
if env_c != _concat and callable(env_c):
# There's a custom _concat() method in the construction
# environment, and we've allowed people to set that in
# the past (see test/custom-concat.py), so preserve the
# backwards compatibility.
c = env_c
else:
c = _concat_ixes
stripprefixes = list(map(env.subst, SCons.Util.flatten(stripprefixes)))
stripsuffixes = list(map(env.subst, SCons.Util.flatten(stripsuffixes)))
stripped = []
for l in SCons.PathList.PathList(itms).subst_path(env, None, None):
if isinstance(l, SCons.Node.FS.File):
stripped.append(l)
continue
if not SCons.Util.is_String(l):
l = str(l)
for stripprefix in stripprefixes:
lsp = len(stripprefix)
if l[:lsp] == stripprefix:
l = l[lsp:]
# Do not strip more than one prefix
break
for stripsuffix in stripsuffixes:
lss = len(stripsuffix)
if l[-lss:] == stripsuffix:
l = l[:-lss]
# Do not strip more than one suffix
break
stripped.append(l)
return c(prefix, stripped, suffix, env)
def processDefines(defs):
"""process defines, resolving strings, lists, dictionaries, into a list of
strings
"""
if SCons.Util.is_List(defs):
l = []
for d in defs:
if d is None:
continue
elif SCons.Util.is_List(d) or isinstance(d, tuple):
if len(d) >= 2:
l.append(str(d[0]) + '=' + str(d[1]))
else:
l.append(str(d[0]))
elif SCons.Util.is_Dict(d):
for macro,value in d.iteritems():
if value is not None:
l.append(str(macro) + '=' + str(value))
else:
l.append(str(macro))
elif SCons.Util.is_String(d):
l.append(str(d))
else:
raise SCons.Errors.UserError("DEFINE %s is not a list, dict, string or None."%repr(d))
elif SCons.Util.is_Dict(defs):
# The items in a dictionary are stored in random order, but
# if the order of the command-line options changes from
# invocation to invocation, then the signature of the command
# line will change and we'll get random unnecessary rebuilds.
# Consequently, we have to sort the keys to ensure a
# consistent order...
l = []
for k,v in sorted(defs.items()):
if v is None:
l.append(str(k))
else:
l.append(str(k) + '=' + str(v))
else:
l = [str(defs)]
return l
def _defines(prefix, defs, suffix, env, c=_concat_ixes):
"""A wrapper around _concat_ixes that turns a list or string
into a list of C preprocessor command-line definitions.
"""
return c(prefix, env.subst_path(processDefines(defs)), suffix, env)
class NullCmdGenerator(object):
"""This is a callable class that can be used in place of other
command generators if you don't want them to do anything.
The __call__ method for this class simply returns the thing
you instantiated it with.
Example usage:
env["DO_NOTHING"] = NullCmdGenerator
env["LINKCOM"] = "${DO_NOTHING('$LINK $SOURCES $TARGET')}"
"""
def __init__(self, cmd):
self.cmd = cmd
def __call__(self, target, source, env, for_signature=None):
return self.cmd
class Variable_Method_Caller(object):
"""A class for finding a construction variable on the stack and
calling one of its methods.
We use this to support "construction variables" in our string
eval()s that actually stand in for methods--specifically, use
of "RDirs" in call to _concat that should actually execute the
"TARGET.RDirs" method. (We used to support this by creating a little
"build dictionary" that mapped RDirs to the method, but this got in
the way of Memoizing construction environments, because we had to
create new environment objects to hold the variables.)
"""
def __init__(self, variable, method):
self.variable = variable
self.method = method
def __call__(self, *args, **kw):
try: 1//0
except ZeroDivisionError:
# Don't start iterating with the current stack-frame to
# prevent creating reference cycles (f_back is safe).
frame = sys.exc_info()[2].tb_frame.f_back
variable = self.variable
while frame:
if variable in frame.f_locals:
v = frame.f_locals[variable]
if v:
method = getattr(v, self.method)
return method(*args, **kw)
frame = frame.f_back
return None
ConstructionEnvironment = {
'BUILDERS' : {},
'SCANNERS' : [],
'CONFIGUREDIR' : '#/.sconf_temp',
'CONFIGURELOG' : '#/config.log',
'CPPSUFFIXES' : SCons.Tool.CSuffixes,
'DSUFFIXES' : SCons.Tool.DSuffixes,
'ENV' : {},
'IDLSUFFIXES' : SCons.Tool.IDLSuffixes,
# 'LATEXSUFFIXES' : SCons.Tool.LaTeXSuffixes, # moved to the TeX tools generate functions
'_concat' : _concat,
'_defines' : _defines,
'_stripixes' : _stripixes,
'_LIBFLAGS' : '${_concat(LIBLINKPREFIX, LIBS, LIBLINKSUFFIX, __env__)}',
'_LIBDIRFLAGS' : '$( ${_concat(LIBDIRPREFIX, LIBPATH, LIBDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)',
'_CPPINCFLAGS' : '$( ${_concat(INCPREFIX, CPPPATH, INCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)',
'_CPPDEFFLAGS' : '${_defines(CPPDEFPREFIX, CPPDEFINES, CPPDEFSUFFIX, __env__)}',
'TEMPFILE' : NullCmdGenerator,
'Dir' : Variable_Method_Caller('TARGET', 'Dir'),
'Dirs' : Variable_Method_Caller('TARGET', 'Dirs'),
'File' : Variable_Method_Caller('TARGET', 'File'),
'RDirs' : Variable_Method_Caller('TARGET', 'RDirs'),
}
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
|
import reversion
import logging
import json
from django.contrib.gis.db import models as gis_models
from rest_framework.exceptions import ValidationError
from common.models import AbstractBase, County, Constituency, Ward
from facilities.models import Facility
LOGGER = logging.getLogger(__name__)
class GISAbstractBase(AbstractBase, gis_models.Model):
"""
We've intentionally duplicated the `AbstractBase` in the `common` app
because we wanted to confine the impact of GIS ( Geographic ) stuff
to this app.
The GIS stuff should have only one touch-point with the rest of the
models: the link to the Facility model.
We've kept the fields that are in the `common` `AbstractBase` because
we want to have the same kind of base behavior.
"""
objects = gis_models.GeoManager()
everything = gis_models.GeoManager()
class Meta(AbstractBase.Meta):
abstract = True
@reversion.register
class GeoCodeSource(GISAbstractBase):
"""
Where the geo-code came from.
This is the organization collecting the code.
For example, DHMT, the Service Availability Mapping survey (SAM),
Kenya Medical Research Institute (KEMRI), the Regional Center for
Mapping of Resources for Development (RCMRD), the AIDS, Population
and Health Integrated Assistance (APHIA) II, or another source.
It is not the individual who collected the code
"""
name = gis_models.CharField(
max_length=100, unique=True,
help_text="The name of the collecting organization")
description = gis_models.TextField(
help_text="A short summary of the collecting organization",
null=True, blank=True)
abbreviation = gis_models.CharField(
max_length=10, null=True, blank=True,
help_text="An acronym of the collecting or e.g SAM")
def __unicode__(self):
return self.name
@reversion.register
class GeoCodeMethod(GISAbstractBase):
"""
Method used to capture the geo-code.
Examples:
1= Taken with GPS device,
2= Calculated from proximity to school, village, markets
3= Calculated from 1:50,000 scale topographic maps,
4= Scanned from hand-drawn maps,
5= Centroid calculation from sub-location
8= No geo-code
9= Other
"""
name = gis_models.CharField(
max_length=100, unique=True, help_text="The name of the method.")
description = gis_models.TextField(
help_text="A short description of the method",
null=True, blank=True)
def __unicode__(self):
return self.name
@reversion.register
class FacilityCoordinates(GISAbstractBase):
"""
Location derived by the use of GPS satellites and GPS device or receivers.
It it three dimensional.
The three-dimensional readings from a GPS device are latitude, longitude,
and attitude. The date/time the reading is done is also important, as
is the source and method of the reading.
"""
facility = gis_models.OneToOneField(Facility)
coordinates = gis_models.PointField()
source = gis_models.ForeignKey(
GeoCodeSource,
help_text="where the geo code came from", on_delete=gis_models.PROTECT)
method = gis_models.ForeignKey(
GeoCodeMethod,
help_text="Method used to obtain the geo codes. e.g"
" taken with GPS device")
collection_date = gis_models.DateTimeField(auto_now_add=True)
def validate_longitude_and_latitude_within_kenya(self):
try:
boundary = WorldBorder.objects.get(code='KEN')
if not boundary.mpoly.contains(self.coordinates):
# This validation was relaxed ( temporarily? )
# The Kenyan boundaries that we have loaded have low fidelity
# at the edges, so that facilities that are, say, 100 meters
# from the border are reported as not in Kenya
# If higher fidelity map data is obtained, this validation
# can be brought back
LOGGER.error(
'{} is not within the Kenyan boundaries that we have'
.format(self.coordinates)
)
except WorldBorder.DoesNotExist:
raise ValidationError('Setup error: Kenyan boundaries not loaded')
def validate_longitude_and_latitude_within_constituency(self):
try:
boundary = ConstituencyBoundary.objects.get(
area=self.facility.ward.constituency)
if not boundary.mpoly.contains(self.coordinates):
raise ValidationError(
'{} not contained in boundary of {}'.format(
self.coordinates,
self.facility.ward.constituency
)
)
except ConstituencyBoundary.DoesNotExist:
raise ValidationError(
'No boundary for {}'.format(
self.facility.ward.constituency
)
)
def validate_longitude_and_latitude_within_county(self):
try:
boundary = CountyBoundary.objects.get(
area=self.facility.ward.constituency.county)
if not boundary.mpoly.contains(self.coordinates):
raise ValidationError(
'{} not contained in boundary of {}'.format(
self.coordinates,
self.facility.ward.constituency.county
)
)
except CountyBoundary.DoesNotExist:
raise ValidationError(
'No boundary for {}'.format(
self.facility.ward.constituency.county
)
)
def validate_longitude_and_latitude_within_ward(self):
try:
boundary = WardBoundary.objects.get(area=self.facility.ward)
if not boundary.mpoly.contains(self.coordinates):
raise ValidationError(
'{} not contained in boundary of {}'.format(
self.coordinates, self.facility.ward
)
)
except WardBoundary.DoesNotExist:
LOGGER.error(
'Ward {} does not have boundary info'.format(
self.facility.ward)
)
def clean(self):
self.validate_longitude_and_latitude_within_kenya()
self.validate_longitude_and_latitude_within_county()
self.validate_longitude_and_latitude_within_constituency()
self.validate_longitude_and_latitude_within_ward()
super(FacilityCoordinates, self).clean()
def __unicode__(self):
return self.facility.name
class Meta(GISAbstractBase.Meta):
verbose_name_plural = 'facility coordinates'
verbose_name = 'facility coordinates'
class AdministrativeUnitBoundary(GISAbstractBase):
"""Base class for the models that implement administrative boundaries
All common operations and fields are here.
We retain the default SRID ( 4326 - WGS84 ).
"""
# These two fields should mirror the contents of the relevant admin
# area model
name = gis_models.CharField(max_length=100)
code = gis_models.CharField(max_length=10, unique=True)
# Making this field nullable is a temporary band-aid for a deficiency
# in model_mommy ( a testing tool )
# The impact of this is minimal; these models hold setup data that is
# loaded and tested during each build
mpoly = gis_models.MultiPolygonField(null=True, blank=True)
@property
def bound(self):
return json.dumps(self.mpoly.envelope.geojson) if self.mpoly else None
@property
def center(self):
return json.loads(self.mpoly.centroid.geojson) if self.mpoly else None
@property
def surface_area(self):
return self.mpoly.area if self.mpoly else 0
@property
def facility_count(self):
return FacilityCoordinates.objects.filter(
coordinates__contained=self.mpoly
).count() if self and self.mpoly else 0
@property
def density(self):
"""This is a synthetic value
The units matter less than the relative density compared to other
administrative units
"""
return self.facility_count / (self.surface_area * 10000) \
if self.surface_area else 0
@property
def facility_coordinates(self):
from common.models.model_declarations import \
_lookup_facility_coordinates
return _lookup_facility_coordinates(self)
def __unicode__(self):
return self.name
class Meta(GISAbstractBase.Meta):
abstract = True
@reversion.register
class WorldBorder(AdministrativeUnitBoundary):
"""World boundaries
Source: http://thematicmapping.org/downloads/TM_WORLD_BORDERS-0.3.zip
"""
longitude = gis_models.FloatField()
latitude = gis_models.FloatField()
@reversion.register
class CountyBoundary(AdministrativeUnitBoundary):
area = gis_models.OneToOneField(County)
@property
def constituency_ids(self):
return Constituency.objects.filter(
county=self.area).values_list('id', flat=True)
@property
def constituency_boundary_ids(self):
constituency_boundary_ids = ConstituencyBoundary.objects.filter(
area__id__in=self.constituency_ids
).values_list('id', flat=True)
return constituency_boundary_ids
class Meta(GISAbstractBase.Meta):
verbose_name_plural = 'county boundaries'
@reversion.register
class ConstituencyBoundary(AdministrativeUnitBoundary):
area = gis_models.OneToOneField(Constituency)
@property
def ward_ids(self):
return Ward.objects.filter(
constituency=self.area).values_list('id', flat=True)
@property
def ward_boundary_ids(self):
ward_boundary_ids = WardBoundary.objects.filter(
area__id__in=self.ward_ids
).values_list('id', flat=True)
return ward_boundary_ids
class Meta(GISAbstractBase.Meta):
verbose_name_plural = 'constituency boundaries'
@reversion.register
class WardBoundary(AdministrativeUnitBoundary):
area = gis_models.OneToOneField(Ward)
@property
def facility_ids(self):
return FacilityCoordinates.objects.filter(
coordinates__contained=self.mpoly
).values_list('id', flat=True) if self and self.mpoly else []
class Meta(GISAbstractBase.Meta):
verbose_name_plural = 'ward boundaries'
|
|
from __future__ import absolute_import
from __future__ import print_function
from contextlib import contextmanager
from typing import (cast, Any, Callable, Dict, Iterable, Iterator, List, Mapping, Optional,
Sized, Tuple, Union)
from django.core.urlresolvers import resolve
from django.conf import settings
from django.test import TestCase
from django.test.client import (
BOUNDARY, MULTIPART_CONTENT, encode_multipart,
)
from django.template import loader
from django.http import HttpResponse
from django.db.utils import IntegrityError
from django.utils.translation import ugettext as _
from zerver.lib.initial_password import initial_password
from zerver.lib.db import TimeTrackingCursor
from zerver.lib.str_utils import force_text
from zerver.lib import cache
from zerver.tornado.handlers import allocate_handler_id
from zerver.worker import queue_processors
from zerver.lib.actions import (
check_send_message, create_stream_if_needed, bulk_add_subscriptions,
get_display_recipient, bulk_remove_subscriptions
)
from zerver.lib.test_helpers import (
instrument_url, find_key_by_email,
)
from zerver.models import (
get_realm,
get_stream,
get_user_profile_by_email,
get_realm_by_email_domain,
Client,
Message,
Realm,
Recipient,
Stream,
Subscription,
UserMessage,
UserProfile,
)
from zerver.lib.request import JsonableError
import base64
import mock
import os
import re
import time
import ujson
import unittest
from six.moves import urllib
from six import text_type, binary_type
from zerver.lib.str_utils import NonBinaryStr
from contextlib import contextmanager
import six
API_KEYS = {} # type: Dict[text_type, text_type]
class ZulipTestCase(TestCase):
'''
WRAPPER_COMMENT:
We wrap calls to self.client.{patch,put,get,post,delete} for various
reasons. Some of this has to do with fixing encodings before calling
into the Django code. Some of this has to do with providing a future
path for instrumentation. Some of it's just consistency.
The linter will prevent direct calls to self.client.foo, so the wrapper
functions have to fake out the linter by using a local variable called
django_client to fool the regext.
'''
def __init__(self, *args, **kwargs):
# type: (*Any, **Any) -> None
# This method should be removed when we migrate to version 3 of Python
import six
if six.PY2:
self.assertRaisesRegex = self.assertRaisesRegexp
super(ZulipTestCase, self).__init__(*args, **kwargs)
DEFAULT_REALM = Realm.objects.get(string_id='zulip')
@instrument_url
def client_patch(self, url, info={}, **kwargs):
# type: (text_type, Dict[str, Any], **Any) -> HttpResponse
"""
We need to urlencode, since Django's function won't do it for us.
"""
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.patch(url, encoded, **kwargs)
@instrument_url
def client_patch_multipart(self, url, info={}, **kwargs):
# type: (text_type, Dict[str, Any], **Any) -> HttpResponse
"""
Use this for patch requests that have file uploads or
that need some sort of multi-part content. In the future
Django's test client may become a bit more flexible,
so we can hopefully eliminate this. (When you post
with the Django test client, it deals with MULTIPART_CONTENT
automatically, but not patch.)
"""
encoded = encode_multipart(BOUNDARY, info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.patch(
url,
encoded,
content_type=MULTIPART_CONTENT,
**kwargs)
@instrument_url
def client_put(self, url, info={}, **kwargs):
# type: (text_type, Dict[str, Any], **Any) -> HttpResponse
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.put(url, encoded, **kwargs)
@instrument_url
def client_put_multipart(self, url, info={}, **kwargs):
# type: (text_type, Dict[str, Any], **Any) -> HttpResponse
"""
Use this for put requests that have file uploads or
that need some sort of multi-part content. In the future
Django's test client may become a bit more flexible,
so we can hopefully eliminate this. (When you post
with the Django test client, it deals with MULTIPART_CONTENT
automatically, but not put.)
"""
encoded = encode_multipart(BOUNDARY, info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.put(url, encoded, content_type=MULTIPART_CONTENT, **kwargs)
@instrument_url
def client_delete(self, url, info={}, **kwargs):
# type: (text_type, Dict[str, Any], **Any) -> HttpResponse
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.delete(url, encoded, **kwargs)
@instrument_url
def client_post(self, url, info={}, **kwargs):
# type: (text_type, Dict[str, Any], **Any) -> HttpResponse
django_client = self.client # see WRAPPER_COMMENT
return django_client.post(url, info, **kwargs)
@instrument_url
def client_post_request(self, url, req):
# type: (text_type, Any) -> HttpResponse
"""
We simulate hitting an endpoint here, although we
actually resolve the URL manually and hit the view
directly. We have this helper method to allow our
instrumentation to work for /notify_tornado and
future similar methods that require doing funny
things to a request object.
"""
match = resolve(url)
return match.func(req)
@instrument_url
def client_get(self, url, info={}, **kwargs):
# type: (text_type, Dict[str, Any], **Any) -> HttpResponse
django_client = self.client # see WRAPPER_COMMENT
return django_client.get(url, info, **kwargs)
def login_with_return(self, email, password=None):
# type: (text_type, Optional[text_type]) -> HttpResponse
if password is None:
password = initial_password(email)
return self.client_post('/accounts/login/',
{'username': email, 'password': password})
def login(self, email, password=None, fails=False):
# type: (text_type, Optional[text_type], bool) -> HttpResponse
if password is None:
password = initial_password(email)
if not fails:
self.assertTrue(self.client.login(username=email, password=password))
else:
self.assertFalse(self.client.login(username=email, password=password))
def register(self, username, password, domain="zulip.com"):
# type: (text_type, text_type, text_type) -> HttpResponse
self.client_post('/accounts/home/',
{'email': username + "@" + domain})
return self.submit_reg_form_for_user(username, password, domain=domain)
def submit_reg_form_for_user(self, username, password, domain="zulip.com",
realm_name="Zulip Test", realm_subdomain="zuliptest",
realm_org_type=Realm.COMMUNITY,
from_confirmation='', **kwargs):
# type: (text_type, text_type, text_type, Optional[text_type], Optional[text_type], int, Optional[text_type], **Any) -> HttpResponse
"""
Stage two of the two-step registration process.
If things are working correctly the account should be fully
registered after this call.
You can pass the HTTP_HOST variable for subdomains via kwargs.
"""
return self.client_post('/accounts/register/',
{'full_name': username, 'password': password,
'realm_name': realm_name,
'realm_subdomain': realm_subdomain,
'key': find_key_by_email(username + '@' + domain),
'realm_org_type': realm_org_type,
'terms': True,
'from_confirmation': from_confirmation},
**kwargs)
def get_confirmation_url_from_outbox(self, email_address, path_pattern="(\S+)>"):
# type: (text_type, text_type) -> text_type
from django.core.mail import outbox
for message in reversed(outbox):
if email_address in message.to:
return re.search(settings.EXTERNAL_HOST + path_pattern,
message.body).groups()[0]
else:
raise ValueError("Couldn't find a confirmation email.")
def get_api_key(self, email):
# type: (text_type) -> text_type
if email not in API_KEYS:
API_KEYS[email] = get_user_profile_by_email(email).api_key
return API_KEYS[email]
def api_auth(self, email):
# type: (text_type) -> Dict[str, text_type]
credentials = u"%s:%s" % (email, self.get_api_key(email))
return {
'HTTP_AUTHORIZATION': u'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
}
def get_streams(self, email):
# type: (text_type) -> List[text_type]
"""
Helper function to get the stream names for a user
"""
user_profile = get_user_profile_by_email(email)
subs = Subscription.objects.filter(
user_profile=user_profile,
active=True,
recipient__type=Recipient.STREAM)
return [cast(text_type, get_display_recipient(sub.recipient)) for sub in subs]
def send_message(self, sender_name, raw_recipients, message_type,
content=u"test content", subject=u"test", **kwargs):
# type: (text_type, Union[text_type, List[text_type]], int, text_type, text_type, **Any) -> int
sender = get_user_profile_by_email(sender_name)
if message_type == Recipient.PERSONAL:
message_type_name = "private"
else:
message_type_name = "stream"
if isinstance(raw_recipients, six.string_types):
recipient_list = [raw_recipients]
else:
recipient_list = raw_recipients
(sending_client, _) = Client.objects.get_or_create(name="test suite")
return check_send_message(
sender, sending_client, message_type_name, recipient_list, subject,
content, forged=False, forged_timestamp=None,
forwarder_user_profile=sender, realm=sender.realm, **kwargs)
def get_old_messages(self, anchor=1, num_before=100, num_after=100):
# type: (int, int, int) -> List[Dict[str, Any]]
post_params = {"anchor": anchor, "num_before": num_before,
"num_after": num_after}
result = self.client_get("/json/messages", dict(post_params))
data = ujson.loads(result.content)
return data['messages']
def users_subscribed_to_stream(self, stream_name, realm):
# type: (text_type, Realm) -> List[UserProfile]
stream = Stream.objects.get(name=stream_name, realm=realm)
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
subscriptions = Subscription.objects.filter(recipient=recipient, active=True)
return [subscription.user_profile for subscription in subscriptions]
def assert_url_serves_contents_of_file(self, url, result):
# type: (str, bytes) -> None
response = self.client_get(url)
data = b"".join(response.streaming_content)
self.assertEquals(result, data)
def assert_json_success(self, result):
# type: (HttpResponse) -> Dict[str, Any]
"""
Successful POSTs return a 200 and JSON of the form {"result": "success",
"msg": ""}.
"""
self.assertEqual(result.status_code, 200, result)
json = ujson.loads(result.content)
self.assertEqual(json.get("result"), "success")
# We have a msg key for consistency with errors, but it typically has an
# empty value.
self.assertIn("msg", json)
return json
def get_json_error(self, result, status_code=400):
# type: (HttpResponse, int) -> Dict[str, Any]
self.assertEqual(result.status_code, status_code)
json = ujson.loads(result.content)
self.assertEqual(json.get("result"), "error")
return json['msg']
def assert_json_error(self, result, msg, status_code=400):
# type: (HttpResponse, text_type, int) -> None
"""
Invalid POSTs return an error status code and JSON of the form
{"result": "error", "msg": "reason"}.
"""
self.assertEqual(self.get_json_error(result, status_code=status_code), msg)
def assert_length(self, queries, count):
# type: (Sized, int) -> None
actual_count = len(queries)
return self.assertTrue(actual_count == count,
"len(%s) == %s, != %s" % (queries, actual_count, count))
def assert_max_length(self, queries, count):
# type: (Sized, int) -> None
actual_count = len(queries)
return self.assertTrue(actual_count <= count,
"len(%s) == %s, > %s" % (queries, actual_count, count))
def assert_json_error_contains(self, result, msg_substring, status_code=400):
# type: (HttpResponse, text_type, int) -> None
self.assertIn(msg_substring, self.get_json_error(result, status_code=status_code))
def assert_equals_response(self, string, response):
# type: (text_type, HttpResponse) -> None
self.assertEqual(string, response.content.decode('utf-8'))
def assert_in_response(self, substring, response):
# type: (text_type, HttpResponse) -> None
self.assertIn(substring, response.content.decode('utf-8'))
def assert_in_success_response(self, substrings, response):
# type: (Iterable[text_type], HttpResponse) -> None
self.assertEqual(response.status_code, 200)
decoded = response.content.decode('utf-8')
for substring in substrings:
self.assertIn(substring, decoded)
def fixture_data(self, type, action, file_type='json'):
# type: (text_type, text_type, text_type) -> text_type
return force_text(open(os.path.join(os.path.dirname(__file__),
"../fixtures/%s/%s_%s.%s" % (type, type, action, file_type))).read())
def make_stream(self, stream_name, realm=None, invite_only=False):
# type: (text_type, Optional[Realm], Optional[bool]) -> Stream
if realm is None:
realm = self.DEFAULT_REALM
try:
stream = Stream.objects.create(
realm=realm,
name=stream_name,
invite_only=invite_only,
)
except IntegrityError:
raise Exception('''
%s already exists
Please call make_stream with a stream name
that is not already in use.''' % (stream_name,))
Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
return stream
# Subscribe to a stream directly
def subscribe_to_stream(self, email, stream_name, realm=None):
# type: (text_type, text_type, Optional[Realm]) -> None
if realm is None:
realm = get_realm_by_email_domain(email)
stream = get_stream(stream_name, realm)
if stream is None:
stream, _ = create_stream_if_needed(realm, stream_name)
user_profile = get_user_profile_by_email(email)
bulk_add_subscriptions([stream], [user_profile])
def unsubscribe_from_stream(self, email, stream_name):
# type: (text_type, text_type) -> None
user_profile = get_user_profile_by_email(email)
stream = get_stream(stream_name, user_profile.realm)
bulk_remove_subscriptions([user_profile], [stream])
# Subscribe to a stream by making an API request
def common_subscribe_to_streams(self, email, streams, extra_post_data={}, invite_only=False):
# type: (text_type, Iterable[text_type], Dict[str, Any], bool) -> HttpResponse
post_data = {'subscriptions': ujson.dumps([{"name": stream} for stream in streams]),
'invite_only': ujson.dumps(invite_only)}
post_data.update(extra_post_data)
result = self.client_post("/api/v1/users/me/subscriptions", post_data, **self.api_auth(email))
return result
def send_json_payload(self, email, url, payload, stream_name=None, **post_params):
# type: (text_type, text_type, Union[text_type, Dict[str, Any]], Optional[text_type], **Any) -> Message
if stream_name is not None:
self.subscribe_to_stream(email, stream_name)
result = self.client_post(url, payload, **post_params)
self.assert_json_success(result)
# Check the correct message was sent
msg = self.get_last_message()
self.assertEqual(msg.sender.email, email)
if stream_name is not None:
self.assertEqual(get_display_recipient(msg.recipient), stream_name)
# TODO: should also validate recipient for private messages
return msg
def get_last_message(self):
# type: () -> Message
return Message.objects.latest('id')
def get_second_to_last_message(self):
# type: () -> Message
return Message.objects.all().order_by('-id')[1]
@contextmanager
def simulated_markdown_failure(self):
# type: () -> Iterator[None]
'''
This raises a failure inside of the try/except block of
bugdown.__init__.do_convert.
'''
with \
self.settings(ERROR_BOT=None), \
mock.patch('zerver.lib.bugdown.timeout', side_effect=KeyError('foo')), \
mock.patch('zerver.lib.bugdown.log_bugdown_error'):
yield
class WebhookTestCase(ZulipTestCase):
"""
Common for all webhooks tests
Override below class attributes and run send_and_test_message
If you create your url in uncommon way you can override build_webhook_url method
In case that you need modify body or create it without using fixture you can also override get_body method
"""
STREAM_NAME = None # type: Optional[text_type]
TEST_USER_EMAIL = '[email protected]'
URL_TEMPLATE = None # type: Optional[text_type]
FIXTURE_DIR_NAME = None # type: Optional[text_type]
def setUp(self):
# type: () -> None
self.url = self.build_webhook_url()
def send_and_test_stream_message(self, fixture_name, expected_subject=None,
expected_message=None, content_type="application/json", **kwargs):
# type: (text_type, Optional[text_type], Optional[text_type], Optional[text_type], **Any) -> Message
payload = self.get_body(fixture_name)
if content_type is not None:
kwargs['content_type'] = content_type
msg = self.send_json_payload(self.TEST_USER_EMAIL, self.url, payload,
self.STREAM_NAME, **kwargs)
self.do_test_subject(msg, expected_subject)
self.do_test_message(msg, expected_message)
return msg
def send_and_test_private_message(self, fixture_name, expected_subject=None,
expected_message=None, content_type="application/json", **kwargs):
# type: (text_type, text_type, text_type, str, **Any) -> Message
payload = self.get_body(fixture_name)
if content_type is not None:
kwargs['content_type'] = content_type
msg = self.send_json_payload(self.TEST_USER_EMAIL, self.url, payload,
stream_name=None, **kwargs)
self.do_test_message(msg, expected_message)
return msg
def build_webhook_url(self):
# type: () -> text_type
api_key = self.get_api_key(self.TEST_USER_EMAIL)
return self.URL_TEMPLATE.format(stream=self.STREAM_NAME, api_key=api_key)
def get_body(self, fixture_name):
# type: (text_type) -> Union[text_type, Dict[str, text_type]]
"""Can be implemented either as returning a dictionary containing the
post parameters or as string containing the body of the request."""
return ujson.dumps(ujson.loads(self.fixture_data(self.FIXTURE_DIR_NAME, fixture_name)))
def do_test_subject(self, msg, expected_subject):
# type: (Message, Optional[text_type]) -> None
if expected_subject is not None:
self.assertEqual(msg.topic_name(), expected_subject)
def do_test_message(self, msg, expected_message):
# type: (Message, Optional[text_type]) -> None
if expected_message is not None:
self.assertEqual(msg.content, expected_message)
|
|
# -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Accesses the google.cloud.asset.v1p2beta1 AssetService API."""
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.client_options
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.gapic_v1.routing_header
import google.api_core.grpc_helpers
import google.api_core.operation
import google.api_core.operations_v1
import google.api_core.path_template
import grpc
from google.cloud.asset_v1p2beta1.gapic import asset_service_client_config
from google.cloud.asset_v1p2beta1.gapic import enums
from google.cloud.asset_v1p2beta1.gapic.transports import asset_service_grpc_transport
from google.cloud.asset_v1p2beta1.proto import asset_service_pb2
from google.cloud.asset_v1p2beta1.proto import asset_service_pb2_grpc
from google.cloud.asset_v1p2beta1.proto import assets_pb2
from google.longrunning import operations_pb2
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2
from google.protobuf import timestamp_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-asset").version
class AssetServiceClient(object):
"""Asset service definition."""
SERVICE_ADDRESS = "cloudasset.googleapis.com:443"
"""The default address of the service."""
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = "google.cloud.asset.v1p2beta1.AssetService"
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
AssetServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@classmethod
def feed_path(cls, project, feed):
"""Return a fully-qualified feed string."""
return google.api_core.path_template.expand(
"projects/{project}/feeds/{feed}", project=project, feed=feed
)
def __init__(
self,
transport=None,
channel=None,
credentials=None,
client_config=None,
client_info=None,
client_options=None,
):
"""Constructor.
Args:
transport (Union[~.AssetServiceGrpcTransport,
Callable[[~.Credentials, type], ~.AssetServiceGrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
client_options (Union[dict, google.api_core.client_options.ClientOptions]):
Client options used to set user options on the client. API Endpoint
should be set through client_options.
"""
# Raise deprecation warnings for things we want to go away.
if client_config is not None:
warnings.warn(
"The `client_config` argument is deprecated.",
PendingDeprecationWarning,
stacklevel=2,
)
else:
client_config = asset_service_client_config.config
if channel:
warnings.warn(
"The `channel` argument is deprecated; use " "`transport` instead.",
PendingDeprecationWarning,
stacklevel=2,
)
api_endpoint = self.SERVICE_ADDRESS
if client_options:
if type(client_options) == dict:
client_options = google.api_core.client_options.from_dict(
client_options
)
if client_options.api_endpoint:
api_endpoint = client_options.api_endpoint
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=asset_service_grpc_transport.AssetServiceGrpcTransport,
address=api_endpoint,
)
else:
if credentials:
raise ValueError(
"Received both a transport instance and "
"credentials; these are mutually exclusive."
)
self.transport = transport
else:
self.transport = asset_service_grpc_transport.AssetServiceGrpcTransport(
address=api_endpoint, channel=channel, credentials=credentials
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config["interfaces"][self._INTERFACE_NAME]
)
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def export_assets(
self,
parent,
output_config,
read_time=None,
asset_types=None,
content_type=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Exports assets with time and resource types to a given Cloud Storage
location. The output format is newline-delimited JSON. This API
implements the ``google.longrunning.Operation`` API allowing you to keep
track of the export.
Example:
>>> from google.cloud import asset_v1p2beta1
>>>
>>> client = asset_v1p2beta1.AssetServiceClient()
>>>
>>> # TODO: Initialize `parent`:
>>> parent = ''
>>>
>>> # TODO: Initialize `output_config`:
>>> output_config = {}
>>>
>>> response = client.export_assets(parent, output_config)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
parent (str): Required. The relative name of the root asset. This can only be an
organization number (such as "organizations/123"), a project ID (such as
"projects/my-project-id"), or a project number (such as "projects/12345").
output_config (Union[dict, ~google.cloud.asset_v1p2beta1.types.OutputConfig]): Required. Output configuration indicating where the results will be output
to. All results will be in newline delimited JSON format.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.asset_v1p2beta1.types.OutputConfig`
read_time (Union[dict, ~google.cloud.asset_v1p2beta1.types.Timestamp]): Timestamp to take an asset snapshot. This can only be set to a timestamp
between 2018-10-02 UTC (inclusive) and the current time. If not specified,
the current time will be used. Due to delays in resource data collection
and indexing, there is a volatile window during which running the same
query may get different results.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.asset_v1p2beta1.types.Timestamp`
asset_types (list[str]): A list of asset types of which to take a snapshot for. For example:
"compute.googleapis.com/Disk". If specified, only matching assets will
be returned. See `Introduction to Cloud Asset
Inventory <https://cloud.google.com/resource-manager/docs/cloud-asset-inventory/overview>`__
for all supported asset types.
content_type (~google.cloud.asset_v1p2beta1.types.ContentType): Asset content type. If not specified, no content but the asset name will be
returned.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.asset_v1p2beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "export_assets" not in self._inner_api_calls:
self._inner_api_calls[
"export_assets"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.export_assets,
default_retry=self._method_configs["ExportAssets"].retry,
default_timeout=self._method_configs["ExportAssets"].timeout,
client_info=self._client_info,
)
request = asset_service_pb2.ExportAssetsRequest(
parent=parent,
output_config=output_config,
read_time=read_time,
asset_types=asset_types,
content_type=content_type,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
operation = self._inner_api_calls["export_assets"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
asset_service_pb2.ExportAssetsResponse,
metadata_type=asset_service_pb2.ExportAssetsRequest,
)
def batch_get_assets_history(
self,
parent,
asset_names,
content_type,
read_time_window=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Batch gets the update history of assets that overlap a time window. For
RESOURCE content, this API outputs history with asset in both non-delete
or deleted status. For IAM\_POLICY content, this API outputs history
when the asset and its attached IAM POLICY both exist. This can create
gaps in the output history.
Example:
>>> from google.cloud import asset_v1p2beta1
>>> from google.cloud.asset_v1p2beta1 import enums
>>>
>>> client = asset_v1p2beta1.AssetServiceClient()
>>>
>>> # TODO: Initialize `parent`:
>>> parent = ''
>>>
>>> # TODO: Initialize `asset_names`:
>>> asset_names = []
>>>
>>> # TODO: Initialize `content_type`:
>>> content_type = enums.ContentType.CONTENT_TYPE_UNSPECIFIED
>>>
>>> response = client.batch_get_assets_history(parent, asset_names, content_type)
Args:
parent (str): Required. The relative name of the root asset. It can only be an
organization number (such as "organizations/123"), a project ID (such as
"projects/my-project-id")", or a project number (such as "projects/12345").
asset_names (list[str]): A list of the full names of the assets. For example:
``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``.
See `Resource
Names <https://cloud.google.com/apis/design/resource_names#full_resource_name>`__
and `Resource Name
Format <https://cloud.google.com/resource-manager/docs/cloud-asset-inventory/resource-name-format>`__
for more info.
The request becomes a no-op if the asset name list is empty, and the max
size of the asset name list is 100 in one request.
content_type (~google.cloud.asset_v1p2beta1.types.ContentType): Required. The content type.
read_time_window (Union[dict, ~google.cloud.asset_v1p2beta1.types.TimeWindow]): Optional. The time window for the asset history. Both start\_time and
end\_time are optional and if set, it must be after 2018-10-02 UTC. If
end\_time is not set, it is default to current timestamp. If start\_time
is not set, the snapshot of the assets at end\_time will be returned.
The returned results contain all temporal assets whose time window
overlap with read\_time\_window.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.asset_v1p2beta1.types.TimeWindow`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.asset_v1p2beta1.types.BatchGetAssetsHistoryResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "batch_get_assets_history" not in self._inner_api_calls:
self._inner_api_calls[
"batch_get_assets_history"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.batch_get_assets_history,
default_retry=self._method_configs["BatchGetAssetsHistory"].retry,
default_timeout=self._method_configs["BatchGetAssetsHistory"].timeout,
client_info=self._client_info,
)
request = asset_service_pb2.BatchGetAssetsHistoryRequest(
parent=parent,
asset_names=asset_names,
content_type=content_type,
read_time_window=read_time_window,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["batch_get_assets_history"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def create_feed(
self,
parent,
feed_id,
feed,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a feed in a parent project/folder/organization to listen to its
asset updates.
Example:
>>> from google.cloud import asset_v1p2beta1
>>>
>>> client = asset_v1p2beta1.AssetServiceClient()
>>>
>>> # TODO: Initialize `parent`:
>>> parent = ''
>>>
>>> # TODO: Initialize `feed_id`:
>>> feed_id = ''
>>>
>>> # TODO: Initialize `feed`:
>>> feed = {}
>>>
>>> response = client.create_feed(parent, feed_id, feed)
Args:
parent (str): Required. The name of the project/folder/organization where this feed
should be created in. It can only be an organization number (such as
"organizations/123"), a folder number (such as "folders/123"), a project ID
(such as "projects/my-project-id")", or a project number (such as
"projects/12345").
feed_id (str): Required. This is the client-assigned asset feed identifier and it needs to
be unique under a specific parent project/folder/organization.
feed (Union[dict, ~google.cloud.asset_v1p2beta1.types.Feed]): The feed details. The field ``name`` must be empty and it will be
generated in the format of: projects/project\_number/feeds/feed\_id
folders/folder\_number/feeds/feed\_id
organizations/organization\_number/feeds/feed\_id
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.asset_v1p2beta1.types.Feed`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.asset_v1p2beta1.types.Feed` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_feed" not in self._inner_api_calls:
self._inner_api_calls[
"create_feed"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_feed,
default_retry=self._method_configs["CreateFeed"].retry,
default_timeout=self._method_configs["CreateFeed"].timeout,
client_info=self._client_info,
)
request = asset_service_pb2.CreateFeedRequest(
parent=parent, feed_id=feed_id, feed=feed
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_feed"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def get_feed(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets details about an asset feed.
Example:
>>> from google.cloud import asset_v1p2beta1
>>>
>>> client = asset_v1p2beta1.AssetServiceClient()
>>>
>>> name = client.feed_path('[PROJECT]', '[FEED]')
>>>
>>> response = client.get_feed(name)
Args:
name (str): The name of the Feed and it must be in the format of:
projects/project\_number/feeds/feed\_id
folders/folder\_number/feeds/feed\_id
organizations/organization\_number/feeds/feed\_id
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.asset_v1p2beta1.types.Feed` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "get_feed" not in self._inner_api_calls:
self._inner_api_calls[
"get_feed"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_feed,
default_retry=self._method_configs["GetFeed"].retry,
default_timeout=self._method_configs["GetFeed"].timeout,
client_info=self._client_info,
)
request = asset_service_pb2.GetFeedRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["get_feed"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def list_feeds(
self,
parent,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists all asset feeds in a parent project/folder/organization.
Example:
>>> from google.cloud import asset_v1p2beta1
>>>
>>> client = asset_v1p2beta1.AssetServiceClient()
>>>
>>> # TODO: Initialize `parent`:
>>> parent = ''
>>>
>>> response = client.list_feeds(parent)
Args:
parent (str): Required. The parent project/folder/organization whose feeds are to be
listed. It can only be using project/folder/organization number (such as
"folders/12345")", or a project ID (such as "projects/my-project-id").
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.asset_v1p2beta1.types.ListFeedsResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "list_feeds" not in self._inner_api_calls:
self._inner_api_calls[
"list_feeds"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_feeds,
default_retry=self._method_configs["ListFeeds"].retry,
default_timeout=self._method_configs["ListFeeds"].timeout,
client_info=self._client_info,
)
request = asset_service_pb2.ListFeedsRequest(parent=parent)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["list_feeds"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def update_feed(
self,
feed,
update_mask,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Updates an asset feed configuration.
Example:
>>> from google.cloud import asset_v1p2beta1
>>>
>>> client = asset_v1p2beta1.AssetServiceClient()
>>>
>>> # TODO: Initialize `feed`:
>>> feed = {}
>>>
>>> # TODO: Initialize `update_mask`:
>>> update_mask = {}
>>>
>>> response = client.update_feed(feed, update_mask)
Args:
feed (Union[dict, ~google.cloud.asset_v1p2beta1.types.Feed]): The new values of feed details. It must match an existing feed and the
field ``name`` must be in the format of:
projects/project\_number/feeds/feed\_id or
folders/folder\_number/feeds/feed\_id or
organizations/organization\_number/feeds/feed\_id.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.asset_v1p2beta1.types.Feed`
update_mask (Union[dict, ~google.cloud.asset_v1p2beta1.types.FieldMask]): Only updates the ``feed`` fields indicated by this mask. The field mask
must not be empty, and it must not contain fields that are immutable or
only set by the server.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.asset_v1p2beta1.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.asset_v1p2beta1.types.Feed` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "update_feed" not in self._inner_api_calls:
self._inner_api_calls[
"update_feed"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_feed,
default_retry=self._method_configs["UpdateFeed"].retry,
default_timeout=self._method_configs["UpdateFeed"].timeout,
client_info=self._client_info,
)
request = asset_service_pb2.UpdateFeedRequest(
feed=feed, update_mask=update_mask
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("feed.name", feed.name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["update_feed"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def delete_feed(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Deletes an asset feed.
Example:
>>> from google.cloud import asset_v1p2beta1
>>>
>>> client = asset_v1p2beta1.AssetServiceClient()
>>>
>>> name = client.feed_path('[PROJECT]', '[FEED]')
>>>
>>> client.delete_feed(name)
Args:
name (str): The name of the feed and it must be in the format of:
projects/project\_number/feeds/feed\_id
folders/folder\_number/feeds/feed\_id
organizations/organization\_number/feeds/feed\_id
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "delete_feed" not in self._inner_api_calls:
self._inner_api_calls[
"delete_feed"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_feed,
default_retry=self._method_configs["DeleteFeed"].retry,
default_timeout=self._method_configs["DeleteFeed"].timeout,
client_info=self._client_info,
)
request = asset_service_pb2.DeleteFeedRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
self._inner_api_calls["delete_feed"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
try:
from .application_gateway_sku_py3 import ApplicationGatewaySku
from .sub_resource_py3 import SubResource
from .application_gateway_ip_configuration_py3 import ApplicationGatewayIPConfiguration
from .application_gateway_ssl_certificate_py3 import ApplicationGatewaySslCertificate
from .application_gateway_frontend_ip_configuration_py3 import ApplicationGatewayFrontendIPConfiguration
from .application_gateway_frontend_port_py3 import ApplicationGatewayFrontendPort
from .application_gateway_backend_address_py3 import ApplicationGatewayBackendAddress
from .backend_address_pool_py3 import BackendAddressPool
from .inbound_nat_rule_py3 import InboundNatRule
from .security_rule_py3 import SecurityRule
from .network_interface_dns_settings_py3 import NetworkInterfaceDnsSettings
from .network_interface_py3 import NetworkInterface
from .network_security_group_py3 import NetworkSecurityGroup
from .route_py3 import Route
from .route_table_py3 import RouteTable
from .public_ip_address_dns_settings_py3 import PublicIPAddressDnsSettings
from .public_ip_address_py3 import PublicIPAddress
from .ip_configuration_py3 import IPConfiguration
from .subnet_py3 import Subnet
from .network_interface_ip_configuration_py3 import NetworkInterfaceIPConfiguration
from .application_gateway_backend_address_pool_py3 import ApplicationGatewayBackendAddressPool
from .application_gateway_backend_http_settings_py3 import ApplicationGatewayBackendHttpSettings
from .application_gateway_http_listener_py3 import ApplicationGatewayHttpListener
from .application_gateway_path_rule_py3 import ApplicationGatewayPathRule
from .application_gateway_probe_py3 import ApplicationGatewayProbe
from .application_gateway_request_routing_rule_py3 import ApplicationGatewayRequestRoutingRule
from .application_gateway_url_path_map_py3 import ApplicationGatewayUrlPathMap
from .application_gateway_py3 import ApplicationGateway
from .resource_py3 import Resource
from .dns_name_availability_result_py3 import DnsNameAvailabilityResult
from .express_route_circuit_authorization_py3 import ExpressRouteCircuitAuthorization
from .express_route_circuit_peering_config_py3 import ExpressRouteCircuitPeeringConfig
from .express_route_circuit_stats_py3 import ExpressRouteCircuitStats
from .express_route_circuit_peering_py3 import ExpressRouteCircuitPeering
from .express_route_circuit_sku_py3 import ExpressRouteCircuitSku
from .express_route_circuit_service_provider_properties_py3 import ExpressRouteCircuitServiceProviderProperties
from .express_route_circuit_py3 import ExpressRouteCircuit
from .express_route_circuit_arp_table_py3 import ExpressRouteCircuitArpTable
from .express_route_circuit_routes_table_py3 import ExpressRouteCircuitRoutesTable
from .express_route_service_provider_bandwidths_offered_py3 import ExpressRouteServiceProviderBandwidthsOffered
from .express_route_service_provider_py3 import ExpressRouteServiceProvider
from .frontend_ip_configuration_py3 import FrontendIPConfiguration
from .load_balancing_rule_py3 import LoadBalancingRule
from .probe_py3 import Probe
from .inbound_nat_pool_py3 import InboundNatPool
from .outbound_nat_rule_py3 import OutboundNatRule
from .load_balancer_py3 import LoadBalancer
from .error_details_py3 import ErrorDetails
from .error_py3 import Error
from .azure_async_operation_result_py3 import AzureAsyncOperationResult
from .usage_name_py3 import UsageName
from .usage_py3 import Usage
from .address_space_py3 import AddressSpace
from .dhcp_options_py3 import DhcpOptions
from .virtual_network_py3 import VirtualNetwork
from .virtual_network_gateway_ip_configuration_py3 import VirtualNetworkGatewayIPConfiguration
from .virtual_network_gateway_sku_py3 import VirtualNetworkGatewaySku
from .vpn_client_root_certificate_py3 import VpnClientRootCertificate
from .vpn_client_revoked_certificate_py3 import VpnClientRevokedCertificate
from .vpn_client_configuration_py3 import VpnClientConfiguration
from .bgp_settings_py3 import BgpSettings
from .virtual_network_gateway_py3 import VirtualNetworkGateway
from .vpn_client_parameters_py3 import VpnClientParameters
from .local_network_gateway_py3 import LocalNetworkGateway
from .virtual_network_gateway_connection_py3 import VirtualNetworkGatewayConnection
from .connection_shared_key_result_py3 import ConnectionSharedKeyResult
from .connection_reset_shared_key_py3 import ConnectionResetSharedKey
from .connection_shared_key_py3 import ConnectionSharedKey
except (SyntaxError, ImportError):
from .application_gateway_sku import ApplicationGatewaySku
from .sub_resource import SubResource
from .application_gateway_ip_configuration import ApplicationGatewayIPConfiguration
from .application_gateway_ssl_certificate import ApplicationGatewaySslCertificate
from .application_gateway_frontend_ip_configuration import ApplicationGatewayFrontendIPConfiguration
from .application_gateway_frontend_port import ApplicationGatewayFrontendPort
from .application_gateway_backend_address import ApplicationGatewayBackendAddress
from .backend_address_pool import BackendAddressPool
from .inbound_nat_rule import InboundNatRule
from .security_rule import SecurityRule
from .network_interface_dns_settings import NetworkInterfaceDnsSettings
from .network_interface import NetworkInterface
from .network_security_group import NetworkSecurityGroup
from .route import Route
from .route_table import RouteTable
from .public_ip_address_dns_settings import PublicIPAddressDnsSettings
from .public_ip_address import PublicIPAddress
from .ip_configuration import IPConfiguration
from .subnet import Subnet
from .network_interface_ip_configuration import NetworkInterfaceIPConfiguration
from .application_gateway_backend_address_pool import ApplicationGatewayBackendAddressPool
from .application_gateway_backend_http_settings import ApplicationGatewayBackendHttpSettings
from .application_gateway_http_listener import ApplicationGatewayHttpListener
from .application_gateway_path_rule import ApplicationGatewayPathRule
from .application_gateway_probe import ApplicationGatewayProbe
from .application_gateway_request_routing_rule import ApplicationGatewayRequestRoutingRule
from .application_gateway_url_path_map import ApplicationGatewayUrlPathMap
from .application_gateway import ApplicationGateway
from .resource import Resource
from .dns_name_availability_result import DnsNameAvailabilityResult
from .express_route_circuit_authorization import ExpressRouteCircuitAuthorization
from .express_route_circuit_peering_config import ExpressRouteCircuitPeeringConfig
from .express_route_circuit_stats import ExpressRouteCircuitStats
from .express_route_circuit_peering import ExpressRouteCircuitPeering
from .express_route_circuit_sku import ExpressRouteCircuitSku
from .express_route_circuit_service_provider_properties import ExpressRouteCircuitServiceProviderProperties
from .express_route_circuit import ExpressRouteCircuit
from .express_route_circuit_arp_table import ExpressRouteCircuitArpTable
from .express_route_circuit_routes_table import ExpressRouteCircuitRoutesTable
from .express_route_service_provider_bandwidths_offered import ExpressRouteServiceProviderBandwidthsOffered
from .express_route_service_provider import ExpressRouteServiceProvider
from .frontend_ip_configuration import FrontendIPConfiguration
from .load_balancing_rule import LoadBalancingRule
from .probe import Probe
from .inbound_nat_pool import InboundNatPool
from .outbound_nat_rule import OutboundNatRule
from .load_balancer import LoadBalancer
from .error_details import ErrorDetails
from .error import Error
from .azure_async_operation_result import AzureAsyncOperationResult
from .usage_name import UsageName
from .usage import Usage
from .address_space import AddressSpace
from .dhcp_options import DhcpOptions
from .virtual_network import VirtualNetwork
from .virtual_network_gateway_ip_configuration import VirtualNetworkGatewayIPConfiguration
from .virtual_network_gateway_sku import VirtualNetworkGatewaySku
from .vpn_client_root_certificate import VpnClientRootCertificate
from .vpn_client_revoked_certificate import VpnClientRevokedCertificate
from .vpn_client_configuration import VpnClientConfiguration
from .bgp_settings import BgpSettings
from .virtual_network_gateway import VirtualNetworkGateway
from .vpn_client_parameters import VpnClientParameters
from .local_network_gateway import LocalNetworkGateway
from .virtual_network_gateway_connection import VirtualNetworkGatewayConnection
from .connection_shared_key_result import ConnectionSharedKeyResult
from .connection_reset_shared_key import ConnectionResetSharedKey
from .connection_shared_key import ConnectionSharedKey
from .application_gateway_paged import ApplicationGatewayPaged
from .express_route_circuit_authorization_paged import ExpressRouteCircuitAuthorizationPaged
from .express_route_circuit_peering_paged import ExpressRouteCircuitPeeringPaged
from .express_route_circuit_arp_table_paged import ExpressRouteCircuitArpTablePaged
from .express_route_circuit_routes_table_paged import ExpressRouteCircuitRoutesTablePaged
from .express_route_circuit_stats_paged import ExpressRouteCircuitStatsPaged
from .express_route_circuit_paged import ExpressRouteCircuitPaged
from .express_route_service_provider_paged import ExpressRouteServiceProviderPaged
from .load_balancer_paged import LoadBalancerPaged
from .network_interface_paged import NetworkInterfacePaged
from .network_security_group_paged import NetworkSecurityGroupPaged
from .security_rule_paged import SecurityRulePaged
from .public_ip_address_paged import PublicIPAddressPaged
from .route_table_paged import RouteTablePaged
from .route_paged import RoutePaged
from .usage_paged import UsagePaged
from .virtual_network_paged import VirtualNetworkPaged
from .subnet_paged import SubnetPaged
from .virtual_network_gateway_paged import VirtualNetworkGatewayPaged
from .virtual_network_gateway_connection_paged import VirtualNetworkGatewayConnectionPaged
from .local_network_gateway_paged import LocalNetworkGatewayPaged
from .network_management_client_enums import (
ApplicationGatewaySkuName,
ApplicationGatewayTier,
IPAllocationMethod,
TransportProtocol,
SecurityRuleProtocol,
SecurityRuleAccess,
SecurityRuleDirection,
RouteNextHopType,
ApplicationGatewayProtocol,
ApplicationGatewayCookieBasedAffinity,
ApplicationGatewayRequestRoutingRuleType,
ApplicationGatewayOperationalState,
AuthorizationUseStatus,
ExpressRouteCircuitPeeringAdvertisedPublicPrefixState,
ExpressRouteCircuitPeeringType,
ExpressRouteCircuitPeeringState,
ExpressRouteCircuitSkuTier,
ExpressRouteCircuitSkuFamily,
ServiceProviderProvisioningState,
LoadDistribution,
ProbeProtocol,
NetworkOperationStatus,
VirtualNetworkGatewayType,
VpnType,
VirtualNetworkGatewaySkuName,
VirtualNetworkGatewaySkuTier,
ProcessorArchitecture,
VirtualNetworkGatewayConnectionType,
VirtualNetworkGatewayConnectionStatus,
)
__all__ = [
'ApplicationGatewaySku',
'SubResource',
'ApplicationGatewayIPConfiguration',
'ApplicationGatewaySslCertificate',
'ApplicationGatewayFrontendIPConfiguration',
'ApplicationGatewayFrontendPort',
'ApplicationGatewayBackendAddress',
'BackendAddressPool',
'InboundNatRule',
'SecurityRule',
'NetworkInterfaceDnsSettings',
'NetworkInterface',
'NetworkSecurityGroup',
'Route',
'RouteTable',
'PublicIPAddressDnsSettings',
'PublicIPAddress',
'IPConfiguration',
'Subnet',
'NetworkInterfaceIPConfiguration',
'ApplicationGatewayBackendAddressPool',
'ApplicationGatewayBackendHttpSettings',
'ApplicationGatewayHttpListener',
'ApplicationGatewayPathRule',
'ApplicationGatewayProbe',
'ApplicationGatewayRequestRoutingRule',
'ApplicationGatewayUrlPathMap',
'ApplicationGateway',
'Resource',
'DnsNameAvailabilityResult',
'ExpressRouteCircuitAuthorization',
'ExpressRouteCircuitPeeringConfig',
'ExpressRouteCircuitStats',
'ExpressRouteCircuitPeering',
'ExpressRouteCircuitSku',
'ExpressRouteCircuitServiceProviderProperties',
'ExpressRouteCircuit',
'ExpressRouteCircuitArpTable',
'ExpressRouteCircuitRoutesTable',
'ExpressRouteServiceProviderBandwidthsOffered',
'ExpressRouteServiceProvider',
'FrontendIPConfiguration',
'LoadBalancingRule',
'Probe',
'InboundNatPool',
'OutboundNatRule',
'LoadBalancer',
'ErrorDetails',
'Error',
'AzureAsyncOperationResult',
'UsageName',
'Usage',
'AddressSpace',
'DhcpOptions',
'VirtualNetwork',
'VirtualNetworkGatewayIPConfiguration',
'VirtualNetworkGatewaySku',
'VpnClientRootCertificate',
'VpnClientRevokedCertificate',
'VpnClientConfiguration',
'BgpSettings',
'VirtualNetworkGateway',
'VpnClientParameters',
'LocalNetworkGateway',
'VirtualNetworkGatewayConnection',
'ConnectionSharedKeyResult',
'ConnectionResetSharedKey',
'ConnectionSharedKey',
'ApplicationGatewayPaged',
'ExpressRouteCircuitAuthorizationPaged',
'ExpressRouteCircuitPeeringPaged',
'ExpressRouteCircuitArpTablePaged',
'ExpressRouteCircuitRoutesTablePaged',
'ExpressRouteCircuitStatsPaged',
'ExpressRouteCircuitPaged',
'ExpressRouteServiceProviderPaged',
'LoadBalancerPaged',
'NetworkInterfacePaged',
'NetworkSecurityGroupPaged',
'SecurityRulePaged',
'PublicIPAddressPaged',
'RouteTablePaged',
'RoutePaged',
'UsagePaged',
'VirtualNetworkPaged',
'SubnetPaged',
'VirtualNetworkGatewayPaged',
'VirtualNetworkGatewayConnectionPaged',
'LocalNetworkGatewayPaged',
'ApplicationGatewaySkuName',
'ApplicationGatewayTier',
'IPAllocationMethod',
'TransportProtocol',
'SecurityRuleProtocol',
'SecurityRuleAccess',
'SecurityRuleDirection',
'RouteNextHopType',
'ApplicationGatewayProtocol',
'ApplicationGatewayCookieBasedAffinity',
'ApplicationGatewayRequestRoutingRuleType',
'ApplicationGatewayOperationalState',
'AuthorizationUseStatus',
'ExpressRouteCircuitPeeringAdvertisedPublicPrefixState',
'ExpressRouteCircuitPeeringType',
'ExpressRouteCircuitPeeringState',
'ExpressRouteCircuitSkuTier',
'ExpressRouteCircuitSkuFamily',
'ServiceProviderProvisioningState',
'LoadDistribution',
'ProbeProtocol',
'NetworkOperationStatus',
'VirtualNetworkGatewayType',
'VpnType',
'VirtualNetworkGatewaySkuName',
'VirtualNetworkGatewaySkuTier',
'ProcessorArchitecture',
'VirtualNetworkGatewayConnectionType',
'VirtualNetworkGatewayConnectionStatus',
]
|
|
"""
(nose)tests.
"""
# HACK
import sys
import os
import inspect
from nose.tools import raises
from six import iteritems
path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
sys.path.append(os.path.join(path, "../../judyz"))
from judyz_cffi import Judy1, JudyL, JudySL
def test_j1_compiled_ok():
"""
Check miscompiled libJudy
"""
items = [
39895168241613,
72383693324832,
395899889036069,
847472082254022,
946081064318053,
1037167590154045,
1633874457044695,
1693551557777793,
1699866756097333,
2297933432179674,
2340748542246111,
2490696201066604,
2928757784612027,
3419613478295142,
3477583438964521,
3487665594607298,
3714788097418699,
3721974488148864,
3758589574777127,
4156020789217938,
4459711081140573,
4682530741276476,
4731624807195863,
4846840683894723,
4857387254864689,
4873346723597917,
4966839149608974,
5631406002858271,
5722255428668219,
5820718729024077,
6209639118315956,
6406299749329887,
6454295835737737,
6503048444249319,
6520786252857121,
6906836761168795,
6926132865086029,
6954533820994232,
]
with Judy1() as j:
for i in items:
j.add(i)
assert len(j) == len(items)
j_items = list(j)
assert j_items == items
def test_j1_bool():
j = Judy1()
assert not bool(j)
j.add(42)
assert bool(j)
j.clear()
def test_j1_in():
j = Judy1()
assert 42 not in j
j.add(42)
assert 42 in j
assert 43 not in j
j.clear()
def test_j1_len():
j = Judy1()
assert len(j) == 0
j.add(42)
assert len(j) == 1
j.add(42)
assert len(j) == 1
j.add(2)
assert len(j) == 2
j.clear()
@raises(KeyError)
def test_j1_remove_absent():
with Judy1() as j:
j.remove(42)
def test_j1_remove():
with Judy1() as j:
j.add(42)
j.remove(42)
assert len(j) == 0
def test_j1_discard():
with Judy1() as j:
j.add(42)
j.discard(43)
assert len(j) == 1
def test_j1_from_list():
with Judy1([6, 5, 4, 3, 2, 1]) as j:
assert len(j) == 6
n = 0
for i in j:
n += i
assert n == 21
def test_j1_signed():
with Judy1([-1]) as j:
assert -1 in j
for k in j:
assert k == -1
def test_jl_bool():
j = JudyL()
assert not bool(j)
j[42] = 1
assert bool(j)
j.clear()
def test_jl_in():
j = JudyL()
assert 42 not in j
j[42] = 1
assert 42 in j
assert 43 not in j
j.clear()
def test_jl_len():
with JudyL() as j:
for i in range(10):
j[i + 10] = i
assert len(j) == 10
@raises(KeyError)
def test_jl_getitem_absent():
with JudyL() as j:
x = j[12]
def test_jl_get_absent():
with JudyL() as j:
x = j.get(12, 1)
assert x == 1
def test_jl_from_dict():
with JudyL({10: 1, 2: 11}) as j:
d = dict(j)
assert d == {2: 11, 10: 1}
def test_jl_from_list():
with JudyL([(10, 1), (2, 11)]) as j:
d = dict(j)
assert d == {2: 11, 10: 1}
def test_jl_iteritems():
with JudyL() as j:
for i in range(10):
j[i + 10] = i
i = 0
start = True
for k, v in iteritems(j):
assert k == v + 10
if start:
assert k == 10
start = False
i += 1
assert i == 10
def test_jl_keys():
with JudyL() as j:
for i in range(10):
j[i + 10] = i
i = 0
start = True
for k in j.keys():
if start:
assert k == 10
start = False
i += 1
assert i == 10
def test_jl_signed():
with JudyL([(-1, -1)]) as j:
assert -1 in j
assert j[-1] == -1
for k, v in j:
assert k == -1
assert v == -1
for k, v in iteritems(j):
assert k == -1
assert v == -1
for k in j.keys():
assert k == -1
def test_jl_inc():
with JudyL() as j:
j[1] = 2
j.inc(1)
assert j[1] == 3
j.inc(1, 10)
assert j[1] == 13
def test_jsl_1():
with JudySL() as j:
assert not j
assert len(j) == 0
j["toto"] = 1
assert j
assert len(j) == 1
assert "toto" in j
assert j["toto"] == 1
assert list(j.keys()) == ["toto"]
def test_jsl_2():
kv = [("bingo", 1), ("zlithoa", -1), ("all", 42)]
with JudySL(kv) as j:
assert len(j) == 3
jitems = list(iteritems(j))
assert jitems == sorted(kv)
def test_jsl_3():
kv = [("a", 1), ("bb", 2), ("ccc", 3), ("dddd", 4), ("eeeee", 5)]
with JudySL(kv) as j:
jitems = list(iteritems(j))
assert jitems == kv
def test_jsl_4():
kv = [("aaaaa", 1), ("bbbb", 2), ("ccc", 3), ("dd", 4), ("e", 5)]
with JudySL(kv) as j:
jitems = list(iteritems(j))
assert jitems == kv
def test_jsl_first_next():
kv = [("bbbb", 2), ("aaaaa", 1), ("ccc", 3), ("dd", 4), ("e", 5)]
with JudySL(kv) as j:
key, value, buf = j.get_first()
assert key == "aaaaa"
assert value == 1
key, value, buf = j.get_next(buf)
assert key == "bbbb"
assert value == 2
key, value, buf = j.get_next(buf)
assert key == "ccc"
assert value == 3
key, value, buf = j.get_next(buf)
assert key == "dd"
assert value == 4
key, value, buf = j.get_next(buf)
assert key == "e"
assert value == 5
key, value, buf = j.get_next(buf)
assert key is None
assert value is None
key, value, buf = j.get_next(buf)
assert key is None
assert value is None
def jdsn(fd):
"""
Sort and count the lines in a file(-like object)
:return:
:rtype:
"""
with JudySL() as j:
for line in fd:
line = line.rstrip("\n")
j.inc(line)
for k, v in j:
print("{}\t{}".format(k, v))
if __name__ == "__main__":
if len(sys.argv) > 0:
with open(sys.argv[0]) as fd:
jdsn(fd)
else:
jdsn(sys.stdin)
|
|
# Copyright (c) 2010-2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TODO: More tests
import socket
import unittest
from urlparse import urlparse
# TODO: mock http connection class with more control over headers
from test.unit.proxy.test_server import fake_http_connect
from swift.common import client as c
class TestClientException(unittest.TestCase):
def test_is_exception(self):
self.assertTrue(issubclass(c.ClientException, Exception))
def test_format(self):
exc = c.ClientException('something failed')
self.assertTrue('something failed' in str(exc))
test_kwargs = (
'scheme',
'host',
'port',
'path',
'query',
'status',
'reason',
'device',
)
for value in test_kwargs:
kwargs = {
'http_%s' % value: value,
}
exc = c.ClientException('test', **kwargs)
self.assertTrue(value in str(exc))
class TestJsonImport(unittest.TestCase):
def tearDown(self):
try:
import json
except ImportError:
pass
else:
reload(json)
try:
import simplejson
except ImportError:
pass
else:
reload(simplejson)
def test_any(self):
self.assertTrue(hasattr(c, 'json_loads'))
def test_no_simplejson(self):
# break simplejson
try:
import simplejson
except ImportError:
# not installed, so we don't have to break it for these tests
pass
else:
delattr(simplejson, 'loads')
reload(c)
try:
from json import loads
except ImportError:
# this case is stested in _no_json
pass
else:
self.assertEquals(loads, c.json_loads)
class MockHttpTest(unittest.TestCase):
def setUp(self):
def fake_http_connection(*args, **kwargs):
_orig_http_connection = c.http_connection
return_read = kwargs.get('return_read')
def wrapper(url, proxy=None):
parsed, _conn = _orig_http_connection(url, proxy=proxy)
conn = fake_http_connect(*args, **kwargs)()
def request(*args, **kwargs):
return
conn.request = request
conn.has_been_read = False
_orig_read = conn.read
def read(*args, **kwargs):
conn.has_been_read = True
return _orig_read(*args, **kwargs)
conn.read = return_read or read
return parsed, conn
return wrapper
self.fake_http_connection = fake_http_connection
def tearDown(self):
reload(c)
class TestHttpHelpers(MockHttpTest):
def test_quote(self):
value = 'standard string'
self.assertEquals('standard%20string', c.quote(value))
value = u'\u0075nicode string'
self.assertEquals('unicode%20string', c.quote(value))
def test_http_connection(self):
url = 'http://www.test.com'
_junk, conn = c.http_connection(url)
self.assertTrue(isinstance(conn, c.HTTPConnection))
url = 'https://www.test.com'
_junk, conn = c.http_connection(url)
self.assertTrue(isinstance(conn, c.HTTPSConnection))
url = 'ftp://www.test.com'
self.assertRaises(c.ClientException, c.http_connection, url)
def test_json_request(self):
def read(*args, **kwargs):
body = {'a': '1',
'b': '2'}
return c.json_dumps(body)
c.http_connection = self.fake_http_connection(200, return_read=read)
url = 'http://www.test.com'
_junk, conn = c.json_request('GET', url, body={'username': 'user1',
'password': 'secure'})
self.assertTrue(type(conn) is dict)
# TODO: following tests are placeholders, need more tests, better coverage
class TestGetAuth(MockHttpTest):
def test_ok(self):
c.http_connection = self.fake_http_connection(200)
url, token = c.get_auth('http://www.test.com', 'asdf', 'asdf')
self.assertEquals(url, None)
self.assertEquals(token, None)
def test_auth_v1(self):
c.http_connection = self.fake_http_connection(200)
url, token = c.get_auth('http://www.test.com', 'asdf', 'asdf',
auth_version="1.0")
self.assertEquals(url, None)
self.assertEquals(token, None)
def test_auth_v2(self):
def read(*args, **kwargs):
acct_url = 'http://127.0.01/AUTH_FOO'
body = {'access': {'serviceCatalog':
[{u'endpoints': [{'publicURL': acct_url}],
'type': 'object-store'}],
'token': {'id': 'XXXXXXX'}}}
return c.json_dumps(body)
c.http_connection = self.fake_http_connection(200, return_read=read)
url, token = c.get_auth('http://www.test.com', 'asdf', 'asdf',
auth_version="2.0")
self.assertTrue(url.startswith("http"))
self.assertTrue(token)
class TestGetAccount(MockHttpTest):
def test_no_content(self):
c.http_connection = self.fake_http_connection(204)
value = c.get_account('http://www.test.com', 'asdf')[1]
self.assertEquals(value, [])
class TestHeadAccount(MockHttpTest):
def test_ok(self):
c.http_connection = self.fake_http_connection(200)
value = c.head_account('http://www.tests.com', 'asdf')
# TODO: Hmm. This doesn't really test too much as it uses a fake that
# always returns the same dict. I guess it "exercises" the code, so
# I'll leave it for now.
self.assertEquals(type(value), dict)
def test_server_error(self):
body = 'c' * 65
c.http_connection = self.fake_http_connection(500, body=body)
self.assertRaises(c.ClientException, c.head_account,
'http://www.tests.com', 'asdf')
try:
value = c.head_account('http://www.tests.com', 'asdf')
except c.ClientException as e:
new_body = "[first 60 chars of response] " + body[0:60]
self.assertEquals(e.__str__()[-89:], new_body)
class TestGetContainer(MockHttpTest):
def test_no_content(self):
c.http_connection = self.fake_http_connection(204)
value = c.get_container('http://www.test.com', 'asdf', 'asdf')[1]
self.assertEquals(value, [])
class TestHeadContainer(MockHttpTest):
def test_server_error(self):
body = 'c' * 60
c.http_connection = self.fake_http_connection(500, body=body)
self.assertRaises(c.ClientException, c.head_container,
'http://www.test.com', 'asdf', 'asdf',
)
try:
value = c.head_container('http://www.test.com', 'asdf', 'asdf')
except c.ClientException as e:
self.assertEquals(e.http_response_content, body)
class TestPutContainer(MockHttpTest):
def test_ok(self):
c.http_connection = self.fake_http_connection(200)
value = c.put_container('http://www.test.com', 'asdf', 'asdf')
self.assertEquals(value, None)
def test_server_error(self):
body = 'c' * 60
c.http_connection = self.fake_http_connection(500, body=body)
self.assertRaises(c.ClientException, c.put_container,
'http://www.test.com', 'asdf', 'asdf',
)
try:
value = c.put_container('http://www.test.com', 'asdf', 'asdf')
except c.ClientException as e:
self.assertEquals(e.http_response_content, body)
class TestDeleteContainer(MockHttpTest):
def test_ok(self):
c.http_connection = self.fake_http_connection(200)
value = c.delete_container('http://www.test.com', 'asdf', 'asdf')
self.assertEquals(value, None)
class TestGetObject(MockHttpTest):
def test_server_error(self):
c.http_connection = self.fake_http_connection(500)
self.assertRaises(c.ClientException, c.get_object,
'http://www.test.com', 'asdf', 'asdf', 'asdf')
class TestHeadObject(MockHttpTest):
def test_server_error(self):
c.http_connection = self.fake_http_connection(500)
self.assertRaises(c.ClientException, c.head_object,
'http://www.test.com', 'asdf', 'asdf', 'asdf')
class TestPutObject(MockHttpTest):
def test_ok(self):
c.http_connection = self.fake_http_connection(200)
args = ('http://www.test.com', 'asdf', 'asdf', 'asdf', 'asdf')
value = c.put_object(*args)
self.assertTrue(isinstance(value, basestring))
def test_server_error(self):
body = 'c' * 60
c.http_connection = self.fake_http_connection(500, body=body)
args = ('http://www.test.com', 'asdf', 'asdf', 'asdf', 'asdf')
self.assertRaises(c.ClientException, c.put_object, *args)
try:
value = c.put_object(*args)
except c.ClientException as e:
self.assertEquals(e.http_response_content, body)
class TestPostObject(MockHttpTest):
def test_ok(self):
c.http_connection = self.fake_http_connection(200)
args = ('http://www.test.com', 'asdf', 'asdf', 'asdf', {})
value = c.post_object(*args)
def test_server_error(self):
body = 'c' * 60
c.http_connection = self.fake_http_connection(500, body=body)
args = ('http://www.test.com', 'asdf', 'asdf', 'asdf', {})
self.assertRaises(c.ClientException, c.post_object, *args)
try:
value = c.post_object(*args)
except c.ClientException as e:
self.assertEquals(e.http_response_content, body)
class TestDeleteObject(MockHttpTest):
def test_ok(self):
c.http_connection = self.fake_http_connection(200)
value = c.delete_object('http://www.test.com', 'asdf', 'asdf', 'asdf')
def test_server_error(self):
c.http_connection = self.fake_http_connection(500)
self.assertRaises(c.ClientException, c.delete_object,
'http://www.test.com', 'asdf', 'asdf', 'asdf')
class TestConnection(MockHttpTest):
def test_instance(self):
conn = c.Connection('http://www.test.com', 'asdf', 'asdf')
self.assertEquals(conn.retries, 5)
def test_retry(self):
c.http_connection = self.fake_http_connection(500)
def quick_sleep(*args):
pass
c.sleep = quick_sleep
conn = c.Connection('http://www.test.com', 'asdf', 'asdf')
self.assertRaises(c.ClientException, conn.head_account)
self.assertEquals(conn.attempts, conn.retries + 1)
def test_resp_read_on_server_error(self):
c.http_connection = self.fake_http_connection(500)
conn = c.Connection('http://www.test.com', 'asdf', 'asdf', retries=0)
def get_auth(*args, **kwargs):
return 'http://www.new.com', 'new'
conn.get_auth = get_auth
self.url, self.token = conn.get_auth()
method_signatures = (
(conn.head_account, []),
(conn.get_account, []),
(conn.head_container, ('asdf',)),
(conn.get_container, ('asdf',)),
(conn.put_container, ('asdf',)),
(conn.delete_container, ('asdf',)),
(conn.head_object, ('asdf', 'asdf')),
(conn.get_object, ('asdf', 'asdf')),
(conn.put_object, ('asdf', 'asdf', 'asdf')),
(conn.post_object, ('asdf', 'asdf', {})),
(conn.delete_object, ('asdf', 'asdf')),
)
for method, args in method_signatures:
self.assertRaises(c.ClientException, method, *args)
try:
self.assertTrue(conn.http_conn[1].has_been_read)
except AssertionError:
msg = '%s did not read resp on server error' % method.__name__
self.fail(msg)
except Exception, e:
raise e.__class__("%s - %s" % (method.__name__, e))
def test_reauth(self):
c.http_connection = self.fake_http_connection(401)
def get_auth(*args, **kwargs):
return 'http://www.new.com', 'new'
def swap_sleep(*args):
self.swap_sleep_called = True
c.get_auth = get_auth
c.http_connection = self.fake_http_connection(200)
c.sleep = swap_sleep
self.swap_sleep_called = False
conn = c.Connection('http://www.test.com', 'asdf', 'asdf',
preauthurl='http://www.old.com',
preauthtoken='old',
)
self.assertEquals(conn.attempts, 0)
self.assertEquals(conn.url, 'http://www.old.com')
self.assertEquals(conn.token, 'old')
value = conn.head_account()
self.assertTrue(self.swap_sleep_called)
self.assertEquals(conn.attempts, 2)
self.assertEquals(conn.url, 'http://www.new.com')
self.assertEquals(conn.token, 'new')
def test_reset_stream(self):
class LocalContents(object):
def __init__(self, tell_value=0):
self.already_read = False
self.seeks = []
self.tell_value = tell_value
def tell(self):
return self.tell_value
def seek(self, position):
self.seeks.append(position)
self.already_read = False
def read(self, size=-1):
if self.already_read:
return ''
else:
self.already_read = True
return 'abcdef'
class LocalConnection(object):
def putrequest(self, *args, **kwargs):
return
def putheader(self, *args, **kwargs):
return
def endheaders(self, *args, **kwargs):
return
def send(self, *args, **kwargs):
raise socket.error('oops')
def request(self, *args, **kwargs):
return
def getresponse(self, *args, **kwargs):
self.status = 200
return self
def getheader(self, *args, **kwargs):
return ''
def read(self, *args, **kwargs):
return ''
def local_http_connection(url, proxy=None):
parsed = urlparse(url)
return parsed, LocalConnection()
orig_conn = c.http_connection
try:
c.http_connection = local_http_connection
conn = c.Connection('http://www.example.com', 'asdf', 'asdf',
retries=1, starting_backoff=.0001)
contents = LocalContents()
exc = None
try:
conn.put_object('c', 'o', contents)
except socket.error, err:
exc = err
self.assertEquals(contents.seeks, [0])
self.assertEquals(str(exc), 'oops')
contents = LocalContents(tell_value=123)
exc = None
try:
conn.put_object('c', 'o', contents)
except socket.error, err:
exc = err
self.assertEquals(contents.seeks, [123])
self.assertEquals(str(exc), 'oops')
contents = LocalContents()
contents.tell = None
exc = None
try:
conn.put_object('c', 'o', contents)
except c.ClientException, err:
exc = err
self.assertEquals(contents.seeks, [])
self.assertEquals(str(exc), "put_object('c', 'o', ...) failure "
"and no ability to reset contents for reupload.")
finally:
c.http_connection = orig_conn
if __name__ == '__main__':
unittest.main()
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import os
import sys
import numpy as np
from PIL import Image
import chainer
from chainer import cuda
import chainer.functions as F
from chainer.functions import caffe
from chainer import Variable, optimizers
import pickle
def subtract_mean(x0):
x = x0.copy()
x[0,0,:,:] -= 104
x[0,1,:,:] -= 117
x[0,2,:,:] -= 123
return x
def add_mean(x0):
x = x0.copy()
x[0,0,:,:] += 104
x[0,1,:,:] += 117
x[0,2,:,:] += 123
return x
def image_resize(img_file, width):
gogh = Image.open(img_file)
orig_w, orig_h = gogh.size[0], gogh.size[1]
if orig_w>orig_h:
new_w = width
new_h = width*orig_h/orig_w
gogh = np.asarray(gogh.resize((new_w,new_h)))[:,:,:3].transpose(2, 0, 1)[::-1].astype(np.float32)
gogh = gogh.reshape((1,3,new_h,new_w))
print("image resized to: ", gogh.shape)
hoge= np.zeros((1,3,width,width), dtype=np.float32)
hoge[0,:,width-new_h:,:] = gogh[0,:,:,:]
gogh = subtract_mean(hoge)
else:
new_w = width*orig_w/orig_h
new_h = width
gogh = np.asarray(gogh.resize((new_w,new_h)))[:,:,:3].transpose(2, 0, 1)[::-1].astype(np.float32)
gogh = gogh.reshape((1,3,new_h,new_w))
print("image resized to: ", gogh.shape)
hoge= np.zeros((1,3,width,width), dtype=np.float32)
hoge[0,:,:,width-new_w:] = gogh[0,:,:,:]
gogh = subtract_mean(hoge)
return xp.asarray(gogh), new_w, new_h
def save_image(img, width, new_w, new_h, it):
def to_img(x):
im = np.zeros((new_h,new_w,3))
im[:,:,0] = x[2,:,:]
im[:,:,1] = x[1,:,:]
im[:,:,2] = x[0,:,:]
def clip(a):
return 0 if a<0 else (255 if a>255 else a)
im = np.vectorize(clip)(im).astype(np.uint8)
Image.fromarray(im).save(args.out_dir+"/im_%05d.png"%it)
if args.gpu>=0:
img_cpu = add_mean(img.get())
else:
img_cpu = add_mean(img)
if width==new_w:
to_img(img_cpu[0,:,width-new_h:,:])
else:
to_img(img_cpu[0,:,:,width-new_w:])
def nin_forward(x):
y0 = F.relu(model.conv1(x))
y1 = model.cccp2(F.relu(model.cccp1(y0)))
x1 = F.relu(model.conv2(F.average_pooling_2d(F.relu(y1), 3, stride=2)))
y2 = model.cccp4(F.relu(model.cccp3(x1)))
x2 = F.relu(model.conv3(F.average_pooling_2d(F.relu(y2), 3, stride=2)))
y3 = model.cccp6(F.relu(model.cccp5(x2)))
x3 = F.relu(getattr(model,"conv4-1024")(F.dropout(F.average_pooling_2d(F.relu(y3), 3, stride=2), train=False)))
return [y0,x1,x2,x3]
def vgg_forward(x):
y1 = model.conv1_2(F.relu(model.conv1_1(x)))
x1 = F.average_pooling_2d(F.relu(y1), 2, stride=2)
y2 = model.conv2_2(F.relu(model.conv2_1(x1)))
x2 = F.average_pooling_2d(F.relu(y2), 2, stride=2)
y3 = model.conv3_3(F.relu(model.conv3_2(F.relu(model.conv3_1(x2)))))
x3 = F.average_pooling_2d(F.relu(y3), 2, stride=2)
y4 = model.conv4_3(F.relu(model.conv4_2(F.relu(model.conv4_1(x3)))))
# x4 = F.average_pooling_2d(F.relu(y4), 2, stride=2)
# y5 = model.conv5_3(F.relu(model.conv5_2(F.relu(model.conv5_1(x4)))))
return [y1,y2,y3,y4]
def get_matrix(y):
ch = y.data.shape[1]
wd = y.data.shape[2]
gogh_y = F.reshape(y, (ch,wd**2))
gogh_matrix = F.matmul(gogh_y, gogh_y, transb=True)/np.float32(ch*wd**2)
return gogh_matrix
class Clip(chainer.Function):
def forward(self, x):
x = x[0]
ret = cuda.elementwise(
'T x','T ret',
'''
ret = x<-100?-100:(x>100?100:x);
''','clip')(x)
return ret
def generate_image(img_orig, img_style, width, nw, nh, max_iter, lr, alpha, beta, img_gen=None):
mid_orig = nin_forward(Variable(img_orig))
style_mats = [get_matrix(y) for y in nin_forward(Variable(img_style))]
if img_gen is None:
if args.gpu >= 0:
img_gen = xp.random.uniform(-20,20,(1,3,width,width),dtype=np.float32)
else:
img_gen = np.random.uniform(-20,20,(1,3,width,width)).astype(np.float32)
x = Variable(img_gen)
xg = xp.zeros_like(x.data)
optimizer = optimizers.Adam(alpha=lr)
optimizer.setup((img_gen,xg))
for i in range(max_iter):
x = Variable(img_gen)
y = nin_forward(x)
optimizer.zero_grads()
L = Variable(xp.zeros((), dtype=np.float32))
for l in range(4):
ch = y[l].data.shape[1]
wd = y[l].data.shape[2]
gogh_y = F.reshape(y[l], (ch,wd**2))
gogh_matrix = F.matmul(gogh_y, gogh_y, transb=True)/np.float32(ch*wd**2)
L1 = np.float32(alpha[l])*F.mean_squared_error(y[l], Variable(mid_orig[l].data))
L2 = np.float32(beta[l])*F.mean_squared_error(gogh_matrix, Variable(style_mats[l].data))/np.float32(4)
L += L1+L2
if i%100==0:
print i,l,L1.data,L2.data
L.backward()
xg += x.grad
optimizer.update()
tmp_shape = img_gen.shape
if args.gpu >= 0:
img_gen += Clip().forward(img_gen).reshape(tmp_shape) - img_gen
else:
def clip(x):
return -100 if x<-100 else (100 if x>100 else x)
img_gen += np.vectorize(clip)(img_gen).reshape(tmp_shape) - img_gen
if i%50==0:
save_image(img_gen, W, nw, nh, i)
parser = argparse.ArgumentParser(
description='A Neural Algorithm of Artistic Style')
parser.add_argument('--model', '-m', default='nin_imagenet.caffemodel',
help='model file')
parser.add_argument('--orig_img', '-i', default='orig.png',
help='Original image')
parser.add_argument('--style_img', '-s', default='style.png',
help='Style image')
parser.add_argument('--out_dir', '-o', default='output',
help='Output directory')
parser.add_argument('--gpu', '-g', default=-1, type=int,
help='GPU ID (negative value indicates CPU)')
parser.add_argument('--iter', default=5000, type=int,
help='number of iteration')
parser.add_argument('--lr', default=4.0, type=float,
help='learning rate')
parser.add_argument('--lam', default=0.005, type=float,
help='original image weight / style weight ratio')
parser.add_argument('--width', '-w', default=435, type=int,
help='image width, height')
args = parser.parse_args()
try:
os.mkdir(args.out_dir)
except:
pass
if args.gpu >= 0:
cuda.check_cuda_available()
cuda.get_device(args.gpu).use()
xp = cuda.cupy
else:
xp = np
chainer.Function.type_check_enable = False
print "load model... %s"%args.model
func = caffe.CaffeFunction(args.model)
model = func.fs
if args.gpu>=0:
model.to_gpu()
W = args.width
img_gogh,_,_ = image_resize(args.style_img, W)
img_hongo,nw,nh = image_resize(args.orig_img, W)
generate_image(img_hongo, img_gogh, W, nw, nh, img_gen=None, max_iter=args.iter, lr=args.lr, alpha=[args.lam * x for x in [0,0,1,1]], beta=[1,1,1,1])
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
"""
This module contains the Girder events framework. It maintains a global mapping
of events to listeners, and contains utilities for callers to handle or trigger
events identified by a name.
Listeners should bind to events by calling:
``girder.events.bind('event.name', 'my.handler', handlerFunction)``
And events should be fired in one of two ways; if the event should be handled
synchronously, fire it with:
``girder.events.trigger('event.name', info)``
And if the event should be handled asynchronously, use:
``girder.events.daemon.trigger('event.name', info, callback)``
For obvious reasons, the asynchronous method does not return a value to the
caller. Instead, the caller may optionally pass the callback argument as a
function to be called when the task is finished. That callback function will
receive the Event object as its only argument.
"""
import threading
import types
from .constants import TerminalColor
from girder import logger
from six.moves import queue
class Event(object):
"""
An Event object is created when an event is triggered. It is passed to
each of the listeners of the event, which have a chance to add information
to the event, and also optionally stop the event from being further
propagated to other listeners, and also optionally instruct the caller that
it should not execute its default behavior.
"""
# We might have a lot of events, so we use __slots__ to make them smaller
__slots__ = (
'async',
'info',
'name',
'propagate',
'defaultPrevented',
'responses',
'currentHandlerName'
)
def __init__(self, name, info, async=False):
self.name = name
self.info = info
self.propagate = True
self.defaultPrevented = False
self.responses = []
self.currentHandlerName = None
self.async = async
def preventDefault(self):
"""
This can be used to instruct the triggerer of the event that the default
behavior it would normally perform should not be performed. The
semantics of this action are specific to the context of the event
being handled, but a common use of this method is for a plugin to
provide an alternate behavior that will replace the normal way the
event is handled by the core system.
"""
self.defaultPrevented = True
return self
def stopPropagation(self):
"""
Listeners should call this on the event they were passed in order to
stop any other listeners to the event from being executed.
"""
self.propagate = False
return self
def addResponse(self, response):
"""
Listeners that wish to return data back to the caller who triggered this
event should call this to append their own response to the event.
:param response: The response value, which can be any type.
"""
self.responses.append(response)
class AsyncEventsThread(threading.Thread):
"""
This class is used to execute the pipeline for events asynchronously.
This should not be invoked directly by callers; instead, they should use
girder.events.daemon.trigger().
"""
def __init__(self):
threading.Thread.__init__(self)
self.daemon = True
self.terminate = False
self.eventQueue = queue.Queue()
def run(self):
"""
Loops over all queued events. If the queue is empty, this thread gets
put to sleep until someone calls trigger() on it with a new event to
dispatch.
"""
print(TerminalColor.info('Started asynchronous event manager thread.'))
while not self.terminate:
eventName, info, callback = self.eventQueue.get(block=True)
try:
event = trigger(eventName, info, async=True)
if isinstance(callback, types.FunctionType):
callback(event)
except Exception:
logger.exception('In handler for event "%s":' % eventName)
pass # Must continue the event loop even if handler failed
print(TerminalColor.info('Stopped asynchronous event manager thread.'))
def trigger(self, eventName, info=None, callback=None):
"""
Adds a new event on the queue to trigger asynchronously.
:param eventName: The event name to pass to the girder.events.trigger
:param info: The info object to pass to girder.events.trigger
"""
self.eventQueue.put((eventName, info, callback))
def stop(self):
"""
Gracefully stops this thread. Will finish the currently processing
event before stopping.
"""
self.terminate = True
def bind(eventName, handlerName, handler):
"""
Bind a listener (handler) to the event identified by eventName. It is
convention that plugins will use their own name as the handlerName, so that
the trigger() caller can see which plugin(s) responded to the event.
:param eventName: The name that identifies the event.
:type eventName: str
:param handlerName: The name that identifies the handler calling bind().
:type handlerName: str
:param handler: The function that will be called when the event is fired.
It must accept a single argument, which is the Event that
was created by trigger(). This function should not return
a value; any data that it needs to pass back to the
triggerer should be passed via the addResponse() method of
the Event.
:type handler: function
"""
global _mapping
if eventName not in _mapping:
_mapping[eventName] = []
_mapping[eventName].append({
'name': handlerName,
'handler': handler
})
def unbind(eventName, handlerName):
"""
Removes the binding between the event and the given listener.
:param eventName: The name that identifies the event.
:type eventName: str
:param handlerName: The name that identifies the handler calling bind().
:type handlerName: str
"""
global _mapping
if eventName not in _mapping:
return
for handler in _mapping[eventName]:
if handler['name'] == handlerName:
_mapping[eventName].remove(handler)
break
def unbindAll():
"""
Clears the entire event map. Any bound listeners will be unbound.
"""
global _mapping
_mapping = {}
def trigger(eventName, info=None, pre=None, async=False):
"""
Fire an event with the given name. All listeners bound on that name will be
called until they are exhausted or one of the handlers calls the
stopPropagation() method on the event.
:param eventName: The name that identifies the event.
:type eventName: str
:param info: The info argument to pass to the handler function. The type of
this argument is opaque, and can be anything.
:param pre: A function that will be executed prior to the handler being
executed. It will receive a dict with a "handler" key, (the function),
"info" key (the info arg to this function), and "eventName" and
"handlerName" values.
:type pre: function or None
:param async: Whether this event is executing on the background daemon
(True) or on the request thread (False).
:type async: bool
"""
global _mapping
e = Event(eventName, info, async=async)
for handler in _mapping.get(eventName, ()):
e.currentHandlerName = handler['name']
if pre is not None:
pre(info=info, handler=handler['handler'], eventName=eventName,
handlerName=handler['name'])
handler['handler'](e)
if e.propagate is False:
break
return e
_mapping = {}
daemon = AsyncEventsThread()
|
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import requests
from functools import partial
from pathlib import Path
from typing import Dict, List, Any
from pandas import concat, DataFrame
from lib.concurrent import thread_map
from lib.data_source import DataSource
from lib.metadata_utils import country_subregion1s, country_subregion2s
from lib.utils import table_merge, table_rename
_subregion1_code_to_api_id_map = {
"AC": 1,
"BA": 2,
"BT": 3,
"BE": 4,
"YO": 5,
"JK": 6,
"GO": 7,
"JA": 8,
"JB": 9,
"JT": 10,
"JI": 11,
"KB": 12,
"KS": 13,
"KT": 14,
"KI": 15,
"KU": 16,
"BB": 17,
"KR": 18,
"LA": 19,
"MA": 20,
"MU": 21,
"NB": 22,
"NT": 23,
"PA": 24,
"PB": 25,
"RI": 26,
"SR": 27,
"SN": 28,
"ST": 29,
"SG": 30,
"SA": 31,
"SB": 32,
"SS": 33,
"SU": 34,
}
_subregion2_code_to_api_id_map = {
"1101": 262,
"1102": 266,
"1103": 267,
"1104": 265,
"1105": 258,
"1106": 260,
"1107": 276,
"1108": 268,
"1109": 279,
"1110": 263,
"1111": 271,
"1112": 259,
"1113": 272,
"1114": 261,
"1115": 275,
"1116": 264,
"1117": 270,
"1118": 277,
"1171": 269,
"1172": 278,
"1173": 274,
"1174": 273,
"1175": 280,
"5101": 6,
"5102": 9,
"5103": 1,
"5104": 5,
"5105": 8,
"5106": 2,
"5107": 7,
"5108": 3,
"5171": 4,
"1901": 10,
"1902": 14,
"1903": 12,
"1904": 13,
"1905": 11,
"1906": 15,
"1971": 16,
"1701": 26,
"1702": 33,
"1703": 28,
"1704": 29,
"1705": 34,
"1706": 32,
"1707": 31,
"1708": 30,
"1709": 27,
"1771": 25,
"3601": 19,
"3602": 18,
"3603": 22,
"3604": 20,
"3671": 23,
"3672": 17,
"3673": 21,
"3674": 24,
"7501": 48,
"7502": 46,
"7503": 47,
"7504": 51,
"7505": 50,
"7571": 49,
"1501": 55,
"1502": 56,
"1503": 58,
"1504": 52,
"1505": 57,
"1506": 60,
"1507": 61,
"1508": 53,
"1509": 62,
"1571": 54,
"1572": 59,
"3201": 69,
"3202": 84,
"3203": 72,
"3204": 63,
"3205": 77,
"3206": 87,
"3207": 71,
"3208": 80,
"3209": 74,
"3210": 81,
"3211": 86,
"3212": 78,
"3213": 83,
"3214": 82,
"3215": 79,
"3216": 67,
"3217": 65,
"3218": 498,
"3271": 70,
"3272": 85,
"3273": 64,
"3274": 75,
"3275": 68,
"3276": 76,
"3277": 73,
"3278": 88,
"3279": 66,
"3501": 147,
"3502": 151,
"3503": 159,
"3504": 161,
"3505": 127,
"3506": 134,
"3507": 141,
"3508": 137,
"3509": 132,
"3510": 125,
"3511": 130,
"3512": 156,
"3513": 152,
"3514": 149,
"3515": 155,
"3516": 143,
"3517": 133,
"3518": 145,
"3519": 138,
"3520": 140,
"3521": 146,
"3522": 129,
"3523": 160,
"3524": 136,
"3525": 131,
"3526": 124,
"3527": 154,
"3528": 148,
"3529": 157,
"3571": 135,
"3572": 128,
"3573": 142,
"3574": 153,
"3575": 150,
"3576": 144,
"3577": 139,
"3578": 158,
"3579": 126,
"3101": 45,
"3171": 41,
"3172": 44,
"3173": 40,
"3174": 42,
"3175": 43,
"3301": 95,
"3302": 90,
"3303": 110,
"3304": 89,
"3305": 100,
"3306": 111,
"3307": 123,
"3308": 104,
"3309": 93,
"3310": 102,
"3311": 117,
"3312": 122,
"3313": 99,
"3314": 116,
"3315": 97,
"3316": 92,
"3317": 112,
"3318": 106,
"3319": 103,
"3320": 98,
"3321": 96,
"3322": 114,
"3323": 121,
"3324": 101,
"3325": 91,
"3326": 107,
"3327": 109,
"3328": 119,
"3329": 94,
"3371": 105,
"3372": 118,
"3373": 113,
"3374": 115,
"3375": 108,
"3376": 120,
"6101": 171,
"6102": 169,
"6103": 172,
"6104": 165,
"6105": 175,
"6106": 163,
"6107": 162,
"6108": 167,
"6109": 173,
"6110": 168,
"6111": 164,
"6112": 166,
"6171": 170,
"6172": 174,
"6401": 212,
"6402": 208,
"6403": 204,
"6407": 207,
"6408": 209,
"6409": 213,
"6411": 502,
"6471": 203,
"6472": 214,
"6474": 205,
"2101": 218,
"2102": 219,
"2103": 222,
"2104": 221,
"2105": 220,
"2171": 217,
"2172": 223,
"6301": 187,
"6302": 184,
"6303": 177,
"6304": 180,
"6305": 188,
"6306": 181,
"6307": 182,
"6308": 183,
"6309": 185,
"6310": 186,
"6311": 176,
"6371": 179,
"6372": 178,
"6201": 195,
"6202": 196,
"6203": 193,
"6204": 189,
"6205": 191,
"6206": 194,
"6207": 201,
"6208": 202,
"6209": 197,
"6210": 192,
"6211": 200,
"6212": 198,
"6213": 190,
"6271": 199,
"6501": 206,
"6502": 210,
"6503": 211,
"6504": 215,
"6571": 216,
"1801": 226,
"1802": 227,
"1803": 229,
"1804": 225,
"1805": 235,
"1806": 234,
"1807": 228,
"1808": 237,
"1809": 232,
"1810": 233,
"1811": 230,
"1812": 236,
"1813": 499,
"1871": 224,
"1872": 231,
"8101": 243,
"8102": 244,
"8103": 245,
"8104": 239,
"8105": 247,
"8106": 246,
"8107": 241,
"8108": 242,
"8109": 240,
"8171": 238,
"8172": 248,
"8201": 249,
"8202": 251,
"8203": 253,
"8204": 250,
"8205": 254,
"8206": 252,
"8207": 255,
"8208": 503,
"8271": 256,
"8272": 257,
"5201": 284,
"5202": 285,
"5203": 286,
"5204": 289,
"5205": 283,
"5206": 281,
"5207": 290,
"5208": 287,
"5271": 288,
"5272": 282,
"5301": 295,
"5302": 310,
"5303": 311,
"5304": 292,
"5305": 291,
"5306": 294,
"5307": 305,
"5308": 293,
"5309": 302,
"5310": 298,
"5311": 309,
"5312": 306,
"5313": 297,
"5314": 303,
"5315": 299,
"5316": 301,
"5317": 308,
"5318": 307,
"5319": 300,
"5320": 304,
"5321": 504,
"5371": 296,
"9101": 327,
"9102": 320,
"9103": 318,
"9104": 329,
"9105": 322,
"9106": 313,
"9107": 334,
"9108": 331,
"9109": 328,
"9110": 335,
"9111": 321,
"9112": 332,
"9113": 339,
"9114": 337,
"9115": 338,
"9116": 314,
"9117": 326,
"9118": 312,
"9119": 336,
"9120": 324,
"9121": 325,
"9122": 340,
"9123": 323,
"9124": 330,
"9125": 333,
"9126": 316,
"9127": 317,
"9128": 315,
"9171": 319,
"9201": 346,
"9202": 343,
"9203": 341,
"9204": 348,
"9205": 345,
"9206": 350,
"9207": 351,
"9208": 342,
"9209": 349,
"9210": 344,
"9211": 500,
"9212": 501,
"9271": 347,
"1401": 356,
"1402": 355,
"1403": 352,
"1404": 354,
"1405": 359,
"1406": 361,
"1407": 360,
"1408": 362,
"1409": 357,
"1410": 497,
"1471": 358,
"1472": 353,
"7101": 416,
"7102": 425,
"7103": 420,
"7104": 422,
"7105": 426,
"7106": 428,
"7107": 427,
"7108": 419,
"7109": 421,
"7110": 418,
"7111": 417,
"7171": 424,
"7172": 415,
"7173": 429,
"7174": 423,
"1301": 442,
"1302": 445,
"1303": 444,
"1304": 448,
"1305": 437,
"1306": 430,
"1307": 434,
"1308": 439,
"1309": 433,
"1310": 432,
"1311": 447,
"1312": 440,
"1371": 435,
"1372": 446,
"1373": 443,
"1374": 436,
"1375": 431,
"1376": 441,
"1377": 438,
"7401": 408,
"7402": 410,
"7403": 413,
"7404": 405,
"7405": 411,
"7406": 404,
"7407": 414,
"7408": 409,
"7409": 412,
"7410": 406,
"7411": 511,
"7412": 512,
"7413": 510,
"7414": 508,
"7415": 509,
"7471": 407,
"7472": 403,
"7301": 384,
"7302": 371,
"7303": 368,
"7304": 374,
"7305": 388,
"7306": 373,
"7307": 386,
"7308": 370,
"7309": 379,
"7310": 381,
"7311": 369,
"7312": 387,
"7313": 391,
"7314": 385,
"7315": 383,
"7316": 372,
"7317": 375,
"7318": 389,
"7322": 377,
"7324": 376,
"7326": 390,
"7371": 378,
"7372": 382,
"7373": 380,
"7601": 366,
"7602": 365,
"7603": 364,
"7604": 367,
"7605": 363,
"7606": 505,
"1601": 458,
"1602": 457,
"1603": 453,
"1604": 451,
"1605": 455,
"1606": 454,
"1607": 449,
"1608": 460,
"1609": 459,
"1610": 456,
"1611": 450,
"1612": 514,
"1613": 513,
"1671": 462,
"1672": 461,
"1673": 452,
"1674": 463,
"7201": 392,
"7202": 399,
"7203": 395,
"7204": 402,
"7205": 394,
"7206": 396,
"7207": 393,
"7208": 398,
"7209": 401,
"7210": 400,
"7211": 506,
"7212": 507,
"7271": 397,
"1201": 493,
"1202": 494,
"1203": 492,
"1204": 478,
"1205": 475,
"1206": 471,
"1207": 468,
"1208": 490,
"1209": 464,
"1210": 472,
"1211": 467,
"1212": 496,
"1213": 476,
"1214": 480,
"1215": 485,
"1216": 470,
"1217": 487,
"1218": 488,
"1219": 465,
"1220": 483,
"1221": 482,
"1222": 473,
"1223": 474,
"1224": 481,
"1225": 479,
"1271": 477,
"1272": 486,
"1273": 489,
"1274": 491,
"1275": 466,
"1276": 495,
"1277": 484,
"1278": 469,
"3401": 37,
"3402": 35,
"3403": 36,
"3404": 38,
"3471": 39,
}
_col_name_map = {
"date": "date",
"key": "key",
"kasus": "total_confirmed",
"kasus_baru": "new_confirmed",
"kematian": "total_deceased",
"kematian_baru": "new_deceased",
"sembuh": "total_recovered",
"sembuh_perhari": "new_recovered",
}
def _get_records(
url_tpl: str, subregion_code_to_api_id_map: Dict[str, int], subregion_code: str
) -> List[Dict[str, Any]]:
url = url_tpl.format(subregion_code_to_api_id_map[subregion_code])
res = requests.get(url, timeout=60).json()
if isinstance(res, dict):
# province API like https://andrafarm.com/api/covid19/prov/11 returns a list but city/region API like
# https://andrafarm.com/api/covid19/kota/43 returns a dict
records = list(res.values())
else:
records = res
[s.update({"subregion_code": subregion_code}) for s in records]
return records
def _indonesian_date_to_isoformat(indo_date: str) -> str:
""" Convert date like '18 Desember 2020' or '31 JulI 2020' to iso format"""
indonesian_to_english_months = {
"januari": "Jan",
"februari": "Feb",
"maret": "Mar",
"april": "Apr",
"mei": "May",
"juni": "Jun",
"juli": "Jul",
"agustus": "Aug",
"september": "Sep",
"oktober": "Oct",
"november": "Nov",
"desember": "Dec",
}
eng_date = indo_date.lower()
for indo, eng in indonesian_to_english_months.items():
eng_date = eng_date.replace(indo, eng)
date = datetime.datetime.strptime(eng_date, "%d %b %Y")
return date.date().isoformat()
def _get_data(
url_tpl: str,
subregion_code_col: str,
subregion_code_to_api_id_map: Dict[str, int],
subregions: DataFrame,
) -> DataFrame:
subregion_codes = subregions[subregion_code_col].values
map_func = partial(_get_records, url_tpl, subregion_code_to_api_id_map)
data = DataFrame.from_records(sum(thread_map(map_func, subregion_codes), []))
data["date"] = data.apply(lambda r: _indonesian_date_to_isoformat(r.tgl), axis=1)
# add location keys
data = table_merge(
[data, subregions], left_on="subregion_code", right_on=subregion_code_col, how="left"
)
data = table_rename(data, _col_name_map, drop=True)
return data
# pylint: disable=missing-class-docstring,abstract-method
class IndonesiaAndrafarmDataSource(DataSource):
def fetch(
self,
output_folder: Path,
cache: Dict[str, str],
fetch_opts: List[Dict[str, Any]],
skip_existing: bool = False,
) -> Dict[str, str]:
# URL is just a template, so pass-through the URL to parse manually
return {source["name"]: source["url"] for source in fetch_opts}
def parse(self, sources: Dict[str, str], aux: Dict[str, DataFrame], **parse_opts) -> DataFrame:
subregion1s = country_subregion1s(aux["metadata"], "ID")
subregion2s = country_subregion2s(aux["metadata"], "ID")
data = concat(
[
_get_data(
sources["subregion1_url"],
"subregion1_code",
_subregion1_code_to_api_id_map,
subregion1s,
),
_get_data(
sources["subregion2_url"],
"subregion2_code",
_subregion2_code_to_api_id_map,
subregion2s,
),
]
)
return data
|
|
# Generated from BindPreferenceGrammar.g4 by ANTLR 4.7
# encoding: utf-8
from __future__ import print_function
from antlr4 import *
from io import StringIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write(u"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3")
buf.write(u")\u0097\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t")
buf.write(u"\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r")
buf.write(u"\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4")
buf.write(u"\23\t\23\3\2\3\2\3\2\3\3\3\3\5\3,\n\3\3\4\3\4\3\4\3\4")
buf.write(u"\7\4\62\n\4\f\4\16\4\65\13\4\3\5\5\58\n\5\3\5\3\5\3\6")
buf.write(u"\3\6\5\6>\n\6\3\7\3\7\3\7\3\7\5\7D\n\7\3\b\3\b\3\b\3")
buf.write(u"\b\7\bJ\n\b\f\b\16\bM\13\b\3\t\3\t\3\t\3\t\3\t\5\tT\n")
buf.write(u"\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3")
buf.write(u"\t\3\t\3\t\3\t\5\tf\n\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3")
buf.write(u"\t\3\t\3\t\3\t\3\t\5\tt\n\t\3\n\3\n\3\n\3\n\3\n\3\n\3")
buf.write(u"\n\3\n\3\n\3\n\3\n\5\n\u0081\n\n\3\13\3\13\5\13\u0085")
buf.write(u"\n\13\3\f\3\f\3\r\3\r\3\16\3\16\3\17\3\17\3\20\3\20\3")
buf.write(u"\21\3\21\3\22\3\22\3\23\3\23\3\23\2\2\24\2\4\6\b\n\f")
buf.write(u"\16\20\22\24\26\30\32\34\36 \"$\2\7\3\2\26\27\4\2\17")
buf.write(u"\20\24\25\3\2 \"\3\2\32\37\3\2\22\23\2\u0095\2&\3\2\2")
buf.write(u"\2\4+\3\2\2\2\6-\3\2\2\2\b\67\3\2\2\2\n=\3\2\2\2\f?\3")
buf.write(u"\2\2\2\16E\3\2\2\2\20s\3\2\2\2\22\u0080\3\2\2\2\24\u0084")
buf.write(u"\3\2\2\2\26\u0086\3\2\2\2\30\u0088\3\2\2\2\32\u008a\3")
buf.write(u"\2\2\2\34\u008c\3\2\2\2\36\u008e\3\2\2\2 \u0090\3\2\2")
buf.write(u"\2\"\u0092\3\2\2\2$\u0094\3\2\2\2&\'\5\4\3\2\'(\7\2\2")
buf.write(u"\3(\3\3\2\2\2),\7\3\2\2*,\5\16\b\2+)\3\2\2\2+*\3\2\2")
buf.write(u"\2,\5\3\2\2\2-\63\5\b\5\2./\5\26\f\2/\60\5\b\5\2\60\62")
buf.write(u"\3\2\2\2\61.\3\2\2\2\62\65\3\2\2\2\63\61\3\2\2\2\63\64")
buf.write(u"\3\2\2\2\64\7\3\2\2\2\65\63\3\2\2\2\668\5\36\20\2\67")
buf.write(u"\66\3\2\2\2\678\3\2\2\289\3\2\2\29:\5\n\6\2:\t\3\2\2")
buf.write(u"\2;>\5 \21\2<>\5\f\7\2=;\3\2\2\2=<\3\2\2\2>\13\3\2\2")
buf.write(u"\2?C\5\16\b\2@A\5\34\17\2AB\5\16\b\2BD\3\2\2\2C@\3\2")
buf.write(u"\2\2CD\3\2\2\2D\r\3\2\2\2EK\5\20\t\2FG\5\30\r\2GH\5\20")
buf.write(u"\t\2HJ\3\2\2\2IF\3\2\2\2JM\3\2\2\2KI\3\2\2\2KL\3\2\2")
buf.write(u"\2L\17\3\2\2\2MK\3\2\2\2NO\t\2\2\2OS\5\"\22\2PQ\7\4\2")
buf.write(u"\2QR\7\5\2\2RT\5\22\n\2SP\3\2\2\2ST\3\2\2\2TU\3\2\2\2")
buf.write(u"UV\7\6\2\2VW\5\24\13\2WX\7\7\2\2XY\5\6\4\2Yt\3\2\2\2")
buf.write(u"Zt\7(\2\2[\\\5\"\22\2\\]\7\b\2\2]^\7\t\2\2^_\5\"\22\2")
buf.write(u"_t\3\2\2\2`a\7\30\2\2ae\5\"\22\2bc\7\4\2\2cd\7\5\2\2")
buf.write(u"df\5\22\n\2eb\3\2\2\2ef\3\2\2\2fg\3\2\2\2gh\7\6\2\2h")
buf.write(u"i\5\24\13\2ij\7\7\2\2jk\5\16\b\2kt\3\2\2\2lm\5\32\16")
buf.write(u"\2mn\5\16\b\2nt\3\2\2\2op\7\n\2\2pq\5\6\4\2qr\7\13\2")
buf.write(u"\2rt\3\2\2\2sN\3\2\2\2sZ\3\2\2\2s[\3\2\2\2s`\3\2\2\2")
buf.write(u"sl\3\2\2\2so\3\2\2\2t\21\3\2\2\2u\u0081\7\'\2\2v\u0081")
buf.write(u"\5\"\22\2wx\7\'\2\2xy\7\f\2\2yz\7\'\2\2z\u0081\7\r\2")
buf.write(u"\2{|\7\'\2\2|}\7\f\2\2}~\5$\23\2~\177\7\r\2\2\177\u0081")
buf.write(u"\3\2\2\2\u0080u\3\2\2\2\u0080v\3\2\2\2\u0080w\3\2\2\2")
buf.write(u"\u0080{\3\2\2\2\u0081\23\3\2\2\2\u0082\u0085\7%\2\2\u0083")
buf.write(u"\u0085\5$\23\2\u0084\u0082\3\2\2\2\u0084\u0083\3\2\2")
buf.write(u"\2\u0085\25\3\2\2\2\u0086\u0087\t\3\2\2\u0087\27\3\2")
buf.write(u"\2\2\u0088\u0089\t\4\2\2\u0089\31\3\2\2\2\u008a\u008b")
buf.write(u"\7#\2\2\u008b\33\3\2\2\2\u008c\u008d\t\5\2\2\u008d\35")
buf.write(u"\3\2\2\2\u008e\u008f\7\21\2\2\u008f\37\3\2\2\2\u0090")
buf.write(u"\u0091\t\6\2\2\u0091!\3\2\2\2\u0092\u0093\7&\2\2\u0093")
buf.write(u"#\3\2\2\2\u0094\u0095\7\16\2\2\u0095%\3\2\2\2\r+\63\67")
buf.write(u"=CKSes\u0080\u0084")
return buf.getvalue()
class BindPreferenceGrammarParser ( Parser ):
grammarFileName = "BindPreferenceGrammar.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ u"<INVALID>", u"'local'", u"'of'", u"'type'", u"'in'",
u"':'", u"'used'", u"'by'", u"'('", u"')'", u"'['",
u"']'", u"<INVALID>", u"'and'", u"'or'", u"'not'",
u"'true'", u"'false'", u"'impl'", u"'iff'", u"'exists'",
u"'forall'", u"'sum'", u"'cost'", u"'<='", u"'='",
u"'>='", u"'<'", u"'>'", u"'!='", u"'+'", u"'-'", u"'*'",
u"'abs'", u"'obj'", u"'DC'" ]
symbolicNames = [ u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>",
u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>",
u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>",
u"RE", u"AND", u"OR", u"NOT", u"TRUE", u"FALSE", u"IMPL",
u"IFF", u"EXISTS", u"FORALL", u"SUM", u"COST", u"LEQ",
u"EQ", u"GEQ", u"LT", u"GT", u"NEQ", u"PLUS", u"MINUS",
u"TIMES", u"ABS", u"OBJ", u"DC", u"VARIABLE", u"ID",
u"INT", u"WS" ]
RULE_statement = 0
RULE_preference = 1
RULE_b_expr = 2
RULE_b_term = 3
RULE_b_factor = 4
RULE_relation = 5
RULE_expr = 6
RULE_term = 7
RULE_objId = 8
RULE_typeV = 9
RULE_bool_binary_op = 10
RULE_arith_binary_op = 11
RULE_arith_unary_op = 12
RULE_comparison_op = 13
RULE_unaryOp = 14
RULE_boolFact = 15
RULE_variable = 16
RULE_re = 17
ruleNames = [ u"statement", u"preference", u"b_expr", u"b_term", u"b_factor",
u"relation", u"expr", u"term", u"objId", u"typeV", u"bool_binary_op",
u"arith_binary_op", u"arith_unary_op", u"comparison_op",
u"unaryOp", u"boolFact", u"variable", u"re" ]
EOF = Token.EOF
T__0=1
T__1=2
T__2=3
T__3=4
T__4=5
T__5=6
T__6=7
T__7=8
T__8=9
T__9=10
T__10=11
RE=12
AND=13
OR=14
NOT=15
TRUE=16
FALSE=17
IMPL=18
IFF=19
EXISTS=20
FORALL=21
SUM=22
COST=23
LEQ=24
EQ=25
GEQ=26
LT=27
GT=28
NEQ=29
PLUS=30
MINUS=31
TIMES=32
ABS=33
OBJ=34
DC=35
VARIABLE=36
ID=37
INT=38
WS=39
def __init__(self, input, output=sys.stdout):
super(BindPreferenceGrammarParser, self).__init__(input, output=output)
self.checkVersion("4.7")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class StatementContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.StatementContext, self).__init__(parent, invokingState)
self.parser = parser
def preference(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.PreferenceContext,0)
def EOF(self):
return self.getToken(BindPreferenceGrammarParser.EOF, 0)
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_statement
def accept(self, visitor):
if hasattr(visitor, "visitStatement"):
return visitor.visitStatement(self)
else:
return visitor.visitChildren(self)
def statement(self):
localctx = BindPreferenceGrammarParser.StatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_statement)
try:
self.enterOuterAlt(localctx, 1)
self.state = 36
self.preference()
self.state = 37
self.match(BindPreferenceGrammarParser.EOF)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PreferenceContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.PreferenceContext, self).__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_preference
def copyFrom(self, ctx):
super(BindPreferenceGrammarParser.PreferenceContext, self).copyFrom(ctx)
class ApreferenceExprContext(PreferenceContext):
def __init__(self, parser, ctx): # actually a BindPreferenceGrammarParser.PreferenceContext)
super(BindPreferenceGrammarParser.ApreferenceExprContext, self).__init__(parser)
self.copyFrom(ctx)
def expr(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.ExprContext,0)
def accept(self, visitor):
if hasattr(visitor, "visitApreferenceExpr"):
return visitor.visitApreferenceExpr(self)
else:
return visitor.visitChildren(self)
class ApreferenceLocalContext(PreferenceContext):
def __init__(self, parser, ctx): # actually a BindPreferenceGrammarParser.PreferenceContext)
super(BindPreferenceGrammarParser.ApreferenceLocalContext, self).__init__(parser)
self.copyFrom(ctx)
def accept(self, visitor):
if hasattr(visitor, "visitApreferenceLocal"):
return visitor.visitApreferenceLocal(self)
else:
return visitor.visitChildren(self)
def preference(self):
localctx = BindPreferenceGrammarParser.PreferenceContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_preference)
try:
self.state = 41
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [BindPreferenceGrammarParser.T__0]:
localctx = BindPreferenceGrammarParser.ApreferenceLocalContext(self, localctx)
self.enterOuterAlt(localctx, 1)
self.state = 39
self.match(BindPreferenceGrammarParser.T__0)
pass
elif token in [BindPreferenceGrammarParser.T__7, BindPreferenceGrammarParser.EXISTS, BindPreferenceGrammarParser.FORALL, BindPreferenceGrammarParser.SUM, BindPreferenceGrammarParser.ABS, BindPreferenceGrammarParser.VARIABLE, BindPreferenceGrammarParser.INT]:
localctx = BindPreferenceGrammarParser.ApreferenceExprContext(self, localctx)
self.enterOuterAlt(localctx, 2)
self.state = 40
self.expr()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class B_exprContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.B_exprContext, self).__init__(parent, invokingState)
self.parser = parser
def b_term(self, i=None):
if i is None:
return self.getTypedRuleContexts(BindPreferenceGrammarParser.B_termContext)
else:
return self.getTypedRuleContext(BindPreferenceGrammarParser.B_termContext,i)
def bool_binary_op(self, i=None):
if i is None:
return self.getTypedRuleContexts(BindPreferenceGrammarParser.Bool_binary_opContext)
else:
return self.getTypedRuleContext(BindPreferenceGrammarParser.Bool_binary_opContext,i)
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_b_expr
def accept(self, visitor):
if hasattr(visitor, "visitB_expr"):
return visitor.visitB_expr(self)
else:
return visitor.visitChildren(self)
def b_expr(self):
localctx = BindPreferenceGrammarParser.B_exprContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_b_expr)
try:
self.enterOuterAlt(localctx, 1)
self.state = 43
self.b_term()
self.state = 49
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,1,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 44
self.bool_binary_op()
self.state = 45
self.b_term()
self.state = 51
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,1,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class B_termContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.B_termContext, self).__init__(parent, invokingState)
self.parser = parser
def b_factor(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.B_factorContext,0)
def unaryOp(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.UnaryOpContext,0)
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_b_term
def accept(self, visitor):
if hasattr(visitor, "visitB_term"):
return visitor.visitB_term(self)
else:
return visitor.visitChildren(self)
def b_term(self):
localctx = BindPreferenceGrammarParser.B_termContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_b_term)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 53
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==BindPreferenceGrammarParser.NOT:
self.state = 52
self.unaryOp()
self.state = 55
self.b_factor()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class B_factorContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.B_factorContext, self).__init__(parent, invokingState)
self.parser = parser
def boolFact(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.BoolFactContext,0)
def relation(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.RelationContext,0)
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_b_factor
def accept(self, visitor):
if hasattr(visitor, "visitB_factor"):
return visitor.visitB_factor(self)
else:
return visitor.visitChildren(self)
def b_factor(self):
localctx = BindPreferenceGrammarParser.B_factorContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_b_factor)
try:
self.state = 59
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [BindPreferenceGrammarParser.TRUE, BindPreferenceGrammarParser.FALSE]:
self.enterOuterAlt(localctx, 1)
self.state = 57
self.boolFact()
pass
elif token in [BindPreferenceGrammarParser.T__7, BindPreferenceGrammarParser.EXISTS, BindPreferenceGrammarParser.FORALL, BindPreferenceGrammarParser.SUM, BindPreferenceGrammarParser.ABS, BindPreferenceGrammarParser.VARIABLE, BindPreferenceGrammarParser.INT]:
self.enterOuterAlt(localctx, 2)
self.state = 58
self.relation()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class RelationContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.RelationContext, self).__init__(parent, invokingState)
self.parser = parser
def expr(self, i=None):
if i is None:
return self.getTypedRuleContexts(BindPreferenceGrammarParser.ExprContext)
else:
return self.getTypedRuleContext(BindPreferenceGrammarParser.ExprContext,i)
def comparison_op(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.Comparison_opContext,0)
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_relation
def accept(self, visitor):
if hasattr(visitor, "visitRelation"):
return visitor.visitRelation(self)
else:
return visitor.visitChildren(self)
def relation(self):
localctx = BindPreferenceGrammarParser.RelationContext(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_relation)
try:
self.enterOuterAlt(localctx, 1)
self.state = 61
self.expr()
self.state = 65
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,4,self._ctx)
if la_ == 1:
self.state = 62
self.comparison_op()
self.state = 63
self.expr()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExprContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.ExprContext, self).__init__(parent, invokingState)
self.parser = parser
def term(self, i=None):
if i is None:
return self.getTypedRuleContexts(BindPreferenceGrammarParser.TermContext)
else:
return self.getTypedRuleContext(BindPreferenceGrammarParser.TermContext,i)
def arith_binary_op(self, i=None):
if i is None:
return self.getTypedRuleContexts(BindPreferenceGrammarParser.Arith_binary_opContext)
else:
return self.getTypedRuleContext(BindPreferenceGrammarParser.Arith_binary_opContext,i)
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_expr
def accept(self, visitor):
if hasattr(visitor, "visitExpr"):
return visitor.visitExpr(self)
else:
return visitor.visitChildren(self)
def expr(self):
localctx = BindPreferenceGrammarParser.ExprContext(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_expr)
try:
self.enterOuterAlt(localctx, 1)
self.state = 67
self.term()
self.state = 73
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,5,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 68
self.arith_binary_op()
self.state = 69
self.term()
self.state = 75
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,5,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TermContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.TermContext, self).__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_term
def copyFrom(self, ctx):
super(BindPreferenceGrammarParser.TermContext, self).copyFrom(ctx)
class AexprQuantifierContext(TermContext):
def __init__(self, parser, ctx): # actually a BindPreferenceGrammarParser.TermContext)
super(BindPreferenceGrammarParser.AexprQuantifierContext, self).__init__(parser)
self.copyFrom(ctx)
def variable(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.VariableContext,0)
def typeV(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.TypeVContext,0)
def b_expr(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.B_exprContext,0)
def EXISTS(self):
return self.getToken(BindPreferenceGrammarParser.EXISTS, 0)
def FORALL(self):
return self.getToken(BindPreferenceGrammarParser.FORALL, 0)
def objId(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.ObjIdContext,0)
def accept(self, visitor):
if hasattr(visitor, "visitAexprQuantifier"):
return visitor.visitAexprQuantifier(self)
else:
return visitor.visitChildren(self)
class AexprSumContext(TermContext):
def __init__(self, parser, ctx): # actually a BindPreferenceGrammarParser.TermContext)
super(BindPreferenceGrammarParser.AexprSumContext, self).__init__(parser)
self.copyFrom(ctx)
def SUM(self):
return self.getToken(BindPreferenceGrammarParser.SUM, 0)
def variable(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.VariableContext,0)
def typeV(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.TypeVContext,0)
def expr(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.ExprContext,0)
def objId(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.ObjIdContext,0)
def accept(self, visitor):
if hasattr(visitor, "visitAexprSum"):
return visitor.visitAexprSum(self)
else:
return visitor.visitChildren(self)
class AexprBindContext(TermContext):
def __init__(self, parser, ctx): # actually a BindPreferenceGrammarParser.TermContext)
super(BindPreferenceGrammarParser.AexprBindContext, self).__init__(parser)
self.copyFrom(ctx)
def variable(self, i=None):
if i is None:
return self.getTypedRuleContexts(BindPreferenceGrammarParser.VariableContext)
else:
return self.getTypedRuleContext(BindPreferenceGrammarParser.VariableContext,i)
def accept(self, visitor):
if hasattr(visitor, "visitAexprBind"):
return visitor.visitAexprBind(self)
else:
return visitor.visitChildren(self)
class AexprBracketsContext(TermContext):
def __init__(self, parser, ctx): # actually a BindPreferenceGrammarParser.TermContext)
super(BindPreferenceGrammarParser.AexprBracketsContext, self).__init__(parser)
self.copyFrom(ctx)
def b_expr(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.B_exprContext,0)
def accept(self, visitor):
if hasattr(visitor, "visitAexprBrackets"):
return visitor.visitAexprBrackets(self)
else:
return visitor.visitChildren(self)
class AexprUnaryArithmeticContext(TermContext):
def __init__(self, parser, ctx): # actually a BindPreferenceGrammarParser.TermContext)
super(BindPreferenceGrammarParser.AexprUnaryArithmeticContext, self).__init__(parser)
self.copyFrom(ctx)
def arith_unary_op(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.Arith_unary_opContext,0)
def expr(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.ExprContext,0)
def accept(self, visitor):
if hasattr(visitor, "visitAexprUnaryArithmetic"):
return visitor.visitAexprUnaryArithmetic(self)
else:
return visitor.visitChildren(self)
class AexprIntContext(TermContext):
def __init__(self, parser, ctx): # actually a BindPreferenceGrammarParser.TermContext)
super(BindPreferenceGrammarParser.AexprIntContext, self).__init__(parser)
self.copyFrom(ctx)
def INT(self):
return self.getToken(BindPreferenceGrammarParser.INT, 0)
def accept(self, visitor):
if hasattr(visitor, "visitAexprInt"):
return visitor.visitAexprInt(self)
else:
return visitor.visitChildren(self)
def term(self):
localctx = BindPreferenceGrammarParser.TermContext(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_term)
self._la = 0 # Token type
try:
self.state = 113
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [BindPreferenceGrammarParser.EXISTS, BindPreferenceGrammarParser.FORALL]:
localctx = BindPreferenceGrammarParser.AexprQuantifierContext(self, localctx)
self.enterOuterAlt(localctx, 1)
self.state = 76
_la = self._input.LA(1)
if not(_la==BindPreferenceGrammarParser.EXISTS or _la==BindPreferenceGrammarParser.FORALL):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 77
self.variable()
self.state = 81
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==BindPreferenceGrammarParser.T__1:
self.state = 78
self.match(BindPreferenceGrammarParser.T__1)
self.state = 79
self.match(BindPreferenceGrammarParser.T__2)
self.state = 80
self.objId()
self.state = 83
self.match(BindPreferenceGrammarParser.T__3)
self.state = 84
self.typeV()
self.state = 85
self.match(BindPreferenceGrammarParser.T__4)
self.state = 86
self.b_expr()
pass
elif token in [BindPreferenceGrammarParser.INT]:
localctx = BindPreferenceGrammarParser.AexprIntContext(self, localctx)
self.enterOuterAlt(localctx, 2)
self.state = 88
self.match(BindPreferenceGrammarParser.INT)
pass
elif token in [BindPreferenceGrammarParser.VARIABLE]:
localctx = BindPreferenceGrammarParser.AexprBindContext(self, localctx)
self.enterOuterAlt(localctx, 3)
self.state = 89
self.variable()
self.state = 90
self.match(BindPreferenceGrammarParser.T__5)
self.state = 91
self.match(BindPreferenceGrammarParser.T__6)
self.state = 92
self.variable()
pass
elif token in [BindPreferenceGrammarParser.SUM]:
localctx = BindPreferenceGrammarParser.AexprSumContext(self, localctx)
self.enterOuterAlt(localctx, 4)
self.state = 94
self.match(BindPreferenceGrammarParser.SUM)
self.state = 95
self.variable()
self.state = 99
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==BindPreferenceGrammarParser.T__1:
self.state = 96
self.match(BindPreferenceGrammarParser.T__1)
self.state = 97
self.match(BindPreferenceGrammarParser.T__2)
self.state = 98
self.objId()
self.state = 101
self.match(BindPreferenceGrammarParser.T__3)
self.state = 102
self.typeV()
self.state = 103
self.match(BindPreferenceGrammarParser.T__4)
self.state = 104
self.expr()
pass
elif token in [BindPreferenceGrammarParser.ABS]:
localctx = BindPreferenceGrammarParser.AexprUnaryArithmeticContext(self, localctx)
self.enterOuterAlt(localctx, 5)
self.state = 106
self.arith_unary_op()
self.state = 107
self.expr()
pass
elif token in [BindPreferenceGrammarParser.T__7]:
localctx = BindPreferenceGrammarParser.AexprBracketsContext(self, localctx)
self.enterOuterAlt(localctx, 6)
self.state = 109
self.match(BindPreferenceGrammarParser.T__7)
self.state = 110
self.b_expr()
self.state = 111
self.match(BindPreferenceGrammarParser.T__8)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ObjIdContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.ObjIdContext, self).__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_objId
def copyFrom(self, ctx):
super(BindPreferenceGrammarParser.ObjIdContext, self).copyFrom(ctx)
class AobjIDScenarioContext(ObjIdContext):
def __init__(self, parser, ctx): # actually a BindPreferenceGrammarParser.ObjIdContext)
super(BindPreferenceGrammarParser.AobjIDScenarioContext, self).__init__(parser)
self.copyFrom(ctx)
def ID(self, i=None):
if i is None:
return self.getTokens(BindPreferenceGrammarParser.ID)
else:
return self.getToken(BindPreferenceGrammarParser.ID, i)
def accept(self, visitor):
if hasattr(visitor, "visitAobjIDScenario"):
return visitor.visitAobjIDScenario(self)
else:
return visitor.visitChildren(self)
class AobjIDREContext(ObjIdContext):
def __init__(self, parser, ctx): # actually a BindPreferenceGrammarParser.ObjIdContext)
super(BindPreferenceGrammarParser.AobjIDREContext, self).__init__(parser)
self.copyFrom(ctx)
def ID(self):
return self.getToken(BindPreferenceGrammarParser.ID, 0)
def re(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.ReContext,0)
def accept(self, visitor):
if hasattr(visitor, "visitAobjIDRE"):
return visitor.visitAobjIDRE(self)
else:
return visitor.visitChildren(self)
class AobjIDVarContext(ObjIdContext):
def __init__(self, parser, ctx): # actually a BindPreferenceGrammarParser.ObjIdContext)
super(BindPreferenceGrammarParser.AobjIDVarContext, self).__init__(parser)
self.copyFrom(ctx)
def variable(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.VariableContext,0)
def accept(self, visitor):
if hasattr(visitor, "visitAobjIDVar"):
return visitor.visitAobjIDVar(self)
else:
return visitor.visitChildren(self)
class AobjIDIDContext(ObjIdContext):
def __init__(self, parser, ctx): # actually a BindPreferenceGrammarParser.ObjIdContext)
super(BindPreferenceGrammarParser.AobjIDIDContext, self).__init__(parser)
self.copyFrom(ctx)
def ID(self):
return self.getToken(BindPreferenceGrammarParser.ID, 0)
def accept(self, visitor):
if hasattr(visitor, "visitAobjIDID"):
return visitor.visitAobjIDID(self)
else:
return visitor.visitChildren(self)
def objId(self):
localctx = BindPreferenceGrammarParser.ObjIdContext(self, self._ctx, self.state)
self.enterRule(localctx, 16, self.RULE_objId)
try:
self.state = 126
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,9,self._ctx)
if la_ == 1:
localctx = BindPreferenceGrammarParser.AobjIDIDContext(self, localctx)
self.enterOuterAlt(localctx, 1)
self.state = 115
self.match(BindPreferenceGrammarParser.ID)
pass
elif la_ == 2:
localctx = BindPreferenceGrammarParser.AobjIDVarContext(self, localctx)
self.enterOuterAlt(localctx, 2)
self.state = 116
self.variable()
pass
elif la_ == 3:
localctx = BindPreferenceGrammarParser.AobjIDScenarioContext(self, localctx)
self.enterOuterAlt(localctx, 3)
self.state = 117
self.match(BindPreferenceGrammarParser.ID)
self.state = 118
self.match(BindPreferenceGrammarParser.T__9)
self.state = 119
self.match(BindPreferenceGrammarParser.ID)
self.state = 120
self.match(BindPreferenceGrammarParser.T__10)
pass
elif la_ == 4:
localctx = BindPreferenceGrammarParser.AobjIDREContext(self, localctx)
self.enterOuterAlt(localctx, 4)
self.state = 121
self.match(BindPreferenceGrammarParser.ID)
self.state = 122
self.match(BindPreferenceGrammarParser.T__9)
self.state = 123
self.re()
self.state = 124
self.match(BindPreferenceGrammarParser.T__10)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypeVContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.TypeVContext, self).__init__(parent, invokingState)
self.parser = parser
def DC(self):
return self.getToken(BindPreferenceGrammarParser.DC, 0)
def re(self):
return self.getTypedRuleContext(BindPreferenceGrammarParser.ReContext,0)
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_typeV
def accept(self, visitor):
if hasattr(visitor, "visitTypeV"):
return visitor.visitTypeV(self)
else:
return visitor.visitChildren(self)
def typeV(self):
localctx = BindPreferenceGrammarParser.TypeVContext(self, self._ctx, self.state)
self.enterRule(localctx, 18, self.RULE_typeV)
try:
self.state = 130
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [BindPreferenceGrammarParser.DC]:
self.enterOuterAlt(localctx, 1)
self.state = 128
self.match(BindPreferenceGrammarParser.DC)
pass
elif token in [BindPreferenceGrammarParser.RE]:
self.enterOuterAlt(localctx, 2)
self.state = 129
self.re()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Bool_binary_opContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.Bool_binary_opContext, self).__init__(parent, invokingState)
self.parser = parser
def AND(self):
return self.getToken(BindPreferenceGrammarParser.AND, 0)
def OR(self):
return self.getToken(BindPreferenceGrammarParser.OR, 0)
def IMPL(self):
return self.getToken(BindPreferenceGrammarParser.IMPL, 0)
def IFF(self):
return self.getToken(BindPreferenceGrammarParser.IFF, 0)
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_bool_binary_op
def accept(self, visitor):
if hasattr(visitor, "visitBool_binary_op"):
return visitor.visitBool_binary_op(self)
else:
return visitor.visitChildren(self)
def bool_binary_op(self):
localctx = BindPreferenceGrammarParser.Bool_binary_opContext(self, self._ctx, self.state)
self.enterRule(localctx, 20, self.RULE_bool_binary_op)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 132
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << BindPreferenceGrammarParser.AND) | (1 << BindPreferenceGrammarParser.OR) | (1 << BindPreferenceGrammarParser.IMPL) | (1 << BindPreferenceGrammarParser.IFF))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Arith_binary_opContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.Arith_binary_opContext, self).__init__(parent, invokingState)
self.parser = parser
def PLUS(self):
return self.getToken(BindPreferenceGrammarParser.PLUS, 0)
def MINUS(self):
return self.getToken(BindPreferenceGrammarParser.MINUS, 0)
def TIMES(self):
return self.getToken(BindPreferenceGrammarParser.TIMES, 0)
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_arith_binary_op
def accept(self, visitor):
if hasattr(visitor, "visitArith_binary_op"):
return visitor.visitArith_binary_op(self)
else:
return visitor.visitChildren(self)
def arith_binary_op(self):
localctx = BindPreferenceGrammarParser.Arith_binary_opContext(self, self._ctx, self.state)
self.enterRule(localctx, 22, self.RULE_arith_binary_op)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 134
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << BindPreferenceGrammarParser.PLUS) | (1 << BindPreferenceGrammarParser.MINUS) | (1 << BindPreferenceGrammarParser.TIMES))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Arith_unary_opContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.Arith_unary_opContext, self).__init__(parent, invokingState)
self.parser = parser
def ABS(self):
return self.getToken(BindPreferenceGrammarParser.ABS, 0)
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_arith_unary_op
def accept(self, visitor):
if hasattr(visitor, "visitArith_unary_op"):
return visitor.visitArith_unary_op(self)
else:
return visitor.visitChildren(self)
def arith_unary_op(self):
localctx = BindPreferenceGrammarParser.Arith_unary_opContext(self, self._ctx, self.state)
self.enterRule(localctx, 24, self.RULE_arith_unary_op)
try:
self.enterOuterAlt(localctx, 1)
self.state = 136
self.match(BindPreferenceGrammarParser.ABS)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Comparison_opContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.Comparison_opContext, self).__init__(parent, invokingState)
self.parser = parser
def LEQ(self):
return self.getToken(BindPreferenceGrammarParser.LEQ, 0)
def EQ(self):
return self.getToken(BindPreferenceGrammarParser.EQ, 0)
def GEQ(self):
return self.getToken(BindPreferenceGrammarParser.GEQ, 0)
def LT(self):
return self.getToken(BindPreferenceGrammarParser.LT, 0)
def GT(self):
return self.getToken(BindPreferenceGrammarParser.GT, 0)
def NEQ(self):
return self.getToken(BindPreferenceGrammarParser.NEQ, 0)
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_comparison_op
def accept(self, visitor):
if hasattr(visitor, "visitComparison_op"):
return visitor.visitComparison_op(self)
else:
return visitor.visitChildren(self)
def comparison_op(self):
localctx = BindPreferenceGrammarParser.Comparison_opContext(self, self._ctx, self.state)
self.enterRule(localctx, 26, self.RULE_comparison_op)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 138
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << BindPreferenceGrammarParser.LEQ) | (1 << BindPreferenceGrammarParser.EQ) | (1 << BindPreferenceGrammarParser.GEQ) | (1 << BindPreferenceGrammarParser.LT) | (1 << BindPreferenceGrammarParser.GT) | (1 << BindPreferenceGrammarParser.NEQ))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class UnaryOpContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.UnaryOpContext, self).__init__(parent, invokingState)
self.parser = parser
def NOT(self):
return self.getToken(BindPreferenceGrammarParser.NOT, 0)
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_unaryOp
def accept(self, visitor):
if hasattr(visitor, "visitUnaryOp"):
return visitor.visitUnaryOp(self)
else:
return visitor.visitChildren(self)
def unaryOp(self):
localctx = BindPreferenceGrammarParser.UnaryOpContext(self, self._ctx, self.state)
self.enterRule(localctx, 28, self.RULE_unaryOp)
try:
self.enterOuterAlt(localctx, 1)
self.state = 140
self.match(BindPreferenceGrammarParser.NOT)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BoolFactContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.BoolFactContext, self).__init__(parent, invokingState)
self.parser = parser
def TRUE(self):
return self.getToken(BindPreferenceGrammarParser.TRUE, 0)
def FALSE(self):
return self.getToken(BindPreferenceGrammarParser.FALSE, 0)
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_boolFact
def accept(self, visitor):
if hasattr(visitor, "visitBoolFact"):
return visitor.visitBoolFact(self)
else:
return visitor.visitChildren(self)
def boolFact(self):
localctx = BindPreferenceGrammarParser.BoolFactContext(self, self._ctx, self.state)
self.enterRule(localctx, 30, self.RULE_boolFact)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 142
_la = self._input.LA(1)
if not(_la==BindPreferenceGrammarParser.TRUE or _la==BindPreferenceGrammarParser.FALSE):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class VariableContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.VariableContext, self).__init__(parent, invokingState)
self.parser = parser
def VARIABLE(self):
return self.getToken(BindPreferenceGrammarParser.VARIABLE, 0)
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_variable
def accept(self, visitor):
if hasattr(visitor, "visitVariable"):
return visitor.visitVariable(self)
else:
return visitor.visitChildren(self)
def variable(self):
localctx = BindPreferenceGrammarParser.VariableContext(self, self._ctx, self.state)
self.enterRule(localctx, 32, self.RULE_variable)
try:
self.enterOuterAlt(localctx, 1)
self.state = 144
self.match(BindPreferenceGrammarParser.VARIABLE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ReContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(BindPreferenceGrammarParser.ReContext, self).__init__(parent, invokingState)
self.parser = parser
def RE(self):
return self.getToken(BindPreferenceGrammarParser.RE, 0)
def getRuleIndex(self):
return BindPreferenceGrammarParser.RULE_re
def accept(self, visitor):
if hasattr(visitor, "visitRe"):
return visitor.visitRe(self)
else:
return visitor.visitChildren(self)
def re(self):
localctx = BindPreferenceGrammarParser.ReContext(self, self._ctx, self.state)
self.enterRule(localctx, 34, self.RULE_re)
try:
self.enterOuterAlt(localctx, 1)
self.state = 146
self.match(BindPreferenceGrammarParser.RE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
|
|
'''
ProgressSpinner
===============
Android Lollipop style progress spinner.
'''
from kivy.lang import Builder
from kivy.core.image import Image as CoreImage
from kivy.properties import NumericProperty, ListProperty, BoundedNumericProperty, StringProperty, ObjectProperty
from kivy.animation import Animation
from kivy.clock import Clock
from kivy.properties import BooleanProperty
from kivy.uix.widget import Widget
Builder.load_string('''
<ProgressSpinnerBase>:
_size: min(self.height, self.width)
_rsize: self._size / 2.
_stroke: max(0.1, self._rsize / 20. if self.stroke_width is None else self.stroke_width)
_radius: self._rsize - self._stroke * 2.
<ProgressSpinner>:
canvas:
Color:
rgba: self.color
Line:
circle:
(self.center_x, self.center_y, self._radius,
self._angle_center + self._angle_start,
self._angle_center + self._angle_end)
width: self._stroke
cap: 'none'
<TextureProgressSpinner>:
canvas:
StencilPush
Color:
rgba: 1, 1, 1, 1
Line:
circle:
(self.center_x, self.center_y, self._radius,
self._angle_center + self._angle_start,
self._angle_center + self._angle_end)
width: self._stroke
cap: 'none'
StencilUse
Color:
rgba: self.color
Rectangle:
pos: self.center_x - self._rsize, self.center_y - self._rsize
size: self._size, self._size
texture: self.texture
StencilUnUse
Color:
rgba: 1, 1, 1, 1
Line:
circle:
(self.center_x, self.center_y, self._radius,
self._angle_center + self._angle_start,
self._angle_center + self._angle_end)
width: self._stroke
cap: 'none'
StencilPop
<RotatingTextureProgressSpinner>:
canvas:
PushMatrix
Rotate:
angle: -self._angle_center
origin: self.center
StencilPush
Color:
rgba: 1, 1, 1, 1
Line:
circle:
(self.center_x, self.center_y, self._radius,
self._angle_start, self._angle_end)
width: self._stroke
cap: 'none'
StencilUse
Color:
rgba: self.color
Rectangle:
pos: self.center_x - self._rsize, self.center_y - self._rsize
size: self._size, self._size
texture: self.texture
StencilUnUse
Color:
rgba: 1, 1, 1, 1
Line:
circle:
(self.center_x, self.center_y, self._radius,
self._angle_start, self._angle_end)
width: self._stroke
cap: 'none'
StencilPop
PopMatrix
''')
class ProgressSpinnerBase(Widget):
'''ProgressSpinnerBase - base class for progress spinner widgets
'''
color = ListProperty([1, 1, 1, 1])
'''Color to render the spinner.
:attr:`color` is a :class:`~kivy.properties.ListProperty` and defaults
to [1, 1, 1, 1] (white, full opacity).
'''
speed = BoundedNumericProperty(1, min=0.1)
'''Speed coefficient of the spinner. This value is multiplied by the
base speed of 90 degrees per second.
:attr:`speed` is a :class:`~kivy.properties.BoundedNumericProperty` and
defaults to 1.
'''
stroke_length = BoundedNumericProperty(25., min=1, max=180)
'''Base length of the stroke in degrees.
:attr:`stroke_length` is a :class:`~kivy.properties.BoundedNumericProperty`
and defaults to 25.
'''
stroke_width = NumericProperty(None, allownone=True)
'''Width of the stroke in pixels. If set to None, the width will be
calculated automatically as 1/20th of the radius.
:attr:`stroke_width` is a :class:`~kivy.properties.NumericProperty` and
defaults to None.
'''
auto_start = BooleanProperty(True)
'''Whether to automatically start spinning.
:attr:`auto_start` is a :class:`~kivy.properties.BooleanProperty` and
defaults to True.
'''
# internal properties
_angle_center = NumericProperty(0)
_angle_start = NumericProperty()
_angle_end = NumericProperty()
_size = NumericProperty()
_rsize = NumericProperty()
_stroke = NumericProperty(1)
_radius = NumericProperty(50)
def __init__(self, **kwargs):
super(ProgressSpinnerBase, self).__init__(**kwargs)
self._state = 'wait1'
self._next = None
self._spinning = False
if self.auto_start:
self.start_spinning()
def start_spinning(self, *args):
'''Start spinning the progress spinner. Ignores all positional args
for easy binding.
'''
if not self._spinning:
self._state = 'wait1'
self._next = None
self._angle_center = 0.
self._angle_start = 360.
self._angle_end = 360. + self.stroke_length
Clock.schedule_interval(self._update, 0)
Clock.schedule_once(self._rotate, 0.3)
self._spinning = True
def stop_spinning(self, *args):
'''Stop spinning the progress spinner. Ignores all positional args
for easy binding.
If you intend to keep the spinner around, you should stop it when
not using it and restart it when needed again.
'''
if self._spinning:
if self._next:
if isinstance(self._next, Animation):
self._next.cancel(self)
else:
self._next.cancel()
Clock.unschedule(self._update)
Clock.unschedule(self._rotate)
self._angle_start = self._angle_end = 0
self._spinning = False
def _update(self, dt):
angle_speed = 90. * self.speed
self._angle_center += dt * angle_speed
if self._angle_center > 360:
self._angle_center -= 360.
def _rotate(self, *args):
if not self._spinning:
return
rotate_speed = 0.6 / self.speed
wait_speed = 0.3 / self.speed
if self._state == 'wait1':
self._state = 'rotate1'
self._next = Animation(_angle_end=self._angle_start + 360. - self.stroke_length, d=rotate_speed,
t='in_quad')
self._next.bind(on_complete=self._rotate)
self._next.start(self)
elif self._state == 'rotate1':
self._state = 'wait2'
self._next = Clock.schedule_once(self._rotate, wait_speed)
elif self._state == 'wait2':
self._state = 'rotate2'
self._next = Animation(_angle_start=self._angle_end - self.stroke_length, d=rotate_speed, t='in_quad')
self._next.bind(on_complete=self._rotate)
self._next.start(self)
elif self._state == 'rotate2':
self._state = 'wait1'
self._next = Clock.schedule_once(self._rotate, wait_speed)
while self._angle_end > 720.:
self._angle_start -= 360.
self._angle_end -= 360.
class ProgressSpinner(ProgressSpinnerBase):
'''ProgressSpinner class. Android Lollipop style progress spinner.
'''
pass
class TextureProgressSpinnerBase(ProgressSpinnerBase):
texture = ObjectProperty()
'''Texture to render for the spinner.
:attr:`texture` is a :class:`~kivy.properties.ObjectProperty` and
defaults to None.
'''
source = StringProperty('')
'''Source image to render for the spinner.
:attr:`source` is a :class:`~kivy.properties.StringProperty` and
defaults to an empty string.
'''
def on_source(self, inst, value):
if value:
self.texture = CoreImage(value).texture
class TextureProgressSpinner(TextureProgressSpinnerBase):
'''TextureProgressSpinner class.
Same as ProgressSpinner, but with a texture/image instead of a solid color.
'''
pass
class RotatingTextureProgressSpinner(TextureProgressSpinnerBase):
'''RotatingTextureProgressSpinner class.
Same as TextureProgressSpinner, but the texture/image rotates along with
the spinner.
'''
pass
if __name__ == '__main__':
from kivy.app import App
from kivy.graphics.texture import Texture
from textwrap import dedent
class TestApp(App):
texture = ObjectProperty()
def blittex(self, *args):
rgbpixels = [(x, 0, y, 255) for x in range(256) for y in range(256)]
pixels = b''.join((b''.join(map(chr, pix)) for pix in rgbpixels))
self.texture = Texture.create(size=(256, 256))
self.texture.blit_buffer(pixels, colorfmt='rgba', bufferfmt='ubyte')
def build(self):
Clock.schedule_once(self.blittex, -1)
return Builder.load_string(dedent('''\
<ProgressSpinnerBase>:
on_touch_down: self.stop_spinning() if self._spinning else self.start_spinning()
<TTextureProgressSpinner@TextureProgressSpinner>:
texture: app.texture
<TRotatingTextureProgressSpinner@RotatingTextureProgressSpinner>:
texture: app.texture
<ITextureProgressSpinner@TextureProgressSpinner>:
source: 'demoimage.jpg'
<IRotatingTextureProgressSpinner@RotatingTextureProgressSpinner>:
source: 'demoimage.jpg'
BoxLayout:
BoxLayout:
orientation: 'vertical'
ProgressSpinner
TTextureProgressSpinner
TRotatingTextureProgressSpinner
BoxLayout:
orientation: 'vertical'
BoxLayout:
ProgressSpinner:
color: 0.3, 0.3, 1, 1
stroke_width: 1
ProgressSpinner:
speed: 0.5
color: 1, 0, 0, 1
ProgressSpinner:
speed: 2
color: 0, 1, 0, 1
BoxLayout:
TTextureProgressSpinner:
color: 1, 0, 0, 1
ITextureProgressSpinner:
stroke_width: 10
ITextureProgressSpinner:
stroke_length: 20
BoxLayout:
TRotatingTextureProgressSpinner:
color: 1, 0, 0, 1
IRotatingTextureProgressSpinner:
stroke_width: 10
IRotatingTextureProgressSpinner:
stroke_length: 20
'''))
TestApp().run()
|
|
import os
import pytest
from dvc.dependency.base import DependencyDoesNotExistError
from dvc.dvcfile import PIPELINE_FILE
from dvc.output import OutputDoesNotExistError
from dvc.stage.exceptions import StageCommitError
from tests.utils import clean_staging
def test_commit_recursive(tmp_dir, dvc):
tmp_dir.gen({"dir": {"file": "text1", "subdir": {"file2": "text2"}}})
stages = dvc.add("dir", recursive=True, no_commit=True)
assert len(stages) == 2
assert dvc.status() != {}
dvc.commit("dir", recursive=True)
assert dvc.status() == {}
def test_commit_force(tmp_dir, dvc):
tmp_dir.gen({"dir": {"file": "text1", "file2": "text2"}})
(stage,) = dvc.add("dir", no_commit=True)
assert stage.outs[0].changed_cache()
tmp_dir.gen("dir/file", "file content modified")
assert stage.outs[0].changed_cache()
with pytest.raises(StageCommitError):
dvc.commit(stage.path)
assert stage.outs[0].changed_cache()
dvc.commit(stage.path, force=True)
assert dvc.status([stage.path]) == {}
@pytest.mark.parametrize("run_kw", [{"single_stage": True}, {"name": "copy"}])
def test_commit_with_deps(tmp_dir, dvc, run_copy, run_kw):
tmp_dir.gen("foo", "foo")
(foo_stage,) = dvc.add("foo", no_commit=True)
assert foo_stage is not None
assert len(foo_stage.outs) == 1
stage = run_copy("foo", "file", no_commit=True, **run_kw)
assert stage is not None
assert len(stage.outs) == 1
assert foo_stage.outs[0].changed_cache()
assert stage.outs[0].changed_cache()
clean_staging()
dvc.commit(stage.path, with_deps=True)
assert not foo_stage.outs[0].changed_cache()
assert not stage.outs[0].changed_cache()
def test_commit_changed_md5(tmp_dir, dvc):
tmp_dir.gen({"file": "file content"})
(stage,) = dvc.add("file", no_commit=True)
stage_file_content = (tmp_dir / stage.path).parse()
stage_file_content["md5"] = "1111111111"
(tmp_dir / stage.path).dump(stage_file_content)
clean_staging()
with pytest.raises(StageCommitError):
dvc.commit(stage.path)
dvc.commit(stage.path, force=True)
assert "md5" not in (tmp_dir / stage.path).parse()
def test_commit_no_exec(tmp_dir, dvc):
tmp_dir.gen({"dep": "dep", "out": "out"})
stage = dvc.run(
name="my", cmd="mycmd", deps=["dep"], outs=["out"], no_exec=True
)
clean_staging()
assert dvc.status(stage.path)
dvc.commit(stage.path, force=True)
assert dvc.status(stage.path) == {}
def test_commit_granular_output(tmp_dir, dvc):
dvc.run(
name="mystage",
cmd=["echo foo>foo", "echo bar>bar"],
outs=["foo", "bar"],
no_commit=True,
)
clean_staging()
cache = tmp_dir / ".dvc" / "cache"
assert not list(cache.glob("*/*"))
dvc.commit("foo")
assert list(cache.glob("*/*")) == [
cache / "d3" / "b07384d113edec49eaa6238ad5ff00"
]
def test_commit_granular_output_file(tmp_dir, dvc):
tmp_dir.gen("foo", "foo")
dvc.add("foo", no_commit=True)
clean_staging()
dvc.commit("foo")
assert dvc.status() == {}
def test_commit_granular_output_dir(tmp_dir, dvc):
tmp_dir.gen(
{
"data": {
"foo": "foo",
"bar": "bar",
"subdir": {"subfoo": "subfoo", "subbar": "subbar"},
}
}
)
dvc.add("data", no_commit=True)
clean_staging()
dvc.commit("data")
assert dvc.status() == {}
def test_commit_granular_dir(tmp_dir, dvc):
tmp_dir.gen(
{
"data": {
"foo": "foo",
"bar": "bar",
"subdir": {"subfoo": "subfoo", "subbar": "subbar"},
}
}
)
dvc.add("data", no_commit=True)
clean_staging()
cache = tmp_dir / ".dvc" / "cache"
assert set(cache.glob("*/*")) == set()
dvc.commit(os.path.join("data", "foo"))
assert set(cache.glob("*/*")) == {
cache / "1a" / "ca2c799df82929bbdd976557975546.dir",
cache / "ac" / "bd18db4cc2f85cedef654fccc4a4d8",
}
clean_staging()
dvc.commit(os.path.join("data", "subdir"))
assert set(cache.glob("*/*")) == {
cache / "1a" / "ca2c799df82929bbdd976557975546.dir",
cache / "ac" / "bd18db4cc2f85cedef654fccc4a4d8",
cache / "4c" / "e8d2a2cf314a52fa7f315ca37ca445",
cache / "68" / "dde2c3c4e7953c2290f176bbdc9a54",
}
clean_staging()
dvc.commit(os.path.join("data"))
assert set(cache.glob("*/*")) == {
cache / "1a" / "ca2c799df82929bbdd976557975546.dir",
cache / "ac" / "bd18db4cc2f85cedef654fccc4a4d8",
cache / "4c" / "e8d2a2cf314a52fa7f315ca37ca445",
cache / "68" / "dde2c3c4e7953c2290f176bbdc9a54",
cache / "37" / "b51d194a7513e45b56f6524f2d51f2",
}
def test_commit_no_exec_missing_dep(tmp_dir, dvc):
stage = dvc.run(
name="my", cmd="mycmd", deps=["dep"], outs=["out"], no_exec=True
)
clean_staging()
assert dvc.status(stage.path)
with pytest.raises(DependencyDoesNotExistError):
dvc.commit(stage.path, force=True)
def test_commit_no_exec_missing_out(tmp_dir, dvc):
stage = dvc.run(name="my", cmd="mycmd", outs=["out"], no_exec=True)
clean_staging()
assert dvc.status(stage.path)
with pytest.raises(OutputDoesNotExistError):
dvc.commit(stage.path, force=True)
def test_commit_pipeline_stage(tmp_dir, dvc, run_copy):
tmp_dir.gen("foo", "foo")
stage = run_copy("foo", "bar", no_commit=True, name="copy-foo-bar")
clean_staging()
assert dvc.status(stage.addressing)
assert dvc.commit(stage.addressing, force=True) == [stage]
assert not dvc.status(stage.addressing)
# just to confirm different variants work
assert dvc.commit(f":{stage.addressing}") == [stage]
assert dvc.commit(f"{PIPELINE_FILE}:{stage.addressing}") == [stage]
assert dvc.commit(PIPELINE_FILE) == [stage]
def test_imported_entries_unchanged(tmp_dir, dvc, erepo_dir):
with erepo_dir.chdir():
erepo_dir.dvc_gen("file", "file content", "initial commit")
clean_staging()
stage = dvc.imp(os.fspath(erepo_dir), "file")
assert stage.changed_entries() == ([], [], None)
|
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys
import json
import time
import datetime
from airflow.contrib.hooks.gcs_hook import GoogleCloudStorageHook
from airflow.hooks.postgres_hook import PostgresHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
from decimal import Decimal
from tempfile import NamedTemporaryFile
PY3 = sys.version_info[0] == 3
class PostgresToGoogleCloudStorageOperator(BaseOperator):
"""
Copy data from Postgres to Google Cloud Storage in JSON format.
"""
template_fields = ('sql', 'bucket', 'filename', 'schema_filename',
'parameters')
template_ext = ('.sql', )
ui_color = '#a0e08c'
@apply_defaults
def __init__(self,
sql,
bucket,
filename,
schema_filename=None,
approx_max_file_size_bytes=1900000000,
postgres_conn_id='postgres_default',
google_cloud_storage_conn_id='google_cloud_default',
delegate_to=None,
parameters=None,
*args,
**kwargs):
"""
:param sql: The SQL to execute on the Postgres table.
:type sql: str
:param bucket: The bucket to upload to.
:type bucket: str
:param filename: The filename to use as the object name when uploading
to Google Cloud Storage. A {} should be specified in the filename
to allow the operator to inject file numbers in cases where the
file is split due to size.
:type filename: str
:param schema_filename: If set, the filename to use as the object name
when uploading a .json file containing the BigQuery schema fields
for the table that was dumped from Postgres.
:type schema_filename: str
:param approx_max_file_size_bytes: This operator supports the ability
to split large table dumps into multiple files (see notes in the
filenamed param docs above). Google Cloud Storage allows for files
to be a maximum of 4GB. This param allows developers to specify the
file size of the splits.
:type approx_max_file_size_bytes: long
:param postgres_conn_id: Reference to a specific Postgres hook.
:type postgres_conn_id: str
:param google_cloud_storage_conn_id: Reference to a specific Google
cloud storage hook.
:type google_cloud_storage_conn_id: str
:param delegate_to: The account to impersonate, if any. For this to
work, the service account making the request must have domain-wide
delegation enabled.
:param parameters: a parameters dict that is substituted at query runtime.
:type parameters: dict
"""
super(PostgresToGoogleCloudStorageOperator, self).__init__(*args, **kwargs)
self.sql = sql
self.bucket = bucket
self.filename = filename
self.schema_filename = schema_filename
self.approx_max_file_size_bytes = approx_max_file_size_bytes
self.postgres_conn_id = postgres_conn_id
self.google_cloud_storage_conn_id = google_cloud_storage_conn_id
self.delegate_to = delegate_to
self.parameters = parameters
def execute(self, context):
cursor = self._query_postgres()
files_to_upload = self._write_local_data_files(cursor)
# If a schema is set, create a BQ schema JSON file.
if self.schema_filename:
files_to_upload.update(self._write_local_schema_file(cursor))
# Flush all files before uploading
for file_handle in files_to_upload.values():
file_handle.flush()
self._upload_to_gcs(files_to_upload)
# Close all temp file handles.
for file_handle in files_to_upload.values():
file_handle.close()
def _query_postgres(self):
"""
Queries Postgres and returns a cursor to the results.
"""
postgres = PostgresHook(postgres_conn_id=self.postgres_conn_id)
conn = postgres.get_conn()
cursor = conn.cursor()
cursor.execute(self.sql, self.parameters)
return cursor
def _write_local_data_files(self, cursor):
"""
Takes a cursor, and writes results to a local file.
:return: A dictionary where keys are filenames to be used as object
names in GCS, and values are file handles to local files that
contain the data for the GCS objects.
"""
schema = list(map(lambda schema_tuple: schema_tuple[0], cursor.description))
file_no = 0
tmp_file_handle = NamedTemporaryFile(delete=True)
tmp_file_handles = {self.filename.format(file_no): tmp_file_handle}
for row in cursor:
# Convert datetime objects to utc seconds, and decimals to floats
row = map(self.convert_types, row)
row_dict = dict(zip(schema, row))
s = json.dumps(row_dict, sort_keys=True)
if PY3:
s = s.encode('utf-8')
tmp_file_handle.write(s)
# Append newline to make dumps BigQuery compatible.
tmp_file_handle.write(b'\n')
# Stop if the file exceeds the file size limit.
if tmp_file_handle.tell() >= self.approx_max_file_size_bytes:
file_no += 1
tmp_file_handle = NamedTemporaryFile(delete=True)
tmp_file_handles[self.filename.format(file_no)] = tmp_file_handle
return tmp_file_handles
def _write_local_schema_file(self, cursor):
"""
Takes a cursor, and writes the BigQuery schema for the results to a
local file system.
:return: A dictionary where key is a filename to be used as an object
name in GCS, and values are file handles to local files that
contains the BigQuery schema fields in .json format.
"""
schema = []
for field in cursor.description:
# See PEP 249 for details about the description tuple.
field_name = field[0]
field_type = self.type_map(field[1])
field_mode = 'REPEATED' if field[1] in (1009, 1005, 1007,
1016) else 'NULLABLE'
schema.append({
'name': field_name,
'type': field_type,
'mode': field_mode,
})
self.log.info('Using schema for %s: %s', self.schema_filename, schema)
tmp_schema_file_handle = NamedTemporaryFile(delete=True)
s = json.dumps(schema, sort_keys=True)
if PY3:
s = s.encode('utf-8')
tmp_schema_file_handle.write(s)
return {self.schema_filename: tmp_schema_file_handle}
def _upload_to_gcs(self, files_to_upload):
"""
Upload all of the file splits (and optionally the schema .json file) to
Google Cloud Storage.
"""
hook = GoogleCloudStorageHook(
google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
delegate_to=self.delegate_to)
for object, tmp_file_handle in files_to_upload.items():
hook.upload(self.bucket, object, tmp_file_handle.name,
'application/json')
@classmethod
def convert_types(cls, value):
"""
Takes a value from Postgres, and converts it to a value that's safe for
JSON/Google Cloud Storage/BigQuery. Dates are converted to UTC seconds.
Decimals are converted to floats. Times are converted to seconds.
"""
if type(value) in (datetime.datetime, datetime.date):
return time.mktime(value.timetuple())
elif type(value) == datetime.time:
formated_time = time.strptime(str(value), "%H:%M:%S")
return datetime.timedelta(
hours=formated_time.tm_hour,
minutes=formated_time.tm_min,
seconds=formated_time.tm_sec).seconds
elif isinstance(value, Decimal):
return float(value)
else:
return value
@classmethod
def type_map(cls, postgres_type):
"""
Helper function that maps from Postgres fields to BigQuery fields. Used
when a schema_filename is set.
"""
d = {
1114: 'TIMESTAMP',
1184: 'TIMESTAMP',
1082: 'TIMESTAMP',
1083: 'TIMESTAMP',
1005: 'INTEGER',
1007: 'INTEGER',
1016: 'INTEGER',
20: 'INTEGER',
21: 'INTEGER',
23: 'INTEGER',
16: 'BOOLEAN',
700: 'FLOAT',
701: 'FLOAT',
1700: 'FLOAT'
}
return d[postgres_type] if postgres_type in d else 'STRING'
|
|
# Copyright 2011 OpenStack Foundation
# aLL Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from lxml import etree
from oslo_utils import timeutils
import six
import webob
from cinder.api.v2 import types
from cinder.api.views import types as views_types
from cinder import exception
from cinder import test
from cinder.tests.api import fakes
from cinder.volume import volume_types
def stub_volume_type(id):
specs = {
"key1": "value1",
"key2": "value2",
"key3": "value3",
"key4": "value4",
"key5": "value5"
}
return dict(
id=id,
name='vol_type_%s' % six.text_type(id),
description='vol_type_desc_%s' % six.text_type(id),
extra_specs=specs,
)
def return_volume_types_get_all_types(context, search_opts=None):
return dict(
vol_type_1=stub_volume_type(1),
vol_type_2=stub_volume_type(2),
vol_type_3=stub_volume_type(3)
)
def return_empty_volume_types_get_all_types(context, search_opts=None):
return {}
def return_volume_types_get_volume_type(context, id):
if id == "777":
raise exception.VolumeTypeNotFound(volume_type_id=id)
return stub_volume_type(id)
def return_volume_types_get_by_name(context, name):
if name == "777":
raise exception.VolumeTypeNotFoundByName(volume_type_name=name)
return stub_volume_type(int(name.split("_")[2]))
def return_volume_types_get_default():
return stub_volume_type(1)
def return_volume_types_get_default_not_found():
return {}
class VolumeTypesApiTest(test.TestCase):
def setUp(self):
super(VolumeTypesApiTest, self).setUp()
self.controller = types.VolumeTypesController()
def test_volume_types_index(self):
self.stubs.Set(volume_types, 'get_all_types',
return_volume_types_get_all_types)
req = fakes.HTTPRequest.blank('/v2/fake/types')
res_dict = self.controller.index(req)
self.assertEqual(3, len(res_dict['volume_types']))
expected_names = ['vol_type_1', 'vol_type_2', 'vol_type_3']
actual_names = map(lambda e: e['name'], res_dict['volume_types'])
self.assertEqual(set(actual_names), set(expected_names))
for entry in res_dict['volume_types']:
self.assertEqual('value1', entry['extra_specs']['key1'])
def test_volume_types_index_no_data(self):
self.stubs.Set(volume_types, 'get_all_types',
return_empty_volume_types_get_all_types)
req = fakes.HTTPRequest.blank('/v2/fake/types')
res_dict = self.controller.index(req)
self.assertEqual(0, len(res_dict['volume_types']))
def test_volume_types_show(self):
self.stubs.Set(volume_types, 'get_volume_type',
return_volume_types_get_volume_type)
type_id = str(uuid.uuid4())
req = fakes.HTTPRequest.blank('/v2/fake/types/' + type_id)
res_dict = self.controller.show(req, type_id)
self.assertEqual(1, len(res_dict))
self.assertEqual(type_id, res_dict['volume_type']['id'])
type_name = 'vol_type_' + type_id
self.assertEqual(type_name, res_dict['volume_type']['name'])
def test_volume_types_show_not_found(self):
self.stubs.Set(volume_types, 'get_volume_type',
return_volume_types_get_volume_type)
req = fakes.HTTPRequest.blank('/v2/fake/types/777')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show,
req, '777')
def test_get_default(self):
self.stubs.Set(volume_types, 'get_default_volume_type',
return_volume_types_get_default)
req = fakes.HTTPRequest.blank('/v2/fake/types/default')
req.method = 'GET'
res_dict = self.controller.show(req, 'default')
self.assertEqual(1, len(res_dict))
self.assertEqual('vol_type_1', res_dict['volume_type']['name'])
self.assertEqual('vol_type_desc_1',
res_dict['volume_type']['description'])
def test_get_default_not_found(self):
self.stubs.Set(volume_types, 'get_default_volume_type',
return_volume_types_get_default_not_found)
req = fakes.HTTPRequest.blank('/v2/fake/types/default')
req.method = 'GET'
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show, req, 'default')
def test_view_builder_show(self):
view_builder = views_types.ViewBuilder()
now = timeutils.isotime()
raw_volume_type = dict(
name='new_type',
description='new_type_desc',
deleted=False,
created_at=now,
updated_at=now,
extra_specs={},
deleted_at=None,
id=42,
)
request = fakes.HTTPRequest.blank("/v2")
output = view_builder.show(request, raw_volume_type)
self.assertIn('volume_type', output)
expected_volume_type = dict(
name='new_type',
description='new_type_desc',
extra_specs={},
id=42,
)
self.assertDictMatch(output['volume_type'], expected_volume_type)
def test_view_builder_list(self):
view_builder = views_types.ViewBuilder()
now = timeutils.isotime()
raw_volume_types = []
for i in range(0, 10):
raw_volume_types.append(
dict(
name='new_type',
description='new_type_desc',
deleted=False,
created_at=now,
updated_at=now,
extra_specs={},
deleted_at=None,
id=42 + i
)
)
request = fakes.HTTPRequest.blank("/v2")
output = view_builder.index(request, raw_volume_types)
self.assertIn('volume_types', output)
for i in range(0, 10):
expected_volume_type = dict(
name='new_type',
description='new_type_desc',
extra_specs={},
id=42 + i
)
self.assertDictMatch(output['volume_types'][i],
expected_volume_type)
class VolumeTypesSerializerTest(test.TestCase):
def _verify_volume_type(self, vtype, tree):
self.assertEqual('volume_type', tree.tag)
self.assertEqual(vtype['name'], tree.get('name'))
self.assertEqual(vtype['description'], tree.get('description'))
self.assertEqual(str(vtype['id']), tree.get('id'))
self.assertEqual(1, len(tree))
extra_specs = tree[0]
self.assertEqual('extra_specs', extra_specs.tag)
seen = set(vtype['extra_specs'].keys())
for child in extra_specs:
self.assertIn(child.tag, seen)
self.assertEqual(vtype['extra_specs'][child.tag], child.text)
seen.remove(child.tag)
self.assertEqual(len(seen), 0)
def test_index_serializer(self):
serializer = types.VolumeTypesTemplate()
# Just getting some input data
vtypes = return_volume_types_get_all_types(None)
text = serializer.serialize({'volume_types': vtypes.values()})
tree = etree.fromstring(text)
self.assertEqual('volume_types', tree.tag)
self.assertEqual(len(vtypes), len(tree))
for child in tree:
name = child.get('name')
self.assertIn(name, vtypes)
self._verify_volume_type(vtypes[name], child)
def test_voltype_serializer(self):
serializer = types.VolumeTypeTemplate()
vtype = stub_volume_type(1)
text = serializer.serialize(dict(volume_type=vtype))
tree = etree.fromstring(text)
self._verify_volume_type(vtype, tree)
|
|
"""
This script is used to design the design matrix for our linear regression.
We explore the influence of linear and quadratic drifts on the model
performance.
Script for the raw data.
Run with:
python noise-pca_script.py
from this directory
"""
from __future__ import print_function, division
import sys, os, pdb
from scipy import ndimage
from scipy.ndimage import gaussian_filter
from matplotlib import colors
from os.path import splitext
from scipy.stats import t as t_dist
import numpy as np
import numpy.linalg as npl
import matplotlib.pyplot as plt
import nibabel as nib
import scipy
import pprint as pp
import json
#Specicy the path for functions
sys.path.append(os.path.join(os.path.dirname(__file__), "../functions/"))
sys.path.append(os.path.join(os.path.dirname(__file__), "./"))
from smoothing import *
from diagnostics import *
from glm import *
from plot_mosaic import *
from mask_filtered_data import *
# Locate the paths
project_path = '../../../'
data_path = project_path+'data/ds005/'
path_dict = {'data_filtered':{
'type' : 'filtered',
'feat' : '.feat/',
'bold_img_name' : 'filtered_func_data_mni.nii.gz',
'run_path' : 'model/model001/'
},
'data_original':{
'type' : '',
'feat': '',
'bold_img_name' : 'bold.nii.gz',
'run_path' : 'BOLD/'
}}
# TODO: uncomment for final version
#subject_list = [str(i) for i in range(1,17)]
#run_list = [str(i) for i in range(1,4)]
# Run only for subject 1 and 5 - run 1
run_list = [str(i) for i in range(1,2)]
subject_list = ['1', '5']
d_path = path_dict['data_original'] #OR original or filtered
images_paths = [('ds005' + '_sub' + s.zfill(3) + '_t1r' + r, \
data_path + 'sub%s/'%(s.zfill(3)) + d_path['run_path'] \
+ 'task001_run%s%s/%s' %(r.zfill(3),d_path['feat'],\
d_path['bold_img_name'])) \
for r in run_list \
for s in subject_list]
# set gray colormap and nearest neighbor interpolation by default
plt.rcParams['image.cmap'] = 'gray'
plt.rcParams['image.interpolation'] = 'nearest'
# Mask
# To be used with the normal data
thres = 375 #From analysis of the histograms
# To be used with the filtered data
mask_path = project_path+'data/mni_icbm152_t1_tal_nlin_asym_09c_mask_2mm.nii'
sm = ''
#sm='not_smooth/'
project_path = project_path + sm
# Create the needed directories if they do not exist
dirs = [project_path+'fig/',\
project_path+'fig/BOLD',\
project_path+'fig/drifts',\
project_path+'fig/pca',\
project_path+'fig/pca/projections/',\
project_path+'fig/linear_model/mosaic',\
project_path+'fig/linear_model/mosaic/middle_slice',\
project_path+'txt_output/',\
project_path+'txt_output/MRSS/',\
project_path+'txt_output/pca/',\
project_path+'txt_output/drifts/']
for d in dirs:
if not os.path.exists(d):
os.makedirs(d)
print("Starting noise-pca for the raw data analysis\n")
for image_path in images_paths:
name = image_path[0]
if d_path['type']=='filtered':
in_brain_img = nib.load('../../../'+
'data/ds005/sub001/model/model001/task001_run001.feat/'\
+ 'masked_filtered_func_data_mni.nii.gz')
# Image shape (91, 109, 91, 240)
#in_brain_img = make_mask_filtered_data(image_path[1],mask_path)
data_int = in_brain_img.get_data()
data = data_int.astype(float)
mean_data = np.mean(data, axis=-1)
in_brain_mask = (mean_data - 0.0) < 0.01
Transpose = False
else:
img = nib.load(image_path[1])
data_int = img.get_data()
data = data_int.astype(float)
mean_data = np.mean(data, axis=-1)
in_brain_mask = mean_data > thres
Transpose = True
# Smoothing with Gaussian filter
smooth_data = smoothing(data,1,range(data.shape[-1]))
# Selecting the voxels in the brain
in_brain_tcs = smooth_data[in_brain_mask, :]
#in_brain_tcs = data[in_brain_mask, :]
vol_shape = data.shape[:-1]
# Plotting the voxels in the brain
plt.imshow(plot_mosaic(mean_data, transpose=Transpose), cmap='gray', alpha=1)
plt.colorbar()
plt.contour(plot_mosaic(in_brain_mask, transpose=Transpose),colors='blue')
plt.title('In brain voxel mean values' + '\n' + (d_path['type'] + str(name)))
plt.savefig(project_path+'fig/BOLD/%s_mean_voxels_countour.png'\
%(d_path['type'] + str(name)))
#plt.show()
plt.clf()
# Convolution with 1 to 4 conditions
convolved = np.zeros((240,5))
for i in range(1,5):
#convolved = np.loadtxt(\
# '../../../txt_output/conv_normal/%s_conv_00%s_canonical.txt'\
# %(str(name),str(i)))
convolved[:,i] = np.loadtxt(\
'../../../txt_output/conv_high_res/%s_conv_00%s_high_res.txt'\
%(str(name),str(i)))
reg_str = ['Intercept','Task', 'Gain', 'Loss', 'Distance', 'Linear Drift',\
'Quadratic drift', 'PC#1', 'PC#2', 'PC#3', 'PC#4']
# Create design matrix X - Including drifts
P = 7 #number of regressors of X including the ones for intercept
n_trs = data.shape[-1]
X = np.ones((n_trs, P))
for i in range(1,5):
X[:,i] = convolved[:,i]
linear_drift = np.linspace(-1, 1, n_trs)
X[:,5] = linear_drift
quadratic_drift = linear_drift ** 2
quadratic_drift -= np.mean(quadratic_drift)
X[:,6] = quadratic_drift
# Save the design matrix
np.savetxt(project_path+\
'txt_output/drifts/%s_design_matrix_with_drift.txt'\
%(d_path['type'] + str(name)), X)
# Linear Model - Including drifts
Y = in_brain_tcs.T
betas = npl.pinv(X).dot(Y)
# Save the betas for the linear model including drifts
np.savetxt(project_path+\
'txt_output/drifts/%s_betas_with_drift.txt'%(d_path['type'] + str(name)), betas)
betas_vols = np.zeros(vol_shape + (P,))
betas_vols[in_brain_mask] = betas.T
# Plot
# Set regions outside mask as missing with np.nan
mean_data[~in_brain_mask] = np.nan
betas_vols[~in_brain_mask] = np.nan
nice_cmap_values = np.loadtxt('actc.txt')
nice_cmap = colors.ListedColormap(nice_cmap_values, 'actc')
# Plot each slice on the 3rd dimension of the image in a mosaic
for k in range(1,P):
plt.imshow(plot_mosaic(mean_data, transpose=Transpose), cmap='gray', alpha=1)
#plt.imshow(plot_mosaic(betas_vols[...,k], transpose=Transpose), cmap='gray', alpha=1)
plt.imshow(plot_mosaic(betas_vols[...,k], transpose=Transpose), cmap=nice_cmap, alpha=1)
plt.colorbar()
plt.title('Beta (with drift) values for brain voxel related to ' \
+ str(reg_str[k]) + '\n' + d_path['type'] + str(name))
plt.savefig(project_path+'fig/linear_model/mosaic/%s_withdrift_%s'\
%(d_path['type'] + str(name), str(reg_str[k]))+'.png')
plt.close()
#plt.show()
plt.clf()
#Show the middle slice only
plt.imshow(betas_vols[:, :, 18, k], cmap='gray', alpha=0.5)
plt.colorbar()
plt.title('In brain voxel - Slice 18 Projection on %s\n%s'\
%(str(reg_str[k]), d_path['type'] + str(name)))
plt.savefig(\
project_path+'fig/linear_model/mosaic/middle_slice/%s_withdrift_middleslice_%s'\
%(d_path['type'] + str(name), str(k))+'.png')
#plt.show()
plt.clf()
plt.close()
# PCA Analysis
Y_demeaned = Y - np.mean(Y, axis=1).reshape([-1, 1])
unscaled_cov = Y_demeaned.dot(Y_demeaned.T)
U, S, V = npl.svd(unscaled_cov)
projections = U.T.dot(Y_demeaned)
projection_vols = np.zeros(data.shape)
projection_vols[in_brain_mask, :] = projections.T
# Plot the projection of the data on the 5 first principal component
# from SVD
for i in range(1,5):
plt.plot(U[:, i])
plt.title('U' + str(i) + ' vector from SVD \n' + str(name))
plt.imshow(projection_vols[:, :, 18, i])
plt.colorbar()
plt.title('PCA - 18th slice projection on PC#' + str(i) + ' from SVD \n ' +\
d_path['type'] + str(name))
plt.savefig(project_path+'fig/pca/projections/%s_PC#%s.png' \
%((d_path['type'] + str(name),str(i))))
#plt.show()
plt.clf()
plt.close()
# Variance Explained analysis
s = []
#S is diag -> trace = sum of the elements of S
for i in S:
s.append(i/np.sum(S))
np.savetxt(project_path+\
'txt_output/pca/%s_variance_explained' % (d_path['type'] + str(name)) +\
'.txt', np.array(s[:40]))
ind = np.arange(len(s[1:40]))
plt.bar(ind, s[1:40], width=0.5)
plt.xlabel('Principal Components indices')
plt.ylabel('Explained variance in percent')
plt.title('Variance explained graph \n' + (d_path['type'] + str(name)))
plt.savefig(project_path+\
'fig/pca/%s_variance_explained.png' %(d_path['type'] + str(name)))
#plt.show()
plt.close()
# Linear Model - including PCs from PCA analysis
PC = 3 # Number of PCs to include in the design matrix
P_pca = P + PC
X_pca = np.ones((n_trs, P_pca))
for i in range(1,5):
X_pca[:,i] = convolved[:,i]
linear_drift = np.linspace(-1, 1, n_trs)
X_pca[:,5] = linear_drift
quadratic_drift = linear_drift ** 2
quadratic_drift -= np.mean(quadratic_drift)
X_pca[:,6] = quadratic_drift
for i in range(3):
X_pca[:,7+i] = U[:, i]
# Save the design matrix - with PCs
np.savetxt(project_path+'txt_output/pca/%s_design_matrix_pca.txt'\
%(d_path['type'] + str(name)), X_pca)
#plt.imshow(X_pca, aspect=0.25)
B_pca = npl.pinv(X_pca).dot(Y)
np.savetxt(project_path+'txt_output/pca/%s_betas_pca.txt'\
%(d_path['type'] + str(name)), B_pca)
b_pca_vols = np.zeros(vol_shape + (P_pca,))
b_pca_vols[in_brain_mask, :] = B_pca.T
# Save betas as nii files
# Plot - with PCs
# Set regions outside mask as missing with np.nan
mean_data[~in_brain_mask] = np.nan
b_pca_vols[~in_brain_mask] = np.nan
# Plot each slice on the 3rd dimension of the image in a mosaic
for k in range(1,P_pca):
fig = plt.figure(figsize = (8, 5))
#plt.imshow(plot_mosaic(b_pca_vols[...,k], transpose=Transpose), cmap='gray', alpha=0.5)
plt.imshow(plot_mosaic(mean_data, transpose=Transpose), cmap='gray', alpha=1)
plt.imshow(plot_mosaic(b_pca_vols[...,k], transpose=Transpose), cmap=nice_cmap, alpha=1)
plt.colorbar()
plt.title('Beta (with PCA) values for brain voxel related to ' \
+ str(reg_str[k]) + '\n' + d_path['type'] + str(name))
plt.savefig(project_path+'fig/linear_model/mosaic/%s_withPCA_%s'\
%(d_path['type'] + str(name), str(reg_str[k]))+'.png')
#plt.show()
plt.close()
#Show the middle slice only
plt.imshow(b_pca_vols[:, :, 18, k], cmap='gray', alpha=0.5)
plt.colorbar()
plt.title('In brain voxel model - Slice 18 \n' \
'Projection on X%s \n %s'\
%(str(reg_str[k]),d_path['type'] + str(name)))
plt.savefig(\
project_path+\
'fig/linear_model/mosaic/middle_slice/%s_withPCA_middle_slice_%s'\
%(d_path['type'] + str(name), str(k))+'.png')
#plt.show()
plt.clf()
plt.close()
# Residuals
MRSS_dict = {}
MRSS_dict['ds005' + d_path['type']] = {}
MRSS_dict['ds005' + d_path['type']]['drifts'] = {}
MRSS_dict['ds005' + d_path['type']]['pca'] = {}
for z in MRSS_dict['ds005' + d_path['type']]:
MRSS_dict['ds005' + d_path['type']][z]['MRSS'] = []
residuals = Y - X.dot(betas)
df = X.shape[0] - npl.matrix_rank(X)
MRSS = np.sum(residuals ** 2 , axis=0) / df
residuals_pca = Y - X_pca.dot(B_pca)
df_pca = X_pca.shape[0] - npl.matrix_rank(X_pca)
MRSS_pca = np.sum(residuals_pca ** 2 , axis=0) / df_pca
MRSS_dict['ds005' + d_path['type']]['drifts']['mean_MRSS'] = np.mean(MRSS)
MRSS_dict['ds005' + d_path['type']]['pca']['mean_MRSS'] = np.mean(MRSS_pca)
# Save the mean MRSS values to compare the performance
# of the design matrices
for design_matrix, beta, mrss, name in \
[(X, betas, MRSS, 'drifts'), (X_pca, B_pca, MRSS_pca, 'pca')]:
MRSS_dict['ds005' + d_path['type']][name]['p-values'] = []
MRSS_dict['ds005' + d_path['type']][name]['t-test'] = []
with open(project_path+'txt_output/MRSS/ds005%s_MRSS.json'\
%(d_path['type']), 'w') as file_out:
json.dump(MRSS_dict, file_out)
# SE = np.zeros(beta.shape)
# for i in range(design_matrix.shape[-1]):
# c = np.zeros(design_matrix.shape[-1])
# c[i]=1
# c = np.atleast_2d(c).T
# SE[i,:]= np.sqrt(\
# mrss* c.T.dot(npl.pinv(design_matrix.T.dot(design_matrix)).dot(c)))
# zeros = np.where(SE==0)
# SE[zeros] = 1
# t = beta / SE
# t[:,zeros] = 0
# # Get p value for t value using CDF of t didstribution
# ltp = t_dist.cdf(abs(t), df)
# p = 1 - ltp # upper tail
# t_brain = t[in_brain_mask]
# p_brain = p[in_brain_mask]
#
# # Save 3D data in .nii files
# for k in range(1,4):
# t_nib = nib.Nifti1Image(t_brain[..., k], affine)
# nib.save(t-test, project_path+'txt_output/%s/%s_t-test_%s.nii.gz'\
# %(name, d_path['type'] + str(name),str(reg_str[k])))
# p_nib = nib.Nifti1Image(p_brain[..., k], affine)
# nib.save(p-values,project_path+'txt_output/%s/%s_p-values_%s.nii.gz'\
# %(name, d_path['type'] + str(name),str(reg_str[k])))
# pdb.set_trace()
# pdb.set_trace()
print("======================================")
print("\n Noise and PCA analysis done")
print("Design Matrix including drift terms stored in project_epsilon/txt_output/drifts/ \n\n")
print("Design Matrix including PCs terms stored in project_epsilon/txt_output/pca/\n\n")
print("Mean MRSS models results in project_epsilon/txt_output/MRSS/ds005_MRSS.json\n\n")
|
|
"""
Render the examples to images and adds them to the documentation.
"""
# Standard library imports
import glob
import os
import shutil
import token, tokenize
import textwrap
import itertools
# Enthought imports
from mayavi import mlab
# A global counter, for subsitutions.
global_counter = itertools.count()
EXAMPLE_DIR = '../../examples/mayavi'
def is_mlab_example(filename):
tokens = tokenize.generate_tokens(open(filename).readline)
code_only = ''.join([tok_content
for tok_type, tok_content, _, _, _ in tokens
if not token.tok_name[tok_type] in ('COMMENT',
'STRING')])
return ('mlab.show()' in code_only)
def run_mlab_file(filename, image_file):
## XXX: Monkey-patch mlab.show, so that we keep control of the
## the mainloop
old_show = mlab.show
def my_show(func=None):
pass
mlab.show = my_show
mlab.clf()
e = mlab.get_engine()
e.close_scene(mlab.gcf())
exec(
compile(open(filename).read(), filename, 'exec'),
{'__name__': '__main__'}
)
mlab.savefig(image_file)
size = mlab.gcf().scene.get_size()
for scene in e.scenes:
e.close_scene(scene)
mlab.show = old_show
def extract_docstring(filename):
# Extract a module-level docstring, if any
lines = open(filename).readlines()
start_row = 0
if lines[0].startswith('#!'):
lines.pop(0)
start_row = 1
docstring = ''
first_par = ''
li = lines.__iter__()
li_next = li.__next__ if hasattr(li, '__next__') else li.next
tokens = tokenize.generate_tokens(li_next)
for tok_type, tok_content, _, (erow, _), _ in tokens:
tok_type = token.tok_name[tok_type]
if tok_type in ('NEWLINE', 'COMMENT', 'NL', 'INDENT', 'DEDENT'):
continue
elif tok_type == 'STRING':
docstring = eval(tok_content)
# If the docstring is formatted with several paragraphs, extract
# the first one:
paragraphs = '\n'.join(line.rstrip()
for line in docstring.split('\n')).split('\n\n')
if len(paragraphs) > 0:
first_par = paragraphs[0]
break
return docstring, first_par, erow+1+start_row
################################################################################
# class `ExampleLister`
################################################################################
class ExampleLister(object):
""" Builds a rst-formatted list of examples from a list of files.
"""
# Header template, for the example gallery.
header_tpl = """
%(title)s
--------------------------------------------------
%(intro)s
.. toctree::
:hidden:
%(toctree)s
.. A comment to split paragraphs
"""
# Template used to create the example rst file
example_rst_file_tpl = """
.. _example_%(short_file_name)s:
%(title)s example
--------------------------------------------
%(docstring)s
**Python source code:** :download:`%(short_file_name)s.py`
.. literalinclude:: %(short_file_name)s.py
:lines: %(end_row)s-
"""
# The title of the corresponding section in the example gallery.
title = ''
# The introductory text of the subsection
intro =''
def __init__(self, **kwargs):
# Cheap unique hash for substitutions
self._unique_hash = next(global_counter)
for name, value in kwargs.items():
setattr(self, name, value)
def render_all(self, stream, file_list):
""" Render the example list to the given
stream (file-like object).
"""
self._stream = stream
files_details = self.render_header(file_list)
for index, file_details in enumerate(files_details):
filename, short_file_name, short_desc, title, docstring, \
end_row = file_details
self.render_example_page(open(os.path.join(self.out_dir,
'example_%s.rst') %
short_file_name, 'w'), index, file_details)
self.gallery_entry(index, file_details)
del self._stream
def render_header(self, filenames):
files_details = list()
toctree = list()
for filename in filenames:
docstring, short_desc, end_row = extract_docstring(filename)
short_file_name = os.path.basename(filename)[:-3]
title = short_file_name.replace('_', ' ')
title = title[0].upper() + title[1:]
shutil.copy(filename,
os.path.join(self.out_dir, os.path.basename(filename)))
toctree.append(""" example_%s.rst""" % short_file_name)
files_details.append((filename, short_file_name, short_desc,
title, docstring, end_row))
toctree = '\n'.join(toctree)
title = self.title
intro = self.intro
self._stream.write(self.header_tpl % locals())
return files_details
def render_example_page(self, stream, index, file_details):
""" Render an individual example page.
"""
filename, short_file_name, short_desc, title, docstring, end_row \
= file_details
stream.write(self.example_rst_file_tpl % locals())
def gallery_entry(self, index, file_details):
""" Write the entry in the main example gallery file
corresponding to the given file details.
"""
filename, short_file_name, short_desc, title, docstring, \
end_row = file_details
self._stream.write(
"\n* :ref:`example_%(short_file_name)s`\n" % locals()
)
short_desc = short_desc.lstrip().rstrip()
for line in short_desc.split('\n'):
self._stream.write(4*" " + line.lstrip() + "\n")
################################################################################
# class `ImagesExampleLister`
################################################################################
class ImagesExampleLister(ExampleLister):
""" ExampleLister that looks for thumbnails.
"""
# Relative directory to images
images_dir = 'mayavi/images/'
def render_all(self, stream, file_list):
self._stream = stream
files_details = self.render_header(file_list)
unique_hash = self._unique_hash
for index, (filename, short_file_name, _, _, _, _) in \
enumerate(files_details):
image_file = os.path.join(self.images_dir,
'example_%(short_file_name)s.jpg' % locals())
if os.path.exists(image_file):
short_image_file = os.path.join(*(
image_file.split(os.sep)[1:]))
self._stream.write("""
.. |%(unique_hash)02i%(index)02i| image:: ../%(short_image_file)s
:width: 150
""" % locals())
else:
self._stream.write("""
.. |%(unique_hash)02i%(index)02i| raw:: html
<br/>
""" % locals())
self._stream.write(2*('\n' + 7*"=" + " " + 45*"="))
for index, file_details in enumerate(files_details):
filename, short_file_name, short_desc, title, docstring, end_row = \
file_details
self.render_example_page(open(os.path.join(self.out_dir,
'example_%s.rst') %
short_file_name, 'w'), index, file_details)
self.gallery_entry(index, file_details)
self._stream.write("\n"+7*"=" + " " + 45*"=" + '\n')
del self._stream
def render_example_page(self, stream, index, file_details):
""" Hijack this method to, optionally, render images.
"""
# Jump one step up, and do not call ImagesExampleLister
filename, short_file_name, short_desc, title, docstring, end_row = \
file_details
image_file = os.path.join(self.images_dir,
'example_%(short_file_name)s.jpg' % locals())
if os.path.exists(image_file):
docstring += """
.. image:: ../%s
:align: center
""" % os.path.join(*(image_file.split(os.sep)[1:]))
file_details = \
filename, short_file_name, short_desc, title, docstring, end_row
stream.write(self.example_rst_file_tpl % locals())
def gallery_entry(self, index, file_details):
filename, short_file_name, short_desc, title, docstring, end_row = \
file_details
short_desc = textwrap.wrap(short_desc, width=40)
unique_hash = self._unique_hash
self._stream.write(
("\n|%(unique_hash)02i%(index)02i|" % locals()).ljust(9) +
":ref:`example_%(short_file_name)s`\n" % locals()
)
for line in short_desc:
self._stream.write(9*" " + line.lstrip() + "\n")
################################################################################
# class `MlabExampleLister`
################################################################################
class MlabExampleLister(ImagesExampleLister):
header_tpl = """
Mlab functions gallery
----------------------
These are the examples of the mlab plotting functions. They are
copied out here for convenience. Please refer to the corresponding
section of the user guide for more information (
:ref:`mlab_plotting_functions`).
.. currentmodule:: mayavi.mlab
+------------------+-------------------------+---------------------+
| :func:`plot3d` | :func:`points3d` | :func:`imshow` |
| | | |
| |plot3d.jpg| | |points3d.jpg| | |imshow.jpg| |
+------------------+-------------------------+---------------------+
| :func:`surf` | :func:`contour_surf` | :func:`mesh` |
| | | |
| |surf.jpg| | |contour_surf.jpg| | |mesh.jpg| |
+------------------+-------------------------+---------------------+
| :func:`barchart` | :func:`triangular_mesh` | :func:`contour3d` |
| | | |
| |barchart.jpg| | |triangular_mesh.jpg| | |contour3d.jpg| |
+------------------+-------------------------+---------------------+
| :func:`quiver3d` | :func:`flow` | |
| | | |
| |quiver3d.jpg| | |flow.jpg| | |
+------------------+-------------------------+---------------------+
.. |plot3d.jpg| image:: ../generated_images/enthought_mayavi_mlab_plot3d.jpg
:width: 150
.. |points3d.jpg| image:: ../generated_images/enthought_mayavi_mlab_points3d.jpg
:width: 150
.. |imshow.jpg| image:: ../generated_images/enthought_mayavi_mlab_imshow.jpg
:width: 150
.. |contour_surf.jpg| image:: ../generated_images/enthought_mayavi_mlab_contour_surf.jpg
:width: 150
.. |triangular_mesh.jpg| image:: ../generated_images/enthought_mayavi_mlab_triangular_mesh.jpg
:width: 150
.. |surf.jpg| image:: ../generated_images/enthought_mayavi_mlab_surf.jpg
:width: 150
.. |mesh.jpg| image:: ../generated_images/enthought_mayavi_mlab_mesh.jpg
:width: 150
.. |barchart.jpg| image:: ../generated_images/enthought_mayavi_mlab_barchart.jpg
:width: 150
.. |contour3d.jpg| image:: ../generated_images/enthought_mayavi_mlab_contour3d.jpg
:width: 150
.. |quiver3d.jpg| image:: ../generated_images/enthought_mayavi_mlab_quiver3d.jpg
:width: 150
.. |flow.jpg| image:: ../generated_images/enthought_mayavi_mlab_flow.jpg
:width: 150
Advanced mlab examples
-----------------------
.. toctree::
:hidden:
%(toctree)s
"""
example_rst_file_tpl = """
.. _example_%(short_file_name)s:
%(title)s example
--------------------------------------------------------------------
%(docstring)s
**Python source code:** :download:`%(short_file_name)s.py`
.. literalinclude:: %(short_file_name)s.py
:lines: %(end_row)s-
"""
render_images = False
images_dir = 'mayavi/generated_images'
def render_example_page(self, stream, index, file_details):
""" Hijack this method to, optionally, render images.
"""
filename, short_file_name, short_desc, title, docstring, end_row = \
file_details
if self.render_images:
print("Generating images for %s" % filename)
image_file = os.path.join(self.images_dir, 'example_%s.jpg' \
% short_file_name)
run_mlab_file(filename, image_file=image_file)
ImagesExampleLister.render_example_page(self, stream,
index, file_details)
################################################################################
# Main entry point
def render_examples(render_images=False, out_dir='mayavi/auto'):
if not os.path.exists(out_dir):
os.makedirs(out_dir)
example_gallery_file = open(os.path.join(out_dir, 'examples.rst'), 'w')
example_gallery_file.write("""
.. _example_gallery:
Example gallery
=================
""")
##########################################################################
# Mlab examples
example_files = [ filename
for filename in glob.glob(os.path.join(EXAMPLE_DIR,
'mlab', '*.py'))
if is_mlab_example(filename)]
# Sort by file length (gives a measure of the complexity of the
# example)
example_files.sort(key=lambda name: len(open(name, 'r').readlines()))
mlab_example_lister = MlabExampleLister(render_images=render_images,
out_dir=out_dir,
images_dir='mayavi/generated_images')
if render_images:
pass
# XXX: Add logics to deal with rerendering examples cleverly
mlab_example_lister.render_all(example_gallery_file, example_files)
##########################################################################
# Interactive application examples
example_files = [ filename
for filename in glob.glob(os.path.join(EXAMPLE_DIR,
'interactive', '*.py'))]
# Sort by file length (gives a measure of the complexity of the
# example)
example_files.sort(key=lambda name: len(open(name, 'r').readlines()))
example_lister = ImagesExampleLister(
title="Interactive examples",
out_dir=out_dir,
intro="""
Examples showing how to use the interactive features of Mayavi, either
via the mayavi2 application, or via specially-crafted dialogs and
applications.
""")
example_lister.render_all(example_gallery_file, example_files)
##########################################################################
# Advanced visualization examples
example_files = [ filename
for filename in glob.glob(os.path.join(EXAMPLE_DIR,
'advanced_visualization', '*.py'))]
# Sort by file length (gives a measure of the complexity of the
# example)
example_files.sort(key=lambda name: len(open(name, 'r').readlines()))
example_lister = ExampleLister(
title="Advanced visualization examples",
out_dir=out_dir,
intro="""
Data visualization using the core Mayavi API, object-oriented, and with
more fine control than mlab.
""")
example_lister.render_all(example_gallery_file, example_files)
##########################################################################
# Data interaction examples
example_files = [ filename
for filename in glob.glob(os.path.join(EXAMPLE_DIR,
'data_interaction', '*.py'))]
# Sort by file length (gives a measure of the complexity of the
# example)
example_files.sort(key=lambda name: len(open(name, 'r').readlines()))
example_lister = ExampleLister(
title="Data interaction examples",
out_dir=out_dir,
intro="""
Examples showing how you can query and interact with the data.
""")
example_lister.render_all(example_gallery_file, example_files)
##########################################################################
# The remaining files
example_files = [ filename
for filename in glob.glob(os.path.join(EXAMPLE_DIR,
'*.py'))]
# Sort by file length (gives a measure of the complexity of the
# example)
example_files.sort(key=lambda name: len(open(name, 'r').readlines()))
example_lister = ExampleLister(title="Misc examples",
out_dir=out_dir)
example_lister.render_all(example_gallery_file, example_files)
if __name__ == '__main__':
render_examples()
import shutil
shutil.copyfile('../CHANGES.txt', './mayavi/auto/changes.rst')
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import http.client
from oslo_serialization import jsonutils
from keystone.common.policies import grant as gp
from keystone.common import provider_api
import keystone.conf
from keystone.tests.common import auth as common_auth
from keystone.tests import unit
from keystone.tests.unit import base_classes
from keystone.tests.unit import ksfixtures
from keystone.tests.unit.ksfixtures import temporaryfile
CONF = keystone.conf.CONF
PROVIDERS = provider_api.ProviderAPIs
class _SystemUserGrantTests(object):
def test_can_list_grants_for_user_on_project(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=CONF.identity.default_domain_id
)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
project_id=project['id']
)
with self.test_client() as c:
r = c.get(
'/v3/projects/%s/users/%s/roles' % (project['id'], user['id']),
headers=self.headers
)
self.assertEqual(1, len(r.json['roles']))
def test_can_list_grants_for_user_on_domain(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
domain_id=domain['id']
)
with self.test_client() as c:
r = c.get(
'/v3/domains/%s/users/%s/roles' % (domain['id'], user['id']),
headers=self.headers
)
self.assertEqual(1, len(r.json['roles']))
def test_can_list_grants_for_group_on_project(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=CONF.identity.default_domain_id
)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
project_id=project['id']
)
with self.test_client() as c:
r = c.get(
'/v3/projects/%s/groups/%s/roles' % (
project['id'], group['id']),
headers=self.headers
)
self.assertEqual(1, len(r.json['roles']))
def test_can_list_grants_for_group_on_domain(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
domain_id=domain['id']
)
with self.test_client() as c:
r = c.get(
'/v3/domains/%s/groups/%s/roles' % (domain['id'], group['id']),
headers=self.headers
)
self.assertEqual(1, len(r.json['roles']))
def test_can_check_grant_for_user_on_project(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=CONF.identity.default_domain_id
)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
project_id=project['id']
)
with self.test_client() as c:
c.get(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.NO_CONTENT
)
def test_can_check_grant_for_user_on_domain(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
domain_id=domain['id']
)
with self.test_client() as c:
c.get(
'/v3/domains/%s/users/%s/roles/%s' % (
domain['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.NO_CONTENT
)
def test_can_check_grant_for_group_on_project(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=CONF.identity.default_domain_id
)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
project_id=project['id']
)
with self.test_client() as c:
c.get(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'],
group['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.NO_CONTENT
)
def test_can_check_grant_for_group_on_domain(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
domain_id=domain['id']
)
with self.test_client() as c:
c.get(
'/v3/domains/%s/groups/%s/roles/%s' % (
domain['id'], group['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.NO_CONTENT
)
class _SystemMemberAndReaderGrantTests(object):
def test_cannot_create_grant_for_user_on_project(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=CONF.identity.default_domain_id
)
)
with self.test_client() as c:
c.put(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_user_on_domain(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
with self.test_client() as c:
c.put(
'/v3/domains/%s/users/%s/roles/%s' % (
domain['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_group_on_project(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=CONF.identity.default_domain_id
)
)
with self.test_client() as c:
c.put(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'],
group['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_group_on_domain(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
with self.test_client() as c:
c.put(
'/v3/domains/%s/groups/%s/roles/%s' % (
domain['id'], group['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_user_on_project(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=CONF.identity.default_domain_id
)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
project_id=project['id']
)
with self.test_client() as c:
c.delete(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_user_on_domain(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
domain_id=domain['id']
)
with self.test_client() as c:
c.delete(
'/v3/domains/%s/users/%s/roles/%s' % (
domain['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_group_on_project(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=CONF.identity.default_domain_id
)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
project_id=project['id']
)
with self.test_client() as c:
c.delete(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'],
group['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_group_on_domain(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
domain_id=domain['id']
)
with self.test_client() as c:
c.delete(
'/v3/domains/%s/groups/%s/roles/%s' % (
domain['id'], group['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
class _DomainUserTests(object):
def test_can_list_grants_for_user_on_project(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=self.domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
project_id=project['id']
)
with self.test_client() as c:
r = c.get(
'/v3/projects/%s/users/%s/roles' % (project['id'], user['id']),
headers=self.headers
)
self.assertEqual(1, len(r.json['roles']))
def test_can_list_grants_for_user_on_domain(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=self.domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
domain_id=self.domain_id
)
with self.test_client() as c:
r = c.get(
'/v3/domains/%s/users/%s/roles' % (self.domain_id, user['id']),
headers=self.headers
)
self.assertEqual(1, len(r.json['roles']))
def test_can_list_grants_for_group_on_project(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=self.domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
project_id=project['id']
)
with self.test_client() as c:
r = c.get(
'/v3/projects/%s/groups/%s/roles' % (
project['id'], group['id']),
headers=self.headers
)
self.assertEqual(1, len(r.json['roles']))
def test_can_list_grants_for_group_on_domain(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=self.domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
domain_id=self.domain_id
)
with self.test_client() as c:
r = c.get(
'/v3/domains/%s/groups/%s/roles' % (
self.domain_id, group['id']
), headers=self.headers
)
self.assertEqual(1, len(r.json['roles']))
def test_can_check_grant_for_user_on_project(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=self.domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=self.domain_id
)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
project_id=project['id']
)
with self.test_client() as c:
c.get(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.NO_CONTENT
)
def test_can_check_grant_for_user_on_domain(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=self.domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
domain_id=self.domain_id
)
with self.test_client() as c:
c.get(
'/v3/domains/%s/users/%s/roles/%s' % (
self.domain_id, user['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.NO_CONTENT
)
def test_can_check_grant_for_group_on_project(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=self.domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
project_id=project['id']
)
with self.test_client() as c:
c.get(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'],
group['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.NO_CONTENT
)
def test_can_check_grant_for_group_on_domain(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=self.domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
domain_id=self.domain_id
)
with self.test_client() as c:
c.get(
'/v3/domains/%s/groups/%s/roles/%s' % (
self.domain_id, group['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.NO_CONTENT
)
def test_cannot_list_grants_for_user_other_domain_on_project_own_domain(self): # noqa: E501
user_domain_id = CONF.identity.default_domain_id
project_domain_id = self.domain_id
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
project_id=project['id']
)
with self.test_client() as c:
c.get(
'/v3/projects/%s/users/%s/roles' % (project['id'], user['id']),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_list_grants_for_user_own_domain_on_project_other_domain(self): # noqa: E501
user_domain_id = self.domain_id
project_domain_id = CONF.identity.default_domain_id
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=project_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
project_id=project['id']
)
with self.test_client() as c:
c.get(
'/v3/projects/%s/users/%s/roles' % (project['id'], user['id']),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_list_grants_for_user_own_domain_on_other_domain(self):
user_domain_id = self.domain_id
domain_id = CONF.identity.default_domain_id
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
domain_id=domain_id
)
with self.test_client() as c:
c.get(
'/v3/domains/%s/users/%s/roles' % (domain_id, user['id']),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_list_grants_for_user_other_domain_on_own_domain(self):
user_domain_id = CONF.identity.default_domain_id
domain_id = self.domain_id
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
domain_id=domain_id
)
with self.test_client() as c:
c.get(
'/v3/domains/%s/users/%s/roles' % (domain_id, user['id']),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_list_grants_for_group_other_domain_on_project_own_domain(self): # noqa: E501
group_domain_id = CONF.identity.default_domain_id
project_domain_id = self.domain_id
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
project_id=project['id']
)
with self.test_client() as c:
c.get(
'/v3/projects/%s/groups/%s/roles' % (
project['id'], group['id']),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_list_grants_for_group_own_domain_on_project_other_domain(self): # noqa: E501
group_domain_id = self.domain_id
project_domain_id = CONF.identity.default_domain_id
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=project_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
project_id=project['id']
)
with self.test_client() as c:
c.get(
'/v3/projects/%s/groups/%s/roles' % (
project['id'], group['id']),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_list_grants_for_group_own_domain_on_other_domain(self):
group_domain_id = self.domain_id
domain_id = CONF.identity.default_domain_id
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
domain_id=domain_id
)
with self.test_client() as c:
c.get(
'/v3/domains/%s/groups/%s/roles' % (
domain_id, group['id']),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_list_grants_for_group_other_domain_on_own_domain(self):
group_domain_id = CONF.identity.default_domain_id
domain_id = self.domain_id
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
domain_id=domain_id
)
with self.test_client() as c:
c.get(
'/v3/domains/%s/groups/%s/roles' % (
domain_id, group['id']),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_check_grant_for_user_other_domain_on_project_own_domain(self): # noqa: E501
user_domain_id = CONF.identity.default_domain_id
project_domain_id = self.domain_id
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
project_id=project['id']
)
with self.test_client() as c:
c.get(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'],
self.bootstrapper.reader_role_id),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_check_grant_for_user_own_domain_on_project_other_domain(self): # noqa: E501
user_domain_id = self.domain_id
project_domain_id = CONF.identity.default_domain_id
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=project_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
project_id=project['id']
)
with self.test_client() as c:
c.get(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'],
self.bootstrapper.reader_role_id),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_check_grant_for_user_own_domain_on_project_own_domain_with_role_other_domain(self): # noqa: E501
user_domain_id = self.domain_id
project_domain_id = self.domain_id
role_domain_id = CONF.identity.default_domain_id
role = PROVIDERS.role_api.create_role(
uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id))
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=project_domain_id)
)
# NOTE(cmurphy) the grant for a domain-specific role cannot be created
# for a project in a different domain, so we don't try to create it,
# but we still need to test that checking the role results in a 403 and
# not a 404
with self.test_client() as c:
c.get(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'],
role['id']),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_check_grant_for_user_own_domain_on_other_domain(self):
user_domain_id = self.domain_id
domain_id = CONF.identity.default_domain_id
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
domain_id=domain_id
)
with self.test_client() as c:
c.get(
'/v3/domains/%s/users/%s/roles/%s' % (
domain_id, user['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_check_grant_for_user_other_domain_on_own_domain(self):
user_domain_id = CONF.identity.default_domain_id
domain_id = self.domain_id
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
domain_id=domain_id
)
with self.test_client() as c:
c.get(
'/v3/domains/%s/users/%s/roles/%s' % (
domain_id, user['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_check_grant_for_user_own_domain_on_own_domain_with_role_other_domain(self): # noqa: E501
user_domain_id = self.domain_id
domain_id = self.domain_id
role_domain_id = CONF.identity.default_domain_id
role = PROVIDERS.role_api.create_role(
uuid.uuid4().hex,
unit.new_role_ref(domain_id=role_domain_id))
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
# NOTE(cmurphy) the grant for a domain-specific role cannot be created
# for a project in a different domain, so we don't try to create it,
# but we still need to test that checking the role results in a 403 and
# not a 404
with self.test_client() as c:
c.get(
'/v3/domains/%s/users/%s/roles/%s' % (
domain_id, user['id'],
role['id']
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_check_grant_for_group_other_domain_on_project_own_domain(self): # noqa: E501
group_domain_id = CONF.identity.default_domain_id
project_domain_id = self.domain_id
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
project_id=project['id']
)
with self.test_client() as c:
c.get(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'], group['id'],
self.bootstrapper.reader_role_id),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_check_grant_for_group_own_domain_on_project_other_domain(self): # noqa: E501
group_domain_id = self.domain_id
project_domain_id = CONF.identity.default_domain_id
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
project_id=project['id']
)
with self.test_client() as c:
c.get(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'], group['id'],
self.bootstrapper.reader_role_id),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_check_grant_for_group_own_domain_on_project_own_domain_with_role_other_domain(self): # noqa: E501
group_domain_id = self.domain_id
project_domain_id = CONF.identity.default_domain_id
role_domain_id = CONF.identity.default_domain_id
role = PROVIDERS.role_api.create_role(
uuid.uuid4().hex,
unit.new_role_ref(domain_id=role_domain_id))
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id)
)
# NOTE(cmurphy) the grant for a domain-specific role cannot be created
# for a project in a different domain, so we don't try to create it,
# but we still need to test that checking the role results in a 403 and
# not a 404
with self.test_client() as c:
c.get(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'], group['id'],
role['id']),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_check_grant_for_group_own_domain_on_other_domain(self):
group_domain_id = self.domain_id
domain_id = CONF.identity.default_domain_id
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
domain_id=domain_id
)
with self.test_client() as c:
c.get(
'/v3/domains/%s/groups/%s/roles/%s' % (
domain_id, group['id'],
self.bootstrapper.reader_role_id),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_check_grant_for_group_other_domain_on_own_domain(self):
group_domain_id = CONF.identity.default_domain_id
domain_id = self.domain_id
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
domain_id=domain_id
)
with self.test_client() as c:
c.get(
'/v3/domains/%s/groups/%s/roles/%s' % (
domain_id, group['id'],
self.bootstrapper.reader_role_id),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_check_grant_for_group_own_domain_on_own_domain_with_role_other_domain(self): # noqa: E501
group_domain_id = self.domain_id
domain_id = self.domain_id
role_domain_id = CONF.identity.default_domain_id
role = PROVIDERS.role_api.create_role(
uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id))
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
# NOTE(cmurphy) the grant for a domain-specific role cannot be created
# for a project in a different domain, so we don't try to create it,
# but we still need to test that checking the role results in a 403 and
# not a 404
with self.test_client() as c:
c.get(
'/v3/domains/%s/groups/%s/roles/%s' % (
domain_id, group['id'],
role['id']),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_user_other_domain_on_project_own_domain(self): # noqa: E501
user_domain_id = CONF.identity.default_domain_id
project_domain_id = self.domain_id
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=project_domain_id
)
)
with self.test_client() as c:
c.put(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_user_own_domain_on_project_other_domain(self): # noqa: E501
user_domain_id = self.domain_id
project_domain_id = CONF.identity.default_domain_id
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=project_domain_id
)
)
with self.test_client() as c:
c.put(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_user_own_domain_on_project_own_domain_with_role_other_domain(self): # noqa: E501
user_domain_id = self.domain_id
project_domain_id = self.domain_id
role_domain_id = CONF.identity.default_domain_id
role = PROVIDERS.role_api.create_role(
uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id))
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=project_domain_id
)
)
with self.test_client() as c:
c.put(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'], role['id']
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_user_other_domain_on_own_domain(self):
user_domain_id = CONF.identity.default_domain_id
domain_id = self.domain_id
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
with self.test_client() as c:
c.put(
'/v3/domains/%s/users/%s/roles/%s' % (
domain_id, user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_user_own_domain_on_other_domain(self):
user_domain_id = self.domain_id
domain_id = CONF.identity.default_domain_id
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
with self.test_client() as c:
c.put(
'/v3/domains/%s/users/%s/roles/%s' % (
domain_id, user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_user_own_domain_on_own_domain_with_role_other_domain(self): # noqa: E501
user_domain_id = self.domain_id
domain_id = self.domain_id
role_domain_id = CONF.identity.default_domain_id
role = PROVIDERS.role_api.create_role(
uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id))
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
with self.test_client() as c:
c.put(
'/v3/domains/%s/users/%s/roles/%s' % (
domain_id, user['id'], role['id']
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_group_other_domain_on_project_own_domain(self): # noqa: E501
group_domain_id = CONF.identity.default_domain_id
project_domain_id = self.domain_id
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=project_domain_id
)
)
with self.test_client() as c:
c.put(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'],
group['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_group_own_domain_on_project_other_domain(self): # noqa: E501
group_domain_id = self.domain_id
project_domain_id = CONF.identity.default_domain_id
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=project_domain_id
)
)
with self.test_client() as c:
c.put(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'],
group['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_group_own_domain_on_project_own_domain_with_role_other_domain(self): # noqa: E501
group_domain_id = self.domain_id
project_domain_id = self.domain_id
role_domain_id = CONF.identity.default_domain_id
role = PROVIDERS.role_api.create_role(
uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id))
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=project_domain_id
)
)
with self.test_client() as c:
c.put(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'],
group['id'],
role['id']
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_group_other_domain_on_own_domain(self):
group_domain_id = CONF.identity.default_domain_id
domain_id = self.domain_id
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
with self.test_client() as c:
c.put(
'/v3/domains/%s/groups/%s/roles/%s' % (
domain_id, group['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_group_own_domain_on_other_domain(self):
group_domain_id = self.domain_id
domain_id = CONF.identity.default_domain_id
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
with self.test_client() as c:
c.put(
'/v3/domains/%s/groups/%s/roles/%s' % (
domain_id, group['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_group_own_domain_on_own_domain_with_role_other_domain(self): # noqa: E501
group_domain_id = self.domain_id
domain_id = self.domain_id
role_domain_id = CONF.identity.default_domain_id
role = PROVIDERS.role_api.create_role(
uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id))
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
with self.test_client() as c:
c.put(
'/v3/domains/%s/groups/%s/roles/%s' % (
domain_id, group['id'], role['id']
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_user_other_domain_on_project_own_domain(self): # noqa: E501
user_domain_id = CONF.identity.default_domain_id
project_domain_id = self.domain_id
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=project_domain_id
)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
project_id=project['id']
)
with self.test_client() as c:
c.delete(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_user_own_domain_on_project_other_domain(self): # noqa: E501
user_domain_id = self.domain_id
project_domain_id = CONF.identity.default_domain_id
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=project_domain_id
)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
project_id=project['id']
)
with self.test_client() as c:
c.delete(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_user_other_domain_on_own_domain(self):
user_domain_id = CONF.identity.default_domain_id
domain_id = self.domain_id
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
domain_id=domain_id
)
with self.test_client() as c:
c.delete(
'/v3/domains/%s/users/%s/roles/%s' % (
domain_id, user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_user_own_domain_on_other_domain(self):
user_domain_id = self.domain_id
domain_id = CONF.identity.default_domain_id
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
domain_id=domain_id
)
with self.test_client() as c:
c.delete(
'/v3/domains/%s/users/%s/roles/%s' % (
domain_id, user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_user_own_domain_on_own_domain_with_role_other_domain(self): # noqa: E501
user_domain_id = self.domain_id
domain_id = self.domain_id
role_domain_id = CONF.identity.default_domain_id
role = PROVIDERS.role_api.create_role(
uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id))
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=user_domain_id)
)
PROVIDERS.assignment_api.create_grant(
role['id'], user_id=user['id'],
domain_id=domain_id
)
with self.test_client() as c:
c.delete(
'/v3/domains/%s/users/%s/roles/%s' % (
domain_id, user['id'], role['id']
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_group_other_domain_on_project_own_domain(self): # noqa: E501
group_domain_id = CONF.identity.default_domain_id
project_domain_id = self.domain_id
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=project_domain_id
)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
project_id=project['id']
)
with self.test_client() as c:
c.delete(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'],
group['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_group_own_domain_on_project_other_domain(self): # noqa: E501
group_domain_id = self.domain_id
project_domain_id = CONF.identity.default_domain_id
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=project_domain_id
)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
project_id=project['id']
)
with self.test_client() as c:
c.delete(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'],
group['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_group_other_domain_on_own_domain(self):
group_domain_id = CONF.identity.default_domain_id
domain_id = self.domain_id
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
domain_id=domain_id
)
with self.test_client() as c:
c.delete(
'/v3/domains/%s/groups/%s/roles/%s' % (
domain_id, group['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_group_own_domain_on_other_domain(self):
group_domain_id = self.domain_id
domain_id = CONF.identity.default_domain_id
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
domain_id=domain_id
)
with self.test_client() as c:
c.delete(
'/v3/domains/%s/groups/%s/roles/%s' % (
domain_id, group['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_group_own_domain_on_own_domain_with_role_other_domain(self): # noqa: E501
group_domain_id = self.domain_id
domain_id = self.domain_id
role_domain_id = CONF.identity.default_domain_id
role = PROVIDERS.role_api.create_role(
uuid.uuid4().hex,
unit.new_role_ref(domain_id=role_domain_id))
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=group_domain_id)
)
PROVIDERS.assignment_api.create_grant(
role['id'], group_id=group['id'],
domain_id=domain_id
)
with self.test_client() as c:
c.delete(
'/v3/domains/%s/groups/%s/roles/%s' % (
domain_id, group['id'], role['id']
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
class SystemReaderTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_SystemUserGrantTests,
_SystemMemberAndReaderGrantTests):
def setUp(self):
super(SystemReaderTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
system_reader = unit.new_user_ref(
domain_id=CONF.identity.default_domain_id
)
self.user_id = PROVIDERS.identity_api.create_user(
system_reader
)['id']
PROVIDERS.assignment_api.create_system_grant_for_user(
self.user_id, self.bootstrapper.reader_role_id
)
auth = self.build_authentication_request(
user_id=self.user_id, password=system_reader['password'],
system=True
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class SystemMemberTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_SystemUserGrantTests,
_SystemMemberAndReaderGrantTests):
def setUp(self):
super(SystemMemberTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
system_member = unit.new_user_ref(
domain_id=CONF.identity.default_domain_id
)
self.user_id = PROVIDERS.identity_api.create_user(
system_member
)['id']
PROVIDERS.assignment_api.create_system_grant_for_user(
self.user_id, self.bootstrapper.member_role_id
)
auth = self.build_authentication_request(
user_id=self.user_id, password=system_member['password'],
system=True
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class SystemAdminTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_SystemUserGrantTests):
def setUp(self):
super(SystemAdminTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
self.user_id = self.bootstrapper.admin_user_id
auth = self.build_authentication_request(
user_id=self.user_id,
password=self.bootstrapper.admin_password,
system=True
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
def test_can_create_grant_for_user_on_project(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=CONF.identity.default_domain_id
)
)
with self.test_client() as c:
c.put(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers
)
def test_can_create_grant_for_user_on_domain(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
with self.test_client() as c:
c.put(
'/v3/domains/%s/users/%s/roles/%s' % (
domain['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers
)
def test_can_create_grant_for_group_on_project(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=CONF.identity.default_domain_id
)
)
with self.test_client() as c:
c.put(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'],
group['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers
)
def test_can_create_grant_for_group_on_domain(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
with self.test_client() as c:
c.put(
'/v3/domains/%s/groups/%s/roles/%s' % (
domain['id'], group['id'], self.bootstrapper.reader_role_id
),
headers=self.headers
)
def test_can_revoke_grant_from_user_on_project(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=CONF.identity.default_domain_id
)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
project_id=project['id']
)
with self.test_client() as c:
c.delete(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers
)
def test_can_revoke_grant_from_user_on_domain(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
domain_id=domain['id']
)
with self.test_client() as c:
c.delete(
'/v3/domains/%s/users/%s/roles/%s' % (
domain['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers
)
def test_can_revoke_grant_from_group_on_project(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=CONF.identity.default_domain_id
)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
project_id=project['id']
)
with self.test_client() as c:
c.delete(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'],
group['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers
)
def test_can_revoke_grant_from_group_on_domain(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
domain_id=domain['id']
)
with self.test_client() as c:
c.delete(
'/v3/domains/%s/groups/%s/roles/%s' % (
domain['id'], group['id'], self.bootstrapper.reader_role_id
),
headers=self.headers
)
class _DomainMemberAndReaderTests(object):
def test_cannot_create_grant_for_user_on_project(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=self.domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=CONF.identity.default_domain_id
)
)
with self.test_client() as c:
c.put(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_user_on_domain(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=self.domain_id)
)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
with self.test_client() as c:
c.put(
'/v3/domains/%s/users/%s/roles/%s' % (
domain['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_group_on_project(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=self.domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id)
)
with self.test_client() as c:
c.put(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'],
group['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_create_grant_for_group_on_domain(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=self.domain_id)
)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
with self.test_client() as c:
c.put(
'/v3/domains/%s/groups/%s/roles/%s' % (
domain['id'], group['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_user_on_project(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=self.domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
project_id=project['id']
)
with self.test_client() as c:
c.delete(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_user_on_domain(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=self.domain_id)
)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
domain_id=domain['id']
)
with self.test_client() as c:
c.delete(
'/v3/domains/%s/users/%s/roles/%s' % (
domain['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_group_on_project(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=self.domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(
domain_id=CONF.identity.default_domain_id
)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
project_id=project['id']
)
with self.test_client() as c:
c.delete(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'],
group['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
def test_cannot_revoke_grant_from_group_on_domain(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=self.domain_id)
)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
domain_id=domain['id']
)
with self.test_client() as c:
c.delete(
'/v3/domains/%s/groups/%s/roles/%s' % (
domain['id'], group['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
class DomainReaderTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_DomainUserTests,
_DomainMemberAndReaderTests):
def setUp(self):
super(DomainReaderTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
self.domain_id = domain['id']
domain_user = unit.new_user_ref(domain_id=self.domain_id)
self.user_id = PROVIDERS.identity_api.create_user(domain_user)['id']
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=self.user_id,
domain_id=self.domain_id
)
auth = self.build_authentication_request(
user_id=self.user_id, password=domain_user['password'],
domain_id=self.domain_id
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class DomainMemberTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_DomainUserTests,
_DomainMemberAndReaderTests):
def setUp(self):
super(DomainMemberTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
self.domain_id = domain['id']
domain_user = unit.new_user_ref(domain_id=self.domain_id)
self.user_id = PROVIDERS.identity_api.create_user(domain_user)['id']
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.member_role_id, user_id=self.user_id,
domain_id=self.domain_id
)
auth = self.build_authentication_request(
user_id=self.user_id, password=domain_user['password'],
domain_id=self.domain_id
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class DomainAdminTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_DomainUserTests):
def setUp(self):
super(DomainAdminTests, self).setUp()
self.loadapp()
self.policy_file = self.useFixture(temporaryfile.SecureTempFile())
self.policy_file_name = self.policy_file.file_name
self.useFixture(
ksfixtures.Policy(
self.config_fixture, policy_file=self.policy_file_name
)
)
self._override_policy()
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
self.domain_id = domain['id']
domain_admin = unit.new_user_ref(domain_id=self.domain_id)
self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id']
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.admin_role_id, user_id=self.user_id,
domain_id=self.domain_id
)
auth = self.build_authentication_request(
user_id=self.user_id,
password=domain_admin['password'],
domain_id=self.domain_id
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
def _override_policy(self):
# TODO(lbragstad): Remove this once the deprecated policies in
# keystone.common.policies.grant have been removed. This is only
# here to make sure we test the new policies instead of the deprecated
# ones. Oslo.policy will OR deprecated policies with new policies to
# maintain compatibility and give operators a chance to update
# permissions or update policies without breaking users. This will
# cause these specific tests to fail since we're trying to correct this
# broken behavior with better scope checking.
with open(self.policy_file_name, 'w') as f:
overridden_policies = {
'identity:list_grants': gp.SYSTEM_READER_OR_DOMAIN_READER_LIST,
'identity:check_grant': gp.SYSTEM_READER_OR_DOMAIN_READER,
'identity:create_grant': gp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN,
'identity:revoke_grant': gp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN
}
f.write(jsonutils.dumps(overridden_policies))
def test_can_create_grant_for_user_on_project(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=self.domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id)
)
with self.test_client() as c:
c.put(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers
)
def test_can_create_grant_for_user_own_domain_on_own_domain(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=self.domain_id)
)
with self.test_client() as c:
c.put(
'/v3/domains/%s/users/%s/roles/%s' % (
self.domain_id, user['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers
)
def test_can_create_grant_for_group_on_project(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=self.domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id)
)
with self.test_client() as c:
c.put(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'],
group['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers
)
def test_can_create_grant_for_group_own_domain_on_own_domain(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=self.domain_id)
)
with self.test_client() as c:
c.put(
'/v3/domains/%s/groups/%s/roles/%s' % (
self.domain_id, group['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers
)
def test_can_revoke_grant_from_user_on_project(self):
user = PROVIDERS.identity_api.create_user(
unit.new_user_ref(domain_id=self.domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, user_id=user['id'],
project_id=project['id']
)
with self.test_client() as c:
c.delete(
'/v3/projects/%s/users/%s/roles/%s' % (
project['id'], user['id'], self.bootstrapper.reader_role_id
),
headers=self.headers
)
def test_can_revoke_grant_from_group_on_project(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=self.domain_id)
)
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id)
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
project_id=project['id']
)
with self.test_client() as c:
c.delete(
'/v3/projects/%s/groups/%s/roles/%s' % (
project['id'],
group['id'],
self.bootstrapper.reader_role_id
),
headers=self.headers
)
def test_cannot_revoke_grant_from_group_on_domain(self):
group = PROVIDERS.identity_api.create_group(
unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.reader_role_id, group_id=group['id'],
domain_id=domain['id']
)
with self.test_client() as c:
c.delete(
'/v3/domains/%s/groups/%s/roles/%s' % (
domain['id'], group['id'], self.bootstrapper.reader_role_id
),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN
)
|
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2020, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
''' Various kinds of layout components.
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
import logging # isort:skip
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Bokeh imports
from ..core.enums import Align, Location, SizingMode, SizingPolicy
from ..core.has_props import abstract
from ..core.properties import (
Auto,
Bool,
Color,
Dict,
Either,
Enum,
Float,
Instance,
Int,
List,
NonNegativeInt,
Seq,
String,
Struct,
Tuple,
)
from ..core.validation import error, warning
from ..core.validation.errors import MIN_PREFERRED_MAX_HEIGHT, MIN_PREFERRED_MAX_WIDTH
from ..core.validation.warnings import (
BOTH_CHILD_AND_ROOT,
EMPTY_LAYOUT,
FIXED_HEIGHT_POLICY,
FIXED_SIZING_MODE,
FIXED_WIDTH_POLICY,
)
from ..model import Model
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'Box',
'Column',
'GridBox',
'HTMLBox',
'LayoutDOM',
'Panel',
'Row',
'Spacer',
'Tabs',
'WidgetBox',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
@abstract
class LayoutDOM(Model):
""" The base class for layoutable components.
"""
disabled = Bool(False, help="""
Whether the widget will be disabled when rendered.
If ``True``, the widget will be greyed-out and not responsive to UI events.
""")
visible = Bool(True, help="""
Whether the component will be visible and a part of a layout.
""")
width = NonNegativeInt(default=None, help="""
The width of the component (in pixels).
This can be either fixed or preferred width, depending on width sizing policy.
""")
height = NonNegativeInt(default=None, help="""
The height of the component (in pixels).
This can be either fixed or preferred height, depending on height sizing policy.
""")
min_width = NonNegativeInt(default=None, help="""
Minimal width of the component (in pixels) if width is adjustable.
""")
min_height = NonNegativeInt(default=None, help="""
Minimal height of the component (in pixels) if height is adjustable.
""")
max_width = NonNegativeInt(default=None, help="""
Minimal width of the component (in pixels) if width is adjustable.
""")
max_height = NonNegativeInt(default=None, help="""
Minimal height of the component (in pixels) if height is adjustable.
""")
margin = Tuple(Int, Int, Int, Int, default=(0, 0, 0, 0), help="""
Allows to create additional space around the component.
""").accepts(Tuple(Int, Int), lambda v_h: (v_h[0], v_h[1], v_h[0], v_h[1])) \
.accepts(Int, lambda m: (m, m, m, m))
width_policy = Either(Auto, Enum(SizingPolicy), default="auto", help="""
Describes how the component should maintain its width.
``"auto"``
Use component's preferred sizing policy.
``"fixed"``
Use exactly ``width`` pixels. Component will overflow if it can't fit in the
available horizontal space.
``"fit"``
Use component's preferred width (if set) and allow it to fit into the available
horizontal space within the minimum and maximum width bounds (if set). Component's
width neither will be aggressively minimized nor maximized.
``"min"``
Use as little horizontal space as possible, not less than the minimum width (if set).
The starting point is the preferred width (if set). The width of the component may
shrink or grow depending on the parent layout, aspect management and other factors.
``"max"``
Use as much horizontal space as possible, not more than the maximum width (if set).
The starting point is the preferred width (if set). The width of the component may
shrink or grow depending on the parent layout, aspect management and other factors.
.. note::
This is an experimental feature and may change in future. Use it at your
own discretion. Prefer using ``sizing_mode`` if this level of control isn't
strictly necessary.
""")
height_policy = Either(Auto, Enum(SizingPolicy), default="auto", help="""
Describes how the component should maintain its height.
``"auto"``
Use component's preferred sizing policy.
``"fixed"``
Use exactly ``height`` pixels. Component will overflow if it can't fit in the
available vertical space.
``"fit"``
Use component's preferred height (if set) and allow to fit into the available
vertical space within the minimum and maximum height bounds (if set). Component's
height neither will be aggressively minimized nor maximized.
``"min"``
Use as little vertical space as possible, not less than the minimum height (if set).
The starting point is the preferred height (if set). The height of the component may
shrink or grow depending on the parent layout, aspect management and other factors.
``"max"``
Use as much vertical space as possible, not more than the maximum height (if set).
The starting point is the preferred height (if set). The height of the component may
shrink or grow depending on the parent layout, aspect management and other factors.
.. note::
This is an experimental feature and may change in future. Use it at your
own discretion. Prefer using ``sizing_mode`` if this level of control isn't
strictly necessary.
""")
aspect_ratio = Either(Enum("auto"), Float, default=None, help="""
Describes the proportional relationship between component's width and height.
This works if any of component's dimensions are flexible in size. If set to
a number, ``width / height = aspect_ratio`` relationship will be maintained.
Otherwise, if set to ``"auto"``, component's preferred width and height will
be used to determine the aspect (if not set, no aspect will be preserved).
""")
sizing_mode = Enum(SizingMode, default=None, help="""
How the component should size itself.
This is a high-level setting for maintaining width and height of the component. To
gain more fine grained control over sizing, use ``width_policy``, ``height_policy``
and ``aspect_ratio`` instead (those take precedence over ``sizing_mode``).
Possible scenarios:
``"fixed"``
Component is not responsive. It will retain its original width and height
regardless of any subsequent browser window resize events.
``"stretch_width"``
Component will responsively resize to stretch to the available width, without
maintaining any aspect ratio. The height of the component depends on the type
of the component and may be fixed or fit to component's contents.
``"stretch_height"``
Component will responsively resize to stretch to the available height, without
maintaining any aspect ratio. The width of the component depends on the type
of the component and may be fixed or fit to component's contents.
``"stretch_both"``
Component is completely responsive, independently in width and height, and
will occupy all the available horizontal and vertical space, even if this
changes the aspect ratio of the component.
``"scale_width"``
Component will responsively resize to stretch to the available width, while
maintaining the original or provided aspect ratio.
``"scale_height"``
Component will responsively resize to stretch to the available height, while
maintaining the original or provided aspect ratio.
``"scale_both"``
Component will responsively resize to both the available width and height, while
maintaining the original or provided aspect ratio.
""")
align = Either(Enum(Align), Tuple(Enum(Align), Enum(Align)), default="start", help="""
The alignment point within the parent container.
This property is useful only if this component is a child element of a layout
(e.g. a grid). Self alignment can be overridden by the parent container (e.g.
grid track align).
""")
background = Color(default=None, help="""
Background color of the component.
""")
# List in order for in-place changes to trigger changes, ref: https://github.com/bokeh/bokeh/issues/6841
css_classes = List(String, help="""
A list of CSS class names to add to this DOM element. Note: the class names are
simply added as-is, no other guarantees are provided.
It is also permissible to assign from tuples, however these are adapted -- the
property will always contain a list.
""").accepts(Seq(String), lambda x: list(x))
@warning(FIXED_SIZING_MODE)
def _check_fixed_sizing_mode(self):
if self.sizing_mode == "fixed" and (self.width is None or self.height is None):
return str(self)
@warning(FIXED_WIDTH_POLICY)
def _check_fixed_width_policy(self):
if self.width_policy == "fixed" and self.width is None:
return str(self)
@warning(FIXED_HEIGHT_POLICY)
def _check_fixed_height_policy(self):
if self.height_policy == "fixed" and self.height is None:
return str(self)
@error(MIN_PREFERRED_MAX_WIDTH)
def _min_preferred_max_width(self):
min_width = self.min_width if self.min_width is not None else 0
width = self.width if self.width is not None else min_width
max_width = self.max_width if self.max_width is not None else width
if not (min_width <= width <= max_width):
return str(self)
@error(MIN_PREFERRED_MAX_HEIGHT)
def _min_preferred_max_height(self):
min_height = self.min_height if self.min_height is not None else 0
height = self.height if self.height is not None else min_height
max_height = self.max_height if self.max_height is not None else height
if not (min_height <= height <= max_height):
return str(self)
@abstract
class HTMLBox(LayoutDOM):
''' A component which size is determined by its HTML content.
'''
class Spacer(LayoutDOM):
''' A container for space used to fill an empty spot in a row or column.
'''
QuickTrackSizing = Either(Enum("auto", "min", "fit", "max"), Int)
TrackAlign = Either(Auto, Enum(Align))
RowSizing = Either(
QuickTrackSizing,
Struct(policy=Enum("auto", "min"), align=TrackAlign),
Struct(policy=Enum("fixed"), height=Int, align=TrackAlign),
Struct(policy=Enum("fit", "max"), flex=Float, align=TrackAlign))
ColSizing = Either(
QuickTrackSizing,
Struct(policy=Enum("auto", "min"), align=TrackAlign),
Struct(policy=Enum("fixed"), width=Int, align=TrackAlign),
Struct(policy=Enum("fit", "max"), flex=Float, align=TrackAlign))
IntOrString = Either(Int, String) # XXX: work around issue #8166
class GridBox(LayoutDOM):
children = List(Either(
Tuple(Instance(LayoutDOM), Int, Int),
Tuple(Instance(LayoutDOM), Int, Int, Int, Int)), default=[], help="""
A list of children with their associated position in the grid (row, column).
""")
rows = Either(QuickTrackSizing, Dict(IntOrString, RowSizing), default="auto", help="""
Describes how the grid should maintain its rows' heights.
.. note::
This is an experimental feature and may change in future. Use it at your
own discretion.
""")
cols = Either(QuickTrackSizing, Dict(IntOrString, ColSizing), default="auto", help="""
Describes how the grid should maintain its columns' widths.
.. note::
This is an experimental feature and may change in future. Use it at your
own discretion.
""")
spacing = Either(Int, Tuple(Int, Int), default=0, help="""
The gap between children (in pixels).
Either a number, if spacing is the same for both dimensions, or a pair
of numbers indicating spacing in the vertical and horizontal dimensions
respectively.
""")
@abstract
class Box(LayoutDOM):
''' Abstract base class for Row and Column. Do not use directly.
'''
def __init__(self, *args, **kwargs):
if len(args) > 0 and "children" in kwargs:
raise ValueError("'children' keyword cannot be used with positional arguments")
elif len(args) > 0:
kwargs["children"] = list(args)
super().__init__(**kwargs)
@warning(EMPTY_LAYOUT)
def _check_empty_layout(self):
from itertools import chain
if not list(chain(self.children)):
return str(self)
@warning(BOTH_CHILD_AND_ROOT)
def _check_child_is_also_root(self):
problems = []
for c in self.children:
if c.document is not None and c in c.document.roots:
problems.append(str(c))
if problems:
return ", ".join(problems)
else:
return None
children = List(Instance(LayoutDOM), help="""
The list of children, which can be other components including plots, rows, columns, and widgets.
""")
spacing = Int(default=0, help="""
The gap between children (in pixels).
""")
class Row(Box):
''' Lay out child components in a single horizontal row.
Children can be specified as positional arguments, as a single argument
that is a sequence, or using the ``children`` keyword argument.
'''
cols = Either(QuickTrackSizing, Dict(IntOrString, ColSizing), default="auto", help="""
Describes how the component should maintain its columns' widths.
.. note::
This is an experimental feature and may change in future. Use it at your
own discretion.
""")
class Column(Box):
''' Lay out child components in a single vertical row.
Children can be specified as positional arguments, as a single argument
that is a sequence, or using the ``children`` keyword argument.
'''
rows = Either(QuickTrackSizing, Dict(IntOrString, RowSizing), default="auto", help="""
Describes how the component should maintain its rows' heights.
.. note::
This is an experimental feature and may change in future. Use it at your
own discretion.
""")
class Panel(Model):
''' A single-widget container with title bar and controls.
'''
title = String(default="", help="""
The text title of the panel.
""")
child = Instance(LayoutDOM, help="""
The child widget. If you need more children, use a layout widget, e.g. a ``Column``.
""")
closable = Bool(False, help="""
Whether this panel is closable or not. If True, an "x" button will appear.
Closing a panel is equivalent to removing it from its parent container (e.g. tabs).
""")
class Tabs(LayoutDOM):
''' A panel widget with navigation tabs.
'''
__example__ = "sphinx/source/docs/user_guide/examples/interaction_tab_panes.py"
tabs = List(Instance(Panel), help="""
The list of child panel widgets.
""").accepts(List(Tuple(String, Instance(LayoutDOM))),
lambda items: [ Panel(title=title, child=child) for (title, child) in items ])
tabs_location = Enum(Location, default="above", help="""
The location of the buttons that activate tabs.
""")
active = Int(0, help="""
The index of the active tab.
""")
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
# TODO (bev) deprecation: 3.0
class WidgetBox(Column):
''' Create a column of bokeh widgets with predefined styling.
WidgetBox is DEPRECATED and will beremoved in Bokeh 3.0, use 'Column' instead.
'''
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
from ..util.deprecation import deprecated
deprecated("'WidgetBox' is deprecated and will be removed in Bokeh 3.0, use 'bokeh.models.Column' instead")
|
|
#!/usr/bin/env python
# Copyright (c) 2014-2015 Benjamin Althues <[email protected]>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import fileinput
import sys
import pycommand
__docformat__ = 'restructuredtext'
__author__ = "Benjamin Althues"
__copyright__ = "Copyright (C) 2014-2015 Benjamin Althues"
__version_info__ = (0, 2, 0, 'alpha', 0)
__version__ = '0.2.0'
# Setting defaults ###########################################################
DEFAULT_INPUT_INDENT = 2
'''The standard value of tuhinga's indentation is 2 spaces'''
DEFAULT_OUTPUT_INDENT = 2
'''The output can be set as a negative value to create condensed one liners'''
# Default mapper for LexerXML ################################################
mapper = {
'html5': {
'area': {'v': True},
'base': {'v': True},
'br': {'v': True},
'col': {'v': True},
'embed': {'v': True},
'hr': {'v': True},
'img': {'v': True},
'keygen': {'v': True},
'param': {'v': True},
'source': {'v': True},
'track': {'v': True},
'wbr': {'v': True},
'css': {'v': True, 'e': 'link', 'c': 'href', 'h': 'rel="stylesheet"'},
'input': {'v': True, 'c': 'value'},
'input-button': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="button"'},
'input-checkbox': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="checkbox"'},
'input-color': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="color"'},
'input-date': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="date"'},
'input-datetime': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="datetime"'},
'input-datetime-local': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="datetime-local"'},
'input-email': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="email"'},
'input-file': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="file"'},
'input-hidden': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="hidden"'},
'input-image': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="image"'},
'input-month': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="month"'},
'input-number': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="number"'},
'input-password': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="password"'},
'input-radio': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="radio"'},
'input-range': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="range"'},
'input-reset': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="reset"'},
'input-search': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="search"'},
'input-submit': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="submit"'},
'input-tel': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="tel"'},
'input-text': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="text"'},
'input-time': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="time"'},
'input-url': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="url"'},
'input-week': {'v': True, 'e': 'input', 'c': 'value',
'h': 'type="week"'},
'js': {'e': 'script', 'c': 'src', 'h': 'type="text/javascript"'},
'link': {'v': True, 'c': 'href'},
'meta': {'v': True, 'c': 'content'},
'meta-charset': {'v': True, 'e': 'meta', 'c': 'charset'},
'script-src': {'e': 'script', 'c': 'src'},
},
}
'''Mapping of contents to arguments / list of void elements
Possible keys:
- 'v': True if void element like <meta>. Default = false
- 'e': HTML element. Default = <name_of_dict_key>
- 'c': Content mapping, see below. Default = '>'
- 'h': Extra html arguments. Default = false
Possible value of content:
- '>': print contents after start tag (default)
- '-': strip any contents
- 'some-string': map any contents to an html argument
'''
# Parser and Lexer objects ###################################################
class LexerError(Exception):
pass
class Parser:
'''Parse a tuhinga doc and create nodes to be processed with a lexer'''
def __init__(self, input_indent=DEFAULT_INPUT_INDENT):
'''Handle args and initialize instance variables'''
self.input_indent = input_indent
self.latest_indentlvl = 0
self.lineno = 0
self.current_indentlvl = 0
self.nodes = []
self.parsed = []
for i in range(0, 100):
self.parsed.append(None)
def string(self, string):
'''Parse a complete tuhinga document as string'''
for line in string.split('\n'):
self.parseLine(line)
return self.close()
def file(self, filename):
'''Parse a complete tuhinga document by filename'''
with open(filename) as f:
for line in f:
self.parseLine(line)
return self.close()
def fileinput(self):
'''Parse stdin or files with the fileinput module'''
for line in fileinput.input():
self.parseLine(line)
return self.close()
def close(self):
'''Close all open nodes'''
self._closeNodes(0)
return self
def parseLine(self, line):
'''Parse a single line of tuhinga markup
Make sure to run close() after the last call to parseLine.'''
self.lineno += 1
indentlvl = int((len(line) - len(line.lstrip())) / self.input_indent)
splitted = line.lstrip().split()
# Skip empty lines and comment lines
if not splitted or splitted[0].startswith(';'):
return self
# parse element, id and classes
identifier = splitted[0]
_id = None
_class = []
if '#' in identifier:
element = identifier[:identifier.find('#')]
if '.' in identifier:
_id = identifier[identifier.find('#') + 1:identifier.find('.')]
_class = identifier.split('.')[1:]
else:
_id = identifier[identifier.find('#') + 1:]
elif '.' in identifier:
element = identifier[:identifier.find('.')]
_class = identifier.split('.')[1:]
else:
element = identifier
if identifier.startswith('#') or identifier.startswith('.'):
element = 'div'
# parse content and arguments
remainder = splitted[1:]
content = []
args = []
# If a word starts with ':' and is not an argument,
# it should be escaped '\:'
for i in remainder:
if i.startswith(':'):
args.append(i[1:])
else:
content.append(i)
data = {
'indentlvl': indentlvl,
'element': element,
'id': _id,
'class': _class,
'arguments': args,
'content': ' '.join(content),
'line': self.lineno,
'splitted': splitted,
}
# register node to handle the tree structure
self._registerNode(indentlvl, data)
return self
def _registerNode(self, indentlvl, data):
if indentlvl < self.current_indentlvl:
self._closeNodes(indentlvl)
self.parsed[indentlvl] = data
self.nodes.append((1, data))
self.latest_indentlvl = indentlvl
self.current_indentlvl = indentlvl
def _closeNodes(self, indentlvl):
self.parsed[self.latest_indentlvl] = None
for i in range(99, indentlvl - 1, -1):
if self.parsed[i]:
self.nodes.append((0, self.parsed[i]))
self.parsed[i] = None
class LexerXML:
'''Lexical compilation of parsed nodes to XML markup'''
def __init__(self, parser, output_indent=DEFAULT_OUTPUT_INDENT):
'''Object init is the only public method'''
self.output = ''
self.doctype = 'html5'
self.output_indent = output_indent
n = 0
for node in parser.nodes:
if node[0] == 1:
try:
next_lvl = parser.nodes[n + 1][1]['indentlvl']
except IndexError:
raise LexerError('Markup Tree Error: parser did not '
'properly close all nodes')
self._startNode(data=node[1], next_lvl=next_lvl)
elif node[0] == 0:
self._endNode(data=node[1])
n += 1
def _startNode(self, data, next_lvl):
out = ''
is_element = True
# defaults, possibly overridden by mapping
element = data['element']
content_dest = '>'
extra_args = ''
void_elem = False
if data['element'] in mapper[self.doctype].keys():
# apply mapping
if 'e' in mapper[self.doctype][data['element']]:
element = mapper[self.doctype][data['element']]['e']
if 'v' in mapper[self.doctype][data['element']]:
void_elem = mapper[self.doctype][data['element']]['v']
if 'c' in mapper[self.doctype][data['element']]:
content_dest = mapper[self.doctype][data['element']]['c']
if 'h' in mapper[self.doctype][data['element']]:
extra_args = mapper[self.doctype][data['element']]['h']
# hardcoded special elements
if element == 'html5':
# Do not print a newline if output_indent setting <= -1
newl = '\n' if self.output_indent > -1 else ''
self._addOutput(
data['indentlvl'],
'<!doctype html>{newl}{indent}<html>'.format(
newl=newl,
indent=((' ' * self.output_indent) * data['indentlvl'])
)
)
return self
elif element == '::':
is_element = False
if is_element:
out += '<' + element # Begin start tag
out += ' id="{}"'.format(data['id']) if data['id'] else ''
if data['class']:
out += ' class="{}"'.format(' '.join(data['class']))
out += ' {}'.format(extra_args) if extra_args else ''
for a in data['arguments']:
arg = a.split('=')
out += ' {}="{}"'.format(arg[0], arg[1])
# Use content as argument according to mapping
if data['content'] and content_dest != '>' and content_dest != '-':
out += ' {}="{}"'.format(content_dest, data['content'])
out += '>' # Close start tag
# Add content, if any.
# Properly align content depending on children nodes
if data['content'] and content_dest == '>':
if data['indentlvl'] >= next_lvl:
out += data['content']
else:
out += '\n{}{}'.format(self._indent(next_lvl), data['content'])
# close tag if node has no children nodes
if is_element and not void_elem:
if data['indentlvl'] >= next_lvl:
out += '</{}>'.format(element)
self._addOutput(data['indentlvl'], out)
def _endNode(self, data):
if data['element'] == 'html5':
self._addOutput(data['indentlvl'], '</html>')
return self
self._addOutput(data['indentlvl'], '</{}>'.format(data['element']))
def _indent(self, indentlvl):
return (' ' * self.output_indent) * indentlvl
def _addOutput(self, indentlvl, contents):
# Do not print a newline if output_indent setting <= -1 and
# unescape any special tokens
newl = '\n' if self.output_indent > -1 else ''
contents = contents.replace('\\:', ':')
self.output += self._indent(indentlvl) + contents + newl
# Shortcut functions #########################################################
def string(string, input_indent=DEFAULT_INPUT_INDENT,
output_indent=DEFAULT_OUTPUT_INDENT):
'''Shortcut for parsing, lexing and mapping a document from a string'''
parser = Parser(input_indent=input_indent).string(string)
return LexerXML(parser, output_indent=output_indent).output
def file(filelocation, input_indent=DEFAULT_INPUT_INDENT,
output_indent=DEFAULT_OUTPUT_INDENT):
'''Shortcut for parsing, lexing and mapping a document from file'''
parser = Parser(input_indent=input_indent).file(filelocation)
return LexerXML(parser, output_indent=output_indent).output
def stdin(input_indent=DEFAULT_INPUT_INDENT,
output_indent=DEFAULT_OUTPUT_INDENT):
'''Shortcut for parsing, lexing and mapping from stdin/fileinput'''
parser = Parser(input_indent=input_indent).fileinput()
return LexerXML(parser, output_indent=output_indent).output
# Command handler ############################################################
class Command(pycommand.CommandBase):
'''Command handler for parsing arguments from shell and starting actions'''
description = 'Tuhinga Markup Language CLI'
optionList = (
('stdin', ('i', False, 'read from standard input')),
('help', ('h', False, 'show this help information')),
('version', ('V', False, 'show version information')),
)
usageTextExtra = 'Specify one or more files or read from stdin with -i'
def __init__(self, *args, **kwargs):
self.usagestr = ('usage: {name} [options] [<file>]'
.format(name=kwargs['execname']))
super(Command, self).__init__(*args)
def run(self):
if self.flags.help:
print(self.usage)
return 0
elif self.flags.version:
print('Tuhinga {} on Python {}'
.format(__version__, sys.version.split()[0]))
return 0
elif self.flags.stdin:
sys.argv = [] # reset sys.argv, prevent parsing "--stdin" filename
try:
print(stdin())
except KeyboardInterrupt:
print('Bye')
return 0
return 0
if not self.args:
print(self.usage)
else:
for f in self.args:
print(file(f))
return 0
if __name__ == '__main__':
cmd = Command(sys.argv[1:], execname=sys.argv[0])
if cmd.error:
print('error: {0}'.format(cmd.error))
sys.exit(1)
else:
sys.exit(cmd.run())
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
NOTE: This module shall not be used by external projects. It will be moved
to neutron-lib in due course, and then it can be used from there.
"""
from neutron_lib.api import attributes
from neutron_lib.db import utils as db_utils
from oslo_db.sqlalchemy import utils as sa_utils
from sqlalchemy import sql, or_, and_
from sqlalchemy.ext import associationproxy
from neutron.common import utils
from neutron.db import _utils as ndb_utils
from neutron.objects import utils as obj_utils
# Classes implementing extensions will register hooks into this dictionary
# for "augmenting" the "core way" of building a query for retrieving objects
# from a model class. Hooks are registered by invoking register_hook().
_model_query_hooks = {
# model1 : {
# hook1: {
# 'query': query_hook,
# 'filter': filter_hook,
# 'result_filters': result_filters
# },
# hook2: {
# 'query': query_hook,
# 'filter': filter_hook,
# 'result_filters': result_filters
# },
# ...
# },
# model2 : {
# hook1: {
# 'query': query_hook,
# 'filter': filter_hook,
# 'result_filters': result_filters
# },
# hook2: {
# 'query': query_hook,
# 'filter': filter_hook,
# 'result_filters': result_filters
# },
# ...
# },
# ...
}
def register_hook(model, name, query_hook, filter_hook,
result_filters=None):
"""Register a hook to be invoked when a query is executed.
:param model: The DB Model that the hook applies to.
:type model: sqlalchemy orm model
:param name: A name for the hook.
:type name: str
:param query_hook: The method to be called to augment the query.
:type query_hook: callable or None
:param filter_hook: A method to be called to augment the query filter.
:type filter_hook: callable or None
:param result_filters: A Method to be called to filter the query result.
:type result_filters: callable or None
Adds the hook components to the _model_query_hooks dict. Models are the
keys of this dict, whereas the value is another dict mapping hook names
to callables performing the hook.
Each hook has three components:
"query", used to build the query expression
"filter", used to build the filter expression
"result_filters", used for final filtering on the query result
Query hooks take as input the query being built and return a
transformed query expression.
def mymodel_query_hook(context, original_model, query):
augmented_query = ...
return augmented_query
Filter hooks take as input the filter expression being built and return
a transformed filter expression
def mymodel_filter_hook(context, original_model, filters):
refined_filters = ...
return refined_filters
Result filter hooks take as input the query expression and the filter
expression, and return a final transformed query expression.
def mymodel_result_filter_hook(query, filters):
final_filters = ...
return query.filter(final_filters)
"""
if callable(query_hook):
query_hook = utils.make_weak_ref(query_hook)
if callable(filter_hook):
filter_hook = utils.make_weak_ref(filter_hook)
if callable(result_filters):
result_filters = utils.make_weak_ref(result_filters)
_model_query_hooks.setdefault(model, {})[name] = {
'query': query_hook,
'filter': filter_hook,
'result_filters': result_filters
}
def get_hooks(model):
"""Retrieve the model query hooks for a model.
:param model: The DB Model to look up for query hooks.
:type model: sqlalchemy orm model
:return: list of hooks
:rtype: list of dict of callable
"""
return _model_query_hooks.get(model, {}).values()
def query_with_hooks(context, model):
query = context.session.query(model)
# define basic filter condition for model query
query_filter = None
if ndb_utils.model_query_scope_is_project(context, model):
if hasattr(model, 'rbac_entries'):
query = query.outerjoin(model.rbac_entries)
rbac_model = model.rbac_entries.property.mapper.class_
query_filter = (
(model.tenant_id == context.tenant_id) |
((rbac_model.action == 'access_as_shared') &
((rbac_model.target_tenant == context.tenant_id) |
(rbac_model.target_tenant == '*'))))
elif hasattr(model, 'shared'):
query_filter = ((model.tenant_id == context.tenant_id) |
(model.shared == sql.true()))
else:
query_filter = (model.tenant_id == context.tenant_id)
# Execute query hooks registered from mixins and plugins
for hook in get_hooks(model):
query_hook = utils.resolve_ref(hook.get('query'))
if query_hook:
query = query_hook(context, model, query)
filter_hook = utils.resolve_ref(hook.get('filter'))
if filter_hook:
query_filter = filter_hook(context, model, query_filter)
# NOTE(salvatore-orlando): 'if query_filter' will try to evaluate the
# condition, raising an exception
if query_filter is not None:
query = query.filter(query_filter)
return query
def get_by_id(context, model, object_id):
query = query_with_hooks(context=context, model=model)
return query.filter(model.id == object_id).one()
def apply_filters(query, model, filters, context=None):
if filters:
for key, value in filters.items():
column = getattr(model, key, None)
# NOTE(kevinbenton): if column is a hybrid property that
# references another expression, attempting to convert to
# a boolean will fail so we must compare to None.
# See "An Important Expression Language Gotcha" in:
# docs.sqlalchemy.org/en/rel_0_9/changelog/migration_06.html
if column is not None:
if not value:
query = query.filter(sql.false())
return query
if isinstance(column, associationproxy.AssociationProxy):
# association proxies don't support in_ so we have to
# do multiple equals matches
query = query.filter(
or_(*[column == v for v in value]))
elif isinstance(value, obj_utils.StringMatchingFilterObj):
if value.is_contains:
query = query.filter(
column.contains(value.contains))
elif value.is_starts:
query = query.filter(
column.startswith(value.starts))
elif value.is_ends:
query = query.filter(
column.endswith(value.ends))
elif None in value:
# in_() operator does not support NULL element so we have
# to do multiple equals matches
query = query.filter(
or_(*[column == v for v in value]))
else:
query = query.filter(column.in_(value))
elif key == 'shared' and hasattr(model, 'rbac_entries'):
# translate a filter on shared into a query against the
# object's rbac entries
rbac = model.rbac_entries.property.mapper.class_
matches = [rbac.target_tenant == '*']
if context:
matches.append(rbac.target_tenant == context.tenant_id)
# any 'access_as_shared' records that match the
# wildcard or requesting tenant
is_shared = and_(rbac.action == 'access_as_shared',
or_(*matches))
if not value[0]:
# NOTE(kevinbenton): we need to find objects that don't
# have an entry that matches the criteria above so
# we use a subquery to exclude them.
# We can't just filter the inverse of the query above
# because that will still give us a network shared to
# our tenant (or wildcard) if it's shared to another
# tenant.
# This is the column joining the table to rbac via
# the object_id. We can't just use model.id because
# subnets join on network.id so we have to inspect the
# relationship.
join_cols = model.rbac_entries.property.local_columns
oid_col = list(join_cols)[0]
is_shared = ~oid_col.in_(
query.session.query(rbac.object_id).filter(is_shared)
)
elif (not context or
not ndb_utils.model_query_scope_is_project(context,
model)):
# we only want to join if we aren't using the subquery
# and if we aren't already joined because this is a
# scoped query
query = query.outerjoin(model.rbac_entries)
query = query.filter(is_shared)
for hook in get_hooks(model):
result_filter = utils.resolve_ref(hook.get('result_filters', None))
if result_filter:
query = result_filter(query, filters)
return query
def get_collection_query(context, model, filters=None, sorts=None, limit=None,
marker_obj=None, page_reverse=False):
collection = query_with_hooks(context, model)
collection = apply_filters(collection, model, filters, context)
if sorts:
sort_keys = db_utils.get_and_validate_sort_keys(sorts, model)
sort_dirs = db_utils.get_sort_dirs(sorts, page_reverse)
# we always want deterministic results for sorted queries
# so add unique keys to limit queries when present.
# (http://docs.sqlalchemy.org/en/latest/orm/
# loading_relationships.html#subqueryload-ordering)
# (http://docs.sqlalchemy.org/en/latest/faq/
# ormconfiguration.html#faq-subqueryload-limit-sort)
for k in _unique_keys(model):
if k not in sort_keys:
sort_keys.append(k)
sort_dirs.append('asc')
collection = sa_utils.paginate_query(collection, model, limit,
marker=marker_obj,
sort_keys=sort_keys,
sort_dirs=sort_dirs)
return collection
def _unique_keys(model):
# just grab first set of unique keys and use them.
# if model has no unqiue sets, 'paginate_query' will
# warn if sorting is unstable
uk_sets = sa_utils.get_unique_keys(model)
return uk_sets[0] if uk_sets else []
def get_collection(context, model, dict_func,
filters=None, fields=None,
sorts=None, limit=None, marker_obj=None,
page_reverse=False):
query = get_collection_query(context, model,
filters=filters, sorts=sorts,
limit=limit, marker_obj=marker_obj,
page_reverse=page_reverse)
items = [
attributes.populate_project_info(
dict_func(c, fields) if dict_func else c)
for c in query
]
if limit and page_reverse:
items.reverse()
return items
def get_collection_count(context, model, filters=None):
return get_collection_query(context, model, filters).count()
|
|
"""Twitter platform for notify component."""
from datetime import datetime, timedelta
from functools import partial
import json
import logging
import mimetypes
import os
from TwitterAPI import TwitterAPI
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_point_in_time
_LOGGER = logging.getLogger(__name__)
CONF_CONSUMER_KEY = "consumer_key"
CONF_CONSUMER_SECRET = "consumer_secret"
CONF_ACCESS_TOKEN_SECRET = "access_token_secret"
ATTR_MEDIA = "media"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ACCESS_TOKEN): cv.string,
vol.Required(CONF_ACCESS_TOKEN_SECRET): cv.string,
vol.Required(CONF_CONSUMER_KEY): cv.string,
vol.Required(CONF_CONSUMER_SECRET): cv.string,
vol.Optional(CONF_USERNAME): cv.string,
}
)
def get_service(hass, config, discovery_info=None):
"""Get the Twitter notification service."""
return TwitterNotificationService(
hass,
config[CONF_CONSUMER_KEY],
config[CONF_CONSUMER_SECRET],
config[CONF_ACCESS_TOKEN],
config[CONF_ACCESS_TOKEN_SECRET],
config.get(CONF_USERNAME),
)
class TwitterNotificationService(BaseNotificationService):
"""Implementation of a notification service for the Twitter service."""
def __init__(
self,
hass,
consumer_key,
consumer_secret,
access_token_key,
access_token_secret,
username,
):
"""Initialize the service."""
self.user = username
self.hass = hass
self.api = TwitterAPI(
consumer_key, consumer_secret, access_token_key, access_token_secret
)
def send_message(self, message="", **kwargs):
"""Tweet a message, optionally with media."""
data = kwargs.get(ATTR_DATA)
media = None
if data:
media = data.get(ATTR_MEDIA)
if not self.hass.config.is_allowed_path(media):
_LOGGER.warning("'%s' is not a whitelisted directory", media)
return
callback = partial(self.send_message_callback, message)
self.upload_media_then_callback(callback, media)
def send_message_callback(self, message, media_id=None):
"""Tweet a message, optionally with media."""
if self.user:
user_resp = self.api.request("users/lookup", {"screen_name": self.user})
user_id = user_resp.json()[0]["id"]
if user_resp.status_code != 200:
self.log_error_resp(user_resp)
else:
_LOGGER.debug("Message posted: %s", user_resp.json())
event = {
"event": {
"type": "message_create",
"message_create": {
"target": {"recipient_id": user_id},
"message_data": {"text": message},
},
}
}
resp = self.api.request("direct_messages/events/new", json.dumps(event))
else:
resp = self.api.request(
"statuses/update", {"status": message, "media_ids": media_id}
)
if resp.status_code != 200:
self.log_error_resp(resp)
else:
_LOGGER.debug("Message posted: %s", resp.json())
def upload_media_then_callback(self, callback, media_path=None):
"""Upload media."""
if not media_path:
return callback()
with open(media_path, "rb") as file:
total_bytes = os.path.getsize(media_path)
(media_category, media_type) = self.media_info(media_path)
resp = self.upload_media_init(media_type, media_category, total_bytes)
if 199 > resp.status_code < 300:
self.log_error_resp(resp)
return None
media_id = resp.json()["media_id"]
media_id = self.upload_media_chunked(file, total_bytes, media_id)
resp = self.upload_media_finalize(media_id)
if 199 > resp.status_code < 300:
self.log_error_resp(resp)
return None
if resp.json().get("processing_info") is None:
return callback(media_id)
self.check_status_until_done(media_id, callback)
def media_info(self, media_path):
"""Determine mime type and Twitter media category for given media."""
(media_type, _) = mimetypes.guess_type(media_path)
media_category = self.media_category_for_type(media_type)
_LOGGER.debug(
"media %s is mime type %s and translates to %s",
media_path,
media_type,
media_category,
)
return media_category, media_type
def upload_media_init(self, media_type, media_category, total_bytes):
"""Upload media, INIT phase."""
return self.api.request(
"media/upload",
{
"command": "INIT",
"media_type": media_type,
"media_category": media_category,
"total_bytes": total_bytes,
},
)
def upload_media_chunked(self, file, total_bytes, media_id):
"""Upload media, chunked append."""
segment_id = 0
bytes_sent = 0
while bytes_sent < total_bytes:
chunk = file.read(4 * 1024 * 1024)
resp = self.upload_media_append(chunk, media_id, segment_id)
if resp.status_code not in range(200, 299):
self.log_error_resp_append(resp)
return None
segment_id = segment_id + 1
bytes_sent = file.tell()
self.log_bytes_sent(bytes_sent, total_bytes)
return media_id
def upload_media_append(self, chunk, media_id, segment_id):
"""Upload media, APPEND phase."""
return self.api.request(
"media/upload",
{"command": "APPEND", "media_id": media_id, "segment_index": segment_id},
{"media": chunk},
)
def upload_media_finalize(self, media_id):
"""Upload media, FINALIZE phase."""
return self.api.request(
"media/upload", {"command": "FINALIZE", "media_id": media_id}
)
def check_status_until_done(self, media_id, callback, *args):
"""Upload media, STATUS phase."""
resp = self.api.request(
"media/upload",
{"command": "STATUS", "media_id": media_id},
method_override="GET",
)
if resp.status_code != 200:
_LOGGER.error("media processing error: %s", resp.json())
processing_info = resp.json()["processing_info"]
_LOGGER.debug("media processing %s status: %s", media_id, processing_info)
if processing_info["state"] in {"succeeded", "failed"}:
return callback(media_id)
check_after_secs = processing_info["check_after_secs"]
_LOGGER.debug(
"media processing waiting %s seconds to check status", str(check_after_secs)
)
when = datetime.now() + timedelta(seconds=check_after_secs)
myself = partial(self.check_status_until_done, media_id, callback)
async_track_point_in_time(self.hass, myself, when)
@staticmethod
def media_category_for_type(media_type):
"""Determine Twitter media category by mime type."""
if media_type is None:
return None
if media_type.startswith("image/gif"):
return "tweet_gif"
if media_type.startswith("video/"):
return "tweet_video"
if media_type.startswith("image/"):
return "tweet_image"
return None
@staticmethod
def log_bytes_sent(bytes_sent, total_bytes):
"""Log upload progress."""
_LOGGER.debug("%s of %s bytes uploaded", str(bytes_sent), str(total_bytes))
@staticmethod
def log_error_resp(resp):
"""Log error response."""
obj = json.loads(resp.text)
error_message = obj["errors"]
_LOGGER.error("Error %s: %s", resp.status_code, error_message)
@staticmethod
def log_error_resp_append(resp):
"""Log error response, during upload append phase."""
obj = json.loads(resp.text)
error_message = obj["errors"][0]["message"]
error_code = obj["errors"][0]["code"]
_LOGGER.error(
"Error %s: %s (Code %s)", resp.status_code, error_message, error_code
)
|
|
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from paasta_tools import chronos_tools
from paasta_tools import marathon_tools
from paasta_tools import monitoring_tools
class TestMonitoring_Tools:
general_page = True
fake_general_service_config = {
'team': 'general_test_team',
'runbook': 'y/general_test_runbook',
'tip': 'general_test_tip',
'notification_email': 'general_test_notification_email',
'page': general_page,
}
empty_service_config = marathon_tools.MarathonServiceConfig(
service='myservicename',
cluster='mycluster',
instance='myinstance',
config_dict={},
branch_dict=None,
)
job_page = False
fake_marathon_job_config = marathon_tools.MarathonServiceConfig(
service='myservicename',
cluster='myclustername',
instance='myinstance',
config_dict={
'team': 'job_test_team',
'runbook': 'y/job_test_runbook',
'tip': 'job_test_tip',
'notification_email': 'job_test_notification_email',
'page': job_page,
},
branch_dict=None,
)
fake_chronos_job_config = chronos_tools.ChronosJobConfig(
service='myservicename',
cluster='myclustername',
instance='myinstance',
config_dict={
'team': 'job_test_team',
'runbook': 'y/job_test_runbook',
'tip': 'job_test_tip',
'notification_email': 'job_test_notification_email',
'page': job_page,
},
branch_dict=None,
)
empty_job_config = {}
monitor_page = True
fake_monitor_config = {
'team': 'monitor_test_team',
'runbook': 'y/monitor_test_runbook',
'tip': 'monitor_test_tip',
'notification_email': 'monitor_test_notification_email',
'page': monitor_page,
}
empty_monitor_config = {}
framework = 'fake_framework'
overrides = {}
instance = 'fake_instance'
service = 'fake_service'
soa_dir = '/fake/soa/dir'
def test_get_team(self):
with mock.patch(
'paasta_tools.monitoring_tools.__get_monitoring_config_value', autospec=True,
) as get_monitoring_config_value_patch:
monitoring_tools.get_team(self.overrides, self.service, self.soa_dir)
get_monitoring_config_value_patch.assert_called_once_with(
'team', self.overrides, self.service,
self.soa_dir,
)
def test_get_runbook(self):
with mock.patch(
'paasta_tools.monitoring_tools.__get_monitoring_config_value', autospec=True,
) as get_monitoring_config_value_patch:
monitoring_tools.get_runbook(self.overrides, self.service, self.soa_dir)
get_monitoring_config_value_patch.assert_called_once_with(
'runbook', self.overrides, self.service,
self.soa_dir,
)
def test_get_tip(self):
with mock.patch(
'paasta_tools.monitoring_tools.__get_monitoring_config_value', autospec=True,
) as get_monitoring_config_value_patch:
monitoring_tools.get_tip(self.overrides, self.service, self.soa_dir)
get_monitoring_config_value_patch.assert_called_once_with(
'tip', self.overrides, self.service,
self.soa_dir,
)
def test_get_notification_email(self):
with mock.patch(
'paasta_tools.monitoring_tools.__get_monitoring_config_value', autospec=True,
) as get_monitoring_config_value_patch:
monitoring_tools.get_notification_email(self.overrides, self.service, self.soa_dir)
get_monitoring_config_value_patch.assert_called_once_with(
'notification_email', self.overrides,
self.service, self.soa_dir,
)
def test_get_page(self):
with mock.patch(
'paasta_tools.monitoring_tools.__get_monitoring_config_value', autospec=True,
) as get_monitoring_config_value_patch:
monitoring_tools.get_page(self.overrides, self.service, self.soa_dir)
get_monitoring_config_value_patch.assert_called_once_with(
'page', self.overrides, self.service,
self.soa_dir,
)
def test_get_alert_after(self):
with mock.patch(
'paasta_tools.monitoring_tools.__get_monitoring_config_value', autospec=True,
) as get_monitoring_config_value_patch:
monitoring_tools.get_alert_after(self.overrides, self.service, self.soa_dir)
get_monitoring_config_value_patch.assert_called_once_with(
'alert_after', self.overrides, self.service,
self.soa_dir,
)
def test_get_realert_every(self):
with mock.patch(
'paasta_tools.monitoring_tools.__get_monitoring_config_value', autospec=True,
) as get_monitoring_config_value_patch:
monitoring_defaults = mock.Mock()
monitoring_tools.get_realert_every(self.overrides, self.service, self.soa_dir, monitoring_defaults)
get_monitoring_config_value_patch.assert_called_once_with(
'realert_every', self.overrides,
self.service, self.soa_dir, monitoring_defaults,
)
def test_get_check_every(self):
with mock.patch(
'paasta_tools.monitoring_tools.__get_monitoring_config_value', autospec=True,
) as get_monitoring_config_value_patch:
monitoring_tools.get_check_every(self.overrides, self.service, self.soa_dir)
get_monitoring_config_value_patch.assert_called_once_with(
'check_every', self.overrides, self.service,
self.soa_dir,
)
def test_get_irc_channels(self):
with mock.patch(
'paasta_tools.monitoring_tools.__get_monitoring_config_value', autospec=True,
) as get_monitoring_config_value_patch:
monitoring_tools.get_irc_channels(self.overrides, self.service, self.soa_dir)
get_monitoring_config_value_patch.assert_called_once_with(
'irc_channels', self.overrides, self.service,
self.soa_dir,
)
def test_get_dependencies(self):
with mock.patch(
'paasta_tools.monitoring_tools.__get_monitoring_config_value', autospec=True,
) as get_monitoring_config_value_patch:
monitoring_tools.get_dependencies(self.overrides, self.service, self.soa_dir)
get_monitoring_config_value_patch.assert_called_once_with(
'dependencies', self.overrides, self.service,
self.soa_dir,
)
def test_get_ticket(self):
with mock.patch(
'paasta_tools.monitoring_tools.__get_monitoring_config_value', autospec=True,
) as get_monitoring_config_value_patch:
monitoring_tools.get_ticket(self.overrides, self.service, self.soa_dir)
get_monitoring_config_value_patch.assert_called_once_with(
'ticket', self.overrides, self.service,
self.soa_dir,
)
def test_get_project(self):
with mock.patch(
'paasta_tools.monitoring_tools.__get_monitoring_config_value', autospec=True,
) as get_monitoring_config_value_patch:
monitoring_tools.get_project(self.overrides, self.service, self.soa_dir)
get_monitoring_config_value_patch.assert_called_once_with(
'project', self.overrides, self.service,
self.soa_dir,
)
def test_get_monitoring_config_value_with_monitor_config(self):
expected = 'monitor_test_team'
with mock.patch(
'service_configuration_lib.read_service_configuration', autospec=True,
return_value=self.fake_general_service_config,
) as service_configuration_lib_patch, mock.patch(
'paasta_tools.monitoring_tools.read_monitoring_config',
autospec=True, return_value=self.fake_monitor_config,
) as read_monitoring_patch, mock.patch(
'paasta_tools.monitoring_tools.load_system_paasta_config', autospec=True,
) as load_system_paasta_config_patch:
load_system_paasta_config_patch.return_value.get_cluster = mock.Mock(return_value='fake_cluster')
actual = monitoring_tools.get_team(self.overrides, self.service, self.soa_dir)
assert expected == actual
service_configuration_lib_patch.assert_called_once_with(self.service, soa_dir=self.soa_dir)
read_monitoring_patch.assert_called_once_with(self.service, soa_dir=self.soa_dir)
def test_get_monitoring_config_value_with_service_config(self):
expected = 'general_test_team'
with mock.patch(
'service_configuration_lib.read_service_configuration', autospec=True,
return_value=self.fake_general_service_config,
) as service_configuration_lib_patch, mock.patch(
'paasta_tools.monitoring_tools.read_monitoring_config',
autospec=True, return_value=self.empty_monitor_config,
) as read_monitoring_patch, mock.patch(
'paasta_tools.monitoring_tools.load_system_paasta_config', autospec=True,
) as load_system_paasta_config_patch:
load_system_paasta_config_patch.return_value.get_cluster = mock.Mock(return_value='fake_cluster')
actual = monitoring_tools.get_team(self.overrides, self.service, self.soa_dir)
assert expected == actual
service_configuration_lib_patch.assert_called_once_with(self.service, soa_dir=self.soa_dir)
read_monitoring_patch.assert_called_once_with(self.service, soa_dir=self.soa_dir)
def test_get_monitoring_config_value_with_defaults(self):
expected = None
with mock.patch(
'service_configuration_lib.read_service_configuration', autospec=True,
return_value=self.empty_job_config,
) as service_configuration_lib_patch, mock.patch(
'paasta_tools.monitoring_tools.read_monitoring_config',
autospec=True, return_value=self.empty_monitor_config,
) as read_monitoring_patch, mock.patch(
'paasta_tools.monitoring_tools.load_system_paasta_config', autospec=True,
) as load_system_paasta_config_patch:
load_system_paasta_config_patch.return_value.get_cluster = mock.Mock(return_value='fake_cluster')
actual = monitoring_tools.get_team(self.overrides, self.service, self.soa_dir)
assert expected == actual
service_configuration_lib_patch.assert_called_once_with(self.service, soa_dir=self.soa_dir)
read_monitoring_patch.assert_called_once_with(self.service, soa_dir=self.soa_dir)
def test_get_team_email_address_uses_override_if_specified(self):
fake_email = 'fake_email'
with mock.patch(
'paasta_tools.monitoring_tools.__get_monitoring_config_value', autospec=True,
) as mock_get_monitoring_config_value:
mock_get_monitoring_config_value.return_value = 'fake_email'
actual = monitoring_tools.get_team_email_address('fake_service', {'notification_email': fake_email})
assert actual == fake_email
def test_get_team_email_address_uses_instance_config_if_specified(self):
expected = 'fake_email'
with mock.patch(
'paasta_tools.monitoring_tools.__get_monitoring_config_value', autospec=True,
) as mock_get_monitoring_config_value:
mock_get_monitoring_config_value.return_value = 'fake_email'
actual = monitoring_tools.get_team_email_address('fake_service')
assert actual == expected
def test_get_team_email_address_uses_team_data_as_last_resort(self):
expected = 'team_data_email'
with mock.patch(
'paasta_tools.monitoring_tools.__get_monitoring_config_value', autospec=True,
) as mock_get_monitoring_config_value, mock.patch(
'paasta_tools.monitoring_tools.get_sensu_team_data', autospec=True,
) as mock_get_sensu_team_data, mock.patch(
'paasta_tools.monitoring_tools.get_team', autospec=True,
) as mock_get_team:
mock_get_team.return_value = 'test_team'
mock_get_monitoring_config_value.return_value = False
mock_get_sensu_team_data.return_value = {
'notification_email': expected,
}
actual = monitoring_tools.get_team_email_address('fake_service')
assert actual == expected
def test_get_team_email_address_returns_none_if_not_available(self):
with mock.patch(
'paasta_tools.monitoring_tools.__get_monitoring_config_value', autospec=True,
) as mock_get_monitoring_config_value, mock.patch(
'paasta_tools.monitoring_tools.get_sensu_team_data', autospec=True,
) as mock_get_sensu_team_data, mock.patch(
'paasta_tools.monitoring_tools.get_team', autospec=True,
) as mock_get_team:
mock_get_team.return_value = 'test_team'
mock_get_monitoring_config_value.return_value = False
mock_get_sensu_team_data.return_value = {}
actual = monitoring_tools.get_team_email_address('fake_service')
assert actual is None
def test_send_event(self):
fake_service = 'fake_service'
fake_monitoring_overrides = {}
fake_check_name = 'fake_check_name'
fake_status = '42'
fake_output = 'The http port is not open'
fake_team = 'fake_team'
fake_tip = 'fake_tip'
fake_notification_email = 'fake@notify'
fake_irc = '#fake'
fake_soa_dir = '/fake/soa/dir'
self.fake_cluster = 'fake_cluster'
fake_sensu_host = 'fake_sensu_host'
fake_sensu_port = 12345
expected_runbook = 'http://y/paasta-troubleshooting'
expected_check_name = fake_check_name
expected_kwargs = {
'tip': fake_tip,
'notification_email': fake_notification_email,
'irc_channels': fake_irc,
'project': None,
'ticket': False,
'page': True,
'alert_after': '5m',
'check_every': '1m',
'realert_every': -1,
'source': 'paasta-fake_cluster',
'ttl': None,
}
with mock.patch(
"paasta_tools.monitoring_tools.get_team",
return_value=fake_team,
autospec=True,
) as get_team_patch, mock.patch(
"paasta_tools.monitoring_tools.get_tip",
return_value=fake_tip,
autospec=True,
) as get_tip_patch, mock.patch(
"paasta_tools.monitoring_tools.get_notification_email",
return_value=fake_notification_email,
autospec=True,
) as get_notification_email_patch, mock.patch(
"paasta_tools.monitoring_tools.get_irc_channels",
return_value=fake_irc,
autospec=True,
) as get_irc_patch, mock.patch(
"paasta_tools.monitoring_tools.get_ticket",
return_value=False,
autospec=True,
), mock.patch(
"paasta_tools.monitoring_tools.get_project",
return_value=None,
autospec=True,
), mock.patch(
"paasta_tools.monitoring_tools.get_page",
return_value=True,
autospec=True,
) as get_page_patch, mock.patch(
"pysensu_yelp.send_event", autospec=True,
) as pysensu_yelp_send_event_patch, mock.patch(
'paasta_tools.monitoring_tools.load_system_paasta_config', autospec=True,
) as load_system_paasta_config_patch:
load_system_paasta_config_patch.return_value.get_cluster = mock.Mock(return_value=self.fake_cluster)
load_system_paasta_config_patch.return_value.get_sensu_host = mock.Mock(return_value=fake_sensu_host)
load_system_paasta_config_patch.return_value.get_sensu_port = mock.Mock(return_value=fake_sensu_port)
monitoring_tools.send_event(
fake_service,
fake_check_name,
fake_monitoring_overrides,
fake_status,
fake_output,
fake_soa_dir,
)
get_team_patch.assert_called_once_with(
fake_monitoring_overrides,
fake_service,
fake_soa_dir,
)
get_tip_patch.assert_called_once_with(
fake_monitoring_overrides,
fake_service,
fake_soa_dir,
)
get_notification_email_patch.assert_called_once_with(
fake_monitoring_overrides,
fake_service,
fake_soa_dir,
)
get_irc_patch.assert_called_once_with(
fake_monitoring_overrides,
fake_service,
fake_soa_dir,
)
get_page_patch.assert_called_once_with(
fake_monitoring_overrides,
fake_service,
fake_soa_dir,
)
pysensu_yelp_send_event_patch.assert_called_once_with(
expected_check_name,
expected_runbook,
fake_status,
fake_output,
fake_team,
sensu_host=fake_sensu_host,
sensu_port=fake_sensu_port,
**expected_kwargs,
)
load_system_paasta_config_patch.return_value.get_cluster.assert_called_once_with()
def test_send_event_sensu_host_is_None(self):
fake_service = 'fake_service'
fake_monitoring_overrides = {}
fake_check_name = 'fake_check_name'
fake_status = '42'
fake_output = 'The http port is not open'
fake_soa_dir = '/fake/soa/dir'
self.fake_cluster = 'fake_cluster'
fake_sensu_port = 12345
with mock.patch(
"paasta_tools.monitoring_tools.get_team", autospec=True,
), mock.patch(
"paasta_tools.monitoring_tools.get_tip", autospec=True,
), mock.patch(
"paasta_tools.monitoring_tools.get_notification_email", autospec=True,
), mock.patch(
"paasta_tools.monitoring_tools.get_irc_channels", autospec=True,
), mock.patch(
"paasta_tools.monitoring_tools.get_ticket", autospec=True,
), mock.patch(
"paasta_tools.monitoring_tools.get_project", autospec=True,
), mock.patch(
"paasta_tools.monitoring_tools.get_page", autospec=True,
), mock.patch(
"pysensu_yelp.send_event", autospec=True,
) as pysensu_yelp_send_event_patch, mock.patch(
'paasta_tools.monitoring_tools.load_system_paasta_config', autospec=True,
) as load_system_paasta_config_patch:
load_system_paasta_config_patch.return_value.get_sensu_host = mock.Mock(return_value=None)
load_system_paasta_config_patch.return_value.get_sensu_port = mock.Mock(return_value=fake_sensu_port)
monitoring_tools.send_event(
fake_service,
fake_check_name,
fake_monitoring_overrides,
fake_status,
fake_output,
fake_soa_dir,
)
assert pysensu_yelp_send_event_patch.call_count == 0
def test_read_monitoring_config(self):
fake_name = 'partial'
fake_fname = 'acronyms'
fake_path = 'ever_patched'
fake_soa_dir = '/nail/cte/oas'
fake_dict = {'e': 'quail', 'v': 'snail'}
with mock.patch(
'os.path.abspath', autospec=True, return_value=fake_path,
) as abspath_patch, mock.patch(
'os.path.join', autospec=True, return_value=fake_fname,
) as join_patch, mock.patch(
'service_configuration_lib.read_monitoring', autospec=True, return_value=fake_dict,
) as read_monitoring_patch:
actual = monitoring_tools.read_monitoring_config(fake_name, fake_soa_dir)
assert fake_dict == actual
abspath_patch.assert_called_once_with(fake_soa_dir)
join_patch.assert_called_once_with(fake_path, fake_name, 'monitoring.yaml')
read_monitoring_patch.assert_called_once_with(fake_fname)
|
|
# pylint: skip-file
# flake8: noqa
class RouterException(Exception):
''' Router exception'''
pass
class RouterConfig(OpenShiftCLIConfig):
''' RouterConfig is a DTO for the router. '''
def __init__(self, rname, namespace, kubeconfig, router_options):
super(RouterConfig, self).__init__(rname, namespace, kubeconfig, router_options)
class Router(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
def __init__(self,
router_config,
verbose=False):
''' Constructor for OpenshiftOC
a router consists of 3 or more parts
- dc/router
- svc/router
- sa/router
- secret/router-certs
- clusterrolebinding/router-router-role
'''
super(Router, self).__init__('default', router_config.kubeconfig, verbose)
self.config = router_config
self.verbose = verbose
self.router_parts = [{'kind': 'dc', 'name': self.config.name},
{'kind': 'svc', 'name': self.config.name},
{'kind': 'sa', 'name': self.config.config_options['service_account']['value']},
{'kind': 'secret', 'name': self.config.name + '-certs'},
{'kind': 'clusterrolebinding', 'name': 'router-' + self.config.name + '-role'},
]
self.__prepared_router = None
self.dconfig = None
self.svc = None
self._secret = None
self._serviceaccount = None
self._rolebinding = None
@property
def prepared_router(self):
''' property for the prepared router'''
if self.__prepared_router is None:
results = self._prepare_router()
if not results or 'returncode' in results and results['returncode'] != 0:
if 'stderr' in results:
raise RouterException('Could not perform router preparation: %s' % results['stderr'])
raise RouterException('Could not perform router preparation.')
self.__prepared_router = results
return self.__prepared_router
@prepared_router.setter
def prepared_router(self, obj):
'''setter for the prepared_router'''
self.__prepared_router = obj
@property
def deploymentconfig(self):
''' property deploymentconfig'''
return self.dconfig
@deploymentconfig.setter
def deploymentconfig(self, config):
''' setter for property deploymentconfig '''
self.dconfig = config
@property
def service(self):
''' property for service '''
return self.svc
@service.setter
def service(self, config):
''' setter for property service '''
self.svc = config
@property
def secret(self):
''' property secret '''
return self._secret
@secret.setter
def secret(self, config):
''' setter for property secret '''
self._secret = config
@property
def serviceaccount(self):
''' property for serviceaccount '''
return self._serviceaccount
@serviceaccount.setter
def serviceaccount(self, config):
''' setter for property serviceaccount '''
self._serviceaccount = config
@property
def rolebinding(self):
''' property rolebinding '''
return self._rolebinding
@rolebinding.setter
def rolebinding(self, config):
''' setter for property rolebinding '''
self._rolebinding = config
def get_object_by_kind(self, kind):
'''return the current object kind by name'''
if re.match("^(dc|deploymentconfig)$", kind, flags=re.IGNORECASE):
return self.deploymentconfig
elif re.match("^(svc|service)$", kind, flags=re.IGNORECASE):
return self.service
elif re.match("^(sa|serviceaccount)$", kind, flags=re.IGNORECASE):
return self.serviceaccount
elif re.match("secret", kind, flags=re.IGNORECASE):
return self.secret
elif re.match("clusterrolebinding", kind, flags=re.IGNORECASE):
return self.rolebinding
return None
def get(self):
''' return the self.router_parts '''
self.service = None
self.deploymentconfig = None
self.serviceaccount = None
self.secret = None
self.rolebinding = None
for part in self.router_parts:
result = self._get(part['kind'], name=part['name'])
if result['returncode'] == 0 and part['kind'] == 'dc':
self.deploymentconfig = DeploymentConfig(result['results'][0])
elif result['returncode'] == 0 and part['kind'] == 'svc':
self.service = Service(content=result['results'][0])
elif result['returncode'] == 0 and part['kind'] == 'sa':
self.serviceaccount = ServiceAccount(content=result['results'][0])
elif result['returncode'] == 0 and part['kind'] == 'secret':
self.secret = Secret(content=result['results'][0])
elif result['returncode'] == 0 and part['kind'] == 'clusterrolebinding':
self.rolebinding = RoleBinding(content=result['results'][0])
return {'deploymentconfig': self.deploymentconfig,
'service': self.service,
'serviceaccount': self.serviceaccount,
'secret': self.secret,
'clusterrolebinding': self.rolebinding,
}
def exists(self):
'''return a whether svc or dc exists '''
if self.deploymentconfig and self.service and self.secret and self.serviceaccount:
return True
return False
def delete(self):
'''return all pods '''
parts = []
for part in self.router_parts:
parts.append(self._delete(part['kind'], part['name']))
rval = 0
for part in parts:
if part['returncode'] != 0 and not 'already exist' in part['stderr']:
rval = part['returncode']
return {'returncode': rval, 'results': parts}
def add_modifications(self, deploymentconfig):
'''modify the deployment config'''
# We want modifications in the form of edits coming in from the module.
# Let's apply these here
edit_results = []
for edit in self.config.config_options['edits'].get('value', []):
if edit['action'] == 'put':
edit_results.append(deploymentconfig.put(edit['key'],
edit['value']))
if edit['action'] == 'update':
edit_results.append(deploymentconfig.update(edit['key'],
edit['value'],
edit.get('index', None),
edit.get('curr_value', None)))
if edit['action'] == 'append':
edit_results.append(deploymentconfig.append(edit['key'],
edit['value']))
if edit_results and not any([res[0] for res in edit_results]):
return None
return deploymentconfig
# pylint: disable=too-many-branches
def _prepare_router(self):
'''prepare router for instantiation'''
# if cacert, key, and cert were passed, combine them into a pem file
if (self.config.config_options['cacert_file']['value'] and
self.config.config_options['cert_file']['value'] and
self.config.config_options['key_file']['value']):
router_pem = '/tmp/router.pem'
with open(router_pem, 'w') as rfd:
rfd.write(open(self.config.config_options['cert_file']['value']).read())
rfd.write(open(self.config.config_options['key_file']['value']).read())
if self.config.config_options['cacert_file']['value'] and \
os.path.exists(self.config.config_options['cacert_file']['value']):
rfd.write(open(self.config.config_options['cacert_file']['value']).read())
atexit.register(Utils.cleanup, [router_pem])
self.config.config_options['default_cert']['value'] = router_pem
elif self.config.config_options['default_cert']['value'] is None:
# No certificate was passed to us. do not pass one to oc adm router
self.config.config_options['default_cert']['include'] = False
options = self.config.to_option_list(ascommalist='labels')
cmd = ['router', self.config.name]
cmd.extend(options)
cmd.extend(['--dry-run=True', '-o', 'json'])
results = self.openshift_cmd(cmd, oadm=True, output=True, output_type='json')
# pylint: disable=maybe-no-member
if results['returncode'] != 0 or 'items' not in results['results']:
return results
oc_objects = {'DeploymentConfig': {'obj': None, 'path': None, 'update': False},
'Secret': {'obj': None, 'path': None, 'update': False},
'ServiceAccount': {'obj': None, 'path': None, 'update': False},
'ClusterRoleBinding': {'obj': None, 'path': None, 'update': False},
'Service': {'obj': None, 'path': None, 'update': False},
}
# pylint: disable=invalid-sequence-index
for res in results['results']['items']:
if res['kind'] == 'DeploymentConfig':
oc_objects['DeploymentConfig']['obj'] = DeploymentConfig(res)
elif res['kind'] == 'Service':
oc_objects['Service']['obj'] = Service(res)
elif res['kind'] == 'ServiceAccount':
oc_objects['ServiceAccount']['obj'] = ServiceAccount(res)
elif res['kind'] == 'Secret':
oc_objects['Secret']['obj'] = Secret(res)
elif res['kind'] == 'ClusterRoleBinding':
oc_objects['ClusterRoleBinding']['obj'] = RoleBinding(res)
# Currently only deploymentconfig needs updating
# Verify we got a deploymentconfig
if not oc_objects['DeploymentConfig']['obj']:
return results
# add modifications added
oc_objects['DeploymentConfig']['obj'] = self.add_modifications(oc_objects['DeploymentConfig']['obj'])
for oc_type, oc_data in oc_objects.items():
if oc_data['obj'] is not None:
oc_data['path'] = Utils.create_tmp_file_from_contents(oc_type, oc_data['obj'].yaml_dict)
return oc_objects
def create(self):
'''Create a router
This includes the different parts:
- deploymentconfig
- service
- serviceaccount
- secrets
- clusterrolebinding
'''
results = []
self.needs_update()
import time
# pylint: disable=maybe-no-member
for kind, oc_data in self.prepared_router.items():
if oc_data['obj'] is not None:
time.sleep(1)
if self.get_object_by_kind(kind) is None:
results.append(self._create(oc_data['path']))
elif oc_data['update']:
results.append(self._replace(oc_data['path']))
rval = 0
for result in results:
if result['returncode'] != 0 and not 'already exist' in result['stderr']:
rval = result['returncode']
return {'returncode': rval, 'results': results}
def update(self):
'''run update for the router. This performs a replace'''
results = []
# pylint: disable=maybe-no-member
for _, oc_data in self.prepared_router.items():
if oc_data['update']:
results.append(self._replace(oc_data['path']))
rval = 0
for result in results:
if result['returncode'] != 0:
rval = result['returncode']
return {'returncode': rval, 'results': results}
# pylint: disable=too-many-return-statements,too-many-branches
def needs_update(self):
''' check to see if we need to update '''
# ServiceAccount:
# Need to determine changes from the pregenerated ones from the original
# Since these are auto generated, we can skip
skip = ['secrets', 'imagePullSecrets']
if self.serviceaccount is None or \
not Utils.check_def_equal(self.prepared_router['ServiceAccount']['obj'].yaml_dict,
self.serviceaccount.yaml_dict,
skip_keys=skip,
debug=self.verbose):
self.prepared_router['ServiceAccount']['update'] = True
# Secret:
# See if one was generated from our dry-run and verify it if needed
if self.prepared_router['Secret']['obj']:
if not self.secret:
self.prepared_router['Secret']['update'] = True
if self.secret is None or \
not Utils.check_def_equal(self.prepared_router['Secret']['obj'].yaml_dict,
self.secret.yaml_dict,
skip_keys=skip,
debug=self.verbose):
self.prepared_router['Secret']['update'] = True
# Service:
# Fix the ports to have protocol=TCP
for port in self.prepared_router['Service']['obj'].get('spec.ports'):
port['protocol'] = 'TCP'
skip = ['portalIP', 'clusterIP', 'sessionAffinity', 'type']
if self.service is None or \
not Utils.check_def_equal(self.prepared_router['Service']['obj'].yaml_dict,
self.service.yaml_dict,
skip_keys=skip,
debug=self.verbose):
self.prepared_router['Service']['update'] = True
# DeploymentConfig:
# Router needs some exceptions.
# We do not want to check the autogenerated password for stats admin
if self.deploymentconfig is not None:
if not self.config.config_options['stats_password']['value']:
for idx, env_var in enumerate(self.prepared_router['DeploymentConfig']['obj'].get(\
'spec.template.spec.containers[0].env') or []):
if env_var['name'] == 'STATS_PASSWORD':
env_var['value'] = \
self.deploymentconfig.get('spec.template.spec.containers[0].env[%s].value' % idx)
break
# dry-run doesn't add the protocol to the ports section. We will manually do that.
for idx, port in enumerate(self.prepared_router['DeploymentConfig']['obj'].get(\
'spec.template.spec.containers[0].ports') or []):
if not 'protocol' in port:
port['protocol'] = 'TCP'
# These are different when generating
skip = ['dnsPolicy',
'terminationGracePeriodSeconds',
'restartPolicy', 'timeoutSeconds',
'livenessProbe', 'readinessProbe',
'terminationMessagePath', 'hostPort',
'defaultMode',
]
if self.deploymentconfig is None or \
not Utils.check_def_equal(self.prepared_router['DeploymentConfig']['obj'].yaml_dict,
self.deploymentconfig.yaml_dict,
skip_keys=skip,
debug=self.verbose):
self.prepared_router['DeploymentConfig']['update'] = True
# Check if any of the parts need updating, if so, return True
# else, no need to update
# pylint: disable=no-member
return any([self.prepared_router[oc_type]['update'] for oc_type in self.prepared_router.keys()])
@staticmethod
def run_ansible(params, check_mode):
'''run ansible idempotent code'''
rconfig = RouterConfig(params['name'],
params['namespace'],
params['kubeconfig'],
{'default_cert': {'value': params['default_cert'], 'include': True},
'cert_file': {'value': params['cert_file'], 'include': False},
'key_file': {'value': params['key_file'], 'include': False},
'images': {'value': params['images'], 'include': True},
'latest_images': {'value': params['latest_images'], 'include': True},
'labels': {'value': params['labels'], 'include': True},
'ports': {'value': ','.join(params['ports']), 'include': True},
'replicas': {'value': params['replicas'], 'include': True},
'selector': {'value': params['selector'], 'include': True},
'service_account': {'value': params['service_account'], 'include': True},
'router_type': {'value': params['router_type'], 'include': False},
'host_network': {'value': params['host_network'], 'include': True},
'external_host': {'value': params['external_host'], 'include': True},
'external_host_vserver': {'value': params['external_host_vserver'],
'include': True},
'external_host_insecure': {'value': params['external_host_insecure'],
'include': True},
'external_host_partition_path': {'value': params['external_host_partition_path'],
'include': True},
'external_host_username': {'value': params['external_host_username'],
'include': True},
'external_host_password': {'value': params['external_host_password'],
'include': True},
'external_host_private_key': {'value': params['external_host_private_key'],
'include': True},
'expose_metrics': {'value': params['expose_metrics'], 'include': True},
'metrics_image': {'value': params['metrics_image'], 'include': True},
'stats_user': {'value': params['stats_user'], 'include': True},
'stats_password': {'value': params['stats_password'], 'include': True},
'stats_port': {'value': params['stats_port'], 'include': True},
# extra
'cacert_file': {'value': params['cacert_file'], 'include': False},
# edits
'edits': {'value': params['edits'], 'include': False},
})
state = params['state']
ocrouter = Router(rconfig, verbose=params['debug'])
api_rval = ocrouter.get()
########
# get
########
if state == 'list':
return {'changed': False, 'results': api_rval, 'state': state}
########
# Delete
########
if state == 'absent':
if not ocrouter.exists():
return {'changed': False, 'state': state}
if check_mode:
return {'changed': True, 'msg': 'CHECK_MODE: Would have performed a delete.'}
# In case of delete we return a list of each object
# that represents a router and its result in a list
# pylint: disable=redefined-variable-type
api_rval = ocrouter.delete()
return {'changed': True, 'results': api_rval, 'state': state}
if state == 'present':
########
# Create
########
if not ocrouter.exists():
if check_mode:
return {'changed': True, 'msg': 'CHECK_MODE: Would have performed a create.'}
api_rval = ocrouter.create()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': state}
########
# Update
########
if not ocrouter.needs_update():
return {'changed': False, 'state': state}
if check_mode:
return {'changed': False, 'msg': 'CHECK_MODE: Would have performed an update.'}
api_rval = ocrouter.update()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': state}
|
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import copy
from oslo_log import log
from oslo_log import versionutils
import six
import keystone.conf
from keystone import exception
from keystone.i18n import _
from keystone.i18n import _LE
CONF = keystone.conf.CONF
LOG = log.getLogger(__name__)
def get_project_from_domain(domain_ref):
"""Create a project ref from the provided domain ref."""
project_ref = domain_ref.copy()
project_ref['is_domain'] = True
project_ref['domain_id'] = None
project_ref['parent_id'] = None
return project_ref
# The ResourceDriverBase class is the set of driver methods from earlier
# drivers that we still support, that have not been removed or modified. This
# class is then used to created the augmented V8 and V9 version abstract driver
# classes, without having to duplicate a lot of abstract method signatures.
# If you remove a method from V9, then move the abstract methods from this Base
# class to the V8 class. Do not modify any of the method signatures in the Base
# class - changes should only be made in the V8 and subsequent classes.
# Starting with V9, some drivers use a special value to represent a domain_id
# of None. See comment in Project class of resource/backends/sql.py for more
# details.
NULL_DOMAIN_ID = '<<keystone.domain.root>>'
@six.add_metaclass(abc.ABCMeta)
class ResourceDriverBase(object):
def _get_list_limit(self):
return CONF.resource.list_limit or CONF.list_limit
# project crud
@abc.abstractmethod
def list_projects(self, hints):
"""List projects in the system.
:param hints: filter hints which the driver should
implement if at all possible.
:returns: a list of project_refs or an empty list.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_projects_from_ids(self, project_ids):
"""List projects for the provided list of ids.
:param project_ids: list of ids
:returns: a list of project_refs.
This method is used internally by the assignment manager to bulk read
a set of projects given their ids.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_project_ids_from_domain_ids(self, domain_ids):
"""List project ids for the provided list of domain ids.
:param domain_ids: list of domain ids
:returns: a list of project ids owned by the specified domain ids.
This method is used internally by the assignment manager to bulk read
a set of project ids given a list of domain ids.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_projects_in_domain(self, domain_id):
"""List projects in the domain.
:param domain_id: the driver MUST only return projects
within this domain.
:returns: a list of project_refs or an empty list.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_project(self, project_id):
"""Get a project by ID.
:returns: project_ref
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def update_project(self, project_id, project):
"""Update an existing project.
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
:raises keystone.exception.Conflict: if project name already exists
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def delete_project(self, project_id):
"""Delete an existing project.
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_project_parents(self, project_id):
"""List all parents from a project by its ID.
:param project_id: the driver will list the parents of this
project.
:returns: a list of project_refs or an empty list.
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented()
@abc.abstractmethod
def list_projects_in_subtree(self, project_id):
"""List all projects in the subtree of a given project.
:param project_id: the driver will get the subtree under
this project.
:returns: a list of project_refs or an empty list
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented()
@abc.abstractmethod
def is_leaf_project(self, project_id):
"""Check if a project is a leaf in the hierarchy.
:param project_id: the driver will check if this project
is a leaf in the hierarchy.
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented()
def _validate_default_domain(self, ref):
"""Validate that either the default domain or nothing is specified.
Also removes the domain from the ref so that LDAP doesn't have to
persist the attribute.
"""
ref = ref.copy()
domain_id = ref.pop('domain_id', CONF.identity.default_domain_id)
self._validate_default_domain_id(domain_id)
return ref
def _validate_default_domain_id(self, domain_id):
"""Validate that the domain ID belongs to the default domain."""
if domain_id != CONF.identity.default_domain_id:
raise exception.DomainNotFound(domain_id=domain_id)
class ResourceDriverV8(ResourceDriverBase):
"""Removed or redefined methods from V8.
Move the abstract methods of any methods removed or modified in later
versions of the driver from ResourceDriverBase to here. We maintain this
so that legacy drivers, which will be a subclass of ResourceDriverV8, can
still reference them.
"""
@abc.abstractmethod
def create_project(self, tenant_id, tenant):
"""Create a new project.
:param tenant_id: This parameter can be ignored.
:param dict tenant: The new project
Project schema::
type: object
properties:
id:
type: string
name:
type: string
domain_id:
type: string
description:
type: string
enabled:
type: boolean
parent_id:
type: string
is_domain:
type: boolean
required: [id, name, domain_id]
additionalProperties: true
If project doesn't match the schema the behavior is undefined.
The driver can impose requirements such as the maximum length of a
field. If these requirements are not met the behavior is undefined.
:raises keystone.exception.Conflict: if the project id already exists
or the name already exists for the domain_id.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_project_by_name(self, tenant_name, domain_id):
"""Get a tenant by name.
:returns: tenant_ref
:raises keystone.exception.ProjectNotFound: if a project with the
tenant_name does not exist within the domain
"""
raise exception.NotImplemented() # pragma: no cover
# Domain management functions for backends that only allow a single
# domain. Although we no longer use this, a custom legacy driver might
# have made use of it, so keep it here in case.
def _set_default_domain(self, ref):
"""If the domain ID has not been set, set it to the default."""
if isinstance(ref, dict):
if 'domain_id' not in ref:
ref = ref.copy()
ref['domain_id'] = CONF.identity.default_domain_id
return ref
elif isinstance(ref, list):
return [self._set_default_domain(x) for x in ref]
else:
raise ValueError(_('Expected dict or list: %s') % type(ref))
# domain crud
@abc.abstractmethod
def create_domain(self, domain_id, domain):
"""Create a new domain.
:raises keystone.exception.Conflict: if the domain_id or domain name
already exists
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_domains(self, hints):
"""List domains in the system.
:param hints: filter hints which the driver should
implement if at all possible.
:returns: a list of domain_refs or an empty list.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_domains_from_ids(self, domain_ids):
"""List domains for the provided list of ids.
:param domain_ids: list of ids
:returns: a list of domain_refs.
This method is used internally by the assignment manager to bulk read
a set of domains given their ids.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_domain(self, domain_id):
"""Get a domain by ID.
:returns: domain_ref
:raises keystone.exception.DomainNotFound: if domain_id does not exist
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_domain_by_name(self, domain_name):
"""Get a domain by name.
:returns: domain_ref
:raises keystone.exception.DomainNotFound: if domain_name does not
exist
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def update_domain(self, domain_id, domain):
"""Update an existing domain.
:raises keystone.exception.DomainNotFound: if domain_id does not exist
:raises keystone.exception.Conflict: if domain name already exists
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def delete_domain(self, domain_id):
"""Delete an existing domain.
:raises keystone.exception.DomainNotFound: if domain_id does not exist
"""
raise exception.NotImplemented() # pragma: no cover
class ResourceDriverV9(ResourceDriverBase):
"""New or redefined methods from V8.
Add any new V9 abstract methods (or those with modified signatures) to
this class.
"""
@abc.abstractmethod
def create_project(self, project_id, project):
"""Create a new project.
:param project_id: This parameter can be ignored.
:param dict project: The new project
Project schema::
type: object
properties:
id:
type: string
name:
type: string
domain_id:
type: [string, null]
description:
type: string
enabled:
type: boolean
parent_id:
type: string
is_domain:
type: boolean
required: [id, name, domain_id]
additionalProperties: true
If the project doesn't match the schema the behavior is undefined.
The driver can impose requirements such as the maximum length of a
field. If these requirements are not met the behavior is undefined.
:raises keystone.exception.Conflict: if the project id already exists
or the name already exists for the domain_id.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_project_by_name(self, project_name, domain_id):
"""Get a project by name.
:returns: project_ref
:raises keystone.exception.ProjectNotFound: if a project with the
project_name does not exist within the domain
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def delete_projects_from_ids(self, project_ids):
"""Delete a given list of projects.
Deletes a list of projects. Ensures no project on the list exists
after it is successfully called. If an empty list is provided,
the it is silently ignored. In addition, if a project ID in the list
of project_ids is not found in the backend, no exception is raised,
but a message is logged.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_projects_acting_as_domain(self, hints):
"""List all projects acting as domains.
:param hints: filter hints which the driver should
implement if at all possible.
:returns: a list of project_refs or an empty list.
"""
raise exception.NotImplemented() # pragma: no cover
class V9ResourceWrapperForV8Driver(ResourceDriverV9):
"""Wrapper class to supported a V8 legacy driver.
In order to support legacy drivers without having to make the manager code
driver-version aware, we wrap legacy drivers so that they look like the
latest version. For the various changes made in a new driver, here are the
actions needed in this wrapper:
Method removed from new driver - remove the call-through method from this
class, since the manager will no longer be
calling it.
Method signature (or meaning) changed - wrap the old method in a new
signature here, and munge the input
and output parameters accordingly.
New method added to new driver - add a method to implement the new
functionality here if possible. If that is
not possible, then return NotImplemented,
since we do not guarantee to support new
functionality with legacy drivers.
This wrapper contains the following support for newer manager code:
- The current manager code expects domains to be represented as projects
acting as domains, something that may not be possible in a legacy driver.
Hence the wrapper will map any calls for projects acting as a domain back
onto the driver domain methods. The caveat for this, is that this assumes
that there can not be a clash between a project_id and a domain_id, in
which case it may not be able to locate the correct entry.
"""
@versionutils.deprecated(
as_of=versionutils.deprecated.MITAKA,
what='keystone.resource.ResourceDriverV8',
in_favor_of='keystone.resource.ResourceDriverV9',
remove_in=+2)
def __init__(self, wrapped_driver):
self.driver = wrapped_driver
def _get_domain_from_project(self, project_ref):
"""Create a domain ref from a project ref.
Based on the provided project ref (or partial ref), creates a
domain ref, so that the result can be passed to the driver
domain methods.
"""
domain_ref = project_ref.copy()
for k in ['parent_id', 'domain_id', 'is_domain']:
domain_ref.pop(k, None)
return domain_ref
def get_project_by_name(self, project_name, domain_id):
if domain_id is None:
try:
domain_ref = self.driver.get_domain_by_name(project_name)
return get_project_from_domain(domain_ref)
except exception.DomainNotFound:
raise exception.ProjectNotFound(project_id=project_name)
else:
return self.driver.get_project_by_name(project_name, domain_id)
def create_project(self, project_id, project):
if project['is_domain']:
new_domain = self._get_domain_from_project(project)
domain_ref = self.driver.create_domain(project_id, new_domain)
return get_project_from_domain(domain_ref)
else:
return self.driver.create_project(project_id, project)
def list_projects(self, hints):
"""List projects and/or domains.
We use the hints filter to determine whether we are listing projects,
domains or both.
If the filter includes domain_id==None, then we should only list
domains (convert to a project acting as a domain) since regular
projects always have a non-None value for domain_id.
Likewise, if the filter includes domain_id==<non-None value>, then we
should only list projects.
If there is no domain_id filter, then we need to do a combained listing
of domains and projects, converting domains to projects acting as a
domain.
"""
domain_listing_filter = None
for f in hints.filters:
if (f['name'] == 'domain_id'):
domain_listing_filter = f
if domain_listing_filter is not None:
if domain_listing_filter['value'] is not None:
proj_list = self.driver.list_projects(hints)
else:
domains = self.driver.list_domains(hints)
proj_list = [get_project_from_domain(p) for p in domains]
hints.filters.remove(domain_listing_filter)
return proj_list
else:
# No domain_id filter, so combine domains and projects. Although
# we hand any remaining filters into each driver, since each filter
# might need to be carried out more than once, we use copies of the
# filters, allowing the original filters to be passed back up to
# controller level where a final filter will occur.
local_hints = copy.deepcopy(hints)
proj_list = self.driver.list_projects(local_hints)
local_hints = copy.deepcopy(hints)
domains = self.driver.list_domains(local_hints)
for domain in domains:
proj_list.append(get_project_from_domain(domain))
return proj_list
def list_projects_from_ids(self, project_ids):
return [self.get_project(id) for id in project_ids]
def list_project_ids_from_domain_ids(self, domain_ids):
return self.driver.list_project_ids_from_domain_ids(domain_ids)
def list_projects_in_domain(self, domain_id):
return self.driver.list_projects_in_domain(domain_id)
def get_project(self, project_id):
try:
domain_ref = self.driver.get_domain(project_id)
return get_project_from_domain(domain_ref)
except exception.DomainNotFound:
return self.driver.get_project(project_id)
def _is_domain(self, project_id):
ref = self.get_project(project_id)
return ref.get('is_domain', False)
def update_project(self, project_id, project):
if self._is_domain(project_id):
update_domain = self._get_domain_from_project(project)
domain_ref = self.driver.update_domain(project_id, update_domain)
return get_project_from_domain(domain_ref)
else:
return self.driver.update_project(project_id, project)
def delete_project(self, project_id):
if self._is_domain(project_id):
try:
self.driver.delete_domain(project_id)
except exception.DomainNotFound:
raise exception.ProjectNotFound(project_id=project_id)
else:
self.driver.delete_project(project_id)
def delete_projects_from_ids(self, project_ids):
raise exception.NotImplemented() # pragma: no cover
def list_project_parents(self, project_id):
"""List a project's ancestors.
The current manager expects the ancestor tree to end with the project
acting as the domain (since that's now the top of the tree), but a
legacy driver will not have that top project in their projects table,
since it's still in the domain table. Hence we lift the algorithm for
traversing up the tree from the driver to here, so that our version of
get_project() is called, which will fetch the "project" from the right
table.
"""
project = self.get_project(project_id)
parents = []
examined = set()
while project.get('parent_id') is not None:
if project['id'] in examined:
msg = _LE('Circular reference or a repeated '
'entry found in projects hierarchy - '
'%(project_id)s.')
LOG.error(msg, {'project_id': project['id']})
return
examined.add(project['id'])
parent_project = self.get_project(project['parent_id'])
parents.append(parent_project)
project = parent_project
return parents
def list_projects_in_subtree(self, project_id):
return self.driver.list_projects_in_subtree(project_id)
def is_leaf_project(self, project_id):
return self.driver.is_leaf_project(project_id)
def list_projects_acting_as_domain(self, hints):
refs = self.driver.list_domains(hints)
return [get_project_from_domain(p) for p in refs]
|
|
#!/usr/bin/env python
# Copyright (c) 2015. Mark E. Madsen <[email protected]>
#
# This work is licensed under the terms of the Apache Software License, Version 2.0. See the file LICENSE for details.
"""
Description here
"""
from mongoengine import *
from seriation import idss_version
import datetime
import logging as logger
def get_file_location_keys():
return sorted([k for k,v in SeriationFileLocations._fields.iteritems()])
class SeriationFileLocations(EmbeddedDocument):
# files used by IDSS.py itself
inputfile = StringField()
xyfile = StringField()
pairfile = StringField()
mstfile = StringField()
shapefile = StringField()
metadatafile = StringField()
frequencyvnafile = StringField()
frequencypairsvnafile = StringField()
frequencymstvnafile = StringField()
frequencymstdistvnafile = StringField()
frequencypointsshapefile = StringField()
frequencyatlasfile = StringField()
frequencyexceltxtfile = StringField()
frequencyexcelwookbookfile = StringField()
frequencysumgraphbyweightgmlfile = StringField()
frequencysumgraphbyweightshapefile = StringField()
frequencysumgraphbyweightvnafile = StringField()
frequencysumgraphbyweightpngfile = StringField()
frequencysumgraphbycountpngfile = StringField()
frequencysumgraphbycountgmlfile = StringField()
frequencyminmaxbyweightpngfile = StringField()
frequencyminmaxbyweightgmlfile = StringField()
frequencygeosignificancefile = StringField()
frequencymstpngfile = StringField()
contsumgraphfile = StringField()
contmstminfile = StringField()
contminmaxweightgml = StringField()
contminmaxcountgml = StringField()
continuityexceltxtfile = StringField()
continuityexcelworkbookfile = StringField()
continuityatlasfile = StringField()
continuityvalidseriationsatlasfile = StringField()
continuityuniquevalidseriationsatlasfile = StringField()
continuityvalidseriationsexceltxtfile = StringField()
continuityvalidseriationsexcelworkbookfile = StringField()
continuitygeosignificancefile = StringField()
continuitysumgraphbyweightpngfile = StringField()
continuitysumgraphbyweightgmlfile = StringField()
continuitysumgraphbycountpngfile = StringField()
continuitysumgraphbycountgmlfile = StringField()
continuityminmaxbyweightpngfile = StringField()
continuityminmaxbyweightgmlfile = StringField()
# files created by analysis scripts
# annotation in seriationct
annotatedfreqminmaxbyweightgmlfile = StringField()
annotatedfreqminmaxbyweightdotfile = StringField()
annotatedfreqminmaxbyweightpngfile = StringField()
annotatedcontminmaxbyweightgmlfile = StringField()
annotatedcontminmaxbyweightdotfile = StringField()
annotatedcontminmaxbyweightpngfile = StringField()
class SeriationRunParameters(EmbeddedDocument):
bootstrap_ci_flag = BooleanField()
bootstrap_significance = FloatField()
spatial_significance = BooleanField()
spatial_bootstrap_n = IntField()
xyfile_path = StringField(required=True)
inputfile = StringField(required=True)
outputdirectory = StringField(required=True)
continuity_seriation = BooleanField()
frequency_seriation = BooleanField()
full_cmdline = StringField()
class SeriationProfilingData(EmbeddedDocument):
bootstrap_ci_processing_time = FloatField()
total_frequency_processing_time = FloatField()
freq_main_processing_time = FloatField()
freq_spatial_processing_time = FloatField()
freq_output_processing_time = FloatField()
freq_minmaxweight_processing_time = FloatField()
freq_sumgraphweight_processing_time = FloatField()
freq_filter_processing_time = FloatField()
freq_excelmacro_processing_time = FloatField()
freq_excel_processing_time = FloatField()
freq_atlas_processing_time = FloatField()
freq_mst_processing_time = FloatField()
total_continuity_processing_time = FloatField()
total_occurrence_processing_time = FloatField()
class FrequencySeriationResult(EmbeddedDocument):
max_solution_size = IntField()
total_number_solutions = IntField()
spatial_significance_pvalue = FloatField()
class OccurrenceSeriationResult(EmbeddedDocument):
pass
class ContinuitySeriationResult(EmbeddedDocument):
spatial_significance_pvalue = FloatField()
class MinmaxSolutionMetrics(EmbeddedDocument):
"""
Scores or metrics from minmax seriation solutions
"""
score_chronological_accuracy = FloatField()
num_branch_points = IntField()
mean_degree = FloatField()
class SeriationRun(Document):
total_runtime = FloatField()
parameters = EmbeddedDocumentField(SeriationRunParameters)
profiling = EmbeddedDocumentField(SeriationProfilingData)
frequency_results = EmbeddedDocumentField(FrequencySeriationResult)
continuity_results = EmbeddedDocumentField(ContinuitySeriationResult)
occurrence_results = EmbeddedDocumentField(OccurrenceSeriationResult)
file_locations = EmbeddedDocumentField(SeriationFileLocations)
minmax_metrics = EmbeddedDocumentField(MinmaxSolutionMetrics)
version_used = StringField(required=True)
seriation_run_id = StringField(required=True)
num_assemblages = IntField()
num_classes = IntField()
date_seriation_run = DateTimeField(default=datetime.datetime.now)
source_identifier = StringField()
meta = {'allow_inheritance': True}
#TODO: Index the table to make annotation easy
class SeriationDatabase(object):
"""
persistence connection to the MongoDB database server
into which SeriationRun metadata, and in the future, primary
output, are stored.
"""
def __init__(self, args):
self.args = args
connect(db = args.database,
host = args.dbhost,
port = args.dbport,
username = args.dbuser,
password = args.dbpassword)
def store_run_metadata(self, stats_map, fileMap):
"""
Saves the metadata for a single seriation run. Parameter
subdocument is constructed from the command line args held
by the object, and the stats_map argument is a dictionary
returned by the seriate() method which contains timing
and solution statistics
:param stats_map :
"""
if self.args.xyfile == None:
xyfile = "none"
else:
xyfile = self.args.xyfile
floc = SeriationFileLocations()
#logger.debug("fileMap: %s", fileMap)
for fkey in fileMap.keys():
floc.__setattr__(fkey, str(fileMap[fkey]))
params = SeriationRunParameters()
params.inputfile = self.args.inputfile
params.bootstrap_ci_flag = bool(self.args.bootstrapCI)
params.bootstrap_significance = self.args.bootstrapSignificance
params.spatial_bootstrap_n = self.args.spatialbootstrapN
params.spatial_significance = bool(self.args.spatialsignificance)
params.xyfile_path = xyfile
params.outputdirectory = self.args.outputdirectory
params.continuity_seriation = bool(stats_map["continuity"])
params.frequency_seriation = bool(stats_map["frequency"])
params.full_cmdline = stats_map["cmdline"]
profile = SeriationProfilingData()
if 'bootstrap_ci_processing_time' in stats_map:
profile.bootstrap_ci_processing_time = stats_map["bootstrap_ci_processing_time"]
if 'frequency_processing_time' in stats_map:
profile.total_frequency_processing_time = stats_map['frequency_processing_time']
profile.freq_main_processing_time = stats_map['freq_main_processing_time']
profile.freq_filter_processing_time = stats_map['frequency_filter_solutions_time']
profile.freq_sumgraphweight_processing_time = stats_map["sumgraphweight_processing_time"]
profile.freq_output_processing_time = stats_map["frequency_output_processing_time"]
profile.freq_minmaxweight_processing_time = stats_map["minmax_weight_processing_time"]
if 'spatial_processing_time' in stats_map:
profile.freq_spatial_processing_time = stats_map["spatial_processing_time"]
if 'continuity_processing_time' in stats_map:
profile.total_continuity_processing_time = stats_map["continuity_processing_time"]
if 'occurrence_processing_time' in stats_map:
profile.total_occurrence_processing_time = stats_map["occurrence_processing_time"]
if 'mst_processing_time' in stats_map:
profile.freq_mst_processing_time = stats_map["mst_processing_time"]
if 'atlas_processing_time' in stats_map:
profile.freq_atlas_processing_time = stats_map["atlas_processing_time"]
if 'excel_processing_time' in stats_map:
profile.freq_excel_processing_time = stats_map["excel_processing_time"]
if 'excel_freqseriation_processing_time' in stats_map:
profile.freq_excelmacro_processing_time = stats_map["excel_freqseriation_processing_time"]
srun = SeriationRun()
srun.parameters = params
srun.profiling = profile
srun.file_locations = floc
srun.total_runtime = stats_map['execution_time']
srun.version_used = idss_version.__version__
srun.seriation_run_id = stats_map['seriation_run_id']
srun.num_assemblages = stats_map["num_assemblages"]
srun.num_classes = stats_map["num_classes"]
srun.source_identifier = self.args.source_identifier
# add the results from various seriation types
if self.args.frequency == 1:
freqres = FrequencySeriationResult()
freqres.max_solution_size = stats_map['max_seriation_size']
freqres.total_number_solutions = stats_map['total_number_solutions']
if 'frequency_geographic_pvalue' in stats_map:
freqres.spatial_significance_pvalue = stats_map['frequency_geographic_pvalue']
srun.frequency_results = freqres
if self.args.continuity == 1:
contres = ContinuitySeriationResult()
if 'continuity_geographic_pvalue' in stats_map:
contres.spatial_significance_pvalue = stats_map['continuity_geographic_pvalue']
srun.continuity_results = contres
# persist the entire set of results
srun.save()
|
|
import time
from apps.user_settings.forms import EmailChangeForm
from canvas import stickers, bgwork
from canvas.models import CommentSticker
from canvas.notifications import expander
from canvas.notifications.actions import Actions
from canvas.notifications.email_channel import EmailChannel
from canvas.notifications.notification_models import Notification
from canvas.tests import tests_helpers as utils
from canvas.tests.tests_helpers import CanvasTestCase, create_user, create_comment, create_content
class TestExpander(CanvasTestCase):
def tearDown(self):
# We disable performing the bgwork coz we do not want to send the email
# notifiations just yet.
pass
def test_op_author_expander_notifies_author(self):
author = utils.create_user()
comment = utils.create_comment()
comment.author = author
comment.save()
self.assertEqual(comment.thread.op.author, author)
another_user = utils.create_user()
pn = Actions.replied(another_user, comment)
replied_expander = expander.get_expander(pn)()
notifications = replied_expander.expand(pn)
recipients = [n.recipient for n in notifications]
#self.assertEqual(len(recipients), 1)
self.assertIn(author, recipients)
def test_expander_honors_unsubscribe_per_channel(self):
author = utils.create_user()
comment = utils.create_comment()
comment.author = author
comment.save()
self.assertEqual(comment.thread.op.author, author)
another_user = utils.create_user()
pn = Actions.replied(another_user, comment)
notifications = expander.expand(pn)
self.assertTrue(notifications)
notification = filter(lambda n: n.channel == 'EmailChannel', notifications)[0]
self.assertEqual(author, notification.recipient)
# Now, let the user unsubscribe to the reply action.
author.kv.subscriptions.unsubscribe('thread_replied')
pn = Actions.replied(another_user, comment)
notifications = expander.expand(pn)
recipients = [n.recipient for n in notifications if n.channel == 'EmailChannel']
self.assertFalse(author in recipients)
def test_newsletter_expander(self):
user = create_user()
pn = Actions.newsletter(user)
notifications = expander.expand(pn)
self.assertEqual(len(notifications), 1)
notification = notifications.pop()
self.assertEqual(notification.recipient, user)
def test_newsletter_expander_for_user_with_no_email(self):
user = create_user(email="")
pn = Actions.newsletter(user)
self.assertFalse(user.email)
notifications = expander.expand(pn)
self.assertEqual(len(notifications), 0)
def test_digest_expander(self):
user = create_user()
pn = Actions.digest(user)
notifications = expander.expand(pn)
self.assertEqual(len(notifications), 1)
notification = notifications.pop()
self.assertEqual(notification.recipient, user)
class TestRepliedExpander(CanvasTestCase):
def test_replied_to_own_thread_does_not_email_self(self):
user = create_user()
comment = create_comment(author=user)
assert comment.author == user
reply = create_comment(replied_comment=comment)
assert comment == reply.replied_comment
pn = Actions.replied(user, reply)
notifications = expander.expand(pn)
recipients = [n.recipient for n in notifications]
self.assertNotIn(user, recipients)
def test_replied_op_only_tells_author(self):
author = create_user()
content = create_content()
op = create_comment(author=author, reply_content=content)
user = create_user()
reply = create_comment(replied_comment=op, author=user)
self.assertEqual(reply.replied_comment, op)
pn = Actions.replied(user, reply)
notifications = expander.expand(pn)
for n in notifications:
print n
self.assertEqual(len(filter(lambda n: n.channel == 'EmailChannel', notifications)), 1)
notification = notifications.pop()
# Note that it will be 'replied', not 'thread_replied'. The former is more specific.
self.assertEqual(notification.action, 'replied')
def test_replied_tells_author_and_op_author(self):
# Some dude starts a thread
author = create_user()
content = create_content()
op = create_comment(author=author, reply_content=content)
# Another dude posts a reply
guest_author = create_user()
reply = create_comment(replied_comment=op, author=guest_author, parent_comment=op)
self.assertEqual(reply.thread.op, op)
# A third dude replies to the guest author
guest_author_2 = create_user()
reply_2 = create_comment(replied_comment=reply, author=guest_author_2, parent_comment=op)
self.assertTrue(reply_2.thread.op.author, author)
# Issue the action
pn = Actions.replied(guest_author_2, reply_2)
notifications = expander.expand(pn)
print notifications
# Now, we should tell both the OP author, and the guest author.
notifications = filter(lambda n: n.channel == 'EmailChannel', notifications)
self.assertEqual(len(notifications), 2)
n1 = notifications[0]
n2 = notifications[1]
self.assertEqual(n1.action, 'replied')
self.assertEqual(n1.recipient, guest_author)
self.assertEqual(n2.action, 'thread_replied')
self.assertEqual(n2.recipient, author)
class TestStickeredExpander(CanvasTestCase):
def test_notifies_author(self):
author = utils.create_user()
comment = utils.create_comment()
comment.author = author
comment.save()
another_user = utils.create_user()
comment_sticker = CommentSticker(comment=comment,
type_id=stickers.get("num1").type_id,
timestamp=time.time(),
ip="127.0.0.1",
user=another_user)
pn = Actions.stickered(another_user, comment_sticker)
ex = expander.get_expander(pn)()
recipients = ex.decide_recipients(pn)
self.assertEqual(len(recipients), 1)
self.assertIn(author, recipients)
class TestUserNotificationSubscription(CanvasTestCase):
def after_setUp(self):
self.user = create_user()
def test_unsubscribe_from_all(self):
user = self.user
self.assertTrue(user.kv.subscriptions.can_receive('remixed'))
self.assertTrue(user.kv.subscriptions.can_receive('replied'))
# Now unsubscribe from all
user.kv.subscriptions.unsubscribe_from_all()
self.assertFalse(user.kv.subscriptions.can_receive('remixed'))
self.assertFalse(user.kv.subscriptions.can_receive('replied'))
def test_unsubscribe_from_all_semantics(self):
user = create_user()
subs = user.kv.subscriptions
assert subs.can_receive("ALL")
assert subs.can_receive("ALL")
subs.unsubscribe_from_all()
assert not subs.can_receive("ALL")
for a in EmailChannel.all_handled_actions():
assert not subs.can_receive(a)
subs.subscribe("ALL")
assert subs.can_receive("ALL")
assert subs.can_receive("ALL")
for a in EmailChannel.all_handled_actions():
assert subs.can_receive(a)
def test_unsubscribe(self):
user = self.user
self.assertTrue(user.kv.subscriptions.can_receive('remixed'))
self.assertTrue(user.kv.subscriptions.can_receive('replied'))
user.kv.subscriptions.unsubscribe('remixed')
self.assertFalse(user.kv.subscriptions.can_receive('remixed'))
# Make sure it did not affect other subscriptions
self.assertTrue(user.kv.subscriptions.can_receive('replied'))
def test_subscribe(self):
user = self.user
subs = user.kv.subscriptions
self.assertTrue(subs.can_receive('replied'))
subs.unsubscribe('replied')
self.assertFalse(subs.can_receive('replied'))
subs.subscribe('replied')
print subs.hash.hgetall()
self.assertTrue(subs.can_receive('replied'))
class TestNotificationModels(CanvasTestCase):
def tearDown(self):
# We disable performing the bgwork coz we do not want to send the email
# notifiations just yet.
bgwork.clear()
def test_Notification_from_pn(self):
pn = Actions.replied(create_user(), create_comment())
notification = Notification.from_pending_notification(pn, create_user, "EmailChannel")
assert notification.recipient
for key in pn.data:
self.assertEqual(getattr(notification, key), getattr(pn, key))
assert pn.comment
assert notification.comment
|
|
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2007 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
# $Id: __init__.py 1222 2007-09-01 11:00:40Z Alex.Holkner $
'''Audio and video playback.
pyglet can play WAV files, and if AVbin is installed, many other audio and
video formats.
Playback is handled by the `Player` class, which reads raw data from `Source`
objects and provides methods for pausing, seeking, adjusting the volume, and
so on. The `Player` class implements a the best available audio device
(currently, only OpenAL is supported)::
player = Player()
A `Source` is used to decode arbitrary audio and video files. It is
associated with a single player by "queuing" it::
source = load('background_music.mp3')
player.queue(source)
Use the `Player` to control playback. Within your main run loop, you must
periodically call `dispatch_events` to ensure the audio buffers are refilled::
player.play()
while player.source: # While the source hasn't finished
player.dispatch_events()
If the source contains video, its `video_format` attribute will be non-None,
and the player's `texture` attribute will contain the current video image
synchronised to the audio.
Decoding sounds can be processor-intensive and may introduce latency,
particularly for short sounds that must be played quickly, such as bullets or
explosions. You can force such sounds to be decoded and retained in memory
rather than streamed from disk by wrapping the source in a `StaticSource`::
bullet_sound = StaticSource(load('bullet.wav'))
The other advantage of a `StaticSound` is that it can be queued on any number
of players, and so played many times simultaneously.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: __init__.py 1222 2007-09-01 11:00:40Z Alex.Holkner $'
import ctypes
import sys
import StringIO
from pyglet import event
class MediaException(Exception):
pass
class MediaFormatException(Exception):
pass
class CannotSeekException(MediaException):
pass
class AudioFormat(object):
'''Audio details.
An instance of this class is provided by sources with audio tracks. You
should not modify the fields, as they are used internally to describe the
format of data provided by the source.
:Ivariables:
`channels` : int
The number of channels: 1 for mono or 2 for stereo (pyglet does
not yet support surround-sound sources).
`sample_size` : int
Bits per sample; typically 8 or 16.
`sample_rate` : int
Samples per second (in Herz).
'''
def __init__(self, channels, sample_size, sample_rate):
self.channels = channels
self.sample_size = sample_size
self.sample_rate = sample_rate
# Convenience
self.bytes_per_sample = (sample_size >> 3) * channels
self.bytes_per_second = self.bytes_per_sample * sample_rate
class VideoFormat(object):
'''Video details.
An instance of this class is provided by sources with a video track. You
should not modify the fields.
Note that the sample aspect has no relation to the aspect ratio of the
video image. For example, a video image of 640x480 with sample aspect 2.0
should be displayed at 1280x480. It is the responsibility of the
application to perform this scaling.
:Ivariables:
`width` : int
Width of video image, in pixels.
`height` : int
Height of video image, in pixels.
`sample_aspect` : float
Aspect ratio (width over height) of a single video pixel.
'''
def __init__(self, width, height, sample_aspect=1.0):
self.width = width
self.height = height
self.sample_aspect = sample_aspect
class AudioData(object):
'''A single packet of audio data.
This class is used internally by pyglet.
:Ivariables:
`data` : str or ctypes array or pointer
Sample data.
`length` : int
Size of sample data, in bytes.
`timestamp` : float
Time of the first sample, in seconds.
`duration` : float
Total data duration, in seconds.
`is_eos` : bool
If True, this is the last audio packet in the source.
'''
def __init__(self, data, length, timestamp, duration, is_eos=False):
self.data = data
self.length = length
self.timestamp = timestamp
self.duration = duration
self.is_eos = is_eos
def consume(self, bytes, audio_format):
'''Remove some data from beginning of packet.'''
if not isinstance(self.data, str):
# XXX Create a string buffer for the whole packet then
# chop it up. Could do some pointer arith here and
# save a bit of data pushing, but my guess is this is
# faster than fudging aruond with ctypes (and easier).
data = ctypes.create_string_buffer(self.length)
ctypes.memmove(data, self.data, self.length)
self.data = data
self.data = self.data[bytes:]
self.length -= bytes
self.duration -= bytes / float(audio_format.bytes_per_second)
self.timestamp += bytes / float(audio_format.bytes_per_second)
class Source(object):
'''An audio and/or video source.
:Ivariables:
`audio_format` : `AudioFormat`
Format of the audio in this source, or None if the source is
silent.
`video_format` : `VideoFormat`
Format of the video in this source, or None if there is no
video.
'''
_duration = None
audio_format = None
video_format = None
def _get_duration(self):
return self._duration
duration = property(lambda self: self._get_duration(),
doc='''The length of the source, in seconds.
Not all source durations can be determined; in this case the value
is None.
Read-only.
:type: float
''')
def play(self):
'''Play the source.
This is a convenience method which creates a ManagedSoundPlayer for
this source and plays it immediately.
:rtype: `ManagedSoundPlayer`
'''
player = ManagedSoundPlayer()
player.eos_action = player.EOS_STOP
player.queue(self)
player.play()
return player
# Internal methods that Players call on the source:
def _play(self):
'''Begin decoding in real-time.'''
pass
def _pause(self):
'''Pause decoding, but remain prerolled.'''
pass
def _stop(self):
'''Stop forever and clean up.'''
pass
def _seek(self, timestamp):
'''Seek to given timestamp.'''
raise CannotSeekException()
def _get_queue_source(self):
'''Return the `Source` to be used as the queue source for a player.
Default implementation returns self.'''
return self
def _get_audio_data(self, bytes):
'''Get next packet of audio data.
:Parameters:
`bytes` : int
Maximum number of bytes of data to return.
:rtype: `AudioData`
:return: Next packet of audio data, or None if there is no (more)
data.
'''
return None
def _init_texture(self, player):
'''Create the player's texture.'''
pass
def _update_texture(self, player, timestamp):
'''Update the texture on player.'''
pass
def _release_texture(self, player):
'''Release the player's texture.'''
pass
class StreamingSource(Source):
'''A source that is decoded as it is being played, and can only be
queued once.
'''
_is_queued = False
is_queued = property(lambda self: self._is_queued,
doc='''Determine if this source has been queued
on a `Player` yet.
Read-only.
:type: bool
''')
def _get_queue_source(self):
'''Return the `Source` to be used as the queue source for a player.
Default implementation returns self.'''
if self._is_queued:
raise MediaException('This source is already queued on a player.')
self._is_queued = True
return self
class StaticSource(Source):
'''A source that has been completely decoded in memory. This source can
be queued onto multiple players any number of times.
'''
def __init__(self, source):
'''Construct a `StaticSource` for the data in `source`.
:Parameters:
`source` : `Source`
The source to read and decode audio and video data from.
'''
if source.video_format:
raise NotImplementedException(
'Static sources not supported for video yet.')
self.audio_format = source.audio_format
if not self.audio_format:
return
# TODO enable time-insensitive playback
source.play()
# Arbitrary: number of bytes to request at a time.
buffer_size = 1 << 20 # 1 MB
# Naive implementation. Driver-specific implementations may override
# to load static audio data into device (or at least driver) memory.
data = StringIO.StringIO()
while True:
audio_data = source._get_audio_data(buffer_size)
if not audio_data:
break
data.write(audio_data.data)
self._data = data.getvalue()
def _get_queue_source(self):
return StaticMemorySource(self._data, self.audio_format)
def _get_audio_data(self, bytes):
raise RuntimeError('StaticSource cannot be queued.')
class StaticMemorySource(StaticSource):
'''Helper class for default implementation of `StaticSource`. Do not use
directly.'''
def __init__(self, data, audio_format):
self._file = StringIO.StringIO(data)
self._max_offset = len(data)
self.audio_format = audio_format
self._duration = len(data) / float(audio_format.bytes_per_second)
def _seek(self, timestamp):
offset = int(timestamp * self.audio_format.bytes_per_second)
# Align to sample
if self.audio_format.bytes_per_sample == 2:
offset &= 0xfffffffe
elif self.audio_foramt.bytes_per_sample == 4:
offset &= 0xfffffffc
self._file.seek(offset)
def _get_audio_data(self, bytes):
offset = self._file.tell()
timestamp = float(offset) / self.audio_format.bytes_per_second
data = self._file.read(bytes)
if not data:
return None
duration = float(len(data)) / self.audio_format.bytes_per_second
is_eos = self._file.tell() == self._max_offset
return AudioData(data,
len(data),
timestamp,
duration,
is_eos)
class BasePlayer(event.EventDispatcher):
'''A sound and/or video player.
Queue sources on this player to play them.
'''
#: The player will pause when it reaches the end of the stream.
EOS_PAUSE = 'pause'
#: The player will loop the current stream continuosly.
EOS_LOOP = 'loop'
#: The player will move on to the next queued stream when it reaches the
#: end of the current source. If there is no source queued, the player
#: will pause.
EOS_NEXT = 'next'
#: The player will stop entirely; valid only for ManagedSoundPlayer.
EOS_STOP = 'stop'
# Source and queuing attributes
_source = None
_eos_action = EOS_NEXT
_playing = False
# Sound and spacialisation attributes
_volume = 1.0
_max_gain = 1.0
_min_gain = 0.0
_position = (0, 0, 0)
_velocity = (0, 0, 0)
_pitch = 1.0
_cone_orientation = (0, 0, 0)
_cone_inner_angle = 360.
_cone_outer_angle = 360.
_cone_outer_gain = 1.
# Video attributes
_texture = None
def queue(self, source):
'''Queue the source on this player.
If the player has no source, the player will be paused immediately
on this source.
:Parameters:
`source` : Source
The source to queue.
'''
def play(self):
'''Begin playing the current source.
This has no effect if the player is already playing.
'''
raise NotImplementedError('abstract')
def pause(self):
'''Pause playback of the current source.
This has no effect if the player is already paused.
'''
raise NotImplementedError('abstract')
def seek(self, timestamp):
'''Seek for playback to the indicated timestamp in seconds on the
current source. If the timestamp is outside the duration of the
source, it will be clamped to the end.
:Parameters:
`timestamp` : float
Timestamp to seek to.
'''
raise NotImplementedError('abstract')
def next(self):
'''Move immediately to the next queued source.
If the `eos_action` of this player is `EOS_NEXT`, and the source has
been queued for long enough, there will be no gap in the audio or
video playback. Otherwise, there may be some delay as the next source
is prerolled and the first frames decoded and buffered.
'''
raise NotImplementedError('abstract')
def dispatch_events(self):
'''Dispatch any pending events and perform regular heartbeat functions
to maintain playback.
'''
pass
def _get_time(self):
raise NotImplementedError('abstract')
time = property(lambda self: self._get_time(),
doc='''Retrieve the current playback time of the current
source.
The playback time is a float expressed in seconds, with 0.0 being
the beginning of the sound. The playback time returned represents
the time encoded in the source, and may not reflect actual time
passed due to pitch shifting or pausing.
Read-only.
:type: float
''')
def _get_source(self):
return self._source
source = property(lambda self: self._get_source(),
doc='''Return the current source.
Read-only.
:type: Source
''')
def _set_eos_action(self, action):
self._eos_action = action
eos_action = property(lambda self: self._eos_action,
_set_eos_action,
doc='''Set the behaviour of the player when it
reaches the end of the current source.
This must be one of the constants `EOS_NEXT`, `EOS_PAUSE` or
`EOS_LOOP`.
:type: str
''')
playing = property(lambda self: self._playing,
doc='''Determine if the player state is playing.
The `playing` property is irrespective of whether or not there is
actually a source to play. If `playing` is True and a source is
queued, it will begin playing immediately. If `playing` is False,
it is implied that the player is paused. There is no other possible
state.
Read-only.
:type: bool
''')
def _set_volume(self, volume):
raise NotImplementedError('abstract')
volume = property(lambda self: self._volume,
lambda self, volume: self._set_volume(volume),
doc='''The volume level of sound playback.
The nominal level is 1.0, and 0.0 is silence.
The volume level is affected by factors such as the distance from the
listener (if positioned), and is clamped (after distance attenuation)
to the range [min_gain, max_gain].
:type: float
''')
def _set_min_gain(self, min_gain):
raise NotImplementedError('abstract')
min_gain = property(lambda self: self._min_gain,
lambda self, min_gain: self._set_min_gain(min_gain),
doc='''The minimum gain to apply to the sound, even
The gain is clamped after distance attenuation. The default value
is 0.0.
:type: float
''')
def _set_max_gain(self, max_gain):
raise NotImplementedError('abstract')
max_gain = property(lambda self: self._max_gain,
lambda self, max_gain: self._set_max_gain(max_gain),
doc='''The maximum gain to apply to the sound.
The gain is clamped after distance attenuation. The default value
is 1.0.
:type: float
''')
def _set_position(self, position):
raise NotImplementedError('abstract')
position = property(lambda self: self._position,
lambda self, position: self._set_position(position),
doc='''The position of the sound in 3D space.
The position is given as a tuple of floats (x, y, z). The unit
defaults to meters, but can be modified with the listener
properties.
:type: 3-tuple of float
''')
def _set_velocity(self, velocity):
raise NotImplementedError('abstract')
velocity = property(lambda self: self._velocity,
lambda self, velocity: self._set_velocity(velocity),
doc='''The velocity of the sound in 3D space.
The velocity is given as a tuple of floats (x, y, z). The unit
defaults to meters per second, but can be modified with the listener
properties.
:type: 3-tuple of float
''')
def _set_pitch(self, pitch):
raise NotImplementedError('abstract')
pitch = property(lambda self: self._pitch,
lambda self, pitch: self._set_pitch(pitch),
doc='''The pitch shift to apply to the sound.
The nominal pitch is 1.0. A pitch of 2.0 will sound one octave
higher, and play twice as fast. A pitch of 0.5 will sound one octave
lower, and play twice as slow. A pitch of 0.0 is not permitted.
The pitch shift is applied to the source before doppler effects.
:type: float
''')
def _set_cone_orientation(self, cone_orientation):
raise NotImplementedError('abstract')
cone_orientation = property(lambda self: self._cone_orientation,
lambda self, c: self._set_cone_orientation(c),
doc='''The direction of the sound in 3D space.
The direction is specified as a tuple of floats (x, y, z), and has no
unit. The default direction is (0, 0, -1). Directional effects are
only noticeable if the other cone properties are changed from their
default values.
:type: 3-tuple of float
''')
def _set_cone_inner_angle(self, cone_inner_angle):
raise NotImplementedError('abstract')
cone_inner_angle = property(lambda self: self._cone_inner_angle,
lambda self, a: self._set_cone_inner_angle(a),
doc='''The interior angle of the inner cone.
The angle is given in degrees, and defaults to 360. When the listener
is positioned within the volume defined by the inner cone, the sound
is played at normal gain (see `volume`).
:type: float
''')
def _set_cone_outer_angle(self, cone_outer_angle):
raise NotImplementedError('abstract')
cone_outer_angle = property(lambda self: self._cone_outer_angle,
lambda self, a: self._set_cone_outer_angle(a),
doc='''The interior angle of the outer cone.
The angle is given in degrees, and defaults to 360. When the listener
is positioned within the volume defined by the outer cone, but outside
the volume defined by the inner cone, the gain applied is a smooth
interpolation between `volume` and `cone_outer_gain`.
:type: float
''')
def _set_cone_outer_gain(self, cone_outer_gain):
raise NotImplementedError('abstract')
cone_outer_gain = property(lambda self: self._cone_outer_gain,
lambda self, g: self._set_cone_outer_gain(g),
doc='''The gain applied outside the cone.
When the listener is positioned outside the volume defined by the
outer cone, this gain is applied instead of `volume`.
:type: float
''')
texture = property(lambda self: self._texture,
doc='''The video texture.
You should rerequest this property every time you display a frame
of video, as multiple textures might be used. This property will
be `None` if there is no video in the current source.
:type: `pyglet.image.Texture`
''')
if getattr(sys, 'is_epydoc', False):
def on_eos():
'''The player has reached the end of the current source.
This event is dispatched regardless of the EOS action. You
can alter the EOS action in this event handler, however playback
may stutter as the media device will not have enough time to
decode and buffer the new data in advance.
:event:
'''
BasePlayer.register_event_type('on_eos')
class ManagedSoundPlayerMixIn(object):
def __init__(self):
super(ManagedSoundPlayerMixIn, self).__init__()
managed_players.append(self)
def stop(self):
managed_players.remove(self)
class Listener(object):
'''The listener properties for positional audio.
You can obtain the singleton instance of this class as
`pyglet.media.listener`.
'''
_volume = 1.0
_position = (0, 0, 0)
_velocity = (0, 0, 0)
_forward_orientation = (0, 0, -1)
_up_orientation = (0, 1, 0)
_doppler_factor = 1.
_speed_of_sound = 343.3
def _set_volume(self, volume):
raise NotImplementedError('abstract')
volume = property(lambda self: self._volume,
lambda self, volume: self._set_volume(volume),
doc='''The master volume for sound playback.
All sound volumes are multiplied by this master volume before being
played. A value of 0 will silence playback (but still consume
resources). The nominal volume is 1.0.
:type: float
''')
def _set_position(self, position):
raise NotImplementedError('abstract')
position = property(lambda self: self._position,
lambda self, position: self._set_position(position),
doc='''The position of the listener in 3D space.
The position is given as a tuple of floats (x, y, z). The unit
defaults to meters, but can be modified with the listener
properties.
:type: 3-tuple of float
''')
def _set_velocity(self, velocity):
raise NotImplementedError('abstract')
velocity = property(lambda self: self._velocity,
lambda self, velocity: self._set_velocity(velocity),
doc='''The velocity of the listener in 3D space.
The velocity is given as a tuple of floats (x, y, z). The unit
defaults to meters per second, but can be modified with the listener
properties.
:type: 3-tuple of float
''')
def _set_forward_orientation(self, orientation):
raise NotImplementedError('abstract')
forward_orientation = property(lambda self: self._forward_orientation,
lambda self, o: self._set_forward_orientation(o),
doc='''A vector giving the direction the
listener is facing.
The orientation is given as a tuple of floats (x, y, z), and has
no unit. The forward orientation should be orthagonal to the
up orientation.
:type: 3-tuple of float
''')
def _set_up_orientation(self, orientation):
raise NotImplementedError('abstract')
up_orientation = property(lambda self: self._up_orientation,
lambda self, o: self._set_up_orientation(o),
doc='''A vector giving the "up" orientation
of the listener.
The orientation is given as a tuple of floats (x, y, z), and has
no unit. The up orientation should be orthagonal to the
forward orientation.
:type: 3-tuple of float
''')
def _set_doppler_factor(self, factor):
raise NotImplementedError('abstract')
doppler_factor = property(lambda self: self._doppler_factor,
lambda self, f: self._set_doppler_factor(f),
doc='''The emphasis to apply to the doppler
effect for sounds that move relative to the listener.
The default value is 1.0, which results in a physically-based
calculation. The effect can be enhanced by using a higher factor,
or subdued using a fractional factor (negative factors are
ignored).
:type: float
''')
def _set_speed_of_sound(self, speed_of_sound):
raise NotImplementedError('abstract')
speed_of_sound = property(lambda self: self._speed_of_sound,
lambda self, s: self._set_speed_of_sound(s),
doc='''The speed of sound, in units per second.
The default value is 343.3, a typical result at sea-level on a mild
day, using meters as the distance unit.
The speed of sound only affects the calculation of pitch shift to
apply due to doppler effects; in particular, no propogation delay
or relative phase adjustment is applied (in current implementations
of audio devices).
:type: float
''')
if getattr(sys, 'is_epydoc', False):
#: The singleton listener.
#:
#: :type: `Listener`
listener = Listener()
# Document imaginary Player class
Player = BasePlayer
Player.__name__ = 'Player'
del BasePlayer
# Document imaginary ManagedSoundPlayer class. (Actually implemented
# by ManagedSoundPlayerMixIn).
class ManagedSoundPlayer(Player):
'''A player which takes care of updating its own audio buffers.
This player will continue playing the sound until the sound is
finished, even if the application discards the player early.
There is no need to call `Player.dispatch_events` on this player,
though you must call `pyglet.media.dispatch_events`.
'''
#: The only possible end of stream action for a managed player.
EOS_STOP = 'stop'
eos_action = property(lambda self: EOS_STOP,
doc='''The fixed eos_action is `EOS_STOP`,
in which the player is discarded as soon as the source has
finished.
Read-only.
:type: str
''')
else:
# Find best available sound driver according to user preference
import pyglet
driver = None
for driver_name in pyglet.options['audio_driver']:
try:
driver_name = 'pyglet.media.drivers.' + driver_name
__import__(driver_name)
driver = sys.modules[driver_name]
break
except ImportError:
pass
if not driver:
raise ImportError('No suitable audio driver could be loaded.')
driver.driver_init()
Player = driver.DriverPlayer
ManagedSoundPlayer = driver.DriverManagedSoundPlayer
listener = driver.driver_listener
# Find best available source loader
try:
from pyglet.media import avbin
_source_class = avbin.AVbinSource
except ImportError:
from pyglet.media import riff
_source_class = riff.WaveSource
def load(filename, file=None, streaming=True):
'''Load a source from a file.
Currently the `file` argument is not supported; media files must exist
as real paths.
:Parameters:
`filename` : str
Filename of the media file to load.
`file` : file-like object
Not yet supported.
`streaming` : bool
If False, a `StaticSource` will be returned; otherwise (default) a
`StreamingSource` is created.
:rtype: `Source`
'''
source = _source_class(filename, file)
if not streaming:
source = StaticSource(source)
return source
managed_players = []
def dispatch_events():
'''Process managed audio events.
You must call this function regularly (typically once per run loop
iteration) in order to keep audio buffers of managed players full.
'''
for player in managed_players:
player.dispatch_events()
|
|
import operator
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.html import escape, format_html
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter
@stringfilter
def trim(value, num):
return value[:num]
@register.filter
@mark_safe
def make_data_div(value):
"""A filter that uses a decorator (@mark_safe)."""
return '<div data-name="%s"></div>' % value
@register.filter
def noop(value, param=None):
"""A noop filter that always return its first argument and does nothing with
its second (optional) one.
Useful for testing out whitespace in filter arguments (see #19882)."""
return value
@register.simple_tag(takes_context=True)
def context_stack_length(context):
return len(context.dicts)
@register.simple_tag
def no_params():
"""Expected no_params __doc__"""
return "no_params - Expected result"
no_params.anything = "Expected no_params __dict__"
@register.simple_tag
def one_param(arg):
"""Expected one_param __doc__"""
return "one_param - Expected result: %s" % arg
one_param.anything = "Expected one_param __dict__"
@register.simple_tag(takes_context=False)
def explicit_no_context(arg):
"""Expected explicit_no_context __doc__"""
return "explicit_no_context - Expected result: %s" % arg
explicit_no_context.anything = "Expected explicit_no_context __dict__"
@register.simple_tag(takes_context=True)
def no_params_with_context(context):
"""Expected no_params_with_context __doc__"""
return "no_params_with_context - Expected result (context value: %s)" % context['value']
no_params_with_context.anything = "Expected no_params_with_context __dict__"
@register.simple_tag(takes_context=True)
def params_and_context(context, arg):
"""Expected params_and_context __doc__"""
return "params_and_context - Expected result (context value: %s): %s" % (context['value'], arg)
params_and_context.anything = "Expected params_and_context __dict__"
@register.simple_tag
def simple_two_params(one, two):
"""Expected simple_two_params __doc__"""
return "simple_two_params - Expected result: %s, %s" % (one, two)
simple_two_params.anything = "Expected simple_two_params __dict__"
@register.simple_tag
def simple_keyword_only_param(*, kwarg):
return "simple_keyword_only_param - Expected result: %s" % kwarg
@register.simple_tag
def simple_keyword_only_default(*, kwarg=42):
return "simple_keyword_only_default - Expected result: %s" % kwarg
@register.simple_tag
def simple_one_default(one, two='hi'):
"""Expected simple_one_default __doc__"""
return "simple_one_default - Expected result: %s, %s" % (one, two)
simple_one_default.anything = "Expected simple_one_default __dict__"
@register.simple_tag
def simple_unlimited_args(one, two='hi', *args):
"""Expected simple_unlimited_args __doc__"""
return "simple_unlimited_args - Expected result: %s" % (
', '.join(str(arg) for arg in [one, two, *args])
)
simple_unlimited_args.anything = "Expected simple_unlimited_args __dict__"
@register.simple_tag
def simple_only_unlimited_args(*args):
"""Expected simple_only_unlimited_args __doc__"""
return "simple_only_unlimited_args - Expected result: %s" % ', '.join(str(arg) for arg in args)
simple_only_unlimited_args.anything = "Expected simple_only_unlimited_args __dict__"
@register.simple_tag
def simple_unlimited_args_kwargs(one, two='hi', *args, **kwargs):
"""Expected simple_unlimited_args_kwargs __doc__"""
# Sort the dictionary by key to guarantee the order for testing.
sorted_kwarg = sorted(kwargs.items(), key=operator.itemgetter(0))
return "simple_unlimited_args_kwargs - Expected result: %s / %s" % (
', '.join(str(arg) for arg in [one, two, *args]),
', '.join('%s=%s' % (k, v) for (k, v) in sorted_kwarg)
)
simple_unlimited_args_kwargs.anything = "Expected simple_unlimited_args_kwargs __dict__"
@register.simple_tag(takes_context=True)
def simple_tag_without_context_parameter(arg):
"""Expected simple_tag_without_context_parameter __doc__"""
return "Expected result"
simple_tag_without_context_parameter.anything = "Expected simple_tag_without_context_parameter __dict__"
@register.simple_tag(takes_context=True)
def simple_tag_takes_context_without_params():
"""Expected simple_tag_takes_context_without_params __doc__"""
return 'Expected result'
simple_tag_takes_context_without_params.anything = (
'Expected simple_tag_takes_context_without_params __dict__'
)
@register.simple_tag(takes_context=True)
def escape_naive(context):
"""A tag that doesn't even think about escaping issues"""
return "Hello {}!".format(context['name'])
@register.simple_tag(takes_context=True)
def escape_explicit(context):
"""A tag that uses escape explicitly"""
return escape("Hello {}!".format(context['name']))
@register.simple_tag(takes_context=True)
def escape_format_html(context):
"""A tag that uses format_html"""
return format_html("Hello {0}!", context['name'])
@register.simple_tag(takes_context=True)
def current_app(context):
return str(context.current_app)
@register.simple_tag(takes_context=True)
def use_l10n(context):
return str(context.use_l10n)
@register.simple_tag(name='minustwo')
def minustwo_overridden_name(value):
return value - 2
register.simple_tag(lambda x: x - 1, name='minusone')
@register.tag('counter')
def counter(parser, token):
return CounterNode()
class CounterNode(template.Node):
def __init__(self):
self.count = 0
def render(self, context):
count = self.count
self.count = count + 1
return str(count)
|
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies and contributors
# License: MIT. See LICENSE
import frappe
from frappe import _
import json
from frappe.model.document import Document
from frappe.desk.doctype.notification_log.notification_log import enqueue_create_notification,\
get_title, get_title_html
from frappe.desk.doctype.notification_settings.notification_settings\
import is_email_notifications_enabled_for_type, is_email_notifications_enabled
from frappe.utils import cint, get_fullname, getdate, get_link_to_form
class EnergyPointLog(Document):
def validate(self):
self.map_milestone_reference()
if self.type in ['Appreciation', 'Criticism'] and self.user == self.owner:
frappe.throw(_('You cannot give review points to yourself'))
def map_milestone_reference(self):
# link energy point to the original reference, if set by milestone
if self.reference_doctype == 'Milestone':
self.reference_doctype, self.reference_name = frappe.db.get_value('Milestone', self.reference_name,
['reference_type', 'reference_name'])
def after_insert(self):
alert_dict = get_alert_dict(self)
if alert_dict:
frappe.publish_realtime('energy_point_alert', message=alert_dict, user=self.user)
frappe.cache().hdel('energy_points', self.user)
frappe.publish_realtime('update_points', after_commit=True)
if self.type != 'Review' and \
frappe.get_cached_value('Notification Settings', self.user, 'energy_points_system_notifications'):
reference_user = self.user if self.type == 'Auto' else self.owner
notification_doc = {
'type': 'Energy Point',
'document_type': self.reference_doctype,
'document_name': self.reference_name,
'subject': get_notification_message(self),
'from_user': reference_user,
'email_content': '<div>{}</div>'.format(self.reason) if self.reason else None
}
enqueue_create_notification(self.user, notification_doc)
def on_trash(self):
if self.type == 'Revert':
reference_log = frappe.get_doc('Energy Point Log', self.revert_of)
reference_log.reverted = 0
reference_log.save()
@frappe.whitelist()
def revert(self, reason, ignore_permissions=False):
if not ignore_permissions:
frappe.only_for('System Manager')
if self.type != 'Auto':
frappe.throw(_('This document cannot be reverted'))
if self.get('reverted'):
return
self.reverted = 1
self.save(ignore_permissions=True)
revert_log = frappe.get_doc({
'doctype': 'Energy Point Log',
'points': -(self.points),
'type': 'Revert',
'user': self.user,
'reason': reason,
'reference_doctype': self.reference_doctype,
'reference_name': self.reference_name,
'revert_of': self.name
}).insert(ignore_permissions=True)
return revert_log
def get_notification_message(doc):
owner_name = get_fullname(doc.owner)
points = doc.points
title = get_title(doc.reference_doctype, doc.reference_name)
if doc.type == 'Auto':
owner_name = frappe.bold('You')
if points == 1:
message = _('{0} gained {1} point for {2} {3}')
else:
message = _('{0} gained {1} points for {2} {3}')
message = message.format(owner_name, frappe.bold(points), doc.rule, get_title_html(title))
elif doc.type == 'Appreciation':
if points == 1:
message = _('{0} appreciated your work on {1} with {2} point')
else:
message = _('{0} appreciated your work on {1} with {2} points')
message = message.format(frappe.bold(owner_name), get_title_html(title), frappe.bold(points))
elif doc.type == 'Criticism':
if points == 1:
message = _('{0} criticized your work on {1} with {2} point')
else:
message = _('{0} criticized your work on {1} with {2} points')
message = message.format(frappe.bold(owner_name), get_title_html(title), frappe.bold(points))
elif doc.type == 'Revert':
if points == 1:
message = _('{0} reverted your point on {1}')
else:
message = _('{0} reverted your points on {1}')
message = message.format(frappe.bold(owner_name), get_title_html(title))
return message
def get_alert_dict(doc):
alert_dict = frappe._dict()
owner_name = get_fullname(doc.owner)
if doc.reference_doctype:
doc_link = get_link_to_form(doc.reference_doctype, doc.reference_name)
points = doc.points
bold_points = frappe.bold(doc.points)
if doc.type == 'Auto':
if points == 1:
message = _('You gained {0} point')
else:
message = _('You gained {0} points')
alert_dict.message = message.format(bold_points)
alert_dict.indicator = 'green'
elif doc.type == 'Appreciation':
if points == 1:
message = _('{0} appreciated your work on {1} with {2} point')
else:
message = _('{0} appreciated your work on {1} with {2} points')
alert_dict.message = message.format(
owner_name,
doc_link,
bold_points
)
alert_dict.indicator = 'green'
elif doc.type == 'Criticism':
if points == 1:
message = _('{0} criticized your work on {1} with {2} point')
else:
message = _('{0} criticized your work on {1} with {2} points')
alert_dict.message = message.format(
owner_name,
doc_link,
bold_points
)
alert_dict.indicator = 'red'
elif doc.type == 'Revert':
if points == 1:
message = _('{0} reverted your point on {1}')
else:
message = _('{0} reverted your points on {1}')
alert_dict.message = message.format(
owner_name,
doc_link,
)
alert_dict.indicator = 'red'
return alert_dict
def create_energy_points_log(ref_doctype, ref_name, doc, apply_only_once=False):
doc = frappe._dict(doc)
log_exists = check_if_log_exists(ref_doctype,
ref_name, doc.rule, None if apply_only_once else doc.user)
if log_exists:
return frappe.get_doc('Energy Point Log', log_exists)
new_log = frappe.new_doc('Energy Point Log')
new_log.reference_doctype = ref_doctype
new_log.reference_name = ref_name
new_log.update(doc)
new_log.insert(ignore_permissions=True)
return new_log
def check_if_log_exists(ref_doctype, ref_name, rule, user=None):
''''Checks if Energy Point Log already exists'''
filters = frappe._dict({
'rule': rule,
'reference_doctype': ref_doctype,
'reference_name': ref_name,
'reverted': 0
})
if user:
filters.user = user
return frappe.db.exists('Energy Point Log', filters)
def create_review_points_log(user, points, reason=None, doctype=None, docname=None):
return frappe.get_doc({
'doctype': 'Energy Point Log',
'points': points,
'type': 'Review',
'user': user,
'reason': reason,
'reference_doctype': doctype,
'reference_name': docname
}).insert(ignore_permissions=True)
@frappe.whitelist()
def add_review_points(user, points):
frappe.only_for('System Manager')
create_review_points_log(user, points)
@frappe.whitelist()
def get_energy_points(user):
# points = frappe.cache().hget('energy_points', user,
# lambda: get_user_energy_and_review_points(user))
# TODO: cache properly
points = get_user_energy_and_review_points(user)
return frappe._dict(points.get(user, {}))
@frappe.whitelist()
def get_user_energy_and_review_points(user=None, from_date=None, as_dict=True):
conditions = ''
given_points_condition = ''
values = frappe._dict()
if user:
conditions = 'WHERE `user` = %(user)s'
values.user = user
if from_date:
conditions += 'WHERE' if not conditions else 'AND'
given_points_condition += "AND `creation` >= %(from_date)s"
conditions += " `creation` >= %(from_date)s OR `type`='Review'"
values.from_date = from_date
points_list = frappe.db.sql("""
SELECT
SUM(CASE WHEN `type` != 'Review' THEN `points` ELSE 0 END) AS energy_points,
SUM(CASE WHEN `type` = 'Review' THEN `points` ELSE 0 END) AS review_points,
SUM(CASE
WHEN `type`='Review' AND `points` < 0 {given_points_condition}
THEN ABS(`points`)
ELSE 0
END) as given_points,
`user`
FROM `tabEnergy Point Log`
{conditions}
GROUP BY `user`
ORDER BY `energy_points` DESC
""".format(
conditions=conditions,
given_points_condition=given_points_condition
), values=values, as_dict=1)
if not as_dict:
return points_list
dict_to_return = frappe._dict()
for d in points_list:
dict_to_return[d.pop('user')] = d
return dict_to_return
@frappe.whitelist()
def review(doc, points, to_user, reason, review_type='Appreciation'):
current_review_points = get_energy_points(frappe.session.user).review_points
doc = doc.as_dict() if hasattr(doc, 'as_dict') else frappe._dict(json.loads(doc))
points = abs(cint(points))
if current_review_points < points:
frappe.msgprint(_('You do not have enough review points'))
return
review_doc = create_energy_points_log(doc.doctype, doc.name, {
'type': review_type,
'reason': reason,
'points': points if review_type == 'Appreciation' else -points,
'user': to_user
})
# deduct review points from reviewer
create_review_points_log(
user=frappe.session.user,
points=-points,
reason=reason,
doctype=review_doc.doctype,
docname=review_doc.name
)
return review_doc
@frappe.whitelist()
def get_reviews(doctype, docname):
return frappe.get_all('Energy Point Log', filters={
'reference_doctype': doctype,
'reference_name': docname,
'type': ['in', ('Appreciation', 'Criticism')],
}, fields=['points', 'owner', 'type', 'user', 'reason', 'creation'])
def send_weekly_summary():
send_summary('Weekly')
def send_monthly_summary():
send_summary('Monthly')
def send_summary(timespan):
from frappe.utils.user import get_enabled_system_users
from frappe.social.doctype.energy_point_settings.energy_point_settings import is_energy_point_enabled
if not is_energy_point_enabled():
return
if not is_email_notifications_enabled_for_type(frappe.session.user, 'Energy Point'):
return
from_date = frappe.utils.add_to_date(None, weeks=-1)
if timespan == 'Monthly':
from_date = frappe.utils.add_to_date(None, months=-1)
user_points = get_user_energy_and_review_points(from_date=from_date, as_dict=False)
# do not send report if no activity found
if not user_points or not user_points[0].energy_points: return
from_date = getdate(from_date)
to_date = getdate()
# select only those users that have energy point email notifications enabled
all_users = [user.email for user in get_enabled_system_users() if
is_email_notifications_enabled_for_type(user.name, 'Energy Point')]
frappe.sendmail(
subject = '{} energy points summary'.format(timespan),
recipients = all_users,
template = "energy_points_summary",
args = {
'top_performer': user_points[0],
'top_reviewer': max(user_points, key=lambda x:x['given_points']),
'standings': user_points[:10], # top 10
'footer_message': get_footer_message(timespan).format(from_date, to_date),
},
with_container = 1
)
def get_footer_message(timespan):
if timespan == 'Monthly':
return _("Stats based on last month's performance (from {0} to {1})")
else:
return _("Stats based on last week's performance (from {0} to {1})")
|
|
#!/usr/bin/python
# Takes a list of files as its arguments. These are the JSON data files
# from a Twitter archive. So, for example:
# python TwitterBookImages.py ~/TwitterArchive/data/js/tweets/*.js
# A very early version of this was based on:
# http://www.leancrew.com/all-this/2013/01/completing-my-twitter-archive/
# but any errors are mine (@amac)
from datetime import datetime, timedelta
import sys
import json
import urllib
import re
import os.path
import Image, ImageDraw, ImageFont, ImageEnhance
import textwrap
# Constants
# A ton, I don't generally use this many but was lazy
# Where to put the output files (a directory name without trailing slash)
OUTPUT_DIRECTORY = 'output'
# Number of hours to correct the date
# I used 8 because mostly my tweets were in SF
# I couldn't see a time zone and didn't geotag my tweets
DATE_CORRECTION = -8
# This is a font from Google that I liked
FONT = 'OpenSans-Regular.ttf'
# Some colours
# White for the background and text background
BACKGROUND_COLOUR = (255,255,255,255)
TEXT_BACKGROUND_COLOUR = BACKGROUND_COLOUR
# Black for the text
TEXT_COLOUR = (0,0,0,255)
DATE_TEXT_COLOUR = TEXT_COLOUR
# These are numbers and ratios I used for a 7x7 blurb.com book
# If doing a text only page (no image) I used 4096x4096 which makes a ~500K image
TEXT_ONLY_PIXEL_SIZE = 4096
# Below a certain size is too small to print well
# These are used in the program logic to test for the size being too small
# and if too small, to creat at the new size
MINIMUM_PIXEL_SIZE = 1364
WITH_MEDIA_PIXEL_SIZE = 1400
# Constants to figure out font size and padding
FONT_SIZE_CONSTANT = 0.046875
NO_MEDIA_TEXT_SIZE_MULTIPLE = 1.6
DATE_SIZE_MULTIPLE = .5
PADDING_CONSTANT = 63
# Corresponding character counts to use with linewrap
MEDIA_TWEET_CHAR_LENGTH = 36
NO_MEDIA_TWEET_CHAR_LENGTH = 20
# this cleans up the Tweet text
# this is probably a subset of what should be done,
# but can be expanded if necessary
def process_tweet_text(text=''):
#remove links
text=re.sub(r"http\S+", "", text)
#change ampersand
text=re.sub(r"\&\;", "&", text)
#removing leading spaces
text=re.sub(r"^ ", "", text)
return (text)
# given a picture file, date, tweet text and output file name
# add a text overlay to the picture and save a new output file
#
# if no input file is given or the file is not openable,
# create a text only image for the tweet text
def add_tweet(in_file='', #this will be '.jpg' if none otherwise will be an image
date='', #the date expressed in text in the format for adding to the image
text='', #the tweet text
out_file='test.jpg', # the output filename
padding=50, # the padding to use around the text
opacity=0.5): # the opacity for the text box
# load the input image or create one
# An in_file of '.jpg' with no extra filename
# is a signifier of no media and so is a tiny filesize
# the variable "media" is made true if there is a pic
if (in_file == '.jpg') or (os.path.getsize(in_file) < 100):
print "Text Only: "+out_file
img = Image.new("RGB", (TEXT_ONLY_PIXEL_SIZE, 1), BACKGROUND_COLOUR)
media=0
else:
media=1
try:
img = Image.open(in_file).convert('RGB')
except:
print "Media didn't open for "+in_file
print "Text Only: "+out_file
img = Image.new("RGB", (TEXT_ONLY_PIXEL_SIZE, 1), BACKGROUND_COLOUR)
media=0
# figure out whether it is a vertical or horizontal image
# use "vertical" as a boolean for it
# 1) make the image square based on the longest side
# 2) make sure it is bigger than the MINIMUM_PIXEL_SIZE
# 3) if it isn't make a new one that is WITH_MEDIA_PIXEL_SIZE big
# and place the old one in the new one with a border
vertical=0
# check which side is longer (width is [0])
if (img.size[0]<img.size[1]):
vertical=1
# make sure the bigger side is bigger than MINIMUM_PIXEL_SIZE
# if so, create a new square image and center the old one inside
# if not, create new bigger image and center the old one inside
if (img.size[1]>MINIMUM_PIXEL_SIZE):
new_img = Image.new("RGB", (img.size[1], img.size[1]), BACKGROUND_COLOUR)
new_img.paste(img, (((img.size[1]-img.size[0])/2),0))
else:
new_img = Image.new("RGB", (WITH_MEDIA_PIXEL_SIZE, WITH_MEDIA_PIXEL_SIZE), BACKGROUND_COLOUR)
new_img.paste(img, (((WITH_MEDIA_PIXEL_SIZE-img.size[0])/2),((WITH_MEDIA_PIXEL_SIZE-img.size[1])/2)))
img=new_img
else:
vertical=0
# make sure the bigger side is bigger than MINIMUM_PIXEL_SIZE
# if so, create a new square image and center the old one inside
# if not, create new bigger image and center the old one inside
if (img.size[0]>MINIMUM_PIXEL_SIZE):
new_img = Image.new("RGB", (img.size[0], img.size[0]), BACKGROUND_COLOUR)
new_img.paste(img, (0,0))
else:
new_img = Image.new("RGB", (WITH_MEDIA_PIXEL_SIZE, WITH_MEDIA_PIXEL_SIZE), BACKGROUND_COLOUR)
new_img.paste(img, (((WITH_MEDIA_PIXEL_SIZE-img.size[0])/2),((WITH_MEDIA_PIXEL_SIZE-img.size[1])/2)))
img=new_img
# The text will be overlaid with a level of transparency.
# that happens by:
# 1) creating a new image that with a transparent background
# 2) figuring out how much space the text will take
# 3) creating a translucent background for that text box
# 4) adding the text and date
# 5) putting the image with the text box over the image created above
# 6) write the resulting file
# 1) create a new image that with a transparent background
rect = Image.new('RGBA', (img.size[0],img.size[1]), (0,0,0,0))
# 2) figure out how much space the text will take
# calculate some variables to use for font size and padding
font_size = int((FONT_SIZE_CONSTANT)*(img.size[0]))
padding = int (padding*font_size/PADDING_CONSTANT)
#change this if you want vertical padding to be different
vert_padding = padding
# if this is a tweet with media, text should be small
# otherwise bigger
# NB there is a much more scientific way to do this
# but this seemed easier at the time
# the better way would be to use normal_font.getsize(text) to
# get the exact width and height for a particular text
if media:
lines = textwrap.wrap(text,MEDIA_TWEET_CHAR_LENGTH)
else:
lines = textwrap.wrap(text,NO_MEDIA_TWEET_CHAR_LENGTH)
font_size = int(font_size*NO_MEDIA_TEXT_SIZE_MULTIPLE)
# Load the fonts for the tweet text and date (small_font)
normal_font = ImageFont.truetype(FONT, font_size)
small_font = ImageFont.truetype(FONT, int(font_size*DATE_SIZE_MULTIPLE))
# Calculate beginning height of text box
# if media then the box should be as low as possible, given padding
# if not then should be centered
if media: rect_start=rect.size[1]-((vert_padding*1.5)+((len(lines)+DATE_SIZE_MULTIPLE)*font_size))
else: rect_start=(rect.size[1]/2)-((((len(lines)+DATE_SIZE_MULTIPLE)*font_size))/2)
# 3) create a translucent background for the text box
# Draw the text box rectangle
draw = ImageDraw.Draw(rect)
draw.rectangle((0,rect_start,rect.size[0],rect.size[1]), fill=TEXT_BACKGROUND_COLOUR)
# Make it translucent
alpha = rect.split()[3]
alpha = ImageEnhance.Brightness(alpha).enhance(opacity)
rect.putalpha(alpha)
# 4) addi the text & date
# this version puts the date immediately above the tweet text
# offset is the place to write text
# start the text a bit below the begining of the text box
offset = rect_start+(vert_padding*.1)
# write the date as Day of the Week, Month Day, Year
draw.text((padding, offset), date.strftime("%A, %B %d, %Y"), font=small_font, fill=DATE_TEXT_COLOUR)
#move the offset down by the size of the date text
offset += int(font_size*DATE_SIZE_MULTIPLE)
# for each line in all the lines of the tweet
# write the tweet text and increment the offset
for line in lines:
draw.text((padding, offset),
line, font=normal_font, fill=TEXT_COLOUR)
offset += font_size
# 5) put the image with the text box over the image created above
# 6) write the resulting file
Image.composite(rect, img, rect).save(OUTPUT_DIRECTORY+'/'+out_file+'.jpg', 'JPEG')
# Main body of the program
# The argument is the list of JSON files
# Count is used to ensure unique filenames and count the number of tweets
count=0
# Loop through each json file provided as an argument
for json_file in sys.argv[1:]:
print "JSON File: "+json_file
# open the file, get rid of the first line, and load its json as data
with open(json_file) as data_file:
#get rid of the first line
data_file.readline()
#read the rest in as json
data = json.load(data_file)
# for each tweet in the resulting data structure
for tweet in data:
count = count +1
# grab the "created_at", strip out the colons for the file name
# and add the count to the end
# NB that this is a little wrong now because I realized that it would
# be better to call the date the date minus 8hrs
# If I edit this, fix that.
filename=re.sub(r"\:", "-", tweet['created_at'])+str(count)
# the date below is used for the date to be put on the pic
date = datetime.strptime(tweet['created_at'][0:16], "%Y-%m-%d %H:%M") + timedelta(hours=DATE_CORRECTION)
# image_name is going to be used for the name of the image
# associated with the tweet
# If it is nothing, that will signify no media with the tweet
image_name = ''
# Loop through the media entities in the tweet data structure
if 'entities' in tweet:
entities = tweet['entities']
if 'media' in entities:
# for each bit of media, grab its url through regex matching
for media in entities['media']:
matched = re.search('media\/(.+?)\.jpg', media['media_url'])
image_name = matched.group(1)
print "IMAGE FOUND:"+image_name
# retrieve the image if it isn't already local
# I believe the ":large" is the largest size twitter returns
# Also, Twitter uses URL obscurity for security, so you can
# retrieve images from protected accounts
if not os.path.isfile(image_name+'.jpg'):
# would be great to have error handling here, but lazy
# also Twitter has an unsupported "feature" that allows original image
# size retrievals with :orig
# although, for my images I didn't see a difference
# between this and :large
urllib.urlretrieve(media['media_url']+':orig', image_name+'.jpg')
#Add the tweet text to the image
#or create a new image with just the tweet text
add_tweet(image_name+'.jpg', date, process_tweet_text(tweet['text']) , filename, 75, .5)
|
|
#!/usr/bin/env python
#
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
"""
Standard setup script.
"""
import sys
import os
import glob
from distutils.core import setup, Command
from buildbot import version
from distutils.command.install_data import install_data
from distutils.command.sdist import sdist
def include(d, e):
"""Generate a pair of (directory, file-list) for installation.
'd' -- A directory
'e' -- A glob pattern"""
return (d, [f for f in glob.glob('%s/%s'%(d, e)) if os.path.isfile(f)])
class _SetupBuildCommand(Command):
"""
Master setup build command to subclass from.
"""
user_options = []
def initialize_options(self):
"""
Setup the current dir.
"""
self._dir = os.getcwd()
def finalize_options(self):
"""
Required.
"""
pass
class TestCommand(_SetupBuildCommand):
"""
Executes tests from setup.
"""
description = "Run unittests inline"
def run(self):
"""
Public run method.
"""
self._run(os.path.normpath(os.path.abspath(
os.path.join('buildbot', 'test'))))
def _run(self, test_loc):
"""
Executes the test step.
@param test_loc: location of test module
@type test_loc: str
"""
from twisted.scripts.trial import run
# remove the 'test' option from argv
sys.argv.remove('test')
# Mimick the trial script by adding the path as the last arg
sys.argv.append(test_loc)
# Add the current dir to path and pull it all together
sys.path.insert(0, os.path.curdir)
sys.path[:] = map(os.path.abspath, sys.path)
# GO!
run()
class SdistTestCommand(TestCommand):
"""
Runs unittests from the sdist output.
"""
description = "Run unittests from inside an sdist distribution"
def run(self):
"""
Interesting magic to get a source dist and running trial on it.
NOTE: there is magic going on here! If you know a better way feel
free to update it.
"""
# Clean out dist/
if os.path.exists('dist'):
for root, dirs, files in os.walk('dist', topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
os.rmdir(os.path.join(root, name))
# Import setup making it as if we ran setup.py with the sdist arg
sys.argv.append('sdist')
import setup #@Reimport @UnresolvedImport @UnusedImport
try:
# attempt to extract the sdist data
from gzip import GzipFile
from tarfile import TarFile
# We open up the gzip as well as using the first item as the sdist
gz = GzipFile(os.path.join('dist', os.listdir('dist')[0]))
tf = TarFile(fileobj=gz)
# Make the output dir and generate the extract path
os.mkdir(os.path.join('dist', 'sdist_test'))
ex_path = os.path.join('dist', 'sdist_test',
tf.getmembers()[0].name, 'buildbot', 'test')
# Extract the data and run tests
print "Extracting to %s" % ex_path
tf.extractall(os.path.join('dist', 'sdist_test'))
print "Executing tests ..."
self._run(os.path.normpath(os.path.abspath(ex_path)))
except IndexError, ie:
# We get called twice and the IndexError is OK
pass
class install_data_twisted(install_data):
"""make sure data files are installed in package.
this is evil.
copied from Twisted/setup.py.
"""
def finalize_options(self):
self.set_undefined_options('install',
('install_lib', 'install_dir'),
)
install_data.finalize_options(self)
def run(self):
install_data.run(self)
# ensure there's a buildbot/VERSION file
fn = os.path.join(self.install_dir, 'buildbot', 'VERSION')
open(fn, 'w').write(version)
self.outfiles.append(fn)
class our_sdist(sdist):
def make_release_tree(self, base_dir, files):
sdist.make_release_tree(self, base_dir, files)
# ensure there's a buildbot/VERSION file
fn = os.path.join(base_dir, 'buildbot', 'VERSION')
open(fn, 'w').write(version)
long_description="""
The BuildBot is a system to automate the compile/test cycle required by
most software projects to validate code changes. By automatically
rebuilding and testing the tree each time something has changed, build
problems are pinpointed quickly, before other developers are
inconvenienced by the failure. The guilty developer can be identified
and harassed without human intervention. By running the builds on a
variety of platforms, developers who do not have the facilities to test
their changes everywhere before checkin will at least know shortly
afterwards whether they have broken the build or not. Warning counts,
lint checks, image size, compile time, and other build parameters can
be tracked over time, are more visible, and are therefore easier to
improve.
"""
scripts = ["bin/buildbot"]
# sdist is usually run on a non-Windows platform, but the buildslave.bat file
# still needs to get packaged.
if 'sdist' in sys.argv or sys.platform == 'win32':
scripts.append("contrib/windows/buildbot.bat")
scripts.append("contrib/windows/buildbot_service.py")
setup_args = {
'name': "buildbot",
'version': version,
'description': "BuildBot build automation system",
'long_description': long_description,
'author': "Brian Warner",
'author_email': "[email protected]",
'maintainer': "Dustin J. Mitchell",
'maintainer_email': "[email protected]",
'url': "http://buildbot.net/",
'license': "GNU GPL",
'classifiers': [
'Development Status :: 5 - Production/Stable',
'Environment :: No Input/Output (Daemon)',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Testing',
],
'packages': ["buildbot",
"buildbot.status", "buildbot.status.web","buildbot.status.web.hooks",
"buildbot.changes",
"buildbot.steps",
"buildbot.steps.package",
"buildbot.steps.package.rpm",
"buildbot.process",
"buildbot.clients",
"buildbot.monkeypatches",
"buildbot.schedulers",
"buildbot.scripts",
"buildbot.db",
"buildbot.db.migrate.versions",
"buildbot.util",
"buildbot.test",
"buildbot.test.fake",
"buildbot.test.unit",
"buildbot.test.util",
"buildbot.test.regressions",
],
'data_files': [
("buildbot", [
"buildbot/buildbot.png",
]),
("buildbot/db/migrate", [
"buildbot/db/migrate/migrate.cfg",
]),
include("buildbot/db/migrate/versions", "*.py"),
("buildbot/clients", [
"buildbot/clients/debug.glade",
]),
("buildbot/status/web/files", [
"buildbot/status/web/files/default.css",
"buildbot/status/web/files/bg_gradient.jpg",
"buildbot/status/web/files/robots.txt",
"buildbot/status/web/files/favicon.ico",
]),
include("buildbot/status/web/templates", '*.html'),
include("buildbot/status/web/templates", '*.xml'),
("buildbot/scripts", [
"buildbot/scripts/sample.cfg",
]),
],
'scripts': scripts,
'cmdclass': {'install_data': install_data_twisted,
'test': TestCommand,
'sdist_test': SdistTestCommand,
'sdist': our_sdist},
}
# set zip_safe to false to force Windows installs to always unpack eggs
# into directories, which seems to work better --
# see http://buildbot.net/trac/ticket/907
if sys.platform == "win32":
setup_args['zip_safe'] = False
py_25 = sys.version_info[0] > 2 or (sys.version_info[0] == 2 and sys.version_info[1] >= 5)
py_26 = sys.version_info[0] > 2 or (sys.version_info[0] == 2 and sys.version_info[1] >= 6)
try:
# If setuptools is installed, then we'll add setuptools-specific arguments
# to the setup args.
import setuptools #@UnusedImport
except ImportError:
pass
else:
## dependencies
setup_args['install_requires'] = [
'twisted >= 8.0.0',
'Jinja2 >= 2.1',
'sqlalchemy >= 0.6',
# buildbot depends on sqlalchemy internals. See buildbot.db.model.
'sqlalchemy-migrate == 0.6',
]
# Python-2.6 and up includes json
if not py_26:
setup_args['install_requires'].append('simplejson')
# Python-2.6 and up includes a working A sqlite (py25's is broken)
if not py_26:
setup_args['install_requires'].append('pysqlite')
if os.getenv('NO_INSTALL_REQS'):
setup_args['install_requires'] = None
setup(**setup_args)
# Local Variables:
# fill-column: 71
# End:
|
|
import unittest
from mock import patch, Mock
import os
from sqlalchemy import create_engine
from sqlalchemy.exc import OperationalError
import ConfigParser
from sqla_taskq import command
from sqla_taskq.models import (
DBSession,
Base,
Task,
)
import sqla_taskq.models as models
import transaction
import multiprocessing
DB_NAME = 'test_sqla_taskq.db'
DB_URL = 'sqlite:///%s' % DB_NAME
def func2lock(*args, **kw):
engine = create_engine(DB_URL)
models.engine = engine
DBSession.configure(bind=engine)
idtask = command.lock_task(models)
return idtask
def func4test(*args, **kw):
return 'test'
class TestSignal(unittest.TestCase):
def test_sigterm_handler(self):
self.assertEqual(command.loop, True)
command.sigterm_handler(666, None)
self.assertEqual(command.loop, False)
command.loop = True
def test_sigterm_kill_handler(self):
self.assertEqual(command.loop, True)
try:
command.sigterm_kill_handler(666, None)
assert(False) # pragma: no cover
except SystemExit, e:
self.assertEqual(str(e), '0')
self.assertEqual(command.loop, False)
finally:
command.loop = True
class TestCommand(unittest.TestCase):
def setUp(self):
engine = create_engine(DB_URL)
models.engine = engine
DBSession.configure(bind=engine)
Base.metadata.create_all(engine)
def tearDown(self):
transaction.abort()
if os.path.exists(DB_NAME):
os.remove(DB_NAME)
def test__lock_task(self):
Task.create(func2lock)
connection = models.engine.connect()
idtask = command._lock_task(connection, models)
self.assertEqual(idtask, 1)
task = models.Task.query.get(1)
self.assertEqual(task.status, models.TASK_STATUS_IN_PROGRESS)
self.assertTrue(task.pid)
self.assertTrue(task.lock_date)
idtask = command._lock_task(connection, models)
self.assertEqual(idtask, None)
Task.create(func2lock, unique_key='mykey')
idtask = command._lock_task(connection, models)
self.assertEqual(idtask, 2)
Task.create(func2lock, unique_key='mykey')
# Will not lock this new task since it's the same unique key than the
# previous one which is not finished
idtask = command._lock_task(connection, models)
self.assertEqual(idtask, None)
def test_lock_task(self):
for i in range(4):
Task.create(func2lock)
pool = multiprocessing.Pool(processes=4)
res = pool.map(func2lock, [{}, {}, {}, {}])
res.sort()
self.assertEqual(res, [1, 2, 3, 4])
rows = models.Task.query.filter_by(pid=None).all()
self.assertEqual(len(rows), 0)
class Err(OperationalError):
def __init__(self):
pass
def f(*args, **kw):
raise Err()
with patch('sqla_taskq.command._lock_task', side_effect=f):
# Don't fail on sqla error
idtask = func2lock()
self.assertEqual(idtask, None)
def test__run(self):
res = command._run(models)
self.assertEqual(res, False)
Task.create(func4test)
res = command._run(models)
self.assertEqual(res, True)
task = Task.query.get(1)
self.assertEqual(task.status, models.TASK_STATUS_FINISHED)
self.assertTrue(task.pid)
self.assertEqual(task.result, 'test')
def test_run(self):
command.loop = False
res = command.run(models)
self.assertEqual(res, None)
Task.create(func4test)
res = command.run(models, kill=True)
self.assertEqual(res, None)
command.loop = True
def f(*args, **kw):
command.loop = False
with patch('sqla_taskq.command._run', side_effect=f):
res = command.run(models, kill=True)
self.assertEqual(res, None)
command.loop = True
def test_parse_config_file(self):
config = ConfigParser.RawConfigParser()
with patch('ConfigParser.ConfigParser', return_value=config):
res = command.parse_config_file('/fake')
self.assertEqual(res, None)
# Make sure loggers are loaded
with patch('logging.config.fileConfig', return_value=None) as m:
config.add_section('loggers')
config.set('loggers', 'key', 'value')
res = command.parse_config_file('/fake')
self.assertEqual(res, None)
m.assert_called_with('/fake')
# No option in sqla_taskq section, we get the default
config.add_section('sqla_taskq')
res = command.parse_config_file('/fake')
expected = {
'kill': False,
'timeout': 60,
}
self.assertEqual(res, expected)
config.set('sqla_taskq', 'kill', 'true')
config.set('sqla_taskq', 'timeout', '5')
config.set('sqla_taskq', 'sqla_url', '//my_url')
res = command.parse_config_file('/fake')
expected = {
'kill': True,
'timeout': 5,
'sqla_url': '//my_url',
}
self.assertEqual(res, expected)
def test_parse_options(self):
res = command.parse_options([])
expected = {
'kill': False,
'sqla_url': None,
'config_filename': None,
}
self.assertEqual(res, expected)
res = command.parse_options([], parse_timeout=True)
expected = {
'kill': False,
'sqla_url': None,
'config_filename': None,
'timeout': 60,
}
self.assertEqual(res, expected)
options = ['-k', '-t', '90', '-u', 'sqlite://fake.db']
res = command.parse_options(options, parse_timeout=True)
expected = {
'kill': True,
'sqla_url': 'sqlite://fake.db',
'config_filename': None,
'timeout': 90,
}
self.assertEqual(res, expected)
options = ['-k', '-t', '90',
'-u', 'sqlite://fake.db',
'-c', 'fake.ini']
res = command.parse_options(options, parse_timeout=True)
expected = {
'kill': True,
'sqla_url': 'sqlite://fake.db',
'config_filename': 'fake.ini',
'timeout': 90,
}
self.assertEqual(res, expected)
config = ConfigParser.RawConfigParser()
with patch('ConfigParser.ConfigParser', return_value=config):
config.add_section('sqla_taskq')
config.set('sqla_taskq', 'timeout', '5')
res = command.parse_config_file('/fake')
expected = {
'kill': False,
'timeout': 5,
}
self.assertEqual(res, expected)
|
|
# Copyright (c) 2011 Sam Rushing
#
# key.py - OpenSSL wrapper
#
# This file is modified from python-korelib.
#
"""ECC secp256k1 crypto routines
WARNING: This module does not mlock() secrets; your private keys may end up on
disk in swap! Use with caution!
"""
import ctypes
import ctypes.util
import hashlib
import sys
ssl = ctypes.cdll.LoadLibrary(ctypes.util.find_library ('ssl') or 'libeay32')
ssl.BN_new.restype = ctypes.c_void_p
ssl.BN_new.argtypes = []
ssl.BN_bin2bn.restype = ctypes.c_void_p
ssl.BN_bin2bn.argtypes = [ctypes.c_char_p, ctypes.c_int, ctypes.c_void_p]
ssl.BN_CTX_free.restype = None
ssl.BN_CTX_free.argtypes = [ctypes.c_void_p]
ssl.BN_CTX_new.restype = ctypes.c_void_p
ssl.BN_CTX_new.argtypes = []
ssl.ECDH_compute_key.restype = ctypes.c_int
ssl.ECDH_compute_key.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p]
ssl.ECDSA_sign.restype = ctypes.c_int
ssl.ECDSA_sign.argtypes = [ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
ssl.ECDSA_verify.restype = ctypes.c_int
ssl.ECDSA_verify.argtypes = [ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p]
ssl.EC_KEY_free.restype = None
ssl.EC_KEY_free.argtypes = [ctypes.c_void_p]
ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
ssl.EC_KEY_new_by_curve_name.argtypes = [ctypes.c_int]
ssl.EC_KEY_get0_group.restype = ctypes.c_void_p
ssl.EC_KEY_get0_group.argtypes = [ctypes.c_void_p]
ssl.EC_KEY_get0_public_key.restype = ctypes.c_void_p
ssl.EC_KEY_get0_public_key.argtypes = [ctypes.c_void_p]
ssl.EC_KEY_set_private_key.restype = ctypes.c_int
ssl.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
ssl.EC_KEY_set_conv_form.restype = None
ssl.EC_KEY_set_conv_form.argtypes = [ctypes.c_void_p, ctypes.c_int]
ssl.EC_KEY_set_public_key.restype = ctypes.c_int
ssl.EC_KEY_set_public_key.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
ssl.i2o_ECPublicKey.restype = ctypes.c_void_p
ssl.i2o_ECPublicKey.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
ssl.EC_POINT_new.restype = ctypes.c_void_p
ssl.EC_POINT_new.argtypes = [ctypes.c_void_p]
ssl.EC_POINT_free.restype = None
ssl.EC_POINT_free.argtypes = [ctypes.c_void_p]
ssl.EC_POINT_mul.restype = ctypes.c_int
ssl.EC_POINT_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
# this specifies the curve used with ECDSA.
NID_secp256k1 = 714 # from openssl/obj_mac.h
# Thx to Sam Devlin for the ctypes magic 64-bit fix.
def _check_result(val, func, args):
if val == 0:
raise ValueError
else:
return ctypes.c_void_p (val)
ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
ssl.EC_KEY_new_by_curve_name.errcheck = _check_result
class CECKey(object):
"""Wrapper around OpenSSL's EC_KEY"""
POINT_CONVERSION_COMPRESSED = 2
POINT_CONVERSION_UNCOMPRESSED = 4
def __init__(self):
self.k = ssl.EC_KEY_new_by_curve_name(NID_secp256k1)
def __del__(self):
if ssl:
ssl.EC_KEY_free(self.k)
self.k = None
def set_secretbytes(self, secret):
priv_key = ssl.BN_bin2bn(secret, 32, ssl.BN_new())
group = ssl.EC_KEY_get0_group(self.k)
pub_key = ssl.EC_POINT_new(group)
ctx = ssl.BN_CTX_new()
if not ssl.EC_POINT_mul(group, pub_key, priv_key, None, None, ctx):
raise ValueError("Could not derive public key from the supplied secret.")
ssl.EC_POINT_mul(group, pub_key, priv_key, None, None, ctx)
ssl.EC_KEY_set_private_key(self.k, priv_key)
ssl.EC_KEY_set_public_key(self.k, pub_key)
ssl.EC_POINT_free(pub_key)
ssl.BN_CTX_free(ctx)
return self.k
def set_privkey(self, key):
self.mb = ctypes.create_string_buffer(key)
return ssl.d2i_ECPrivateKey(ctypes.byref(self.k), ctypes.byref(ctypes.pointer(self.mb)), len(key))
def set_pubkey(self, key):
self.mb = ctypes.create_string_buffer(key)
return ssl.o2i_ECPublicKey(ctypes.byref(self.k), ctypes.byref(ctypes.pointer(self.mb)), len(key))
def get_privkey(self):
size = ssl.i2d_ECPrivateKey(self.k, 0)
mb_pri = ctypes.create_string_buffer(size)
ssl.i2d_ECPrivateKey(self.k, ctypes.byref(ctypes.pointer(mb_pri)))
return mb_pri.raw
def get_pubkey(self):
size = ssl.i2o_ECPublicKey(self.k, 0)
mb = ctypes.create_string_buffer(size)
ssl.i2o_ECPublicKey(self.k, ctypes.byref(ctypes.pointer(mb)))
return mb.raw
def get_raw_ecdh_key(self, other_pubkey):
ecdh_keybuffer = ctypes.create_string_buffer(32)
r = ssl.ECDH_compute_key(ctypes.pointer(ecdh_keybuffer), 32,
ssl.EC_KEY_get0_public_key(other_pubkey.k),
self.k, 0)
if r != 32:
raise Exception('CKey.get_ecdh_key(): ECDH_compute_key() failed')
return ecdh_keybuffer.raw
def get_ecdh_key(self, other_pubkey, kdf=lambda k: hashlib.sha256(k).digest()):
# FIXME: be warned it's not clear what the kdf should be as a default
r = self.get_raw_ecdh_key(other_pubkey)
return kdf(r)
def sign(self, hash):
# FIXME: need unit tests for below cases
if not isinstance(hash, bytes):
raise TypeError('Hash must be bytes instance; got %r' % hash.__class__)
if len(hash) != 32:
raise ValueError('Hash must be exactly 32 bytes long')
sig_size0 = ctypes.c_uint32()
sig_size0.value = ssl.ECDSA_size(self.k)
mb_sig = ctypes.create_string_buffer(sig_size0.value)
result = ssl.ECDSA_sign(0, hash, len(hash), mb_sig, ctypes.byref(sig_size0), self.k)
assert 1 == result
return mb_sig.raw[:sig_size0.value]
def verify(self, hash, sig):
"""Verify a DER signature"""
return ssl.ECDSA_verify(0, hash, len(hash), sig, len(sig), self.k) == 1
def set_compressed(self, compressed):
if compressed:
form = self.POINT_CONVERSION_COMPRESSED
else:
form = self.POINT_CONVERSION_UNCOMPRESSED
ssl.EC_KEY_set_conv_form(self.k, form)
class CPubKey(bytes):
"""An encapsulated public key
Attributes:
is_valid - Corresponds to CPubKey.IsValid()
is_fullyvalid - Corresponds to CPubKey.IsFullyValid()
is_compressed - Corresponds to CPubKey.IsCompressed()
"""
def __new__(cls, buf, _cec_key=None):
self = super(CPubKey, cls).__new__(cls, buf)
if _cec_key is None:
_cec_key = CECKey()
self._cec_key = _cec_key
self.is_fullyvalid = _cec_key.set_pubkey(self) != 0
return self
@property
def is_valid(self):
return len(self) > 0
@property
def is_compressed(self):
return len(self) == 33
def verify(self, hash, sig):
return self._cec_key.verify(hash, sig)
def __str__(self):
return repr(self)
def __repr__(self):
# Always have represent as b'<secret>' so test cases don't have to
# change for py2/3
if sys.version > '3':
return '%s(%s)' % (self.__class__.__name__, super(CPubKey, self).__repr__())
else:
return '%s(b%s)' % (self.__class__.__name__, super(CPubKey, self).__repr__())
|
|
##########################################################################
#
# Copyright (c) 2007-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import unittest
import imath
import IECore
import IECoreImage
class ImagePrimitiveTest( unittest.TestCase ) :
def testConstructor( self ) :
""" Test IECoreImage.ImagePrimitive constructor """
windowMin = imath.V2i( 0, 0 )
windowMax = imath.V2i( 100, 100 )
w = imath.Box2i( windowMin, windowMax )
i = IECoreImage.ImagePrimitive( w, w )
self.assertEqual( i.dataWindow, w )
self.assertEqual( i.displayWindow, w )
i.dataWindow = imath.Box2i( windowMin, imath.V2i( 10, 10 ) )
self.assertEqual( i.dataWindow, imath.Box2i( windowMin, imath.V2i( 10, 10 ) ) )
self.assertEqual( i.displayWindow, w )
i.displayWindow = imath.Box2i( windowMin, imath.V2i( 10, 10 ) )
self.assertEqual( i.displayWindow, imath.Box2i( windowMin, imath.V2i( 10, 10 ) ) )
def testDataWindow( self ) :
displayWindow = imath.Box2i( imath.V2i( 0, 0 ), imath.V2i( 99, 99 ) )
dataWindow = imath.Box2i( imath.V2i( 50, 50 ), imath.V2i( 99, 99 ) )
img = IECoreImage.ImagePrimitive( dataWindow, displayWindow )
def testDataWindow( self ) :
""" Test IECoreImage.ImagePrimitive data window """
displayWindow = imath.Box2i( imath.V2i( 0, 0 ), imath.V2i( 99, 99 ) )
dataWindow = imath.Box2i( imath.V2i( 50, 50 ), imath.V2i( 99, 99 ) )
img = IECoreImage.ImagePrimitive( dataWindow, displayWindow )
dataWindowArea = 50 * 50
img["R"] = IECore.FloatVectorData( dataWindowArea )
img["G"] = IECore.FloatVectorData( dataWindowArea )
img["B"] = IECore.FloatVectorData( dataWindowArea )
self.assertTrue( img.channelsValid() )
# \todo Verify behaviour when dataWindow and displayWindow are contradictory or inconsistent
def testLoadSave( self ) :
""" Test IECoreImage.ImagePrimitive load/save """
windowMin = imath.V2i( 0, 0 )
windowMax = imath.V2i( 100, 100 )
w = imath.Box2i( windowMin, windowMax )
i = IECoreImage.ImagePrimitive( w, w )
i["R"] = IECore.FloatVectorData( 101 * 101 )
i["G"] = IECore.FloatVectorData( 101 * 101 )
i["B"] = IECore.FloatVectorData( 101 * 101 )
self.assertTrue( i.channelsValid() )
IECore.Writer.create( i, "test/IECore/data/output.cob" ).write()
i2 = IECore.Reader.create( "test/IECore/data/output.cob" ).read()
self.assertEqual( type( i2 ), IECoreImage.ImagePrimitive )
self.assertEqual( i.displayWindow, i2.displayWindow )
self.assertEqual( i.dataWindow, i2.dataWindow )
self.assertEqual( i.channelNames(), i2.channelNames() )
self.assertTrue( i2.channelsValid() )
def testChannelNames( self ) :
""" Test IECoreImage.ImagePrimitive channel names """
windowMin = imath.V2i( 0, 0 )
windowMax = imath.V2i( 99, 99 )
w = imath.Box2i( windowMin, windowMax )
i = IECoreImage.ImagePrimitive( w, w )
r = IECore.FloatVectorData()
r.resize( 100 * 100 )
i["R"] = r
self.assertTrue( "R" in i.channelNames() )
self.assertTrue( i.channelsValid() )
b = IECore.FloatData()
i["B"] = b
self.failIf( "B" in i.channelNames() )
self.failIf( i.channelsValid() )
# Deliberately make a primvar too small!
g = IECore.FloatVectorData()
g.resize( 50 * 100 )
i["G"] = g
self.failIf( "G" in i.channelNames() )
self.failIf( i.channelsValid() )
i["B"] = i["R"]
i["G"].resize( 100 * 100 )
self.assertTrue( "R" in i.channelNames() )
self.assertTrue( "G" in i.channelNames() )
self.assertTrue( "B" in i.channelNames() )
self.assertTrue( i.channelsValid() )
def testCreateChannel( self ) :
windowMin = imath.V2i( 0, 0 )
windowMax = imath.V2i( 99, 99 )
w = imath.Box2i( windowMin, windowMax )
i = IECoreImage.ImagePrimitive( w, w )
i.createFloatChannel( "R" )
i.createHalfChannel( "G" )
i.createUIntChannel( "B" )
self.assertTrue( "R" in i )
self.assertTrue( "G" in i )
self.assertTrue( "B" in i )
def testErrors( self ) :
windowMin = imath.V2i( 0, 0 )
windowMax = imath.V2i( 99, 99 )
w = imath.Box2i( windowMin, windowMax )
i = IECoreImage.ImagePrimitive( w, w )
empty = imath.Box2i()
self.assertRaises( RuntimeError, setattr, i, "displayWindow", empty )
self.assertRaises( RuntimeError, IECoreImage.ImagePrimitive, empty, empty )
def testChannelValid( self ) :
b = imath.Box2i( imath.V2i( 0 ), imath.V2i( 9 ) )
i = IECoreImage.ImagePrimitive( b, b )
d = IECore.FloatVectorData( [1] )
i["Y"] = d
self.assertEqual( i.channelValid( d ), False )
self.assertEqual( i.channelValid( "Y" ), False )
self.assertEqual( i.getChannel( "Y" ), None )
t = i.channelValid( d, True )
self.assert_( isinstance( t, tuple ) )
self.assertEqual( t[0], False )
self.assert_( isinstance( t[1], str ) )
d.resize( 100 )
self.assertEqual( i.channelValid( d ), True )
self.assertEqual( i.channelValid( "Y" ), True )
self.assertTrue( d.isSame( i.getChannel( "Y" ) ) )
dd = IECore.FloatData( 1 )
self.assertEqual( i.channelValid( dd ), False )
i["PP"] = dd
self.assertEqual( i.channelValid( "PP" ), False )
self.assertEqual( i.getChannel( "PP" ), None )
def testConvenienceConstructors( self ) :
""" Test IECoreImage.ImagePrimitive convenience constructors """
window1Min = imath.V2i( 0, 0 )
window1Max = imath.V2i( 15, 15 )
w1 = imath.Box2i( window1Min, window1Max )
window2Min = imath.V2i( 4, 4 )
window2Max = imath.V2i( 11, 11 )
w2 = imath.Box2i( window2Min, window2Max )
fill = imath.Color3f( 0.49, 0.50, 0.51 )
i = IECoreImage.ImagePrimitive.createRGBFloat( fill, w1, w2 )
self.assert_( i.isInstanceOf( IECoreImage.ImagePrimitive.staticTypeId() ) )
self.assert_( "R" in i )
self.assert_( "G" in i )
self.assert_( "B" in i )
self.assert_( "Y" not in i )
self.assertEqual( i.dataWindow, w1 )
self.assertEqual( i.displayWindow, w2 )
self.assert_( i["R"].isInstanceOf( IECore.FloatVectorData.staticTypeId() ) )
self.assert_( i["G"].isInstanceOf( IECore.FloatVectorData.staticTypeId() ) )
self.assert_( i["B"].isInstanceOf( IECore.FloatVectorData.staticTypeId() ) )
self.assertEqual( i["R"].size(), 256 )
self.assertEqual( i["G"].size(), 256 )
self.assertEqual( i["B"].size(), 256 )
for p in (0, 63, 127, 255) :
self.assertEqual( i["R"][p], fill[0] )
self.assertEqual( i["G"][p], fill[1] )
self.assertEqual( i["B"][p], fill[2] )
fill = 0.5
i = IECoreImage.ImagePrimitive.createGreyscaleFloat( fill, w1, w2 )
self.assert_( i.isInstanceOf( IECoreImage.ImagePrimitive.staticTypeId() ) )
self.assert_( "R" not in i )
self.assert_( "G" not in i )
self.assert_( "B" not in i )
self.assert_( "Y" in i )
self.assertEqual( i.dataWindow, w1 )
self.assertEqual( i.displayWindow, w2 )
self.assert_( i["Y"].isInstanceOf( IECore.FloatVectorData.staticTypeId() ) )
self.assertEqual( i["Y"].size(), 256 )
for p in (0, 63, 127, 255) :
self.assertEqual( i["Y"][p], fill )
def testSpaces( self ) :
# one pixel image 0,0 -> 0,0
onePixelWindow = imath.Box2i( imath.V2i( 0 ), imath.V2i( 0 ) )
i = IECoreImage.ImagePrimitive( onePixelWindow, onePixelWindow )
m = i.pixelToObjectMatrix()
self.assertEqual( imath.V2f( 0 ) * m, imath.V2f( 0 ) )
m2 = i.objectToPixelMatrix()
self.assertEqual( m2, m.inverse() )
m = i.pixelToUVMatrix()
self.assertEqual( imath.V2f( 0 ) * m, imath.V2f( 0.5 ) )
m2 = i.uvToPixelMatrix()
self.assertEqual( m2, m.inverse() )
m = i.objectToUVMatrix()
self.assertEqual( imath.V2f( -0.5 ) * m, imath.V2f( 0, 1 ) )
self.assertEqual( imath.V2f( 0.5 ) * m, imath.V2f( 1, 0 ) )
m2 = i.uvToObjectMatrix()
self.assertEqual( m2, m.inverse() )
self.failUnless( (i.objectToUVMatrix() * i.uvToPixelMatrix()).equalWithAbsError( i.objectToPixelMatrix(), 0.00001 ) )
self.failUnless( (i.pixelToUVMatrix() * i.uvToObjectMatrix()).equalWithAbsError( i.pixelToObjectMatrix(), 0.00001 ) )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.UV, IECoreImage.ImagePrimitive.Space.Pixel ), i.uvToPixelMatrix() )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.UV, IECoreImage.ImagePrimitive.Space.Object ), i.uvToObjectMatrix() )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.Pixel, IECoreImage.ImagePrimitive.Space.UV ), i.pixelToUVMatrix() )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.Pixel, IECoreImage.ImagePrimitive.Space.Object ), i.pixelToObjectMatrix() )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.Object, IECoreImage.ImagePrimitive.Space.Pixel ), i.objectToPixelMatrix() )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.Object, IECoreImage.ImagePrimitive.Space.UV ), i.objectToUVMatrix() )
# two pixel image 0,0 -> 1,1
twoPixelWindow = imath.Box2i( imath.V2i( 0 ), imath.V2i( 1 ) )
i = IECoreImage.ImagePrimitive( twoPixelWindow, twoPixelWindow )
m = i.pixelToObjectMatrix()
self.assertEqual( imath.V2f( 0 ) * m, imath.V2f( -0.5, 0.5 ) )
self.assertEqual( imath.V2f( 1 ) * m, imath.V2f( 0.5, -0.5 ) )
m2 = i.objectToPixelMatrix()
self.assertEqual( m2, m.inverse() )
m = i.pixelToUVMatrix()
self.assertEqual( imath.V2f( 0 ) * m, imath.V2f( 0.25 ) )
self.assertEqual( imath.V2f( 1 ) * m, imath.V2f( 0.75 ) )
m2 = i.uvToPixelMatrix()
self.assertEqual( m2, m.inverse() )
m = i.objectToUVMatrix()
self.assertEqual( imath.V2f( -1 ) * m, imath.V2f( 0, 1 ) )
self.assertEqual( imath.V2f( 1 ) * m, imath.V2f( 1, 0 ) )
m2 = i.uvToObjectMatrix()
self.assertEqual( m2, m.inverse() )
self.failUnless( (i.objectToUVMatrix() * i.uvToPixelMatrix()).equalWithAbsError( i.objectToPixelMatrix(), 0.00001 ) )
self.failUnless( (i.pixelToUVMatrix() * i.uvToObjectMatrix()).equalWithAbsError( i.pixelToObjectMatrix(), 0.00001 ) )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.UV, IECoreImage.ImagePrimitive.Space.Pixel ), i.uvToPixelMatrix() )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.UV, IECoreImage.ImagePrimitive.Space.Object ), i.uvToObjectMatrix() )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.Pixel, IECoreImage.ImagePrimitive.Space.UV ), i.pixelToUVMatrix() )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.Pixel, IECoreImage.ImagePrimitive.Space.Object ), i.pixelToObjectMatrix() )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.Object, IECoreImage.ImagePrimitive.Space.Pixel ), i.objectToPixelMatrix() )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.Object, IECoreImage.ImagePrimitive.Space.UV ), i.objectToUVMatrix() )
# three by two pixel image 10,20 -> 12,21
threeTwoPixelWindowOffset = imath.Box2i( imath.V2i( 10, 20 ), imath.V2i( 12, 21 ) )
i = IECoreImage.ImagePrimitive( threeTwoPixelWindowOffset, threeTwoPixelWindowOffset )
m = i.pixelToObjectMatrix()
self.assertEqual( imath.V2f( 10, 20 ) * m, imath.V2f( -1, 0.5 ) )
self.assertEqual( imath.V2f( 12, 21 ) * m, imath.V2f( 1, -0.5 ) )
m2 = i.objectToPixelMatrix()
self.assertEqual( m2, m.inverse() )
m = i.pixelToUVMatrix()
self.failUnless( (imath.V2f( 10, 20 ) * m).equalWithAbsError( imath.V2f( 1 / 6.0, 1 / 4.0 ), 0.00001 ) )
self.failUnless( (imath.V2f( 12, 21 ) * m).equalWithAbsError( imath.V2f( 5 / 6.0, 3 / 4.0 ), 0.00001 ) )
m2 = i.uvToPixelMatrix()
self.assertEqual( m2, m.inverse() )
m = i.objectToUVMatrix()
self.assertEqual( imath.V2f( -1.5, -1 ) * m, imath.V2f( 0, 1 ) )
self.assertEqual( imath.V2f( 1.5, 1 ) * m, imath.V2f( 1, 0 ) )
m2 = i.uvToObjectMatrix()
self.assertEqual( m2, m.inverse() )
self.failUnless( (i.objectToUVMatrix() * i.uvToPixelMatrix()).equalWithAbsError( i.objectToPixelMatrix(), 0.00001 ) )
self.failUnless( (i.pixelToUVMatrix() * i.uvToObjectMatrix()).equalWithAbsError( i.pixelToObjectMatrix(), 0.00001 ) )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.UV, IECoreImage.ImagePrimitive.Space.Pixel ), i.uvToPixelMatrix() )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.UV, IECoreImage.ImagePrimitive.Space.Object ), i.uvToObjectMatrix() )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.Pixel, IECoreImage.ImagePrimitive.Space.UV ), i.pixelToUVMatrix() )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.Pixel, IECoreImage.ImagePrimitive.Space.Object ), i.pixelToObjectMatrix() )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.Object, IECoreImage.ImagePrimitive.Space.Pixel ), i.objectToPixelMatrix() )
self.assertEqual( i.matrix( IECoreImage.ImagePrimitive.Space.Object, IECoreImage.ImagePrimitive.Space.UV ), i.objectToUVMatrix() )
def testHash( self ) :
w = imath.Box2i( imath.V2i( 0 ), imath.V2i( 10 ) )
i = IECoreImage.ImagePrimitive( w, w )
h = i.hash()
i.displayWindow = imath.Box2i( imath.V2i( 10 ), imath.V2i( 20 ) )
self.assertNotEqual( i.hash(), h )
h = i.hash()
i.dataWindow = imath.Box2i( imath.V2i( 10 ), imath.V2i( 20 ) )
self.assertNotEqual( i.hash(), h )
h = i.hash()
i["R"] = IECore.IntData( 10 )
self.assertNotEqual( i.hash(), h )
h = i.hash()
i["R"] = IECore.FloatData( 10 )
self.assertNotEqual( i.hash(), h )
h = i.hash()
i["G"] = IECore.IntData( 10 )
self.assertNotEqual( i.hash(), h )
h = i.hash()
def tearDown( self ) :
if os.path.exists( "test/IECore/data/output.cob" ) :
os.remove( "test/IECore/data/output.cob" )
if __name__ == "__main__" :
unittest.main()
|
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com [email protected]
import re
from json import loads
from pyvows import Vows, expect
ctx = Vows.Context
from thumbor.engines.json_engine import JSONEngine
from thumbor.point import FocalPoint
class MockImage:
def __init__(self, size, data=None):
self.size = size
self.data = data
class MockEngine:
def __init__(self, size):
self.context = None
self.image = MockImage(size)
self.frame_count = 1
def get_image_mode(self):
return 'RGB'
def get_image_data(self):
return self.image.data
def set_image_data(self, data):
self.image.data = data
def resize(self, width, height):
self.image.size = (width, height)
def crop(self, left, top, right, bottom):
self.image.size = (right - left, bottom - top)
def image_data_as_rgb(self, update_image=True):
return 'RGB', self.image.data
@property
def size(self):
return self.image.size
IMAGE_PATH = '/some/image/path.jpg'
IMAGE_SIZE = (300, 200)
@Vows.batch
class JsonEngineVows(ctx):
class CreateInstanceVows(ctx):
def topic(self):
engine = MockEngine(size=IMAGE_SIZE)
json = JSONEngine(engine=engine, path=IMAGE_PATH)
return json
def should_not_be_null_or_error(self, topic):
expect(topic).not_to_be_null()
expect(topic).not_to_be_an_error()
def should_have_proper_engine(self, topic):
expect(topic.engine).to_be_instance_of(MockEngine)
def should_have_proper_dimensions(self, topic):
expect(topic.width).to_equal(300)
expect(topic.height).to_equal(200)
def should_have_proper_path(self, topic):
expect(topic.path).to_equal(IMAGE_PATH)
def should_have_null_callback_name(self, topic):
expect(topic.callback_name).to_be_null()
def should_have_empty_operations(self, topic):
expect(topic.operations).to_be_empty()
def should_have_empty_focal_points(self, topic):
expect(topic.focal_points).to_be_empty()
def should_have_proper_image(self, topic):
expect(topic.image).to_be_instance_of(MockImage)
def should_return_size(self, topic):
expect(topic.size).to_equal((300, 200))
class GetImageMode(ctx):
def topic(self, engine):
return engine.get_image_mode()
def should_return_proper_image_mode(self, topic):
expect(topic).to_equal('RGB')
class GetImageDataAsRgb(ctx):
def topic(self, engine):
engine.set_image_data('SOME DATA')
return engine.image_data_as_rgb()
def should_return_proper_image_data(self, (mode, data)):
expect(mode).to_equal('RGB')
expect(data).to_equal('SOME DATA')
class GetImageData(ctx):
def topic(self, engine):
engine.set_image_data('SOME DATA')
return engine.get_image_data()
def should_return_proper_image_data(self, topic):
expect(topic).to_equal('SOME DATA')
class Read(ctx):
def topic(self, engine):
return loads(engine.read('jpg', 100))
def should_be_proper_json(self, topic):
expected = {
"thumbor": {
"operations": [],
"source": {
"url": "/some/image/path.jpg",
"width": 300,
"height": 200,
"frameCount": 1,
},
"target": {
"width": 300,
"height": 200
}
}
}
expect(topic).to_be_like(expected)
class ReadWithCallbackName(ctx):
def topic(self):
engine = MockEngine(size=IMAGE_SIZE)
json = JSONEngine(engine=engine, path=IMAGE_PATH, callback_name="callback")
jsonp = json.read('jpg', 100)
match = re.match('^callback\((.+)\);', jsonp)
return match
def should_not_be_null(self, topic):
expect(topic).not_to_be_null()
class JsonCompare(ctx):
def topic(self, match):
json = match.groups()[0]
return loads(json)
def should_be_proper_json(self, topic):
expected = {
"thumbor": {
"operations": [],
"source": {
"url": "/some/image/path.jpg",
"width": 300,
"height": 200,
"frameCount": 1,
},
"target": {
"width": 300,
"height": 200
}
}
}
expect(topic).to_be_like(expected)
class ResizeVows(ctx):
def topic(self):
engine = MockEngine(size=IMAGE_SIZE)
json = JSONEngine(engine=engine, path=IMAGE_PATH)
json.resize(200, 300)
return loads(json.read('jpg', 100))
def should_be_proper_json(self, topic):
expected = {
"thumbor": {
"operations": [
{u'width': 200, u'type': u'resize', u'height': 300}
],
"source": {
"url": "/some/image/path.jpg",
"width": 300,
"height": 200,
"frameCount": 1,
},
"target": {
"width": 200,
"height": 300
}
}
}
expect(topic).to_be_like(expected)
class CropVows(ctx):
def topic(self):
engine = MockEngine(size=IMAGE_SIZE)
json = JSONEngine(engine=engine, path=IMAGE_PATH)
json.crop(100, 100, 200, 150)
return loads(json.read('jpg', 100))
def should_be_proper_json(self, topic):
expected = {
"thumbor": {
"operations": [
{u'top': 100, u'right': 200, u'type': u'crop', u'left': 100, u'bottom': 150}
],
"source": {
"url": "/some/image/path.jpg",
"width": 300,
"height": 200,
"frameCount": 1,
},
"target": {
"width": 100,
"height": 50
}
}
}
expect(topic).to_be_like(expected)
class FlipVows(ctx):
def topic(self):
engine = MockEngine(size=IMAGE_SIZE)
json = JSONEngine(engine=engine, path=IMAGE_PATH)
json.flip_vertically()
json.flip_horizontally()
return loads(json.read('jpg', 100))
def should_be_proper_json(self, topic):
expected = {
"thumbor": {
"operations": [
{u'type': u'flip_vertically'},
{u'type': u'flip_horizontally'}
],
"source": {
"url": "/some/image/path.jpg",
"width": 300,
"height": 200,
"frameCount": 1,
},
"target": {
"width": 300,
"height": 200
}
}
}
expect(topic).to_be_like(expected)
class FocalVows(ctx):
def topic(self):
engine = MockEngine(size=IMAGE_SIZE)
json = JSONEngine(engine=engine, path=IMAGE_PATH)
json.focus([
FocalPoint(100, 100),
FocalPoint(200, 200)
])
return loads(json.read('jpg', 100))
def should_be_proper_json(self, topic):
expected = {
"thumbor": {
"operations": [
],
"focal_points": [
{u'origin': u'alignment', u'height': 1, u'width': 1, u'y': 100, u'x': 100, u'z': 1.0},
{u'origin': u'alignment', u'height': 1, u'width': 1, u'y': 200, u'x': 200, u'z': 1.0}
],
"source": {
"url": "/some/image/path.jpg",
"width": 300,
"height": 200,
"frameCount": 1,
},
"target": {
"width": 300,
"height": 200
}
}
}
expect(topic).to_be_like(expected)
|
|
# -*- coding: utf-8 -*-
"""The FSI for dropbox."""
# this module is named 'DropBox' instead of 'dropbox' to avoid a
# naming conflict.
import os
import logging
import stat
import tempfile
import dropbox
from stashutils.fsi.errors import OperationFailure, IsDir, IsFile
from stashutils.fsi.errors import AlreadyExists
from stashutils.fsi.base import BaseFSI, make_stat, calc_mode
from stashutils.dbutils import get_dropbox_client
# turn down requests log verbosity
logging.getLogger('requests').setLevel(logging.CRITICAL)
OVERWRITE = dropbox.files.WriteMode("overwrite", None)
class DropboxFSI(BaseFSI):
"""A FSI for accessing dropbox."""
def __init__(self, logger):
self.logger = logger
self.path = "/"
self.client = None
def abspath(self, path):
"""returns thr absolute path for path."""
p = os.path.join(self.path, path)
if p == "/":
return ""
else:
return p
def connect(self, *args):
"""connects to the dropbox. args[0] is the username."""
if len(args) != 1:
return "expected one argument!"
try:
dbci = get_dropbox_client(args[0], False, None, None)
except Exception as e:
return e.message
else:
if dbci is None:
return "No Dropbox configured for '{u}'.".format(u=args[0])
else:
self.client = dbci
return True
def get_path(self):
return self.path
def repr(self):
return "Dropbox [CWD: {p}]".format(p=self.path)
def close(self):
pass
def cd(self, name):
path = self.abspath(name)
if name == "..":
self.path = "/".join(self.path.split("/")[:-1])
if self.path == "":
self.path = "/"
return
try:
# test
self.client.files_list_folder(path, recursive=False)
except dropbox.exceptions.ApiError as api_e:
e = api_e.reason
if e.is_other():
raise OperationFailure(repr(e))
elif e.is_path():
pe = e.get_path()
if pe.is_not_folder():
raise IsFile()
elif pe.is_not_found():
raise OperationFailure("Not Found!")
else:
raise OperationFailure(repr(e))
else:
raise OperationFailure("Not found!")
else:
self.path = path
def listdir(self, path="."):
p = self.abspath(path)
e = []
try:
c = self.client.files_list_folder(p, recursive=False)
e += c.entries
while True:
if c.has_more:
c = self.client.files_list_folder_continue(p)
e += c.entries
else:
break
except dropbox.exceptions.ApiError as e:
raise OperationFailure(e.message)
return [str(m.name) for m in e]
def mkdir(self, name):
path = self.abspath(name)
try:
self.client.files_create_folder(path)
except dropbox.exceptions.ApiError as api_e:
e = api_e.reason
if e.is_path():
pe = e.get_path()
if pe.is_conflict():
raise AlreadyExists("Already exists!")
elif pe.is_insufficient_space():
raise OperationFailure("Not enough Space available!")
elif pe.is_disallowed_name():
raise OperationFailure("Disallowed name!")
elif pe.is_no_write_permission():
raise OperationFailure("Permission denied!")
else:
raise OperationFailure(api_e.message)
else:
raise OperationFailure("Can not create dir!")
def remove(self, name):
path = self.abspath(name)
try:
self.client.files_delete(path)
except dropbox.exceptions.ApiError:
raise OperationFailure("Can not delete target!")
def isdir(self, name):
path = self.abspath(name)
try:
self.client.files_list_folder(path, recursive=False)
return True
except dropbox.exceptions.ApiError:
return False
def isfile(self, name):
return not self.isdir(name)
def open(self, name, mode="rb", buffering=0):
mode = mode.replace("+", "")
ap = self.abspath(name)
if mode in ("r", "rb", "rU"):
try:
response = self.client.files_download(ap)[1]
# unfortunaly, we cant return response.raw because it does not
# support seek(), which is required by tarfile (used in ls)
return Dropbox_Download(
self.client,
name,
mode,
buffering,
response,
)
except dropbox.exceptions.ApiError as api_e:
e = api_e.reason
if e.is_path():
pe = e.get_path()
if pe.is_not_file():
raise IsDir()
raise OperationFailure(api_e.message)
elif "w" in mode:
return Dropbox_Upload(self.client, ap, mode)
else:
raise OperationFailure("Mode not supported!")
def stat(self, name):
ap = self.abspath(name)
if ap in ("/", "/.", "./", "//", ""):
bytes = 0
isdir = True
else:
try:
meta = self.client.files_get_metadata(ap)
except dropbox.exceptions.ApiError as e:
raise OperationFailure(e.message)
if isinstance(meta, (dropbox.files.FolderMetadata, dropbox.sharing.SharedFolderMetadata)):
bytes = 0
isdir = True
else:
bytes = meta.size
isdir = False
type_ = (stat.S_IFDIR if isdir else stat.S_IFREG)
m = calc_mode(type=type_)
s = make_stat(size=bytes, mode=m)
return s
class Dropbox_Upload(object):
"""utility file-like class used for Dropbox-uploads."""
def __init__(self, client, path, mode):
self.client = client
self.path = path
self.mode = mode
self.session = None
self.cursor = None
self.closed = False
def write(self, data):
"""writes some data to the file."""
if self.closed:
raise ValueError("I/O operation on closed file")
if self.session is None:
# first call
self.session = self.client.files_upload_session_start(
data,
close=False,
)
self.cursor = dropbox.files.UploadSessionCursor(self.session.session_id, offset=0)
else:
self.client.files_upload_session_append_v2(data, self.cursor, close=False)
self.cursor.offset += len(data)
def close(self):
"""closes the file"""
if self.closed:
return
if self.session is None:
self.client.files_upload("", self.path, mute=True)
else:
commit = dropbox.files.CommitInfo(self.path, mode=OVERWRITE)
self.client.files_upload_session_finish("", self.cursor, commit)
self.session = None
self.closed = True
def __del__(self):
"""called on deletion"""
self.close()
def __enter__(self):
"""called when entering a 'with'-context."""
return self
def __exit__(self, exc_type, exc_value, traceback):
"""called when exiting a 'with'-context."""
self.close()
def flush(self):
"""no-op"""
pass
def truncate(self, size=-1):
"""no-op"""
pass
class Dropbox_Download(object):
"""
utility file-like class used for Dropbox-downloads.
There are two reasons to use this class:
1. requests.Response.raw does not support seek() and tell()
2. the 'ls' command checks for filetypes. Due to this, each
file in a directory is opened. This class improved performance
by only downloading as much as required into a temporary file.
"""
def __init__(self, client, path, mode, buffering, response):
self.client = client
self.path = path
self.mode = mode
self.buffering = buffering
self.name = path
self._response = response
self._raw = response.raw
self.closed = False
self._read = 0
if "U" in mode:
tfmode = "w+bU"
else:
tfmode = "w+b"
self._tf = tempfile.TemporaryFile(mode=tfmode)
self.newlines = None
def close(self):
"""closes the file"""
if self.closed:
return
self.closed = True
self._tf.close()
self._raw.close()
p = self._tf.name
if os.path.exists(p):
os.remove(p)
def __enter__(self):
"""called when entering a 'with'-context"""
return self
def __exit__(self, exc_type, exc_value, traceback):
"""called when exiting a 'with'-context."""
self.close()
def __del__(self):
"""called when the object will be deleted"""
self.close()
def read(self, size=-1):
"""read at most size bytes from the file"""
if self.closed:
raise ValueError("I/O operation on closed file")
if ((size + self._tf.tell()) > self._read) or (size < 0):
ccp = self._tf.tell()
if size >= 0:
tr = size - (self._read - ccp)
content = self._raw.read(tr)
else:
content = self._raw.read()
self._read += len(content)
self._tf.seek(0, os.SEEK_END)
self._tf.write(content)
self._tf.seek(ccp, os.SEEK_SET)
return self._tf.read(size)
def tell(self):
"""tells the cursor position"""
return self._tf.tell()
def seek(self, offset, whence=os.SEEK_SET):
"""sets the cursor position"""
ccp = self._tf.tell()
if whence == os.SEEK_SET:
ncp = offset
elif whence == os.SEEK_CUR:
ncp = ccp + offset
elif whence == os.SEEK_END:
size = int(self._response.headers["Content-Length"])
ncp = size + offset
else:
raise ValueError("Invalid Value")
if ncp > self._read:
toread = ncp - ccp
self.read(toread)
self.seek(ccp, os.SEEK_SET)
# we need to seek twice to support relative search
self._tf.seek(offset, whence)
def readline(self, size=-1):
"""Read one entire line from the file."""
if "U" in self.mode:
ends = ("\n", "\r", "\r\n")
else:
ends = ("\n", )
buff = ""
while True:
d = self.read(1)
buff += d
if any([e in buff for e in ends]):
return buff
if (size <= len(buff)) or (not d):
return buff
def readlines(self, sizehint=None):
"""
Read until EOF using readline() and return a list containing the
lines thus read.
"""
# sizehint ignored; see the documentation of file.readlines
lines = []
while True:
line = self.readline()
if not line:
break
lines.append(line)
return lines
def xreadlines(self):
"""This method returns the same thing as iter(f)."""
return self
def __iter__(self):
if self.closed:
raise ValueError("I/O operation on closed file")
return self
def next(self):
"""returns the next line"""
line = self.readline()
if line:
return line
else:
raise StopIteration()
def flush(self):
"""no-op"""
pass
def truncate(self, size=-1):
"""no-op"""
pass
|
|
"""
The :mod:`sklearn.pls` module implements Partial Least Squares (PLS).
"""
# Author: Edouard Duchesnay <[email protected]>
# License: BSD 3 clause
from distutils.version import LooseVersion
from sklearn.utils.extmath import svd_flip
from ..base import BaseEstimator, RegressorMixin, TransformerMixin
from ..utils import check_array, check_consistent_length
from ..externals import six
import warnings
from abc import ABCMeta, abstractmethod
import numpy as np
from scipy import linalg
from ..utils import arpack
from ..utils.validation import check_is_fitted, FLOAT_DTYPES
__all__ = ['PLSCanonical', 'PLSRegression', 'PLSSVD']
import scipy
pinv2_args = {}
if LooseVersion(scipy.__version__) >= LooseVersion('0.12'):
# check_finite=False is an optimization available only in scipy >=0.12
pinv2_args = {'check_finite': False}
def _nipals_twoblocks_inner_loop(X, Y, mode="A", max_iter=500, tol=1e-06,
norm_y_weights=False):
"""Inner loop of the iterative NIPALS algorithm.
Provides an alternative to the svd(X'Y); returns the first left and right
singular vectors of X'Y. See PLS for the meaning of the parameters. It is
similar to the Power method for determining the eigenvectors and
eigenvalues of a X'Y.
"""
y_score = Y[:, [0]]
x_weights_old = 0
ite = 1
X_pinv = Y_pinv = None
eps = np.finfo(X.dtype).eps
# Inner loop of the Wold algo.
while True:
# 1.1 Update u: the X weights
if mode == "B":
if X_pinv is None:
# We use slower pinv2 (same as np.linalg.pinv) for stability
# reasons
X_pinv = linalg.pinv2(X, **pinv2_args)
x_weights = np.dot(X_pinv, y_score)
else: # mode A
# Mode A regress each X column on y_score
x_weights = np.dot(X.T, y_score) / np.dot(y_score.T, y_score)
# 1.2 Normalize u
x_weights /= np.sqrt(np.dot(x_weights.T, x_weights)) + eps
# 1.3 Update x_score: the X latent scores
x_score = np.dot(X, x_weights)
# 2.1 Update y_weights
if mode == "B":
if Y_pinv is None:
Y_pinv = linalg.pinv2(Y, **pinv2_args) # compute once pinv(Y)
y_weights = np.dot(Y_pinv, x_score)
else:
# Mode A regress each Y column on x_score
y_weights = np.dot(Y.T, x_score) / np.dot(x_score.T, x_score)
# 2.2 Normalize y_weights
if norm_y_weights:
y_weights /= np.sqrt(np.dot(y_weights.T, y_weights)) + eps
# 2.3 Update y_score: the Y latent scores
y_score = np.dot(Y, y_weights) / (np.dot(y_weights.T, y_weights) + eps)
# y_score = np.dot(Y, y_weights) / np.dot(y_score.T, y_score) ## BUG
x_weights_diff = x_weights - x_weights_old
if np.dot(x_weights_diff.T, x_weights_diff) < tol or Y.shape[1] == 1:
break
if ite == max_iter:
warnings.warn('Maximum number of iterations reached')
break
x_weights_old = x_weights
ite += 1
return x_weights, y_weights, ite
def _svd_cross_product(X, Y):
C = np.dot(X.T, Y)
U, s, Vh = linalg.svd(C, full_matrices=False)
u = U[:, [0]]
v = Vh.T[:, [0]]
return u, v
def _center_scale_xy(X, Y, scale=True):
""" Center X, Y and scale if the scale parameter==True
Returns
-------
X, Y, x_mean, y_mean, x_std, y_std
"""
# center
x_mean = X.mean(axis=0)
X -= x_mean
y_mean = Y.mean(axis=0)
Y -= y_mean
# scale
if scale:
x_std = X.std(axis=0, ddof=1)
x_std[x_std == 0.0] = 1.0
X /= x_std
y_std = Y.std(axis=0, ddof=1)
y_std[y_std == 0.0] = 1.0
Y /= y_std
else:
x_std = np.ones(X.shape[1])
y_std = np.ones(Y.shape[1])
return X, Y, x_mean, y_mean, x_std, y_std
class _PLS(six.with_metaclass(ABCMeta), BaseEstimator, TransformerMixin,
RegressorMixin):
"""Partial Least Squares (PLS)
This class implements the generic PLS algorithm, constructors' parameters
allow to obtain a specific implementation such as:
- PLS2 regression, i.e., PLS 2 blocks, mode A, with asymmetric deflation
and unnormalized y weights such as defined by [Tenenhaus 1998] p. 132.
With univariate response it implements PLS1.
- PLS canonical, i.e., PLS 2 blocks, mode A, with symmetric deflation and
normalized y weights such as defined by [Tenenhaus 1998] (p. 132) and
[Wegelin et al. 2000]. This parametrization implements the original Wold
algorithm.
We use the terminology defined by [Wegelin et al. 2000].
This implementation uses the PLS Wold 2 blocks algorithm based on two
nested loops:
(i) The outer loop iterate over components.
(ii) The inner loop estimates the weights vectors. This can be done
with two algo. (a) the inner loop of the original NIPALS algo. or (b) a
SVD on residuals cross-covariance matrices.
n_components : int, number of components to keep. (default 2).
scale : boolean, scale data? (default True)
deflation_mode : str, "canonical" or "regression". See notes.
mode : "A" classical PLS and "B" CCA. See notes.
norm_y_weights: boolean, normalize Y weights to one? (default False)
algorithm : string, "nipals" or "svd"
The algorithm used to estimate the weights. It will be called
n_components times, i.e. once for each iteration of the outer loop.
max_iter : an integer, the maximum number of iterations (default 500)
of the NIPALS inner loop (used only if algorithm="nipals")
tol : non-negative real, default 1e-06
The tolerance used in the iterative algorithm.
copy : boolean, default True
Whether the deflation should be done on a copy. Let the default
value to True unless you don't care about side effects.
Attributes
----------
x_weights_ : array, [p, n_components]
X block weights vectors.
y_weights_ : array, [q, n_components]
Y block weights vectors.
x_loadings_ : array, [p, n_components]
X block loadings vectors.
y_loadings_ : array, [q, n_components]
Y block loadings vectors.
x_scores_ : array, [n_samples, n_components]
X scores.
y_scores_ : array, [n_samples, n_components]
Y scores.
x_rotations_ : array, [p, n_components]
X block to latents rotations.
y_rotations_ : array, [q, n_components]
Y block to latents rotations.
coef_: array, [p, q]
The coefficients of the linear model: ``Y = X coef_ + Err``
n_iter_ : array-like
Number of iterations of the NIPALS inner loop for each
component. Not useful if the algorithm given is "svd".
References
----------
Jacob A. Wegelin. A survey of Partial Least Squares (PLS) methods, with
emphasis on the two-block case. Technical Report 371, Department of
Statistics, University of Washington, Seattle, 2000.
In French but still a reference:
Tenenhaus, M. (1998). La regression PLS: theorie et pratique. Paris:
Editions Technic.
See also
--------
PLSCanonical
PLSRegression
CCA
PLS_SVD
"""
@abstractmethod
def __init__(self, n_components=2, scale=True, deflation_mode="regression",
mode="A", algorithm="nipals", norm_y_weights=False,
max_iter=500, tol=1e-06, copy=True):
self.n_components = n_components
self.deflation_mode = deflation_mode
self.mode = mode
self.norm_y_weights = norm_y_weights
self.scale = scale
self.algorithm = algorithm
self.max_iter = max_iter
self.tol = tol
self.copy = copy
def fit(self, X, Y):
"""Fit model to data.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vectors, where n_samples in the number of samples and
n_features is the number of predictors.
Y : array-like of response, shape = [n_samples, n_targets]
Target vectors, where n_samples in the number of samples and
n_targets is the number of response variables.
"""
# copy since this will contains the residuals (deflated) matrices
check_consistent_length(X, Y)
X = check_array(X, dtype=np.float64, copy=self.copy)
Y = check_array(Y, dtype=np.float64, copy=self.copy, ensure_2d=False)
if Y.ndim == 1:
Y = Y.reshape(-1, 1)
n = X.shape[0]
p = X.shape[1]
q = Y.shape[1]
if self.n_components < 1 or self.n_components > p:
raise ValueError('Invalid number of components: %d' %
self.n_components)
if self.algorithm not in ("svd", "nipals"):
raise ValueError("Got algorithm %s when only 'svd' "
"and 'nipals' are known" % self.algorithm)
if self.algorithm == "svd" and self.mode == "B":
raise ValueError('Incompatible configuration: mode B is not '
'implemented with svd algorithm')
if self.deflation_mode not in ["canonical", "regression"]:
raise ValueError('The deflation mode is unknown')
# Scale (in place)
X, Y, self.x_mean_, self.y_mean_, self.x_std_, self.y_std_ = (
_center_scale_xy(X, Y, self.scale))
# Residuals (deflated) matrices
Xk = X
Yk = Y
# Results matrices
self.x_scores_ = np.zeros((n, self.n_components))
self.y_scores_ = np.zeros((n, self.n_components))
self.x_weights_ = np.zeros((p, self.n_components))
self.y_weights_ = np.zeros((q, self.n_components))
self.x_loadings_ = np.zeros((p, self.n_components))
self.y_loadings_ = np.zeros((q, self.n_components))
self.n_iter_ = []
# NIPALS algo: outer loop, over components
for k in range(self.n_components):
if np.all(np.dot(Yk.T, Yk) < np.finfo(np.double).eps):
# Yk constant
warnings.warn('Y residual constant at iteration %s' % k)
break
# 1) weights estimation (inner loop)
# -----------------------------------
if self.algorithm == "nipals":
x_weights, y_weights, n_iter_ = \
_nipals_twoblocks_inner_loop(
X=Xk, Y=Yk, mode=self.mode, max_iter=self.max_iter,
tol=self.tol, norm_y_weights=self.norm_y_weights)
self.n_iter_.append(n_iter_)
elif self.algorithm == "svd":
x_weights, y_weights = _svd_cross_product(X=Xk, Y=Yk)
# Forces sign stability of x_weights and y_weights
# Sign undeterminacy issue from svd if algorithm == "svd"
# and from platform dependent computation if algorithm == 'nipals'
x_weights, y_weights = svd_flip(x_weights, y_weights.T)
y_weights = y_weights.T
# compute scores
x_scores = np.dot(Xk, x_weights)
if self.norm_y_weights:
y_ss = 1
else:
y_ss = np.dot(y_weights.T, y_weights)
y_scores = np.dot(Yk, y_weights) / y_ss
# test for null variance
if np.dot(x_scores.T, x_scores) < np.finfo(np.double).eps:
warnings.warn('X scores are null at iteration %s' % k)
break
# 2) Deflation (in place)
# ----------------------
# Possible memory footprint reduction may done here: in order to
# avoid the allocation of a data chunk for the rank-one
# approximations matrix which is then subtracted to Xk, we suggest
# to perform a column-wise deflation.
#
# - regress Xk's on x_score
x_loadings = np.dot(Xk.T, x_scores) / np.dot(x_scores.T, x_scores)
# - subtract rank-one approximations to obtain remainder matrix
Xk -= np.dot(x_scores, x_loadings.T)
if self.deflation_mode == "canonical":
# - regress Yk's on y_score, then subtract rank-one approx.
y_loadings = (np.dot(Yk.T, y_scores)
/ np.dot(y_scores.T, y_scores))
Yk -= np.dot(y_scores, y_loadings.T)
if self.deflation_mode == "regression":
# - regress Yk's on x_score, then subtract rank-one approx.
y_loadings = (np.dot(Yk.T, x_scores)
/ np.dot(x_scores.T, x_scores))
Yk -= np.dot(x_scores, y_loadings.T)
# 3) Store weights, scores and loadings # Notation:
self.x_scores_[:, k] = x_scores.ravel() # T
self.y_scores_[:, k] = y_scores.ravel() # U
self.x_weights_[:, k] = x_weights.ravel() # W
self.y_weights_[:, k] = y_weights.ravel() # C
self.x_loadings_[:, k] = x_loadings.ravel() # P
self.y_loadings_[:, k] = y_loadings.ravel() # Q
# Such that: X = TP' + Err and Y = UQ' + Err
# 4) rotations from input space to transformed space (scores)
# T = X W(P'W)^-1 = XW* (W* : p x k matrix)
# U = Y C(Q'C)^-1 = YC* (W* : q x k matrix)
self.x_rotations_ = np.dot(
self.x_weights_,
linalg.pinv2(np.dot(self.x_loadings_.T, self.x_weights_),
**pinv2_args))
if Y.shape[1] > 1:
self.y_rotations_ = np.dot(
self.y_weights_,
linalg.pinv2(np.dot(self.y_loadings_.T, self.y_weights_),
**pinv2_args))
else:
self.y_rotations_ = np.ones(1)
if True or self.deflation_mode == "regression":
# FIXME what's with the if?
# Estimate regression coefficient
# Regress Y on T
# Y = TQ' + Err,
# Then express in function of X
# Y = X W(P'W)^-1Q' + Err = XB + Err
# => B = W*Q' (p x q)
self.coef_ = np.dot(self.x_rotations_, self.y_loadings_.T)
self.coef_ = (1. / self.x_std_.reshape((p, 1)) * self.coef_ *
self.y_std_)
return self
def transform(self, X, Y=None, copy=True):
"""Apply the dimension reduction learned on the train data.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
Y : array-like of response, shape = [n_samples, q], optional
Training vectors, where n_samples in the number of samples and
q is the number of response variables.
copy : boolean, default True
Whether to copy X and Y, or perform in-place normalization.
Returns
-------
x_scores if Y is not given, (x_scores, y_scores) otherwise.
"""
check_is_fitted(self, 'x_mean_')
X = check_array(X, copy=copy, dtype=FLOAT_DTYPES)
# Normalize
X -= self.x_mean_
X /= self.x_std_
# Apply rotation
x_scores = np.dot(X, self.x_rotations_)
if Y is not None:
Y = check_array(Y, ensure_2d=False, copy=copy, dtype=FLOAT_DTYPES)
if Y.ndim == 1:
Y = Y.reshape(-1, 1)
Y -= self.y_mean_
Y /= self.y_std_
y_scores = np.dot(Y, self.y_rotations_)
return x_scores, y_scores
return x_scores
def predict(self, X, copy=True):
"""Apply the dimension reduction learned on the train data.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
copy : boolean, default True
Whether to copy X and Y, or perform in-place normalization.
Notes
-----
This call requires the estimation of a p x q matrix, which may
be an issue in high dimensional space.
"""
check_is_fitted(self, 'x_mean_')
X = check_array(X, copy=copy, dtype=FLOAT_DTYPES)
# Normalize
X -= self.x_mean_
X /= self.x_std_
Ypred = np.dot(X, self.coef_)
return Ypred + self.y_mean_
def fit_transform(self, X, y=None, **fit_params):
"""Learn and apply the dimension reduction on the train data.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
Y : array-like of response, shape = [n_samples, q], optional
Training vectors, where n_samples in the number of samples and
q is the number of response variables.
copy : boolean, default True
Whether to copy X and Y, or perform in-place normalization.
Returns
-------
x_scores if Y is not given, (x_scores, y_scores) otherwise.
"""
return self.fit(X, y, **fit_params).transform(X, y)
class PLSRegression(_PLS):
"""PLS regression
PLSRegression implements the PLS 2 blocks regression known as PLS2 or PLS1
in case of one dimensional response.
This class inherits from _PLS with mode="A", deflation_mode="regression",
norm_y_weights=False and algorithm="nipals".
Read more in the :ref:`User Guide <cross_decomposition>`.
Parameters
----------
n_components : int, (default 2)
Number of components to keep.
scale : boolean, (default True)
whether to scale the data
max_iter : an integer, (default 500)
the maximum number of iterations of the NIPALS inner loop (used
only if algorithm="nipals")
tol : non-negative real
Tolerance used in the iterative algorithm default 1e-06.
copy : boolean, default True
Whether the deflation should be done on a copy. Let the default
value to True unless you don't care about side effect
Attributes
----------
x_weights_ : array, [p, n_components]
X block weights vectors.
y_weights_ : array, [q, n_components]
Y block weights vectors.
x_loadings_ : array, [p, n_components]
X block loadings vectors.
y_loadings_ : array, [q, n_components]
Y block loadings vectors.
x_scores_ : array, [n_samples, n_components]
X scores.
y_scores_ : array, [n_samples, n_components]
Y scores.
x_rotations_ : array, [p, n_components]
X block to latents rotations.
y_rotations_ : array, [q, n_components]
Y block to latents rotations.
coef_: array, [p, q]
The coefficients of the linear model: ``Y = X coef_ + Err``
n_iter_ : array-like
Number of iterations of the NIPALS inner loop for each
component.
Notes
-----
Matrices :
T: x_scores_
U: y_scores_
W: x_weights_
C: y_weights_
P: x_loadings_
Q: y_loadings__
Are computed such that:
X = T P.T + Err and Y = U Q.T + Err
T[:, k] = Xk W[:, k] for k in range(n_components)
U[:, k] = Yk C[:, k] for k in range(n_components)
x_rotations_ = W (P.T W)^(-1)
y_rotations_ = C (Q.T C)^(-1)
where Xk and Yk are residual matrices at iteration k.
Slides explaining PLS
:ref:http://www.eigenvector.com/Docs/Wise_pls_properties.pdf
For each component k, find weights u, v that optimizes:
``max corr(Xk u, Yk v) * std(Xk u) std(Yk u)``, such that ``|u| = 1``
Note that it maximizes both the correlations between the scores and the
intra-block variances.
The residual matrix of X (Xk+1) block is obtained by the deflation on
the current X score: x_score.
The residual matrix of Y (Yk+1) block is obtained by deflation on the
current X score. This performs the PLS regression known as PLS2. This
mode is prediction oriented.
This implementation provides the same results that 3 PLS packages
provided in the R language (R-project):
- "mixOmics" with function pls(X, Y, mode = "regression")
- "plspm " with function plsreg2(X, Y)
- "pls" with function oscorespls.fit(X, Y)
Examples
--------
>>> from sklearn.cross_decomposition import PLSRegression
>>> X = [[0., 0., 1.], [1.,0.,0.], [2.,2.,2.], [2.,5.,4.]]
>>> Y = [[0.1, -0.2], [0.9, 1.1], [6.2, 5.9], [11.9, 12.3]]
>>> pls2 = PLSRegression(n_components=2)
>>> pls2.fit(X, Y)
... # doctest: +NORMALIZE_WHITESPACE
PLSRegression(copy=True, max_iter=500, n_components=2, scale=True,
tol=1e-06)
>>> Y_pred = pls2.predict(X)
References
----------
Jacob A. Wegelin. A survey of Partial Least Squares (PLS) methods, with
emphasis on the two-block case. Technical Report 371, Department of
Statistics, University of Washington, Seattle, 2000.
In french but still a reference:
Tenenhaus, M. (1998). La regression PLS: theorie et pratique. Paris:
Editions Technic.
"""
def __init__(self, n_components=2, scale=True,
max_iter=500, tol=1e-06, copy=True):
super(PLSRegression, self).__init__(
n_components=n_components, scale=scale,
deflation_mode="regression", mode="A",
norm_y_weights=False, max_iter=max_iter, tol=tol,
copy=copy)
class PLSCanonical(_PLS):
""" PLSCanonical implements the 2 blocks canonical PLS of the original Wold
algorithm [Tenenhaus 1998] p.204, referred as PLS-C2A in [Wegelin 2000].
This class inherits from PLS with mode="A" and deflation_mode="canonical",
norm_y_weights=True and algorithm="nipals", but svd should provide similar
results up to numerical errors.
Read more in the :ref:`User Guide <cross_decomposition>`.
Parameters
----------
scale : boolean, scale data? (default True)
algorithm : string, "nipals" or "svd"
The algorithm used to estimate the weights. It will be called
n_components times, i.e. once for each iteration of the outer loop.
max_iter : an integer, (default 500)
the maximum number of iterations of the NIPALS inner loop (used
only if algorithm="nipals")
tol : non-negative real, default 1e-06
the tolerance used in the iterative algorithm
copy : boolean, default True
Whether the deflation should be done on a copy. Let the default
value to True unless you don't care about side effect
n_components : int, number of components to keep. (default 2).
Attributes
----------
x_weights_ : array, shape = [p, n_components]
X block weights vectors.
y_weights_ : array, shape = [q, n_components]
Y block weights vectors.
x_loadings_ : array, shape = [p, n_components]
X block loadings vectors.
y_loadings_ : array, shape = [q, n_components]
Y block loadings vectors.
x_scores_ : array, shape = [n_samples, n_components]
X scores.
y_scores_ : array, shape = [n_samples, n_components]
Y scores.
x_rotations_ : array, shape = [p, n_components]
X block to latents rotations.
y_rotations_ : array, shape = [q, n_components]
Y block to latents rotations.
n_iter_ : array-like
Number of iterations of the NIPALS inner loop for each
component. Not useful if the algorithm provided is "svd".
Notes
-----
Matrices :
T: x_scores_
U: y_scores_
W: x_weights_
C: y_weights_
P: x_loadings_
Q: y_loadings__
Are computed such that:
X = T P.T + Err and Y = U Q.T + Err
T[:, k] = Xk W[:, k] for k in range(n_components)
U[:, k] = Yk C[:, k] for k in range(n_components)
x_rotations_ = W (P.T W)^(-1)
y_rotations_ = C (Q.T C)^(-1)
where Xk and Yk are residual matrices at iteration k.
Slides explaining PLS
:ref:http://www.eigenvector.com/Docs/Wise_pls_properties.pdf
For each component k, find weights u, v that optimize::
max corr(Xk u, Yk v) * std(Xk u) std(Yk u), such that ``|u| = |v| = 1``
Note that it maximizes both the correlations between the scores and the
intra-block variances.
The residual matrix of X (Xk+1) block is obtained by the deflation on the
current X score: x_score.
The residual matrix of Y (Yk+1) block is obtained by deflation on the
current Y score. This performs a canonical symmetric version of the PLS
regression. But slightly different than the CCA. This is mostly used
for modeling.
This implementation provides the same results that the "plspm" package
provided in the R language (R-project), using the function plsca(X, Y).
Results are equal or collinear with the function
``pls(..., mode = "canonical")`` of the "mixOmics" package. The difference
relies in the fact that mixOmics implementation does not exactly implement
the Wold algorithm since it does not normalize y_weights to one.
Examples
--------
>>> from sklearn.cross_decomposition import PLSCanonical
>>> X = [[0., 0., 1.], [1.,0.,0.], [2.,2.,2.], [2.,5.,4.]]
>>> Y = [[0.1, -0.2], [0.9, 1.1], [6.2, 5.9], [11.9, 12.3]]
>>> plsca = PLSCanonical(n_components=2)
>>> plsca.fit(X, Y)
... # doctest: +NORMALIZE_WHITESPACE
PLSCanonical(algorithm='nipals', copy=True, max_iter=500, n_components=2,
scale=True, tol=1e-06)
>>> X_c, Y_c = plsca.transform(X, Y)
References
----------
Jacob A. Wegelin. A survey of Partial Least Squares (PLS) methods, with
emphasis on the two-block case. Technical Report 371, Department of
Statistics, University of Washington, Seattle, 2000.
Tenenhaus, M. (1998). La regression PLS: theorie et pratique. Paris:
Editions Technic.
See also
--------
CCA
PLSSVD
"""
def __init__(self, n_components=2, scale=True, algorithm="nipals",
max_iter=500, tol=1e-06, copy=True):
super(PLSCanonical, self).__init__(
n_components=n_components, scale=scale,
deflation_mode="canonical", mode="A",
norm_y_weights=True, algorithm=algorithm,
max_iter=max_iter, tol=tol, copy=copy)
class PLSSVD(BaseEstimator, TransformerMixin):
"""Partial Least Square SVD
Simply perform a svd on the crosscovariance matrix: X'Y
There are no iterative deflation here.
Read more in the :ref:`User Guide <cross_decomposition>`.
Parameters
----------
n_components : int, default 2
Number of components to keep.
scale : boolean, default True
Whether to scale X and Y.
copy : boolean, default True
Whether to copy X and Y, or perform in-place computations.
Attributes
----------
x_weights_ : array, [p, n_components]
X block weights vectors.
y_weights_ : array, [q, n_components]
Y block weights vectors.
x_scores_ : array, [n_samples, n_components]
X scores.
y_scores_ : array, [n_samples, n_components]
Y scores.
See also
--------
PLSCanonical
CCA
"""
def __init__(self, n_components=2, scale=True, copy=True):
self.n_components = n_components
self.scale = scale
self.copy = copy
def fit(self, X, Y):
# copy since this will contains the centered data
check_consistent_length(X, Y)
X = check_array(X, dtype=np.float64, copy=self.copy)
Y = check_array(Y, dtype=np.float64, copy=self.copy, ensure_2d=False)
if Y.ndim == 1:
Y = Y.reshape(-1, 1)
if self.n_components > max(Y.shape[1], X.shape[1]):
raise ValueError("Invalid number of components n_components=%d"
" with X of shape %s and Y of shape %s."
% (self.n_components, str(X.shape), str(Y.shape)))
# Scale (in place)
X, Y, self.x_mean_, self.y_mean_, self.x_std_, self.y_std_ = (
_center_scale_xy(X, Y, self.scale))
# svd(X'Y)
C = np.dot(X.T, Y)
# The arpack svds solver only works if the number of extracted
# components is smaller than rank(X) - 1. Hence, if we want to extract
# all the components (C.shape[1]), we have to use another one. Else,
# let's use arpacks to compute only the interesting components.
if self.n_components >= np.min(C.shape):
U, s, V = linalg.svd(C, full_matrices=False)
else:
U, s, V = arpack.svds(C, k=self.n_components)
# Deterministic output
U, V = svd_flip(U, V)
V = V.T
self.x_scores_ = np.dot(X, U)
self.y_scores_ = np.dot(Y, V)
self.x_weights_ = U
self.y_weights_ = V
return self
def transform(self, X, Y=None):
"""Apply the dimension reduction learned on the train data."""
check_is_fitted(self, 'x_mean_')
X = check_array(X, dtype=np.float64)
Xr = (X - self.x_mean_) / self.x_std_
x_scores = np.dot(Xr, self.x_weights_)
if Y is not None:
if Y.ndim == 1:
Y = Y.reshape(-1, 1)
Yr = (Y - self.y_mean_) / self.y_std_
y_scores = np.dot(Yr, self.y_weights_)
return x_scores, y_scores
return x_scores
def fit_transform(self, X, y=None, **fit_params):
"""Learn and apply the dimension reduction on the train data.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
Y : array-like of response, shape = [n_samples, q], optional
Training vectors, where n_samples in the number of samples and
q is the number of response variables.
Returns
-------
x_scores if Y is not given, (x_scores, y_scores) otherwise.
"""
return self.fit(X, y, **fit_params).transform(X, y)
|
|
# Copyright 2016 Nexenta Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import hashlib
import os
from oslo_log import log as logging
from cinder import context
from cinder import db
from cinder import exception
from cinder.i18n import _, _LE, _LI, _LW
from cinder import interface
from cinder.volume.drivers.nexenta.ns5 import jsonrpc
from cinder.volume.drivers.nexenta import options
from cinder.volume.drivers.nexenta import utils
from cinder.volume.drivers import nfs
VERSION = '1.0.0'
LOG = logging.getLogger(__name__)
@interface.volumedriver
class NexentaNfsDriver(nfs.NfsDriver): # pylint: disable=R0921
"""Executes volume driver commands on Nexenta Appliance.
Version history:
1.0.0 - Initial driver version.
"""
driver_prefix = 'nexenta'
volume_backend_name = 'NexentaNfsDriver'
VERSION = VERSION
def __init__(self, *args, **kwargs):
super(NexentaNfsDriver, self).__init__(*args, **kwargs)
if self.configuration:
self.configuration.append_config_values(
options.NEXENTA_CONNECTION_OPTS)
self.configuration.append_config_values(
options.NEXENTA_NFS_OPTS)
self.configuration.append_config_values(
options.NEXENTA_DATASET_OPTS)
self.nfs_mount_point_base = self.configuration.nexenta_mount_point_base
self.dataset_compression = (
self.configuration.nexenta_dataset_compression)
self.dataset_deduplication = self.configuration.nexenta_dataset_dedup
self.dataset_description = (
self.configuration.nexenta_dataset_description)
self.sparsed_volumes = self.configuration.nexenta_sparsed_volumes
self.nef = None
self.nef_protocol = self.configuration.nexenta_rest_protocol
self.nef_host = self.configuration.nas_host
self.share = self.configuration.nas_share_path
self.nef_port = self.configuration.nexenta_rest_port
self.nef_user = self.configuration.nexenta_user
self.nef_password = self.configuration.nexenta_password
@property
def backend_name(self):
backend_name = None
if self.configuration:
backend_name = self.configuration.safe_get('volume_backend_name')
if not backend_name:
backend_name = self.__class__.__name__
return backend_name
def do_setup(self, context):
if self.nef_protocol == 'auto':
protocol, auto = 'http', True
else:
protocol, auto = self.nef_protocol, False
self.nef = jsonrpc.NexentaJSONProxy(
protocol, self.nef_host, self.nef_port, self.nef_user,
self.nef_password, auto=auto)
def check_for_setup_error(self):
"""Verify that the volume for our folder exists.
:raise: :py:exc:`LookupError`
"""
pool_name, fs = self._get_share_datasets(self.share)
url = 'storage/pools/%s' % (pool_name)
if not self.nef.get(url):
raise LookupError(_("Pool %s does not exist in Nexenta "
"Store appliance") % pool_name)
url = 'storage/pools/%s/filesystems/%s' % (
pool_name, fs)
if not self.nef.get(url):
raise LookupError(_("filesystem %s does not exist in "
"Nexenta Store appliance") % fs)
path = '/'.join([pool_name, fs])
shared = False
response = self.nef.get('nas/nfs')
for share in response['data']:
if share.get('filesystem') == path:
shared = True
break
if not shared:
raise LookupError(_("Dataset %s is not shared in Nexenta "
"Store appliance") % path)
def initialize_connection(self, volume, connector):
"""Allow connection to connector and return connection info.
:param volume: volume reference
:param connector: connector reference
"""
data = {'export': volume['provider_location'], 'name': 'volume'}
if volume['provider_location'] in self.shares:
data['options'] = self.shares[volume['provider_location']]
return {
'driver_volume_type': self.driver_volume_type,
'data': data
}
def create_volume(self, volume):
"""Creates a volume.
:param volume: volume reference
:returns: provider_location update dict for database
"""
self._do_create_volume(volume)
return {'provider_location': volume['provider_location']}
def _do_create_volume(self, volume):
pool, fs = self._get_share_datasets(self.share)
filesystem = '%s/%s/%s' % (pool, fs, volume['name'])
LOG.debug('Creating filesystem on NexentaStor %s', filesystem)
url = 'storage/pools/%s/filesystems' % pool
data = {
'name': '/'.join([fs, volume['name']]),
'compressionMode': self.dataset_compression,
'dedupMode': self.dataset_deduplication,
}
self.nef.post(url, data)
volume['provider_location'] = '%s:/%s/%s' % (
self.nef_host, self.share, volume['name'])
try:
self._share_folder(fs, volume['name'])
self._ensure_share_mounted('%s:/%s/%s' % (
self.nef_host, self.share, volume['name']))
volume_size = volume['size']
if getattr(self.configuration,
self.driver_prefix + '_sparsed_volumes'):
self._create_sparsed_file(self.local_path(volume), volume_size)
else:
url = 'storage/pools/%s/filesystems/%s' % (
pool, '%2F'.join([fs, volume['name']]))
compression = self.nef.get(url).get('compressionMode')
if compression != 'off':
# Disable compression, because otherwise will not use space
# on disk.
self.nef.put(url, {'compressionMode': 'off'})
try:
self._create_regular_file(
self.local_path(volume), volume_size)
finally:
if compression != 'off':
# Backup default compression value if it was changed.
self.nef.put(url, {'compressionMode': compression})
except exception.NexentaException:
try:
url = 'storage/pools/%s/filesystems/%s' % (
pool, '%2F'.join([fs, volume['name']]))
self.nef.delete(url)
except exception.NexentaException:
LOG.warning(_LW("Cannot destroy created folder: "
"%(vol)s/%(folder)s"),
{'vol': pool, 'folder': '/'.join(
[fs, volume['name']])})
raise
def delete_volume(self, volume):
"""Deletes a logical volume.
:param volume: volume reference
"""
pool, fs = self._get_share_datasets(self.share)
url = ('storage/pools/%(pool)s/filesystems/%(fs)s') % {
'pool': pool,
'fs': '%2F'.join([fs, volume['name']])
}
origin = self.nef.get(url).get('originalSnapshot')
url = ('storage/pools/%(pool)s/filesystems/'
'%(fs)s?snapshots=true') % {
'pool': pool,
'fs': '%2F'.join([fs, volume['name']])
}
try:
self.nef.delete(url)
except exception.NexentaException as exc:
if 'Failed to destroy snapshot' in exc.args[0]:
LOG.debug('Snapshot has dependent clones, skipping')
else:
raise
try:
if origin and self._is_clone_snapshot_name(origin):
path, snap = origin.split('@')
pool, fs = path.split('/', 1)
snap_url = ('storage/pools/%(pool)s/'
'filesystems/%(fs)s/snapshots/%(snap)s') % {
'pool': pool,
'fs': fs,
'snap': snap
}
self.nef.delete(snap_url)
except exception.NexentaException as exc:
if 'does not exist' in exc.args[0]:
LOG.debug(
'Volume %s does not exist on appliance', '/'.join(
[pool, fs]))
def create_snapshot(self, snapshot):
"""Creates a snapshot.
:param snapshot: snapshot reference
"""
volume = self._get_snapshot_volume(snapshot)
pool, fs = self._get_share_datasets(self.share)
url = 'storage/pools/%(pool)s/filesystems/%(fs)s/snapshots' % {
'pool': pool,
'fs': '%2F'.join([fs, volume['name']]),
}
data = {'name': snapshot['name']}
self.nef.post(url, data)
def delete_snapshot(self, snapshot):
"""Deletes a snapshot.
:param snapshot: snapshot reference
"""
volume = self._get_snapshot_volume(snapshot)
pool, fs = self._get_share_datasets(self.share)
url = ('storage/pools/%(pool)s/'
'filesystems/%(fs)s/snapshots/%(snap)s') % {
'pool': pool,
'fs': '%2F'.join([fs, volume['name']]),
'snap': snapshot['name']
}
try:
self.nef.delete(url)
except exception.NexentaException as exc:
if 'EBUSY' is exc:
LOG.warning(_LW(
'Could not delete snapshot %s - it has dependencies'),
snapshot['name'])
def create_volume_from_snapshot(self, volume, snapshot):
"""Create new volume from other's snapshot on appliance.
:param volume: reference of volume to be created
:param snapshot: reference of source snapshot
"""
snapshot_vol = self._get_snapshot_volume(snapshot)
volume['provider_location'] = snapshot_vol['provider_location']
pool, fs = self._get_share_datasets(self.share)
dataset_path = '%s/%s' % (pool, fs)
url = ('storage/pools/%(pool)s/'
'filesystems/%(fs)s/snapshots/%(snap)s/clone') % {
'pool': pool,
'fs': '%2F'.join([fs, snapshot_vol['name']]),
'snap': snapshot['name']
}
path = '/'.join([pool, fs, volume['name']])
data = {'targetPath': path}
self.nef.post(url, data)
path = '%2F'.join([pool, fs, volume['name']])
url = 'storage/filesystems/%s/promote' % path
self.nef.post(url)
try:
self._share_folder(fs, volume['name'])
except exception.NexentaException:
try:
url = ('storage/pools/%(pool)s/'
'filesystems/%(fs)s') % {
'pool': pool,
'fs': volume['name']
}
self.nef.delete(url)
except exception.NexentaException:
LOG.warning(_LW("Cannot destroy cloned filesystem: "
"%(vol)s/%(filesystem)s"),
{'vol': dataset_path,
'filesystem': volume['name']})
raise
return {'provider_location': volume['provider_location']}
def create_cloned_volume(self, volume, src_vref):
"""Creates a clone of the specified volume.
:param volume: new volume reference
:param src_vref: source volume reference
"""
LOG.info(_LI('Creating clone of volume: %s'), src_vref['id'])
snapshot = {'volume_name': src_vref['name'],
'volume_id': src_vref['id'],
'name': self._get_clone_snapshot_name(volume)}
self.create_snapshot(snapshot)
try:
return self.create_volume_from_snapshot(volume, snapshot)
except exception.NexentaException:
LOG.error(_LE('Volume creation failed, deleting created snapshot '
'%(volume_name)s@%(name)s'), snapshot)
try:
self.delete_snapshot(snapshot)
except (exception.NexentaException, exception.SnapshotIsBusy):
LOG.warning(_LW('Failed to delete zfs snapshot '
'%(volume_name)s@%(name)s'), snapshot)
raise
self.delete_snapshot(snapshot)
def local_path(self, volume):
"""Get volume path (mounted locally fs path) for given volume.
:param volume: volume reference
"""
nfs_share = volume['provider_location']
return os.path.join(self._get_mount_point_for_share(nfs_share),
'volume')
def _get_mount_point_for_share(self, nfs_share):
"""Returns path to mount point NFS share.
:param nfs_share: example 172.18.194.100:/var/nfs
"""
nfs_share = nfs_share.encode('utf-8')
return os.path.join(self.configuration.nexenta_mount_point_base,
hashlib.md5(nfs_share).hexdigest())
def _share_folder(self, path, filesystem):
"""Share NFS filesystem on NexentaStor Appliance.
:param nef: nef object
:param path: path to parent filesystem
:param filesystem: filesystem that needs to be shared
"""
pool = self.share.split('/')[0]
LOG.debug(
'Creating ACL for filesystem %s on Nexenta Store', filesystem)
url = 'storage/pools/%s/filesystems/%s/acl' % (
pool, '%2F'.join([path.replace('/', '%2F'), filesystem]))
data = {
"type": "allow",
"principal": "everyone@",
"permissions": [
"list_directory",
"read_data",
"add_file",
"write_data",
"add_subdirectory",
"append_data",
"read_xattr",
"write_xattr",
"execute",
"delete_child",
"read_attributes",
"write_attributes",
"delete",
"read_acl",
"write_acl",
"write_owner",
"synchronize"
],
"flags": [
"file_inherit",
"dir_inherit"
]
}
self.nef.post(url, data)
LOG.debug(
'Successfully shared filesystem %s', '/'.join(
[path, filesystem]))
def _get_capacity_info(self, path):
"""Calculate available space on the NFS share.
:param path: example pool/nfs
"""
pool, fs = self._get_share_datasets(path)
url = 'storage/pools/%s/filesystems/%s' % (
pool, fs)
data = self.nef.get(url)
total = utils.str2size(data['bytesAvailable'])
allocated = utils.str2size(data['bytesUsed'])
free = total - allocated
return total, free, allocated
def _get_snapshot_volume(self, snapshot):
ctxt = context.get_admin_context()
return db.volume_get(ctxt, snapshot['volume_id'])
def _get_share_datasets(self, nfs_share):
pool_name, fs = nfs_share.split('/', 1)
return pool_name, fs
def _get_clone_snapshot_name(self, volume):
"""Return name for snapshot that will be used to clone the volume."""
return 'cinder-clone-snapshot-%(id)s' % volume
def _is_clone_snapshot_name(self, snapshot):
"""Check if snapshot is created for cloning."""
name = snapshot.split('@')[-1]
return name.startswith('cinder-clone-snapshot-')
def _update_volume_stats(self):
"""Retrieve stats info for NexentaStor appliance."""
LOG.debug('Updating volume stats')
share = ':/'.join([self.nef_host, self.share])
total, free, allocated = self._get_capacity_info(self.share)
total_space = utils.str2gib_size(total)
free_space = utils.str2gib_size(free)
location_info = '%(driver)s:%(share)s' % {
'driver': self.__class__.__name__,
'share': share
}
self._stats = {
'vendor_name': 'Nexenta',
'dedup': self.dataset_deduplication,
'compression': self.dataset_compression,
'description': self.dataset_description,
'nef_url': self.nef_host,
'driver_version': self.VERSION,
'storage_protocol': 'NFS',
'total_capacity_gb': total_space,
'free_capacity_gb': free_space,
'reserved_percentage': self.configuration.reserved_percentage,
'QoS_support': False,
'location_info': location_info,
'volume_backend_name': self.backend_name,
'nfs_mount_point_base': self.nfs_mount_point_base
}
|
|
import os
import logging
import threading
import base64
import pkgutil
import time
import re
import deluge.configmanager
import deluge.component as component
from deluge.common import is_url
from deluge.event import DelugeEvent
from deluge.core.rpcserver import export
from deluge.plugins.pluginbase import CorePluginBase
from twisted.internet.task import LoopingCall
# Line below is required to import tracker handler on fly.
import updatorr.tracker_handlers
from updatorr.utils import *
import sys
import traceback
log = logging.getLogger(__name__)
# Import tracker handlers on fly.
# It is an .egg-friendly alternative to os.listdir() walking.
for mloader, pname, ispkg in pkgutil.iter_modules(updatorr.tracker_handlers.__path__):
log.info('Updatorr Importing tracker handler file %s' % pname)
__import__('updatorr.tracker_handlers.%s' % pname)
# Default plugin preferences.
DEFAULT_PREFS = {
'last_walk': 0,
'walk_period': 24,
'trackers_settings': {},
'torrents_to_update': [] # That might have been set(), except config serialization.
}
# This regex is used to get hyperlink from torrent comment.
RE_LINK = re.compile(r'(?P<url>https?://[^\s]+)')
class UpdatorrUpdateDoneEvent(DelugeEvent):
"""This event fires up when a torrent is updated."""
def __init__(self, torrent_id):
self._args = [torrent_id]
class UpdatorrErrorEvent(DelugeEvent):
"""This event fires up when an error occures in tracker handler."""
def __init__(self, torrent_id, error_text):
self._args = [torrent_id, error_text]
class UpdatorrUpdatesCheckStartedEvent(DelugeEvent):
"""This event fires up when torrent updates check is started."""
pass
class UpdatorrUpdatesCheckFinishedEvent(DelugeEvent):
"""This event fires up when torrent updates check is finished."""
pass
class Core(CorePluginBase):
walking = False
plugin_id = 'Updatorr'
def enable(self):
"""This one fires when plugin is enabled."""
self.plugin = component.get('CorePluginManager')
self.plugin.register_status_field(self.plugin_id, self.get_status_label)
self.core = component.get('Core')
self.torrents = self.core.torrentmanager.torrents
self.config = deluge.configmanager.ConfigManager('updatorr.conf', DEFAULT_PREFS)
self.torrents_to_update = self.config['torrents_to_update']
self.walk_period = self.config['walk_period']
self.last_walk = self.config['last_walk']
self.trackers_settings = self.config['trackers_settings']
self.update_trackers_settings()
self.filter_manager = component.get('FilterManager')
self.filter_manager.register_tree_field(self.plugin_id, self.get_filters_initial)
# We will check whether it's time to go for updates every 60 seconds.
self.walk_torrents_timer = LoopingCall(self.run_walker)
self.walk_torrents_timer.start(60)
def disable(self):
"""That one fires when plugin is disabled."""
self.walk_torrents_timer.stop()
self.filter_manager.deregister_tree_field(self.plugin_id)
self.plugin.deregister_status_field(self.plugin_id)
self.save_config()
def update(self):
"""This one fires every second while plugin is enabled."""
pass
UPDATE_STATES = {True: 'On', False: 'Off'}
def get_status_label(self, torrent_id):
"""This one is to update filter tree numbers.
It is called every time torrent status is changed."""
return self.UPDATE_STATES[self.check_is_to_update(torrent_id)]
def get_filters_initial(self):
"""That are initial filter tree values."""
return {'On': 0, 'Off': 0, 'All': len(self.torrents.keys())}
def update_trackers_settings(self):
"""Returns registered handlers dictionary."""
for domain, handler in get_registered_handlers().items():
if domain not in self.trackers_settings:
domain_dict = {
'login_required': handler.login_required,
'login': '',
'password': '',
'cookies': None
}
self.trackers_settings[domain] = domain_dict
else:
self.trackers_settings[domain].update({'login_required': handler.login_required})
@export
def get_status(self):
"""Returns tuple with Updatorr status data:
last walk time, walk period in hours, is currently walking."""
return self.last_walk, self.walk_period, self.walking
@export
def test_login(self, domain, login, password):
"""Launches login procedure for tracker domain.
Returns True on success, overwise - False."""
handler = get_tracker_handler({'comment': domain}, log)
handler.set_settings(self.trackers_settings.get(domain))
if handler is not None:
return handler.login(login=login, password=password)
return None
@export
def is_walking(self):
"""Returns boolean to identify whether update
proccess is on the run."""
return self.walking
@export
def run_walker(self, force=False):
"""Runs update process in a separate thread
if it is a hight time for it and it's not already started."""
if not force:
now = time.time()
next_walk = int(self.last_walk) + (int(self.walk_period) * 3600)
log.debug('Updatorr run walker: walking=%s; next_walk=%s; now=%s' % (self.walking, next_walk, now))
if self.walking:
return False
if next_walk > now:
return False
threading.Thread(target=self.walk, kwargs={'force': force}).start()
return True
def walk(self, force=False):
"""Implemets automatic torrent updates process.
Automatic update is available for torrents selected by user
and having tracker's page URL in torrent's `comment` field.
Besides that torrent a tracker handler class should be
associated with domain from the URL mentioned above.
If `force` set to a list of torrent IDs, only those
torrents will be checked for updates.
If `force` is False every torrent scheduled to updates
by used will be checked.
"""
# To prevent possible concurent runs.
self.walking = True
try:
log.info('Updatorr walking...')
component.get('EventManager').emit(UpdatorrUpdatesCheckStartedEvent())
allow_last_walk_update = False
if isinstance(force, list):
torrents_list = force
else:
torrents_list = self.torrents_to_update
for torrent_id in torrents_list:
try:
torrent_data = self.core.get_torrent_status(torrent_id, [])
log.info('Updatorr Processing %s ...' % torrent_data['name'])
except KeyError:
log.debug('Updatorr \tSKIPPED No torrent with id %s listed [yet]' % torrent_id)
continue
# Remove not url data from comment
torrent_data['comment'] = RE_LINK.search(torrent_data['comment']).group('url')
if not is_url(torrent_data['comment']):
log.info('Updatorr \tSKIPPED No URL found in torrent comment')
continue
# From now on we consider that update took its place.
# If only this update is not forced.
if not force:
allow_last_walk_update = True
tracker_handler = get_tracker_handler(torrent_data, log)
if tracker_handler is None:
self.dump_error(torrent_id, 'Unable to find tracker handler for %s' % torrent_data['comment'])
continue
tracker_handler.set_settings(self.trackers_settings.get(tracker_handler.tracker_host))
new_torrent_filepath = tracker_handler.get_torrent_file()
if new_torrent_filepath is None:
self.dump_error(torrent_id, 'Error in tracker handling: %s' % tracker_handler.get_error_text())
continue
# Let's store cookies form that tracker to enter without logins in future sessions.
self.trackers_settings[tracker_handler.tracker_host]['cookies'] = tracker_handler.get_cookies(as_dict=True)
new_torrent_contents = read_torrent_file(new_torrent_filepath)
new_torrent_info = read_torrent_info(new_torrent_contents)
if torrent_data['hash'] == new_torrent_info['hash']:
log.info('Updatorr \tSKIPPED Torrent is up-to-date')
continue
log.info('Updatorr \tTorrent update is available')
new_torrent_prefs = get_new_prefs(torrent_data, new_torrent_info)
added_torrent_id = self.core.add_torrent_file(None, base64.encodestring(new_torrent_contents), new_torrent_prefs)
if added_torrent_id is not None:
self.core.remove_torrent(torrent_id, False)
log.info('Updatorr \tTorrent is updated')
# Fire up update finished event.
component.get('EventManager').emit(UpdatorrUpdateDoneEvent(new_torrent_info['hash']))
# Add new torrent hash to continue autoupdates.
self.set_items_to_update(new_torrent_info['hash'], True)
# Remove old torrent from autoupdates list.
self.set_items_to_update(torrent_id, False)
else:
self.dump_error(torrent_id, 'Unable to replace current torrent with a new one')
# No littering, remove temporary .torrent file.
os.remove(new_torrent_filepath)
if allow_last_walk_update:
# Remember lastrun time.
self.last_walk = time.time()
log.info('Updatorr walk is finished')
component.get('EventManager').emit(UpdatorrUpdatesCheckFinishedEvent())
except:
log.error(traceback.format_exc())
finally:
self.walking = False
def dump_error(self, torrent_id, text):
"""Logs error and fires error event."""
log.info('Updatorr \tSKIPPED %s' % text)
component.get('EventManager').emit(UpdatorrErrorEvent(torrent_id, text))
@export
def set_items_to_update(self, torrent_id, do_update):
"""Adds or removes given torrent to the `torrents-to-update list`."""
if do_update:
if torrent_id not in self.torrents_to_update:
self.torrents_to_update.append(torrent_id)
elif torrent_id in self.torrents_to_update:
self.torrents_to_update.remove(torrent_id)
self.save_config()
@export
def check_is_to_update(self, torrent_id):
"""Checks whether given torrent is set to update. Returns boolean."""
return torrent_id in self.torrents_to_update
@export
def get_items_to_update(self):
"""Retunt a lis of to"""
return self.torrents_to_update
@export
def set_config(self, config=None):
"""Sets the config dictionary of torrent IDs to update."""
log.debug('Updatorr sets config')
if config is not None:
self.walk_period = config['walk_period']
self.trackers_settings = config['trackers_settings']
self.save_config()
@export
def get_config(self):
"""Returns the config dictionary"""
log.debug('Updatorr gets config')
return self.config.config
def save_config(self):
"""Dumps configuration file to file system ~/.config/deluge/updatorr.conf."""
# Going through every name to be sure...
self.update_trackers_settings()
self.config['walk_period'] = int(self.walk_period)
self.config['last_walk'] = int(self.last_walk)
self.config['torrents_to_update'] = self.torrents_to_update
self.config['trackers_settings'] = self.trackers_settings
self.config.save()
|
|
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_serialization import jsonutils
import six
from sahara.plugins.ambari import common
from sahara.plugins import provisioning
from sahara.plugins import utils
from sahara.swift import swift_helper
from sahara.utils import files
configs = {}
obj_configs = {}
cfg_process_map = {
"admin-properties": common.RANGER_SERVICE,
"ams-env": common.AMBARI_SERVICE,
"ams-hbase-env": common.AMBARI_SERVICE,
"ams-hbase-policy": common.AMBARI_SERVICE,
"ams-hbase-security-site": common.AMBARI_SERVICE,
"ams-hbase-site": common.AMBARI_SERVICE,
"ams-site": common.AMBARI_SERVICE,
"capacity-scheduler": common.YARN_SERVICE,
"cluster-env": "general",
"core-site": common.HDFS_SERVICE,
"falcon-env": common.FALCON_SERVICE,
"falcon-runtime.properties": common.FALCON_SERVICE,
"falcon-startup.properties": common.FALCON_SERVICE,
"flume-env": common.FLUME_SERVICE,
"gateway-site": common.KNOX_SERVICE,
"hadoop-env": common.HDFS_SERVICE,
"hadoop-policy": common.HDFS_SERVICE,
"hbase-env": common.HBASE_SERVICE,
"hbase-policy": common.HBASE_SERVICE,
"hbase-site": common.HBASE_SERVICE,
"hdfs-site": common.HDFS_SERVICE,
"hive-env": common.HIVE_SERVICE,
"hive-site": common.HIVE_SERVICE,
"hiveserver2-site": common.HIVE_SERVICE,
"kafka-broker": common.KAFKA_SERVICE,
"kafka-env": common.KAFKA_SERVICE,
"knox-env": common.KNOX_SERVICE,
"mapred-env": common.YARN_SERVICE,
"mapred-site": common.YARN_SERVICE,
"oozie-env": common.OOZIE_SERVICE,
"oozie-site": common.OOZIE_SERVICE,
"ranger-env": common.RANGER_SERVICE,
"ranger-hbase-plugin-properties": common.HBASE_SERVICE,
"ranger-hdfs-plugin-properties": common.HDFS_SERVICE,
"ranger-hive-plugin-properties": common.HIVE_SERVICE,
"ranger-knox-plugin-properties": common.KNOX_SERVICE,
"ranger-site": common.RANGER_SERVICE,
"ranger-storm-plugin-properties": common.STORM_SERVICE,
"spark-defaults": common.SPARK_SERVICE,
"spark-env": common.SPARK_SERVICE,
"sqoop-env": common.SQOOP_SERVICE,
"storm-env": common.STORM_SERVICE,
"storm-site": common.STORM_SERVICE,
"tez-site": common.OOZIE_SERVICE,
"usersync-properties": common.RANGER_SERVICE,
"yarn-env": common.YARN_SERVICE,
"yarn-site": common.YARN_SERVICE,
"zoo.cfg": common.ZOOKEEPER_SERVICE,
"zookeeper-env": common.ZOOKEEPER_SERVICE
}
ng_confs = [
"dfs.datanode.data.dir",
"dtnode_heapsize",
"mapreduce.map.java.opts",
"mapreduce.map.memory.mb",
"mapreduce.reduce.java.opts",
"mapreduce.reduce.memory.mb",
"mapreduce.task.io.sort.mb",
"nodemanager_heapsize",
"yarn.app.mapreduce.am.command-opts",
"yarn.app.mapreduce.am.resource.mb",
"yarn.nodemanager.resource.cpu-vcores",
"yarn.nodemanager.resource.memory-mb",
"yarn.scheduler.maximum-allocation-mb",
"yarn.scheduler.minimum-allocation-mb"
]
hdp_repo_cfg = provisioning.Config(
"HDP repo URL", "general", "cluster", priority=1, default_value="")
hdp_utils_repo_cfg = provisioning.Config(
"HDP-UTILS repo URL", "general", "cluster", priority=1, default_value="")
def _get_service_name(service):
return cfg_process_map.get(service, service)
def _get_config_group(group, param, plugin_version):
for section, process in six.iteritems(cfg_process_map):
if process == group and param in configs[plugin_version][section]:
return section
def _get_param_scope(param):
if param in ng_confs:
return "node"
else:
return "cluster"
def load_configs(version):
if obj_configs.get(version):
return obj_configs[version]
cfg_path = "plugins/ambari/resources/configs-%s.json" % version
vanilla_cfg = jsonutils.loads(files.get_file_text(cfg_path))
configs[version] = vanilla_cfg
sahara_cfg = [hdp_repo_cfg, hdp_utils_repo_cfg]
for service, confs in vanilla_cfg.items():
for k, v in confs.items():
sahara_cfg.append(provisioning.Config(
k, _get_service_name(service), _get_param_scope(k),
default_value=v))
obj_configs[version] = sahara_cfg
return sahara_cfg
def _get_config_value(cluster, key):
return cluster.cluster_configs.get("general", {}).get(key.name,
key.default_value)
def get_hdp_repo_url(cluster):
return _get_config_value(cluster, hdp_repo_cfg)
def get_hdp_utils_repo_url(cluster):
return _get_config_value(cluster, hdp_utils_repo_cfg)
def _serialize_ambari_configs(configs):
return list(map(lambda x: {x: configs[x]}, configs))
def _create_ambari_configs(sahara_configs, plugin_version):
configs = {}
for service, params in six.iteritems(sahara_configs):
for k, v in six.iteritems(params):
group = _get_config_group(service, k, plugin_version)
configs.setdefault(group, {})
configs[group].update({k: v})
return configs
def _make_paths(dirs, suffix):
return ",".join([d + suffix for d in dirs])
def get_ng_params(node_group):
configs = _create_ambari_configs(node_group.node_configs,
node_group.cluster.hadoop_version)
storage_paths = node_group.storage_paths()
configs.setdefault("hdfs-site", {})
configs["hdfs-site"]["dfs.datanode.data.dir"] = _make_paths(
storage_paths, "/hdfs/data")
configs["hdfs-site"]["dfs.journalnode.edits.dir"] = _make_paths(
storage_paths, "/hdfs/journalnode")
configs["hdfs-site"]["dfs.namenode.checkpoint.dir"] = _make_paths(
storage_paths, "/hdfs/namesecondary")
configs["hdfs-site"]["dfs.namenode.name.dir"] = _make_paths(
storage_paths, "/hdfs/namenode")
configs.setdefault("yarn-site", {})
configs["yarn-site"]["yarn.nodemanager.local-dirs"] = _make_paths(
storage_paths, "/yarn/local")
configs["yarn-site"]["yarn.nodemanager.log-dirs"] = _make_paths(
storage_paths, "/yarn/log")
configs["yarn-site"][
"yarn.timeline-service.leveldb-timeline-store.path"] = _make_paths(
storage_paths, "/yarn/timeline")
return _serialize_ambari_configs(configs)
def get_cluster_params(cluster):
configs = _create_ambari_configs(cluster.cluster_configs,
cluster.hadoop_version)
swift_configs = {x["name"]: x["value"]
for x in swift_helper.get_swift_configs()}
configs.setdefault("core-site", {})
configs["core-site"].update(swift_configs)
if utils.get_instance(cluster, common.RANGER_ADMIN):
configs.setdefault("admin-properties", {})
configs["admin-properties"]["db_root_password"] = (
cluster.extra["ranger_db_password"])
return _serialize_ambari_configs(configs)
|
|
"""
Game user interface.
"""
import urwid
from arthur.util import MultiDeferred
from zope import interface
DEFAULT_PALETTE = (
('header', 'black', 'dark green'),
('foreground', 'dark green', 'black'),
('background', 'dark gray', 'black'),
('alert', 'yellow', 'dark red')
)
BACKGROUND = urwid.AttrMap(urwid.SolidFill(u"\N{LIGHT SHADE}"), "background")
DIVIDER = urwid.Divider(u'\N{UPPER ONE EIGHTH BLOCK}')
class Workbench(object):
"""
A workbench, consisting of a top status bar and a background.
"""
def __init__(self):
self.header = Header()
self.widget = urwid.Frame(header=self.header.widget, body=BACKGROUND)
self._tools = []
def display(self, tool):
"""Displays the given tool above the current layer, and sets the
title to its name.
"""
self._tools.append(tool)
self._justDisplay(tool)
def _justDisplay(self, tool):
"""
Displays the given tool. Does not register it in the tools list.
"""
self.header.title.set_text(tool.name)
body, _options = self.widget.contents["body"]
overlay = urwid.Overlay(tool.widget, body, *tool.position)
self._surface = urwid.AttrMap(overlay, "foreground")
self.widget.contents["body"] = self._surface, None
def undisplay(self):
"""Undisplays the top tool.
This actually forces a complete re-render.
"""
self._tools.pop()
self._justClear()
for tool in self._tools:
self._justDisplay(tool)
def clear(self):
"""
Clears the workbench completely.
"""
self._tools = []
self._justClear()
def _justClear(self):
self.header.title.set_text(u"")
self.widget.contents["body"] = BACKGROUND, None
class Header(object):
"""
A header. Contains a title and an aside.
"""
def __init__(self):
self.title = urwid.Text(u"", align="left")
self.aside = urwid.Text(u"Press C-w to quit", align="right")
columns = urwid.Columns([self.title, self.aside])
self.widget = urwid.AttrMap(columns, "header")
def _unhandledInput(event, workbench, launcher):
"""Handles input events that weren't handled anywhere else.
"""
if event == "ctrl w":
raise urwid.ExitMainLoop()
elif event == "esc":
workbench.clear()
workbench.display(launcher)
return True
class ITool(interface.Interface):
"""
A tool, displayable by a workbench.
"""
name = interface.Attribute(
"""
The name of the tool, which will be used in the title.
""")
widget = interface.Attribute(
"""
The widget that will be displayed on the workbench.
""")
position = interface.Attribute(
"""
The position of the tool's widget on the workbench.
""")
@interface.implementer(ITool)
class Launcher(object):
"""The launcher.
The launcher is a tool that launches other tools. Since it has to
display other tools, it has a reference to the workbench.
"""
name = u"Launcher"
position = "center", 30, "middle", 10
def __init__(self, workbench, tools):
self.workbench = workbench
body = [urwid.Text(u"Select a tool to launch"), DIVIDER]
for tool in tools:
button = urwid.Button(tool.name)
urwid.connect_signal(button, 'click', self._launch, tool)
body.append(urwid.AttrMap(button, "foreground", focus_map="header"))
self.menu = urwid.ListBox(urwid.SimpleFocusListWalker(body))
self.widget = urwid.LineBox(self.menu)
def _launch(self, _button, tool):
"""Button callback to launch a tool.
Tells the workbench to display the given tool.
"""
self.workbench.display(tool)
@interface.implementer(ITool)
class _PopUp(object):
"""
A generic pop-up.
"""
position = "center", 50, "middle", 7
def __init__(self, name):
self.name = name
widgets = [urwid.Text(name), DIVIDER] + self._makeExtraWidgets()
self.listBox = urwid.ListBox(urwid.SimpleListWalker(widgets))
self.widget = urwid.LineBox(self.listBox)
def _makeExtraWidgets(self):
return []
class _Splash(_PopUp):
"""
A splash screen: like a notification, except you can't dismiss it.
"""
def __init__(self, name, text):
self.text = text
_PopUp.__init__(self, name)
def _makeExtraWidgets(self):
"""Makes a text widget.
"""
self.textWidget = urwid.Text(self.text)
return [self.textWidget]
class _ButtonPopUp(_PopUp):
"""A pop up with one or more buttons, and support for notification
when they've been clicked.
"""
def __init__(self, name):
_PopUp.__init__(self, name)
self._result = MultiDeferred()
def _makeExtraWidgets(self):
"""Makes the extra widgets.
This defers to the ``make(TextWidgets|Buttons)`` methods; so
they can be overridden separately.
"""
return self._makeTextWidgets() + self._makeButtons()
def _makeTextWidgets(self):
"""Makes (optional) text widgets.
Override this in a subclass.
"""
return []
def _makeButtons(self):
"""Makes buttons and wires them up.
"""
self.button = button = urwid.Button(u"OK")
urwid.connect_signal(button, "click", self._completed)
return [self.button]
def notifyCompleted(self):
"""Request to be notified when this prompt is completed.
"""
return self._result.tee()
def _completed(self, _button=None):
"""Call the completion deferreds that have been handed out.
"""
self._result.callback(None)
class _Notification(_ButtonPopUp):
"""A generic notification, which can be clicked away.
"""
def __init__(self, name, text):
self.text = text
_ButtonPopUp.__init__(self, name)
def _makeTextWidgets(self):
"""Makes a text widget.
"""
self.textWidget = urwid.Text(self.text)
return [self.textWidget]
def notify(workbench, name, text):
"""Runs a notification.
"""
return _runPopUp(workbench, _Notification(name, text))
class _Alert(_Notification):
"""A notification in a scary-looking color.
"""
def __init__(self, *args, **kwargs):
_Notification.__init__(self, *args, **kwargs)
self.originalWidget = self.widget
self.widget = urwid.AttrMap(self.originalWidget, "alert")
def alert(workbench, name, text):
"""Runs an alert.
"""
return _runPopUp(workbench, _Alert(name, text))
class _Prompt(_ButtonPopUp):
"""
A generic prompt for a single string value.
"""
position = "center", 40, "middle", 6
def __init__(self, name, promptText):
self.promptText = promptText
_ButtonPopUp.__init__(self, name)
def _makeTextWidgets(self):
"""Makes an editable prompt widget.
"""
self.prompt = urwid.Edit(self.promptText, multiline=False)
return [self.prompt]
def _completed(self, _button=None):
"""The prompt was completed. Fire all waiting deferreds with the
prompt's edit text.
"""
self._result.callback(self.prompt.edit_text)
def prompt(workbench, name, promptText):
"""Runs a prompt.
"""
return _runPopUp(workbench, _Prompt(name, promptText))
def _runPopUp(workbench, popUp):
"""Displays the pop-up on the workbench and gets a completion
notification deferred. When that fires, undisplay the pop-up and
return the result of the notification deferred verbatim.
"""
workbench.display(popUp)
d = popUp.notifyCompleted()
d.addCallback(_popUpCompleted, workbench)
return d
def _popUpCompleted(result, workbench):
"""The popUp was completed; undisplay it and return the result.
"""
workbench.undisplay()
return result
|
|
from __future__ import generators, print_function
import numpy as np
from random import shuffle
from scipy.io import loadmat
from copy import deepcopy
import functools
import Queue
#from multiprocessing import Process, Queue, Manager, Pool
import threading
import time
from collections import defaultdict
def async_prefetch_wrapper(iterable, buffer=100):
"""
wraps an iterater such that it produces items in the background
uses a bounded queue to limit memory consumption
"""
done = 'DONE'# object()
def worker(q, it):
for item in it:
q.put(item)
q.put(done)
# launch a thread to fetch the items in the background
queue = Queue.Queue(buffer)
#pool = Pool()
#m = Manager()
#queue = m.Queue()
it = iter(iterable)
#workers = pool.apply_async(worker, (queue, it))
thread = threading.Thread(target=worker, args=(queue, it))
#thread = Process(target=worker, args=(queue, it))
thread.daemon = True
thread.start()
# pull the items of the queue as requested
while True:
item = queue.get()
if item == 'DONE':#done:
return
else:
yield item
#pool.close()
#pool.join()
def async_prefetch(func):
"""
decorator to make generator functions fetch items in the background
"""
@functools.wraps(func)
def wrapper(*args, **kwds):
return async_prefetch_wrapper(func(*args, **kwds))
return wrapper
class DataSet(object):
def __init__(self, cfg):
"""Construct a DataSet.
"""
self.cfg = cfg
self.adj = self.get_adj(cfg.adj_dir)
#self.all_walks, self.node_seq = self.get_walks(cfg.walks_dir) # reverse the sequence
#self.node_seq = self.all_walks[:, -1] # index by ending node
self.all_labels = self.get_labels(cfg.label_dir)
self.all_features= self.get_fetaures(cfg.features_dir)
#Increment the positions by 1 and mark the 0th one as False
self.train_nodes = np.concatenate(([False], np.load(cfg.label_fold_dir + 'train_ids.npy')))
self.val_nodes = np.concatenate(([False], np.load(cfg.label_fold_dir + 'val_ids.npy')))
self.test_nodes = np.concatenate(([False], np.load(cfg.label_fold_dir + 'test_ids.npy')))
# [IMP]Assert no overlap between test/val/train nodes
self.change = 0
self.label_cache, self.update_cache = {0:self.all_labels[0]}, {}
self.wce = self.get_wce()
def get_adj(self, path):
adj = loadmat(path)['adjmat'].toarray()
# Add dummy '0' nodes
temp = np.zeros((adj.shape[0] + 1, adj.shape[0] + 1), dtype=int)
temp[1:, 1:] = adj
#print('adj: ', np.sum(temp, 0), '\n', np.shape(adj), np.shape(temp))
return temp
def get_walks(self, path):
#Reverse sequences and padding in beginning
#return np.fliplr(np.loadtxt(path, dtype=np.int))
walks = np.fliplr(np.loadtxt(path, dtype=np.int)) # reverse the sequence
seq = deepcopy(walks[:,-1])
#rotate around the sequences, such that ends are padded with zeros
for i in range(np.shape(walks)[0]):
non_zeros = np.sum(walks[i] > 0)
walks[i] = np.roll(walks[i], non_zeros)
return walks, seq
def get_wce(self):
if self.cfg.solver.wce:
valid = self.train_nodes #+ self.val_nodes
tot = np.dot(valid, self.all_labels)
wce = 1/(len(tot) * (tot*1.0/np.sum(tot)))
else:
wce = [1]*self.all_labels.shape[1]
print("Cross-Entropy weights: ",wce)
return wce
def get_fetaures(self, path):
# Serves 2 purpose:
# a) add feature for dummy node 0 a.k.a <EOS> and <unlabeled>
# b) increments index of all features by 1, thus aligning it with indices in walks
all_features = np.load(path)
all_features = all_features.astype(np.float32, copy=False) # Required conversion for Python3
all_features = np.concatenate(([np.zeros(all_features.shape[1])], all_features), 0)
if self.cfg.data_sets.add_degree:
all_features = np.concatenate((all_features, np.sum(self.adj, axis=0, keepdims=True).T), 1)
return all_features
def get_labels(self, path):
# Labels start with node '0'; Walks_data with node '1'
# To get corresponding mapping, increment the label node number by 1
# add label for dummy node 0 a.k.a <EOS> and <unlabeled>
all_labels = np.load(path)
all_labels = np.concatenate(([np.zeros(all_labels.shape[1])], all_labels), 0)
return all_labels
def get_update_cache(self):
updated = {}
for k,v in self.update_cache.items():
updated[k] = v[0]/v[1]
return updated
def accumulate_label_cache(self, labels, nodes):
#Aggregates all the labels for the corresponding nodes
#and tracks the count of updates made
default = (self.all_labels[0], 0) #Initial estimate -> all_zeros
if self.cfg.data_sets.binary_label_updates:
#Convert to binary and keep only the maximum value as 1
amax = np.argmax(labels, axis = 1)
labels = np.zeros(labels.shape)
for idx, pos in enumerate(amax):
labels[idx,pos] = 1
for idx, node in enumerate(nodes):
prv_label, prv_count = self.update_cache.get(node, default)
new_label = prv_label + labels[idx]
new_count = prv_count + 1
self.update_cache[node] = (new_label, new_count)
def update_label_cache(self):
#Average all the predictions made for the corresponding nodes and reset cache
alpha = self.cfg.solver.label_update_rate
if len(self.label_cache.items()) <= 1: alpha =1
for k, v in self.update_cache.items():
old = self.label_cache.get(k, self.label_cache[0])
new = (1-alpha)*old + alpha*(v[0]/v[1])
self.change += np.mean((new - old) **2)
self.label_cache[k] = new
print("\nChange in label: :", np.sqrt(self.change/self.cfg.data_sets._len_vocab)*100)
self.change = 0
self.update_cache = {}
def get_nodes(self, dataset):
nodes = []
if dataset == 'train':
nodes = self.train_nodes
elif dataset == 'val':
nodes = self.val_nodes
elif dataset == 'test':
nodes = self.test_nodes
elif dataset == 'all':
# Get all the nodes except the 0th node
nodes = [True]*len(self.train_nodes)
nodes[0] = False
else:
raise ValueError
return nodes
#@async_prefetch
def next_batch(self, dataset, batch_size, shuffle=True):
nodes = np.where(self.get_nodes(dataset))[0]
# Divide the nodes into buckets based on their number of neighbors
buckets = 3
tot_neigh = np.sum(self.adj[nodes], 1) #get neighbors of the nodes and compute individual sums
count = zip(nodes, tot_neigh) #zip nodes with their neighbor count
count = sorted(count, key = lambda item:item[1])
count = np.array(count)
buck_size = len(nodes)//buckets
if len(nodes)%buck_size != 0:
buckets += 1
grouped = {}
for i in range(buckets):
extra = max(0, (i+1)*buck_size - len(nodes)) #Increase the size of last bucket to accomodate left-over nodes
temp = count[i*buck_size: (i+1)*buck_size + extra]
maxi = np.max(temp[:,1])
grouped[i] = [temp, maxi] #format -> ([..[node, neighbor_count]..], max_neighbor_count)
if shuffle:
for k,v in grouped.items():
indices = np.random.permutation(len(v[0]))
grouped[k] = [v[0][indices], v[1]]
tot = buck_size*buckets/batch_size #Total number of batches
for vertices, maxi in grouped.values():
#print("Vertices; ",vertices)
maxi += 1 #number of neighbors + itself
for idx in range(0, len(vertices), batch_size):
lengths = []
#additional dummy entries in the batch to make batch size constant
dummy = max(0, (idx + batch_size) -len(vertices))
mask = [1]*batch_size
if dummy: mask[-dummy:] = [0]*dummy
#print("mask: ",mask, dummy)
seq = vertices[idx: idx + batch_size - dummy, 0]
seq = np.concatenate((seq, [0]*dummy)).astype(int)
x = []
for n in seq:
x_ = np.where(self.adj[n])[0]
np.random.shuffle(x_) #shuffle neighbor nodes
x_ = list(x_)
x_.append(n) #append itself to the set of neighbors
lengths.append(len(x_))
pad = maxi - len(x_) #padding for each sequence
x_.extend([0]*pad)
#print(list(np.where(self.adj[n])[0]), x_)
x.append(x_)
#print("Shape for this batch: ",np.shape(x))
x = np.swapaxes(x, 0, 1) #convert from (batch x step) to (step x batch)
x_labels = [[self.label_cache.get(item, self.all_labels[0]) for item in row] for row in x]
x_feats = [[self.all_features[item] for item in row] for row in x]
y = [self.all_labels[item] for item in seq]
yield (x_feats, x_labels, seq, y, tot, lengths, mask)
"""
@async_prefetch
def next_batch(self, dataset, batch_size, shuffle=True):
nodes = self.get_nodes(dataset)
label_len = np.shape(self.all_labels)[1]
max_len = self.all_walks.shape[1]
# Get position of all walks ending with desired set of nodes
pos = []
seq = []
for node in np.where(nodes)[0]:
temp = np.where(self.node_seq == node)[0]
pos.extend(temp)
seq.extend([node]*len(temp))
pos = np.array(pos)
seq = np.array(seq)
if shuffle:
indices = np.random.permutation(len(pos))
pos = pos[indices]
seq = seq[indices]
if batch_size == -1:
batch_size = len(pos)
tot = len(pos)//batch_size
for i in range(0, len(pos), batch_size):
x = self.all_walks[pos[i: i + batch_size]]
#get number of nodes per path
lengths = np.sum(np.array(x)>0, axis=1)
x = np.swapaxes(x, 0, 1) # convert from (batch x step) to (step x batch)
# get labels for valid data points, for others: select the 0th label
x2 = [[self.label_cache.get(item, self.all_labels[0]) for item in row] for row in x]
#y = [self.all_labels[item] for item in x[-1]]
y = [self.all_labels[item] for item in seq[i: i+batch_size]]
# get features for all data points
x1 = [[self.all_features[item] for item in row] for row in x]
#print(x,y, lengths, seq[i: i + batch_size])
yield (x1, x2, seq[i: i + batch_size], y, tot, lengths)
"""
@async_prefetch
def next_batch_same(self, dataset, node_count=1):
nodes = self.get_nodes(dataset)
pos = []
counts = []
seq = []
for node in np.where(nodes)[0]:
temp = np.where(self.node_seq == node)[0]
counts.append(len(temp))
seq.append(node)
pos.extend(temp)
pos = np.array(pos)
start = 0
max_len = self.all_walks.shape[1]
# Get a batch of all walks for 'node_count' number of node
for idx in range(0, len(counts), node_count):
#print(idx)
stop = start + np.sum(counts[idx:idx+node_count]) #start + total number of walks to be considered this time
x = self.all_walks[pos[start:stop]] #get the walks corresponding to respective positions
temp = np.array(x)>0 #get locations of all zero inputs
lengths = np.sum(temp, axis=1)
x = np.swapaxes(x, 0, 1) # convert from (batch x step) to (step x batch)
#"""
#original
# get labels for valid data points, for others: select the 0th label
x2 = [[self.label_cache.get(item, self.all_labels[0]) for item in row] for row in x]
y = [self.all_labels[item] for item in x[-1,:]] #Not useful, only presetn for sake of placeholder
# get features for all data points
x1 = [[self.all_features[item] for item in row] for row in x]
#"""
"""
#Unique based
u, inv = np.unique(x, return_inverse=True)
u2, inv2 = np.unique(x[-1:], return_inverse=True)
x2 = np.array([self.label_cache.get(item, self.all_labels[0]) for item in u])[inv]#.reshape(x.shape)
x1 = np.array([self.all_features[item] for item in u])[inv]#.reshape(x.shape)
y = np.array([self.all_labels[item] for item in u2])[inv2]
"""
"""
# Vectorized
# get labels for valid data points, for others: select the 0th label
x2 = np.vectorize(self.label_cache.get)(x)
x1 = np.vectorize(self.all_features.__getitem__)(x)
y = np.vectorize(self.all_labels.__getitem__)(x[-1:])
"""
start = stop
yield (x, x1, x2, seq[idx:idx+node_count], counts[idx:idx+node_count], y, lengths)
def testPerformance(self):
start = time.time()
step =0
for a,b,c,d,e,f,g in self.next_batch_same('all'):
step += 1
if step%500 == 0: print(step)
print ('total time: ', time.time()-start)
|
|
import textwrap
from .core import ParseException, TreeNode, read_line, read_whitespace, peek_indentation
from .elements import read_element
from .filters import get_filter
XHTML_DOCTYPES = {
'1.1': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">', # noqa
'strict': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">', # noqa
'frameset': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Frameset//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd">', # noqa
'mobile': '<!DOCTYPE html PUBLIC "-//WAPFORUM//DTD XHTML Mobile 1.2//EN" "http://www.openmobilealliance.org/tech/DTD/xhtml-mobile12.dtd">', # noqa
'rdfa': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML+RDFa 1.0//EN" "http://www.w3.org/MarkUp/DTD/xhtml-rdfa-1.dtd">', # noqa
'basic': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML Basic 1.1//EN" "http://www.w3.org/TR/xhtml-basic/xhtml-basic11.dtd">', # noqa
'': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">', # noqa
}
HTML4_DOCTYPES = {
'strict': '<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">',
'frameset': '<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Frameset//EN" "http://www.w3.org/TR/html4/frameset.dtd">',
'': '<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">'
}
DOCTYPE_PREFIX = '!!!'
ELEMENT_PREFIXES = ('%', '#', '.')
HTML_COMMENT_PREFIX = '/'
CONDITIONAL_COMMENT_PREFIX = '/['
HAML_COMMENT_PREFIX = '-#'
VARIABLE_PREFIX = '='
TAG_PREFIX = '-'
FILTER_PREFIX = ':'
HAML_ESCAPE = '\\'
def read_node(stream, prev, compiler):
"""
Reads a node, returning either the node or None if we've reached the end of the input
"""
while True:
indent = read_whitespace(stream)
if stream.ptr >= stream.length:
return None
# convert indent to be all of the first character
if indent:
indent = indent[0] * len(indent)
# empty lines are recorded as newlines on previous node
if stream.text[stream.ptr] == '\n':
if prev:
prev.newlines += 1
stream.ptr += 1
continue
# parse filter node
if stream.text[stream.ptr] == FILTER_PREFIX:
return read_filter_node(stream, indent, compiler)
# peek ahead to so we don't try to parse an element from a variable node starting #{ or a Django tag ending %}
if stream.text[stream.ptr] in ELEMENT_PREFIXES and stream.text[stream.ptr:stream.ptr+2] not in ('#{', '%}'):
element = read_element(stream, compiler)
return ElementNode(element, indent, compiler)
# all other nodes are single line
line = read_line(stream)
inline_var_regex, escaped_var_regex = compiler.inline_variable_regexes
if inline_var_regex.match(line) or escaped_var_regex.match(line):
return PlaintextNode(line, indent, compiler)
if line[0] == HAML_ESCAPE:
return PlaintextNode(line, indent, compiler)
if line.startswith(DOCTYPE_PREFIX):
return DoctypeNode(line, indent, compiler)
if line.startswith(CONDITIONAL_COMMENT_PREFIX):
return ConditionalCommentNode(line, indent, compiler)
if line[0] == HTML_COMMENT_PREFIX:
return CommentNode(line, indent, compiler)
if line.startswith(HAML_COMMENT_PREFIX):
return HamlCommentNode(line, indent, compiler)
if line[0] == VARIABLE_PREFIX:
return VariableNode(line, indent, compiler)
if line[0] == TAG_PREFIX:
return TagNode(line, indent, compiler)
return PlaintextNode(line, indent, compiler)
def read_filter_node(stream, indent, compiler):
"""
Reads a filter node including its indented content, e.g. :plain
"""
assert stream.text[stream.ptr] == FILTER_PREFIX
stream.ptr += 1 # consume the initial colon
name = read_line(stream)
content_lines = []
# read lines below with higher indentation as this filter's content
while stream.ptr < stream.length:
line_indentation = peek_indentation(stream)
if line_indentation is not None and line_indentation <= len(indent):
break
line = read_line(stream)
# don't preserve whitespace on empty lines
if line.isspace():
line = ''
content_lines.append(line)
return FilterNode(name.rstrip(), '\n'.join(content_lines), indent, compiler)
class Node(TreeNode):
"""
Base class of all nodes
"""
def __init__(self, indent, compiler):
super(Node, self).__init__()
if indent is not None:
self.indent = indent
self.indentation = len(indent)
else:
self.indent = None
self.indentation = -1
self.compiler = compiler
self.newlines = 0 # number of empty lines to render after node
self.before = '' # rendered text at start of node, e.g. "<p>\n"
self.after = '' # rendered text at end of node, e.g. "\n</p>"
@classmethod
def create_root(cls, compiler):
return cls(None, compiler)
def render(self):
# Render (sets self.before and self.after)
self._render_children()
# Post-render (nodes can modify the rendered text of other nodes)
self._post_render()
# Generate HTML
return self._generate_html()
def render_newlines(self):
return '\n' * (self.newlines + 1)
def _render_children(self):
for child in self.children:
child._render()
def _post_render(self):
for child in self.children:
child._post_render()
def _generate_html(self):
output = [self.before]
for child in self.children:
output.append(child.before)
output += [gc._generate_html() for gc in child.children]
output.append(child.after)
output.append(self.after)
return ''.join(output)
def replace_inline_variables(self, content):
inline_var_regex, escaped_var_regex = self.compiler.inline_variable_regexes
content = inline_var_regex.sub(r'{{ \2 }}', content)
content = escaped_var_regex.sub(r'\1', content)
return content
def add_node(self, node):
if self._should_go_inside_last_node(node):
self.children[-1].add_node(node)
else:
self.add_child(node)
def _should_go_inside_last_node(self, node):
return len(self.children) > 0 \
and (node.indentation > self.children[-1].indentation
or (node.indentation == self.children[-1].indentation and self.children[-1].should_contain(node)))
def should_contain(self, node):
return False
def debug_tree(self): # pragma: no cover
return '\n'.join(self._debug_tree([self]))
def _debug_tree(self, nodes): # pragma: no cover
output = []
for n in nodes:
output.append('%s%s' % (' ' * (n.indentation + 2), n))
if n.children:
output += self._debug_tree(n.children)
return output
def __repr__(self): # pragma: no cover
return '%s' % type(self).__name__
class LineNode(Node):
"""
Base class of nodes which are a single line of Haml
"""
def __init__(self, line, indent, compiler):
super(LineNode, self).__init__(indent, compiler)
self.haml = line.rstrip()
def __repr__(self): # pragma: no cover
return '%s(indent=%d, newlines=%d): %s' % (type(self).__name__, self.indentation, self.newlines, self.haml)
class PlaintextNode(LineNode):
"""
Node that is not modified or processed when rendering
"""
def _render(self):
text = self.replace_inline_variables(self.haml)
# remove escape character
if text and text[0] == HAML_ESCAPE:
text = text.replace(HAML_ESCAPE, '', 1)
self.before = '%s%s' % (self.indent, text)
if self.children:
self.before += self.render_newlines()
else:
self.after = self.render_newlines()
self._render_children()
class ElementNode(Node):
"""
An HTML tag node, e.g. %span
"""
def __init__(self, element, indent, compiler):
super(ElementNode, self).__init__(indent, compiler)
self.element = element
def _render(self):
self.before = self._render_before(self.element)
self.after = self._render_after(self.element)
self._render_children()
def _render_before(self, element):
"""
Render opening tag and inline content
"""
start = ["%s<%s" % (self.indent, element.tag)]
attributes = element.render_attributes(self.compiler.options)
if attributes:
start.append(' ' + self.replace_inline_variables(attributes))
content = self._render_inline_content(self.element.inline_content)
if element.nuke_inner_whitespace and content:
content = content.strip()
if element.self_close and not content:
start.append(">" if self.compiler.options.html else " />")
elif content:
start.append(">%s" % content)
elif self.children:
start.append(">%s" % (self.render_newlines()))
else:
start.append(">")
return ''.join(start)
def _render_after(self, element):
"""
Render closing tag
"""
if element.inline_content:
return "</%s>%s" % (element.tag, self.render_newlines())
elif element.self_close:
return self.render_newlines()
elif self.children:
return "%s</%s>\n" % (self.indent, element.tag)
else:
return "</%s>\n" % element.tag
def _post_render(self):
# inner whitespace removal
if self.element.nuke_inner_whitespace:
self.before = self.before.rstrip()
self.after = self.after.lstrip()
if self.children:
node = self
if node.children:
node.children[0].before = node.children[0].before.lstrip()
if node.children:
node.children[-1].after = node.children[-1].after.rstrip()
# outer whitespace removal
if self.element.nuke_outer_whitespace:
left_sibling = self.left_sibling()
if left_sibling:
# If node has left sibling, strip whitespace after left sibling
left_sibling.after = left_sibling.after.rstrip()
left_sibling.newlines = 0
else:
# If not, whitespace comes from it's parent node,
# so strip whitespace before the node
self.parent.before = self.parent.before.rstrip()
self.parent.newlines = 0
self.before = self.before.lstrip()
self.after = self.after.rstrip()
right_sibling = self.right_sibling()
if right_sibling:
right_sibling.before = right_sibling.before.lstrip()
else:
self.parent.after = self.parent.after.lstrip()
self.parent.newlines = 0
super(ElementNode, self)._post_render()
def _render_inline_content(self, inline_content):
if inline_content is None or len(inline_content) == 0:
return None
if self.element.django_variable:
content = "{{ " + inline_content.strip() + " }}"
return content
else:
return self.replace_inline_variables(inline_content)
class CommentNode(LineNode):
"""
An HTML comment node, e.g. / This is a comment
"""
def _render(self):
self.after = "-->\n"
if self.children:
self.before = "<!-- %s" % (self.render_newlines())
self._render_children()
else:
self.before = "<!-- %s " % (self.haml.lstrip(HTML_COMMENT_PREFIX).strip())
class ConditionalCommentNode(LineNode):
"""
An HTML conditional comment node, e.g. /[if IE]
"""
def _render(self):
conditional = self.haml[1: self.haml.index(']') + 1]
if self.children:
self.before = "<!--%s>\n" % conditional
else:
content = self.haml[len(CONDITIONAL_COMMENT_PREFIX) + len(conditional) - 1:]
self.before = "<!--%s>%s" % (conditional, content)
self.after = "<![endif]-->\n"
self._render_children()
class DoctypeNode(LineNode):
"""
An XML doctype node, e.g. !!! 5
"""
def _render(self):
doctype = self.haml.lstrip(DOCTYPE_PREFIX).strip().lower()
self.before = self.get_header(doctype, self.compiler.options)
self.after = self.render_newlines()
def get_header(self, doctype, options):
if doctype.startswith('xml'):
if options.html:
return ''
parts = doctype.split()
encoding = parts[1] if len(parts) > 1 else 'utf-8'
return "<?xml version=%s1.0%s encoding=%s%s%s ?>" % (
options.attr_wrapper, options.attr_wrapper,
options.attr_wrapper, encoding, options.attr_wrapper,
)
elif options.html5:
return '<!DOCTYPE html>'
elif options.xhtml:
if doctype == "5":
return '<!DOCTYPE html>'
else:
return XHTML_DOCTYPES.get(doctype, XHTML_DOCTYPES[''])
else:
return HTML4_DOCTYPES.get(doctype, HTML4_DOCTYPES[''])
class HamlCommentNode(LineNode):
"""
A Haml comment node, e.g. -# This is a comment
"""
def _render(self):
self.after = self.render_newlines()[1:]
def _post_render(self):
pass
class VariableNode(LineNode):
"""
A Django variable node, e.g. =person.name
"""
def __init__(self, haml, indent, compiler):
super(VariableNode, self).__init__(haml, indent, compiler)
def _render(self):
tag_content = self.haml.lstrip(VARIABLE_PREFIX)
self.before = "%s{{ %s }}" % (self.indent, tag_content.strip())
self.after = self.render_newlines()
def _post_render(self):
pass
class TagNode(LineNode):
"""
A Django/Jinja server-side tag node, e.g. -block
"""
def __init__(self, haml, indent, compiler):
super(TagNode, self).__init__(haml, indent, compiler)
self.tag_statement = self.haml.lstrip(TAG_PREFIX).strip()
self.tag_name = self.tag_statement.split(' ')[0]
if self.tag_name in self.compiler.self_closing_tags.values():
raise ParseException("Unexpected closing tag for self-closing tag %s" % self.tag_name)
def _render(self):
self.before = "%s{%% %s %%}" % (self.indent, self.tag_statement)
closing_tag = self.compiler.self_closing_tags.get(self.tag_name)
if closing_tag:
self.before += self.render_newlines()
self.after = '%s{%% %s %%}%s' % (self.indent, closing_tag, self.render_newlines())
else:
if self.children:
self.before += self.render_newlines()
else:
self.after = self.render_newlines()
self._render_children()
def should_contain(self, node):
return isinstance(node, TagNode) and node.tag_name in self.compiler.tags_may_contain.get(self.tag_name, '')
class FilterNode(Node):
"""
A type filter, e.g. :javascript
"""
def __init__(self, filter_name, content, indent, compiler):
super(FilterNode, self).__init__(indent, compiler)
self.filter_name = filter_name
self.content = content
def _render(self):
content = textwrap.dedent(self.content)
filter_func = get_filter(self.filter_name)
content = filter_func(content, self.compiler.options)
content = self.indent + content.replace('\n', '\n' + self.indent)
self.before = content
self.after = self.render_newlines() if self.content else ''
def _post_render(self):
pass
def __repr__(self): # pragma: no cover
return '%s(indent=%d, newlines=%d, filter=%s): %s' \
% (type(self).__name__, self.indentation, self.newlines, self.filter_name, self.content)
|
|
import distutils
import django
import os
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.core.management import call_command
from liquimigrate.settings import LIQUIBASE_JAR, LIQUIBASE_DRIVERS
try:
# Django 3.7
from django.core.management.sql import (
emit_pre_migrate_signal, emit_post_migrate_signal)
emit_post_sync_signal = None
except ImportError:
# Django 1.6 and older
from django.core.management.sql import emit_post_sync_signal
emit_pre_migrate_signal = None
emit_post_migrate_signal = None
django_19_or_newer = (
distutils.version.StrictVersion(django.__version__) >= '1.9')
try:
from django.db import connections
databases = connections.databases
except ImportError:
# django without multidb support
databases = {
'default': {
'ENGINE': settings.DATABASE_ENGINE,
'HOST': settings.DATABASE_HOST,
'PORT': settings.DATABASE_PORT,
'NAME': settings.DATABASE_NAME,
'USER': settings.DATABASE_USER,
'PASSWORD': settings.DATABASE_PASSWORD,
},
}
DB_DEFAULTS = {
'postgresql': {
'tag': 'postgresql',
'host': 'localhost',
'port': 5432,
},
'mysql': {
'tag': 'mysql',
'host': 'localhost',
'port': 3306,
},
}
class Command(BaseCommand):
help = "liquibase migrations"
def add_arguments(self, parser):
parser.add_argument('command', help='Liquibase command')
parser.add_argument(
'args', nargs='*', help='Liquibase positional arguments')
parser.add_argument(
'--changeLogFile', dest='changelog_file',
help='XML file with changelog')
parser.add_argument(
'--driver', dest='driver',
help='db driver')
parser.add_argument(
'--classpath', dest='classpath',
help='jdbc driver class path')
parser.add_argument(
'--username', dest='username',
help='db username')
parser.add_argument(
'--password', dest='password',
help='db password')
parser.add_argument('--url', dest='url', help='db url')
parser.add_argument(
'--database', dest='database', default='default',
help='django database connection name')
parser.add_argument(
'-n', '--nosignals', dest='no_signals', action='store_true',
default=False,
help='disable emitting pre- and post migration signals')
def handle(self, *args, **options):
"""
Handle liquibase command parameters
"""
database = getattr(
settings, 'LIQUIMIGRATE_DATABASE', options['database'])
try:
dbsettings = databases[database]
except KeyError:
raise CommandError("don't know such a connection: %s" % database)
verbosity = int(options.get('verbosity'))
# get driver
driver_class = (
options.get('driver')
or dbsettings.get('ENGINE').split('.')[-1])
dbtag, driver, classpath = LIQUIBASE_DRIVERS.get(
driver_class, (None, None, None))
classpath = options.get('classpath') or classpath
if driver is None:
raise CommandError(
"unsupported db driver '%s'\n"
"available drivers: %s" % (
driver_class, ' '.join(LIQUIBASE_DRIVERS.keys())))
# command options
changelog_file = (
options.get('changelog_file')
or _get_changelog_file(options['database']))
username = options.get('username') or dbsettings.get('USER') or ''
password = options.get('password') or dbsettings.get('PASSWORD') or ''
url = options.get('url') or _get_url_for_db(dbtag, dbsettings)
command = options['command']
cmdargs = {
'jar': LIQUIBASE_JAR,
'changelog_file': changelog_file,
'username': username,
'password': password,
'command': command,
'driver': driver,
'classpath': classpath,
'url': url,
'args': ' '.join(args),
}
cmdline = "java -jar %(jar)s --changeLogFile %(changelog_file)s \
--username=%(username)s --password=%(password)s \
--driver=%(driver)s --classpath=%(classpath)s --url=%(url)s \
%(command)s %(args)s" % (cmdargs)
if verbosity > 0:
print("changelog file: %s" % (changelog_file,))
print("executing: %s" % (cmdline,))
created_models = None # we dont know it
if emit_pre_migrate_signal and not options.get('no_signals'):
if django_19_or_newer:
emit_pre_migrate_signal(
1, options.get('interactive'), database)
else:
emit_pre_migrate_signal(
created_models, 1, options.get('interactive'), database)
rc = os.system(cmdline)
if rc == 0:
try:
if not options.get('no_signals'):
if emit_post_migrate_signal:
if django_19_or_newer:
emit_post_migrate_signal(
0, options.get('interactive'), database)
else:
emit_post_migrate_signal(
created_models, 0,
options.get('interactive'), database)
elif emit_post_sync_signal:
emit_post_sync_signal(
created_models, 0,
options.get('interactive'), database)
if not django_19_or_newer:
call_command(
'loaddata', 'initial_data', verbosity=1,
database=database)
except TypeError:
# singledb (1.1 and older)
emit_post_sync_signal(
created_models, 0, options.get('interactive'))
call_command(
'loaddata', 'initial_data', verbosity=0)
else:
raise CommandError('Liquibase returned an error code %s' % rc)
def _get_url_for_db(tag, dbsettings):
pattern = "jdbc:%(tag)s://%(host)s:%(port)s/%(name)s"
options = dict(DB_DEFAULTS.get(tag))
settings_map = {
'NAME': 'name',
'HOST': 'host',
'PORT': 'port',
}
for key in settings_map:
value = dbsettings.get(key)
if value:
options[settings_map[key]] = value
return pattern % options
def _get_changelog_file(database):
try:
return settings.LIQUIMIGRATE_CHANGELOG_FILES[database]
except AttributeError:
if database == 'default':
try:
return settings.LIQUIMIGRATE_CHANGELOG_FILE
except AttributeError:
raise CommandError('give me changelog somehow')
else:
raise CommandError('settings.LIQUIMIGRATE_CHANGELOG_FILES dict \
is needed due to multidb operation')
except KeyError:
raise CommandError(
"don't know changelog for connection: %s" % database)
|
|
# Copyright 2001 by Katharine Lindner. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
# standard library
import sys
import string
import copy
import array
import os
import re
import sgmllib
import urlparse
# XML from python 2.0
from xml.sax import handler
# Martel
import Martel
from Martel import RecordReader
from Bio.ParserSupport import EventGenerator
from Bio.ParserSupport import AbstractConsumer
from Bio import File
from Bio.Align.Generic import Alignment
import Bio.Alphabet
import ecell_format
import Record
"""
Ecell converts the ECell input from spreadsheet format to an intermediate format, described in
http://www.e-cell.org/manual/chapter2E.html#3.2. It provides an alternative to the perl script
supplied with the Ecell2 distribution at http://bioinformatics.org/project/?group_id=49.
ECell expects a spreadsheet exported in delimited text format. The file should be read with
FilteredReader using the default filter chain to remove extraneous characters.
"""
class Error( Exception ):
"""
"""
def __init__( self ):
pass
class ECellError( Error ):
"""
message - description of error
"""
def __init__( self, message ):
self.message = message
class Iterator:
"""Iterator interface to move over a file of ecell entries one at a time.
"""
def __init__(self, handle, parser = None):
"""Initialize the iterator.
Arguments:
o handle - A handle with ECell entries to iterate through.
o parser - An optional parser to pass the entries through before
returning them. If None, then the raw entry will be returned.
"""
self.handle = File.UndoHandle( handle )
self._reader = RecordReader.Everything( self.handle )
self._parser = parser
def next(self):
"""Return the next ecell record from the handle.
Will return None if we ran out of records.
"""
data = self._reader.next()
if self._parser is not None:
if data:
dumpfile = open( 'dump', 'w' )
dumpfile.write( data )
dumpfile.close()
return self._parser.parse(File.StringHandle(data))
return data
def __iter__(self):
return iter(self.next, None)
class _Scanner:
"""Start up Martel to do the scanning of the file.
This initialzes the Martel based parser and connects it to a handler
that will generate events for a Feature Consumer.
"""
def __init__(self, debug = 0):
"""Initialize the scanner by setting up our caches.
Creating the parser takes a long time, so we want to cache it
to reduce parsing time.
Arguments:
o debug - The level of debugging that the parser should
display. Level 0 is no debugging, Level 2 displays the most
debugging info (but is much slower). See Martel documentation
for more info on this.
"""
# a listing of all tags we are interested in scanning for
# in the MartelParser
self.interest_tags = [ 'header_line', 'system_line', 'substance_multiline', \
'reactor_multiline', 'include_line' ]
# make a parser that returns only the tags we are interested in
expression = Martel.select_names( ecell_format.ecell_record, self.interest_tags)
self._parser = expression.make_parser(debug_level = debug)
def feed(self, handle, consumer):
"""Feed a set of data into the scanner.
Arguments:
o handle - A handle with the information to parse.
o consumer - The consumer that should be informed of events.
"""
self._parser.setContentHandler( EventGenerator(consumer,
self.interest_tags))
# self._parser.setErrorHandler(handle.ErrorHandler())
self._parser.parseFile(handle)
class _RecordConsumer:
"""Create an ECell Record object from scanner generated information.
"""
def __init__(self):
self.data = Record.Record()
self._header = []
self._database = {}
self._state = ''
def include_line( self, line ):
self.data.include_buf = self.data.include_buf + line
def header_line( self, lines ):
for line in lines:
items = line.split( '\t')
items[ 0 ] = items[ 0 ].lower()
self._header = []
self._state = items[ 0 ]
for item in items:
item = item.strip()
self._header.append( item.lower() )
def system_line( self, lines ):
for line in lines:
line_dict = self._make_line_dict( line )
if( not self._check_missing_header( line_dict ) ):
raise EcellError( "invalid header" )
self.data.num_systems = self.data.num_systems + 1
_set_defaults( line_dict )
self._build_system_entry( line_dict )
def substance_multiline( self, multiline ):
for line in multiline:
self.parse_substance_lines( line )
def parse_substance_lines( self, multiline ):
lines = multiline.splitlines()
line_no = 0
for line in lines:
line_dict = self._make_line_dict( line )
try:
if( not _is_valid_substance( line_dict ) ):
raise ECellError( "quantity and concentration are mutually exclusive" )
except ECellError, e:
print sys.stderr, e.message
qty = Record.get_entry( line_dict, 'qty' )
conc = Record.get_entry( line_dict, 'conc' )
if( ( qty.lower() != 'fix' ) and ( conc.lower() != 'fix' ) ):
self.data.num_substances = self.data.num_substances + 1
else:
line_no = line_no + 1
if( line.lower().startswith( 'substance' ) ):
_set_defaults( line_dict )
self._convert_conc( line_dict )
self._build_substance_entry( line_dict, line_no )
def reactor_multiline( self, multiline ):
for line in multiline:
self.parse_reactor_lines( line )
def parse_reactor_lines( self, multiline ):
lines = multiline.splitlines()
for line in lines:
line_dict = self._make_line_dict( line )
if( line.lower().startswith( 'reactor' ) ):
if( not self._check_missing_header( line_dict ) ):
raise ECellError( "invalid header" )
try:
if( not is_only_digits( line_dict[ 's_coeff' ] ) ):
raise ECellError( 's_coeff must contain only digits' )
if( not is_only_digits( line_dict[ 'p_coeff' ] ) ):
raise ECellError( 'p_coeff must contain only digits' )
except KeyError:
pass
if( line.lower().startswith( 'reactor' ) ):
_set_reactor_defaults( line_dict )
line_dict = self._remove_if_inconsistent( line_dict )
if( line_dict.has_key( 'class' ) ):
self.data.num_reactors = self.data.num_reactors + 1
num_substrates = 0
num_products = 0
num_catalysts = 0
num_effectors = 0
num_options = 0
num_args = 0
if( line_dict.has_key( 's_id' ) ): num_substrates = num_substrates + 1
if( line_dict.has_key( 'p_id' ) ): num_products = num_products + 1
if( line_dict.has_key( 'c_id' ) ): num_catalysts = num_catalysts + 1
if( line_dict.has_key( 'e_id' ) ): num_effectors = num_effectors + 1
if( line_dict.has_key( 'o_type' ) ): num_options = num_options + 1
if( line_dict.has_key( 'arg_tag' ) ): num_args = num_args + 1
counter_dict = { \
's_' : num_substrates, \
'p_' : num_products, \
'c_' : num_catalysts, \
'e_' : num_effectors, \
'o_' : num_options, \
'arg_tag' : num_args
}
self._set_max( counter_dict )
self._build_reactor_entry( line_dict, counter_dict )
def _set_max( self, counter_dict ):
num_reactors = self.data.num_reactors
for key in counter_dict.keys():
composite_key = key + str( num_reactors )
self.data._max_dict[ composite_key ] = counter_dict[ key ]
def _build_system_entry( self, line_dict ):
for key in line_dict.keys():
item = line_dict[ key ]
composite_key = 'system' + str( self.data.num_systems ) + key + '0'
if( not self.data.cell_dict.has_key( composite_key ) ):
self.data.cell_dict[ composite_key ] = item
def _build_substance_entry( self, line_dict, line_no ):
for key in line_dict.keys():
item = line_dict[ key ]
composite_key = 'substance' + str( self.data.num_substances ) + key + \
str( line_no )
if( not self.data.cell_dict.has_key( composite_key ) ):
self.data.cell_dict[ composite_key ] = item
def _convert_conc( self, line_dict ):
if( line_dict.has_key( 'conc' ) ):
if( not line_dict.has_key( 'qty' ) ):
contents = 'QTY(%s,%s)' % ( line_dict[ 'conc' ], line_dict[ 'path' ] )
composite_key = 'substance' + str( self.data.num_substances ) + 'qty' + '0'
self.data.cell_dict[ composite_key ] = contents
self.data.contains_concentration = 1
def _build_reactor_entry( self, line_dict, counter_dict ):
for key in line_dict.keys():
item = line_dict[ key ]
prefix = key[ :2 ]
if( key.startswith( 'arg_' ) ):
index = counter_dict[ 'arg_tag' ]
elif( counter_dict.has_key( prefix ) ):
index = counter_dict[ prefix ]
else:
index = '0'
composite_key = 'reactor' + str( self.data.num_reactors ) + str( key ) + str( index )
if( not self.data.cell_dict.has_key( composite_key ) ):
self.data.cell_dict[ composite_key ] = item
def _check_missing_header( self, line_dict ):
ok = 1
items = [ 'id', 'path', 'class' ]
for item in items:
if( line_dict.has_key( item ) == 0 ):
others = copy.deepcopy( items )
others.remove( item )
for other in others:
if( line_dict.has_key( other ) ):
if( item.lower() != 'class' ):
ok = 0
break
return ok
def _remove_if_inconsistent( self, list_dict ):
valid_keys = list_dict.keys()
for label in [ 'id', 'path', 'type' ]:
for prefix in [ 's_', 'p_', 'c_', 'e_' ]:
node = prefix + label
valid_keys = self._consistency_filter( prefix, node, valid_keys )
for key in list_dict.keys():
if( not key in valid_keys ):
del list_dict[ key ]
return list_dict
def _consistency_filter( self, prefix, tag, valid_keys ):
block = []
for suffix in [ 'id', 'path', 'coeff', 'type' ]:
node = prefix + suffix
block.append( node )
for node in block:
if( ( not tag in valid_keys ) and ( node in valid_keys ) ):
if( ( prefix == 'o_' ) or ( not tag.endswith( 'type' ) ) ):
valid_keys.remove( node )
return valid_keys
def _make_line_dict( self, line ):
line_dict = {}
items = line.split( '\t' )
num = 0
for item in items:
item = item.strip()
if( item != '' ):
line_dict[ self._header[ num ] ] = item
num = num + 1
return line_dict
def _clear_bad_block( block, items ):
for label in block:
items = items.remove( items.index( label ) )
return items
def _is_valid_substance( line_dict ):
ok = 1
if( line_dict.has_key( 'qty' ) and line_dict.has_key( 'conc' ) ):
if( not ( line_dict[ 'qty' ] == 'QTY' ) ):
ok = 0
return ok
def is_only_digits( line ):
ok = 1
text = line.strip()
if( text != '' ):
if( not text.isdigit() ):
ok = 0
return ok
def _set_reactor_defaults( line_dict ):
line_dict = _set_defaults( line_dict )
for item in [ 's_', 'p_', 'c_', 'e_' ]:
id = item + 'id'
coeff = item + 'coeff'
path = item + 'path'
if( line_dict.has_key( id ) ):
if( not line_dict.has_key( coeff ) ):
line_dict[ coeff ] = 1
if( not line_dict.has_key( path ) ):
line_dict[ path ] = line_dict[ 'path' ]
return( line_dict )
def _set_defaults( line_dict ):
if( not line_dict.has_key( 'name' ) ):
line_dict[ 'name' ] = line_dict[ 'id' ]
if( line_dict.has_key( 'arg_tag' ) ):
if( not line_dict.has_key( 'arg_coeff' ) ):
line_dict[ 'arg_coeff' ] = 0
return( line_dict )
class RecordParser:
"""Parse ECell files into Record objects
"""
def __init__(self, debug_level = 0):
"""Initialize the parser.
Arguments:
o debug_level - An optional argument that specifies the amount of
debugging information Martel should spit out. By default we have
no debugging info (the fastest way to do things), but if you want
you can set this as high as two and see exactly where a parse fails.
"""
self._scanner = _Scanner(debug_level)
def parse(self, handle):
"""Parse the specified handle into an ECell record.
"""
self._consumer = _RecordConsumer()
self._scanner.feed(handle, self._consumer)
return self._consumer.data
|
|
import os
import sys
import errno
import random
import glob
import tkinter
from tkinter import filedialog
import pyautogui
import time
import configparser
root = tkinter.Tk()
root.withdraw()
#Setting current Dir
dir_path = os.path.dirname(os.path.realpath(__file__))
#Move mouse to upper left screen to kill in case of error
pyautogui.FAILSAFE = True
autokeytemplate = ("""import subprocess
import os
os.chdir("{0}")
subprocess.call("python randinterq.py {1}", shell=True)
""")
autokeyjsontemplate = ("""{{
"type": "script",
"description": "{0}",
"store": {{}},
"modes": [
1
],
"usageCount": 0,
"prompt": false,
"omitTrigger": false,
"showInTrayMenu": false,
"abbreviation": {{
"abbreviations": [
"{1}"
],
"backspace": true,
"ignoreCase": false,
"immediate": false,
"triggerInside": false,
"wordChars": "[^\\t]"
}},
"hotkey": {{
"modifiers": [],
"hotKey": null
}},
"filter": {{
"regex": null,
"isRecursive": false
}}
}}""")
ahktemplate = ("""
::{0}::
SetWorkingDir, {1}
Run %comspec% /c ""{2}" "{3}"",,hide
return
""")
config = configparser.ConfigParser()
ahkpath = 'none'
autokeypath = 'None'
qpath = dir_path
if os.path.isfile('config.ini'):
config.sections()
config.read('config.ini')
ahkpath = config['Default']['ahkdir']
autokeypath = config['Default']['autokeydir']
qpath = config['Default']['qdir']
def createdir():
numdir = int(input("Please enter the number of questions (directories) you would like: "))
a = 0
while a <= numdir:
dir_name = ("Question %s" % a)
try:
os.mkdir(dir_name)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
a = a + 1
passfail = input("Would you like to create the pass/fail directories? (y/n): ")
if passfail == 'y':
try:
os.mkdir("Question pass")
os.mkdir("Question fail")
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
def writerandomq():
script, qnum = sys.argv
os.chdir(qpath)
#Create list of filenames
search_path = os.path.join('Question %s' % qnum, '*.txt')
filenames = glob.glob(search_path)
#Open Random file from list
selqfile = open(random.choice(filenames))
qcontent = selqfile.read()
#Write content of file
pyautogui.typewrite(qcontent)
def genautokey():
gen = input("\nDo you wish to generate the python autokey files? (y/n): ")
numq = None
if gen == 'y':
print("\nI recommend using question 0 as the intro of your interview script."
"\nIt will be created along with the other questions.")
numq = int(input("\nPlease enter the number of questions you have: "))
a = 0
os.chdir(autokeypath)
while a <= numq:
f = open("question%s.py" % a, "w")
f.write(autokeytemplate.format(dir_path, a))
a = a + 1
f.close()
f = open("pass.py", "w")
f.write(autokeytemplate.format(dir_path, 'pass'))
f.close()
f = open("fail.py", "w")
f.write(autokeytemplate.format(dir_path, 'fail'))
f.close()
gjson = input("Do you wish to generate the .json files as well? (y/n): ")
if gjson == 'y':
if numq == None:
numq = int(input("\nPlease enter the number of questions you have: "))
b = 0
os.chdir(autokeypath)
while b <= numq:
f = open(".question%s.json" % b, "w")
f.write(autokeyjsontemplate.format('Question %s' % b, 'q%s'% b))
f.close()
b = b + 1
f = open(".pass.json", "w")
f.write(autokeyjsontemplate.format('pass', 'pass'))
f.close()
f = open(".fail.json", "w")
f.write(autokeyjsontemplate.format('fail', 'fail'))
f.close()
leaving()
else:
leaving()
def genahk():
numq = None
print("\nI recommend using question 0 as the intro of your interview script."
"It will be created along with the other questions.")
numq = int(input("\nPlease enter the number of questions you have: "))
a = 0
os.chdir(ahkpath)
filename = os.path.splitext(os.path.basename(__file__))[0]
with open("randinterq.ahk", "w") as file:
file.write('#Hotstring EndChars `t')
while a <= numq:
file.write(ahktemplate.format('q%s' % a, dir_path, '%s.exe' % filename, a))
a = a + 1
file.write(ahktemplate.format('pass', dir_path, '%s.exe' % filename, 'pass'))
file.write(ahktemplate.format('fail', dir_path, '%s.exe' % filename, 'fail'))
leaving()
def leaving():
os.chdir(dir_path)
config['Default'] = {}
config['Default']['ahkdir'] = ahkpath
config['Default']['autokeydir'] = autokeypath
config['Default']['qdir'] = qpath
with open('config.ini', 'w') as configfile:
config.write(configfile)
print("\nFor normal use of this program, please pass the number of the question you would like to write")
print("For example: 'randinterq 11' will return a random selection from question 11")
print("Will exit in 5 seconds")
time.sleep(5)
exit()
if len(sys.argv) == 1:
print("\nWelcome to the Apollo.rip Interviewer Companion app!")
choosedir = input("\nWould you like to change the location of the question folders? (y/n): ")
if choosedir == 'y':
qpath = filedialog.askdirectory(initialdir='.')
makedir = input("Do you wish to make some directories to hold your question files? (y/n): ")
if makedir == 'y':
os.chdir(qpath)
createdir()
windows = input("Are you running windows and using autohotkey? (y/n): ")
if windows == 'y':
ahkchangedir = input("Do you wish to set/change where the ahk script is saved? (y/n): ")
if ahkchangedir == 'y':
ahkpath = filedialog.askdirectory(initialdir='.')
startgenahk = input("Do you wish to create the ahk script? (y/n): ")
if startgenahk == 'y':
genahk()
linux = input("Are you running linux and using AutoKey? (y/n): ")
if linux == 'y':
autochangedir = input("Do you wish to set/change the AutoKey directory? (y/n): ")
if autochangedir == 'y':
linuxrdy = input("\nPress y when you are ready to set the AutoKey directory \n \n"
"Make sure this folder was already created by AutoKey previously \n"
"otherwise press any other key to exit: ")
if linuxrdy == 'y':
autokeypath = filedialog.askdirectory(initialdir='.')
genautokey()
else:
leaving()
else:
genautokey()
# if linux == 'n':
# leaving()
else:
leaving()
else:
writerandomq()
|
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for using the TensorFlow C API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.framework import api_def_pb2
from tensorflow.core.framework import op_def_pb2
from tensorflow.python.client import pywrap_tf_session as c_api
from tensorflow.python.util import compat
from tensorflow.python.util import tf_contextlib
class ScopedTFStatus(object):
"""Wrapper around TF_Status that handles deletion."""
def __init__(self):
self.status = c_api.TF_NewStatus()
def __del__(self):
# Note: when we're destructing the global context (i.e when the process is
# terminating) we can have already deleted other modules.
if c_api is not None and c_api.TF_DeleteStatus is not None:
c_api.TF_DeleteStatus(self.status)
class ScopedTFGraph(object):
"""Wrapper around TF_Graph that handles deletion."""
def __init__(self):
self.graph = c_api.TF_NewGraph()
# Note: when we're destructing the global context (i.e when the process is
# terminating) we may have already deleted other modules. By capturing the
# DeleteGraph function here, we retain the ability to cleanly destroy the
# graph at shutdown, which satisfies leak checkers.
self.deleter = c_api.TF_DeleteGraph
def __del__(self):
self.deleter(self.graph)
class ScopedTFImportGraphDefOptions(object):
"""Wrapper around TF_ImportGraphDefOptions that handles deletion."""
def __init__(self):
self.options = c_api.TF_NewImportGraphDefOptions()
def __del__(self):
# Note: when we're destructing the global context (i.e when the process is
# terminating) we can have already deleted other modules.
if c_api is not None and c_api.TF_DeleteImportGraphDefOptions is not None:
c_api.TF_DeleteImportGraphDefOptions(self.options)
class ScopedTFImportGraphDefResults(object):
"""Wrapper around TF_ImportGraphDefOptions that handles deletion."""
def __init__(self, results):
self.results = results
def __del__(self):
# Note: when we're destructing the global context (i.e when the process is
# terminating) we can have already deleted other modules.
if c_api is not None and c_api.TF_DeleteImportGraphDefResults is not None:
c_api.TF_DeleteImportGraphDefResults(self.results)
class ScopedTFFunction(object):
"""Wrapper around TF_Function that handles deletion."""
def __init__(self, func):
self.func = func
# Note: when we're destructing the global context (i.e when the process is
# terminating) we may have already deleted other modules. By capturing the
# DeleteFunction function here, we retain the ability to cleanly destroy the
# Function at shutdown, which satisfies leak checkers.
self.deleter = c_api.TF_DeleteFunction
def __del__(self):
if self.func is not None:
self.deleter(self.func)
self.func = None
class ApiDefMap(object):
"""Wrapper around Tf_ApiDefMap that handles querying and deletion.
The OpDef protos are also stored in this class so that they could
be queried by op name.
"""
def __init__(self):
op_def_proto = op_def_pb2.OpList()
buf = c_api.TF_GetAllOpList()
try:
op_def_proto.ParseFromString(c_api.TF_GetBuffer(buf))
self._api_def_map = c_api.TF_NewApiDefMap(buf)
finally:
c_api.TF_DeleteBuffer(buf)
self._op_per_name = {}
for op in op_def_proto.op:
self._op_per_name[op.name] = op
def __del__(self):
# Note: when we're destructing the global context (i.e when the process is
# terminating) we can have already deleted other modules.
if c_api is not None and c_api.TF_DeleteApiDefMap is not None:
c_api.TF_DeleteApiDefMap(self._api_def_map)
def put_api_def(self, text):
c_api.TF_ApiDefMapPut(self._api_def_map, text, len(text))
def get_api_def(self, op_name):
api_def_proto = api_def_pb2.ApiDef()
buf = c_api.TF_ApiDefMapGet(self._api_def_map, op_name, len(op_name))
try:
api_def_proto.ParseFromString(c_api.TF_GetBuffer(buf))
finally:
c_api.TF_DeleteBuffer(buf)
return api_def_proto
def get_op_def(self, op_name):
if op_name in self._op_per_name:
return self._op_per_name[op_name]
raise ValueError("No entry found for " + op_name + ".")
def op_names(self):
return self._op_per_name.keys()
@tf_contextlib.contextmanager
def tf_buffer(data=None):
"""Context manager that creates and deletes TF_Buffer.
Example usage:
with tf_buffer() as buf:
# get serialized graph def into buf
...
proto_data = c_api.TF_GetBuffer(buf)
graph_def.ParseFromString(compat.as_bytes(proto_data))
# buf has been deleted
with tf_buffer(some_string) as buf:
c_api.TF_SomeFunction(buf)
# buf has been deleted
Args:
data: An optional `bytes`, `str`, or `unicode` object. If not None, the
yielded buffer will contain this data.
Yields:
Created TF_Buffer
"""
if data:
buf = c_api.TF_NewBufferFromString(compat.as_bytes(data))
else:
buf = c_api.TF_NewBuffer()
try:
yield buf
finally:
c_api.TF_DeleteBuffer(buf)
def tf_output(c_op, index):
"""Returns a wrapped TF_Output with specified operation and index.
Args:
c_op: wrapped TF_Operation
index: integer
Returns:
Wrapped TF_Output
"""
ret = c_api.TF_Output()
ret.oper = c_op
ret.index = index
return ret
def tf_operations(graph):
"""Generator that yields every TF_Operation in `graph`.
Args:
graph: Graph
Yields:
wrapped TF_Operation
"""
# pylint: disable=protected-access
pos = 0
c_op, pos = c_api.TF_GraphNextOperation(graph._c_graph, pos)
while c_op is not None:
yield c_op
c_op, pos = c_api.TF_GraphNextOperation(graph._c_graph, pos)
# pylint: enable=protected-access
def new_tf_operations(graph):
"""Generator that yields newly-added TF_Operations in `graph`.
Specifically, yields TF_Operations that don't have associated Operations in
`graph`. This is useful for processing nodes added by the C API.
Args:
graph: Graph
Yields:
wrapped TF_Operation
"""
# TODO(b/69679162): do this more efficiently
for c_op in tf_operations(graph):
try:
graph._get_operation_by_tf_operation(c_op) # pylint: disable=protected-access
except KeyError:
yield c_op
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import (
Flask, request,
render_template, session,
redirect, url_for,
flash, send_file, send_from_directory,
Response
)
from flask.ext.admin import Admin
from urllib import quote
from os import path
from os.path import isfile,isdir,dirname,abspath,basename,join
from lib.dataDB import directory_indexer, cleanDate, get_size
from flask.ext.admin.contrib.sqla import ModelView
from urllib2 import quote, unquote
from werkzeug.contrib.cache import SimpleCache
from rdflib import Graph
from lib import views
from lib import models
from lib import logon
from lib.dbShared import db
from StringIO import StringIO
MT_RDF_XML = 'application/rdf+xml'
APP_BASE_URI = 'https://data.kb.se/'
cache = SimpleCache()
app = Flask(__name__)
admin = Admin(app, base_template='admin/base_admin.html')
app.config.from_pyfile('settings.py')
db.init_app(app)
curDir = path.dirname(path.realpath(__file__))
secretsFile = path.join(curDir, 'secrets')
with open(secretsFile, 'r') as sfile:
app.secret_key = sfile.read()
salt = app.secret_key
with app.app_context():
dirIndex = directory_indexer()
index_dir = dirIndex.index_dir
class Models(ModelView):
def is_accessible(self):
if session.get('username', None) and session.get('is_admin', 'False') == 'True':
return True
else:
return False
def index(self):
return self.render('admindex.html')
def login_required(f):
def decorated_function(*args, **kwargs):
if session['username'] is None:
return redirect(url_for('login2', next=request.url))
return f(*args, **kwargs)
return decorated_function
def is_allowed(roles):
print('Allowed: %s got %s' % (roles, session['role']))
if session['role'] not in roles:
return False
if session['role'] in roles:
return True
admin.add_view(views.UserView(db.session))
admin.add_view(Models(models.Role, db.session))
admin.add_view(Models(models.Provider, db.session))
admin.add_view(Models(models.License, db.session))
admin.add_view(Models(models.Format, db.session))
admin.add_view(views.DatasetsView(db.session))
admin.add_view(views.TorrentView(db.session))
admin.add_view(Models(models.Sameas, db.session))
def redirect_url(default='index'):
return request.args.get('next') or \
request.referrer or \
url_for(default)
@app.route('/')
def index():
accepts = request.accept_mimetypes
best = accepts.best_match([MT_RDF_XML, 'text/html'])
if best == MT_RDF_XML and accepts[best] > accepts['text/html']:
return index_rdf()
else:
return index_html()
@app.route('/index.html')
def index_html():
datasets = models.Datasets.query.options(db.lazyload('sameas')).all()
return render_template(
'index.html',
datasets=datasets,
datasetRoot=app.config['DATASET_ROOT']
)
@app.route('/index.rdf')
def index_rdf():
key = index_rdf.__name__
data = cache.get(key)
if data is None:
data = Graph().parse(
data=index_html(),
publicID=APP_BASE_URI,
format='rdfa',
media_type='text/html'
).serialize(format='pretty-xml')
cache.set(key, data, timeout=60 * 60)
return Response(data, mimetype=MT_RDF_XML)
@app.before_request
def log_request():
if app.config.get('LOG_REQUESTS'):
app.logger.debug('whatever')
@app.route('/datasets/<int:year>/<month>/<dataset>/<path:dpath>')
@app.route('/datasets/<int:year>/<month>/<dataset>/')
def viewDataset(year, month, dataset, dpath=None):
#print(year, month, dataset, dpath)
datasetName = dataset
datasetRoot = app.config['DATASET_ROOT']
datasetPath = path.join(str(year), str(month), dataset)
dataset = models.Datasets.query.filter(
models.Datasets.path == datasetPath
).first()
if dpath:
wholePath = abspath(join(datasetRoot, datasetPath, dpath or '')).encode('utf-8')
if isfile(wholePath):
return send_from_directory(dirname(wholePath), basename(wholePath))
elif isdir(wholePath) and dpath[-1] != '/':
print('redirecting to /datasets/%d/%s/%s/%s/' % (year, month, datasetName, quote(dpath)))
return redirect('/datasets/%d/%s/%s/%s/' % (year, month, datasetName, quote(dpath)))
if not dataset:
return(render_template("error.html",
message="Could not find dataset"))
dataset.cleanDate = cleanDate(dataset.updated_at)
pathDict = {}
if not dataset.url:
try:
pathDict, dirUp, metadata = index_dir(
dpath,
dataset
)
except Exception as e:
return(render_template("error.html",
message="Could not generate index %s" % e))
if dataset.url:
pathDict = None
dirUp = None
# should use @memoize instead
key=path.join(datasetRoot, datasetPath)
datasetSize = cache.get(key)
if datasetSize is None:
datasetSize = get_size(start_path=path.join(datasetRoot, datasetPath))
cache.set(key, datasetSize, 60*60)
return(
render_template(
'dataset.html',
datasetRoot=datasetRoot,
dataset=dataset,
pathDict=pathDict,
dirUp=dirUp,
quote=quote,
metadata=metadata,
unquote=unquote,
datasetID=dataset.datasetID,
datasetSize=datasetSize
)
)
@app.route('/torrent/<torrentID>')
def getTorrent(torrentID):
torrentFile = StringIO()
torrent = models.Torrent.query.filter(
models.Torrent.id == torrentID
).first()
dataset = models.Datasets.query.filter(
models.Datasets.datasetID == torrent.dataset
).first()
filename = '%s.torrent' % path.basename(dataset.path)
torrentFile.write(torrent.torrentData)
torrentFile.seek(0)
return send_file(
torrentFile,
as_attachment=True,
attachment_filename=filename,
mimetype='application/x-bittorrent'
)
@app.route('/datasets/<datasetName>')
def viewDatasetURL(datasetName):
dataset = models.Datasets.query.filter(
models.Datasets.name == datasetName
).first()
if not dataset:
return(render_template("error.html",
message="Could not find dataset"))
dataset.cleanDate = cleanDate(dataset.updated_at)
return(
render_template(
'dataset.html',
dataset=dataset,
dirUp=None,
pathDict=None
)
)
@app.route('/login2')
def login2():
return(logon.handle_logon())
@app.route('/logout')
def logout():
session.pop('logged_in', None)
session.pop('username', None)
return redirect(url_for('index'))
@app.route('/del/confirm/<int:datasetID>')
def confirmDel(datasetID):
if session.get('logged_in'):
dataset = models.Datasets.query.filter(
models.Datasets.datasetID == datasetID
).first()
return(render_template('confirm.html', dataset=dataset))
@app.route('/del/<int:datasetID>')
def delDataset(datasetID):
if session.get('logged_in'):
try:
dataset = models.Datasets.query.filter(
models.Datasets.datasetID == datasetID
).first()
db.session.delete(dataset)
db.session.commit()
flash('Deleted!')
return redirect(url_for('index'))
except Exception as e:
return(render_template('error.html', message=e))
@app.after_request
def add_ua_compat(response):
response.headers['X-UA-Compatible'] = 'IE=Edge'
return response
if __name__ == "__main__":
with app.app_context():
db.create_all()
models.buildDB()
app.run(host='0.0.0.0', port=8000)
|
|
# searchAgents.py
# ---------------
# Licensing Information: Please do not distribute or publish solutions to this
# project. You are free to use and extend these projects for educational
# purposes. The Pacman AI projects were developed at UC Berkeley, primarily by
# John DeNero ([email protected]) and Dan Klein ([email protected]).
# For more info, see http://inst.eecs.berkeley.edu/~cs188/sp09/pacman.html
"""
This file contains all of the agents that can be selected to
control Pacman. To select an agent, use the '-p' option
when running pacman.py. Arguments can be passed to your agent
using '-a'. For example, to load a SearchAgent that uses
depth first search (dfs), run the following command:
> python pacman.py -p SearchAgent -a fn=depthFirstSearch
Commands to invoke other search strategies can be found in the
project description.
Please only change the parts of the file you are asked to.
Look for the lines that say
"*** YOUR CODE HERE ***"
The parts you fill in start about 3/4 of the way down. Follow the
project description for details.
Good luck and happy searching!
"""
from existingSearchAgents import *
from existingSearchAgents import SearchAgent
'''#################################################
This file contains mostly functions that you
will write or complete. Be sure to check out
the file 'existingSearchAgents.py' as it
contains many classes that are used in this
file, with explanation in comments. It might
help you to understand what is happening!
Be sure to also read the file 'util.py', as
it contains a number of classes that will
prove useful when implementing the
solutions to the assignments.
#################################################'''
class CornersProblem(search.SearchProblem):
"""
This search problem finds paths through all four corners of a layout.
You must select a suitable state space and successor function
"""
def __init__(self, startingGameState):
"""
Stores the walls, pacman's starting position and corners.
"""
self.walls = startingGameState.getWalls()
self.startingPosition = startingGameState.getPacmanPosition()
top, right = self.walls.height-2, self.walls.width-2
self.corners = ((1,1), (1,top), (right, 1), (right, top))
for corner in self.corners:
if not startingGameState.hasFood(*corner):
print(('Warning: no food in corner ' + str(corner)))
self._expanded = 0 # Number of search nodes expanded
"*** YOUR CODE HERE ***"
def getStartState(self):
"Returns the start state (in your state space, not the full Pacman state space)"
"*** YOUR CODE HERE ***"
util.raiseNotDefined()
def isGoalState(self, state):
"Returns whether this search state is a goal state of the problem"
"*** YOUR CODE HERE ***"
util.raiseNotDefined()
def getSuccessors(self, state):
"""
Returns successor states, the actions they require, and a cost of 1.
As noted in search.py:
For a given state, this should return a list of triples,
(successor, action, stepCost), where 'successor' is a
successor to the current state, 'action' is the action
required to get there, and 'stepCost' is the incremental
cost of expanding to that successor
"""
successors = []
for action in [Directions.NORTH, Directions.SOUTH, Directions.EAST, Directions.WEST]:
# Add a successor state to the successor list if the action is legal
# Here's a code snippet for figuring out whether a new position hits a wall:
# x,y = currentPosition
# dx, dy = Actions.directionToVector(action)
# nextx, nexty = int(x + dx), int(y + dy)
# hitsWall = self.walls[nextx][nexty]
"*** YOUR CODE HERE ***"
self._expanded += 1
return successors
def getCostOfActions(self, actions):
"""
Returns the cost of a particular sequence of actions. If those actions
include an illegal move, return 999999. This is implemented for you.
"""
if actions == None: return 999999
x,y= self.startingPosition
for action in actions:
dx, dy = Actions.directionToVector(action)
x, y = int(x + dx), int(y + dy)
if self.walls[x][y]: return 999999
return len(actions)
def cornersHeuristic(state, problem):
"""
A heuristic for the CornersProblem that you defined.
state: The current search state
(a data structure you chose in your search problem)
problem: The CornersProblem instance for this layout.
This function should always return a number that is a lower bound
on the shortest path from the state to a goal of the problem; i.e.
it should be admissible. (You need not worry about consistency for
this heuristic to receive full credit.)
"""
corners = problem.corners # These are the corner coordinates
walls = problem.walls # These are the walls of the maze, as a Grid (game.py)
"*** YOUR CODE HERE ***"
return 0 # Default to trivial solution
def foodHeuristic(state, problem):
"""
Your heuristic for the FoodSearchProblem goes here.
This heuristic must be consistent to ensure correctness. First, try to come up
with an admissible heuristic; almost all admissible heuristics will be consistent
as well.
If using A* ever finds a solution that is worse uniform cost search finds,
your heuristic is *not* consistent, and probably not admissible! On the other hand,
inadmissible or inconsistent heuristics may find optimal solutions, so be careful.
The state is a tuple ( pacmanPosition, foodGrid ) where foodGrid is a
Grid (see game.py) of either True or False. You can call foodGrid.asList()
to get a list of food coordinates instead.
If you want access to info like walls, capsules, etc., you can query the problem.
For example, problem.walls gives you a Grid of where the walls are.
If you want to *store* information to be reused in other calls to the heuristic,
there is a dictionary called problem.heuristicInfo that you can use. For example,
if you only want to count the walls once and store that value, try:
problem.heuristicInfo['wallCount'] = problem.walls.count()
Subsequent calls to this heuristic can access problem.heuristicInfo['wallCount']
"""
position, foodGrid = state
"*** YOUR CODE HERE ***"
return 0
class ClosestDotSearchAgent(SearchAgent):
"Search for all food using a sequence of searches"
def registerInitialState(self, state):
self.actions = []
currentState = state
while(currentState.getFood().count() > 0):
nextPathSegment = self.findPathToClosestDot(currentState) # The missing piece
self.actions += nextPathSegment
for action in nextPathSegment:
legal = currentState.getLegalActions()
if action not in legal:
t = (str(action), str(currentState))
raise Exception('findPathToClosestDot returned an illegal move: %s!\n%s' % t)
currentState = currentState.generateSuccessor(0, action)
self.actionIndex = 0
print(('Path found with cost %d.' % len(self.actions)))
def findPathToClosestDot(self, gameState):
"Returns a path (a list of actions) to the closest dot, starting from gameState"
# Here are some useful elements of the startState
startPosition = gameState.getPacmanPosition()
food = gameState.getFood()
walls = gameState.getWalls()
problem = AnyFoodSearchProblem(gameState)
"*** YOUR CODE HERE ***"
util.raiseNotDefined()
class AnyFoodSearchProblem(PositionSearchProblem):
"""
A search problem for finding a path to any food.
This search problem is just like the PositionSearchProblem, but
has a different goal test, which you need to fill in below. The
state space and successor function do not need to be changed.
The class definition above, AnyFoodSearchProblem(PositionSearchProblem),
inherits the methods of the PositionSearchProblem.
You can use this search problem to help you fill in
the findPathToClosestDot method.
"""
def __init__(self, gameState):
"Stores information from the gameState. You don't need to change this."
# Store the food for later reference
self.food = gameState.getFood()
# Store info for the PositionSearchProblem (no need to change this)
self.walls = gameState.getWalls()
self.startState = gameState.getPacmanPosition()
self.costFn = lambda x: 1
self._visited, self._visitedlist, self._expanded = {}, [], 0
def isGoalState(self, state):
"""
The state is Pacman's position. Fill this in with a goal test
that will complete the problem definition.
"""
x,y = state
"*** YOUR CODE HERE ***"
util.raiseNotDefined()
class CrossroadSearchAgent(SearchAgent):
def getSuccessors(self, state):
"""
Returns successor states, the actions they require, and a cost of 1.
As noted in search.py:
For a given state, this should return a list of triples,
(successor, action, stepCost), where 'successor' is a
successor to the current state, 'action' is the action
required to get there, and 'stepCost' is the incremental
cost of expanding to that successor
"""
successors = []
for action in [Directions.NORTH, Directions.SOUTH, Directions.EAST, Directions.WEST]:
# Add a successor state to the successor list if the action is legal
# Here's a code snippet for figuring out whether a new position hits a wall:
# x,y = currentPosition
# dx, dy = Actions.directionToVector(action)
# nextx, nexty = int(x + dx), int(y + dy)
# hitsWall = self.walls[nextx][nexty]
1
# Bookkeeping for display purposes
self._expanded += 1
"*** YOUR CODE HERE ***"
return successors
##################
# Mini-contest 1 #
##################
class ApproximateSearchAgent(Agent):
"Implement your contest entry here. Change anything but the class name."
def registerInitialState(self, state):
"This method is called before any moves are made."
"*** YOUR CODE HERE ***"
def getAction(self, state):
"""
From game.py:
The Agent will receive a GameState and must return an action from
Directions.{North, South, East, West, Stop}
"""
"*** YOUR CODE HERE ***"
util.raiseNotDefined()
class AStarCornersAgent(SearchAgent):
"A SearchAgent for FoodSearchProblem using A* and your foodHeuristic"
def __init__(self):
self.searchFunction = lambda prob: search.aStarSearch(prob, cornersHeuristic)
self.searchType = CornersProblem
class AStarFoodSearchAgent(SearchAgent):
"A SearchAgent for FoodSearchProblem using A* and your foodHeuristic"
def __init__(self):
self.searchFunction = lambda prob: search.aStarSearch(prob, foodHeuristic)
self.searchType = FoodSearchProblem
|
|
"""
Helper functions for mapping model fields to a dictionary of default
keyword arguments that should be used for their equivelent serializer fields.
"""
from django.core import validators
from django.db import models
from django.utils.text import capfirst
from rest_framework.compat import clean_manytomany_helptext
from rest_framework.validators import UniqueValidator
import inspect
NUMERIC_FIELD_TYPES = (
models.IntegerField, models.FloatField, models.DecimalField
)
class ClassLookupDict(object):
"""
Takes a dictionary with classes as keys.
Lookups against this object will traverses the object's inheritance
hierarchy in method resolution order, and returns the first matching value
from the dictionary or raises a KeyError if nothing matches.
"""
def __init__(self, mapping):
self.mapping = mapping
def __getitem__(self, key):
if hasattr(key, '_proxy_class'):
# Deal with proxy classes. Ie. BoundField behaves as if it
# is a Field instance when using ClassLookupDict.
base_class = key._proxy_class
else:
base_class = key.__class__
for cls in inspect.getmro(base_class):
if cls in self.mapping:
return self.mapping[cls]
raise KeyError('Class %s not found in lookup.', cls.__name__)
def needs_label(model_field, field_name):
"""
Returns `True` if the label based on the model's verbose name
is not equal to the default label it would have based on it's field name.
"""
default_label = field_name.replace('_', ' ').capitalize()
return capfirst(model_field.verbose_name) != default_label
def get_detail_view_name(model):
"""
Given a model class, return the view name to use for URL relationships
that refer to instances of the model.
"""
return '%(model_name)s-detail' % {
'app_label': model._meta.app_label,
'model_name': model._meta.object_name.lower()
}
def get_field_kwargs(field_name, model_field):
"""
Creates a default instance of a basic non-relational field.
"""
kwargs = {}
validator_kwarg = list(model_field.validators)
# The following will only be used by ModelField classes.
# Gets removed for everything else.
kwargs['model_field'] = model_field
if model_field.verbose_name and needs_label(model_field, field_name):
kwargs['label'] = capfirst(model_field.verbose_name)
if model_field.help_text:
kwargs['help_text'] = model_field.help_text
max_digits = getattr(model_field, 'max_digits', None)
if max_digits is not None:
kwargs['max_digits'] = max_digits
decimal_places = getattr(model_field, 'decimal_places', None)
if decimal_places is not None:
kwargs['decimal_places'] = decimal_places
if isinstance(model_field, models.TextField):
kwargs['style'] = {'base_template': 'textarea.html'}
if isinstance(model_field, models.AutoField) or not model_field.editable:
# If this field is read-only, then return early.
# Further keyword arguments are not valid.
kwargs['read_only'] = True
return kwargs
if model_field.has_default() or model_field.blank or model_field.null:
kwargs['required'] = False
if model_field.null and not isinstance(model_field, models.NullBooleanField):
kwargs['allow_null'] = True
if model_field.blank:
kwargs['allow_blank'] = True
if model_field.flatchoices:
# If this model field contains choices, then return early.
# Further keyword arguments are not valid.
kwargs['choices'] = model_field.flatchoices
return kwargs
# Ensure that max_length is passed explicitly as a keyword arg,
# rather than as a validator.
max_length = getattr(model_field, 'max_length', None)
if max_length is not None and isinstance(model_field, models.CharField):
kwargs['max_length'] = max_length
validator_kwarg = [
validator for validator in validator_kwarg
if not isinstance(validator, validators.MaxLengthValidator)
]
# Ensure that min_length is passed explicitly as a keyword arg,
# rather than as a validator.
min_length = next((
validator.limit_value for validator in validator_kwarg
if isinstance(validator, validators.MinLengthValidator)
), None)
if min_length is not None and isinstance(model_field, models.CharField):
kwargs['min_length'] = min_length
validator_kwarg = [
validator for validator in validator_kwarg
if not isinstance(validator, validators.MinLengthValidator)
]
# Ensure that max_value is passed explicitly as a keyword arg,
# rather than as a validator.
max_value = next((
validator.limit_value for validator in validator_kwarg
if isinstance(validator, validators.MaxValueValidator)
), None)
if max_value is not None and isinstance(model_field, NUMERIC_FIELD_TYPES):
kwargs['max_value'] = max_value
validator_kwarg = [
validator for validator in validator_kwarg
if not isinstance(validator, validators.MaxValueValidator)
]
# Ensure that max_value is passed explicitly as a keyword arg,
# rather than as a validator.
min_value = next((
validator.limit_value for validator in validator_kwarg
if isinstance(validator, validators.MinValueValidator)
), None)
if min_value is not None and isinstance(model_field, NUMERIC_FIELD_TYPES):
kwargs['min_value'] = min_value
validator_kwarg = [
validator for validator in validator_kwarg
if not isinstance(validator, validators.MinValueValidator)
]
# URLField does not need to include the URLValidator argument,
# as it is explicitly added in.
if isinstance(model_field, models.URLField):
validator_kwarg = [
validator for validator in validator_kwarg
if not isinstance(validator, validators.URLValidator)
]
# EmailField does not need to include the validate_email argument,
# as it is explicitly added in.
if isinstance(model_field, models.EmailField):
validator_kwarg = [
validator for validator in validator_kwarg
if validator is not validators.validate_email
]
# SlugField do not need to include the 'validate_slug' argument,
if isinstance(model_field, models.SlugField):
validator_kwarg = [
validator for validator in validator_kwarg
if validator is not validators.validate_slug
]
if getattr(model_field, 'unique', False):
validator = UniqueValidator(queryset=model_field.model._default_manager)
validator_kwarg.append(validator)
if validator_kwarg:
kwargs['validators'] = validator_kwarg
return kwargs
def get_relation_kwargs(field_name, relation_info):
"""
Creates a default instance of a flat relational field.
"""
model_field, related_model, to_many, has_through_model = relation_info
kwargs = {
'queryset': related_model._default_manager,
'view_name': get_detail_view_name(related_model)
}
if to_many:
kwargs['many'] = True
if has_through_model:
kwargs['read_only'] = True
kwargs.pop('queryset', None)
if model_field:
if model_field.verbose_name and needs_label(model_field, field_name):
kwargs['label'] = capfirst(model_field.verbose_name)
help_text = clean_manytomany_helptext(model_field.help_text)
if help_text:
kwargs['help_text'] = help_text
if not model_field.editable:
kwargs['read_only'] = True
kwargs.pop('queryset', None)
if kwargs.get('read_only', False):
# If this field is read-only, then return early.
# No further keyword arguments are valid.
return kwargs
if model_field.has_default() or model_field.null:
kwargs['required'] = False
if model_field.null:
kwargs['allow_null'] = True
if model_field.validators:
kwargs['validators'] = model_field.validators
if getattr(model_field, 'unique', False):
validator = UniqueValidator(queryset=model_field.model._default_manager)
kwargs['validators'] = kwargs.get('validators', []) + [validator]
return kwargs
def get_nested_relation_kwargs(relation_info):
kwargs = {'read_only': True}
if relation_info.to_many:
kwargs['many'] = True
return kwargs
def get_url_kwargs(model_field):
return {
'view_name': get_detail_view_name(model_field)
}
|
|
"""
does calculators for statistics and analysis related with food intake
"""
import re
from copy import deepcopy
from simulation.food import proteins, carbs, fats, calories, alcoholics
class daily_intake_calculator(object):
"""
object that calculates various stats given a list of daily intake in
dictionary format with key as name of food and its quantities in grams.
can calculate:
- carbs, fats, proteins, calories
- keto proportions
"""
def __init__(self, food_contents_dict):
self.daily_intake = food_contents_dict
self.carb_content = self.calculate_carbs()['total']
self.protein_content = self.calculate_proteins()['total']
self.alcohol_content = self.calculate_alcoholics()['total']
self.fat_content = self.calculate_fats()['total']
def normalized_pertentage(self, partial, total):
"""
calculate a percentage with 3 decimal digits
"""
return round(((partial / total) * 100), ndigits=3)
def calculate_calories_proportions(self):
"""
9 calories per gram of fat
4 calories per gram of proteins
4 calories per gram of carbs
7 calories per gram of alcohol
source: http://www.nutristrategy.com/nutrition/calories.htm
"""
carbs_calories = self.carb_content * 4
protein_calories = self.protein_content * 4
fat_calories = self.fat_content * 9
alcohol_calories = self.alcohol_content * 7
total_calories = carbs_calories + protein_calories + fat_calories + alcohol_calories
return {
'total_calories': total_calories,
'fat': (
fat_calories,
self.normalized_pertentage(fat_calories, total_calories)),
'carbs': (
carbs_calories,
self.normalized_pertentage(carbs_calories, total_calories)),
'protein': (
protein_calories,
self.normalized_pertentage(protein_calories, total_calories)),
'alcohol': (
alcohol_calories,
self.normalized_pertentage(alcohol_calories, total_calories)),
}
def complete_analysis(self):
"""
do complete analysis
"""
# percentaje analysis
admited_ranges = {
'carbs': (4.5, 7.5), # ideal: 5
'protein': (23, 28), # ideal: 25
'fat': (63, 77), # ideal: 70
}
report = self.calculate_calories_proportions()
for r in admited_ranges:
if report[r][1] < admited_ranges[r][0]:
# low
pass
elif report[r][1] > admited_ranges[r][1]:
# high
pass
else:
# ok
pass
# proportion analysis
protein_carbs_range = (4, 6) # ideal: 5
protein_carbs_ratio = report['protein'][1] / report['carbs'][1]
if protein_carbs_ratio <= protein_carbs_range[0]:
# more protein
pass
elif protein_carbs_ratio >= protein_carbs_range[1]:
# less protein
pass
# food analysis
pass
def keto_analysis(self):# TODO: refactor this
"""
do keto analysis
"""
# declare proportions bias
# source: https://www.ruled.me/guide-keto-diet/
protein_carbs_range = (4, 6) # ideal: 5
carbs_range = (4.5, 7.5)
protein_range = (23, 28)
fat_range = (63, 77) # ideal: 70
# get calories proportions
report = self.calculate_calories_proportions()
proportion = report['protein'][1] / report['carbs'][1]
# valorate carbs, fats and proteins
if proportion <= protein_carbs_range[0]:
# too much carbs
carbs_analysis = '[planned_{}%][real_{:.2f}%]: high'
protein_analysis = '[planned_{}%][real_{:.2f}%]: low'
elif proportion >= protein_carbs_range[1]:
# too many proteins
carbs_analysis = '[planned_{}%][real_{:.2f}%]: low'
protein_analysis = '[planned_{}%][real_{:.2f}%]: high'
else:
# proteins and carbs are ok
carbs_analysis = '[planned_{}%][real_{:.2f}%]: ok'
protein_analysis = '[planned_{}%][real_{:.2f}%]: ok'
if report['fat'][1] <= fat_range[0]:
# need more fat
fat_analysis = '[planned_{}%][real_{:.2f}%]: low'
elif report['fat'][1] >= fat_range[1]:
# need less fat
fat_analysis = '[planned_{}%][real_{:.2f}%]: high'
else:
# fat is ok
fat_analysis = '[planned_{}%][real_{}%]: fat in the range'
return {
'carbs': carbs_analysis.format(5, report['carbs'][1]),
'protein': protein_analysis.format(25, report['protein'][1]),
'fat': fat_analysis.format(70, report['fat'][1])
}
def tdee_analysis(self):
"""
do tdee analysis
"""
# calculate calories
# TODO: compare with tdee plan
pass
def do_calculation(self, food_contents_dict):
"""
calculate total and partial contents of any statistics in daily_intake
given a dictionary with it's contents per 100g
"""
ingredients = deepcopy(self.daily_intake)
adjusts = {}
total_content = 0
partial_contents = {}
for i in ingredients:
if 'g' in i[-1]:
adjust_weight_index = float(i[-1].replace('g', '')) / 100
else:
adjust_weight_index = float(i[-1])
adjusts[i[0]] = adjust_weight_index
for i in adjusts:
content = food_contents_dict[i] * adjusts[i]
partial_contents[i] = content
total_content += content
return total_content, partial_contents
def calculate_proteins(self):
"""calculate proteins in a list of things"""
total_content, partial_contents = self.do_calculation(proteins)
# print('partial proteins contents: {}'.format(partial_contents))
return {'total': total_content, 'partial': partial_contents}
def calculate_carbs(self):
"""calculate carbs in a list of things"""
total_content, partial_contents = self.do_calculation(carbs)
# print('partial carbs contents: {}'.format(partial_contents))
return {'total': total_content, 'partial': partial_contents}
def calculate_fats(self):
"""calculate fats in a list of things"""
total_content, partial_contents = self.do_calculation(fats)
# print('partial fat contents: {}'.format(partial_contents))
return {'total': total_content, 'partial': partial_contents}
def calculate_alcoholics(self):
"""calculate alcohol in a list of things"""
total_content, partial_contents = self.do_calculation(alcoholics)
# print('partial alcohol contents: {}'.format(partial_contents))
return {'total': total_content, 'partial': partial_contents}
def analyse_percentajes():
"""
validate percentajes of proteins, carbs and fat
"""
# CARBS
# less than 20 is too low
# between 20 and 40 is ok
# more than 40 is too high
# # FATS
# less than 60 is too low
# between 60 and 75 is ok
# more than 75 is too high
# # PROTEINS
# less than 20 is too low
# between 20 and 30 is ok
# more than 30 is too high
# # FAT-CARBS RATIO
# less than 4.5 is too much carbs
# between 4.5 and 5.5 is ok
# more than 5.5 is too much protein
pass
|
|
from functools import partial
import numpy as np
from scipy import stats
import statsmodels.api as sm
from statsmodels.base.model import GenericLikelihoodModel
from statsmodels.tools.numdiff import approx_fprime, approx_hess
data = sm.datasets.spector.load()
data.exog = sm.add_constant(data.exog, prepend=False)
# in this dir
probit_mod = sm.Probit(data.endog, data.exog)
probit_res = probit_mod.fit()
loglike = probit_mod.loglike
score = probit_mod.score
mod = GenericLikelihoodModel(data.endog, data.exog*2, loglike, score)
res = mod.fit(method="nm", maxiter = 500)
def probitloglike(params, endog, exog):
"""
Log likelihood for the probit
"""
q = 2*endog - 1
X = exog
return np.add.reduce(stats.norm.logcdf(q*np.dot(X,params)))
model_loglike = partial(probitloglike, endog=data.endog, exog=data.exog)
mod = GenericLikelihoodModel(data.endog, data.exog, loglike=model_loglike)
res = mod.fit(method="nm", maxiter=500)
print(res)
np.allclose(res.params, probit_res.params, rtol=1e-4)
print(res.params, probit_res.params)
#datal = sm.datasets.longley.load()
datal = sm.datasets.ccard.load()
datal.exog = sm.add_constant(datal.exog, prepend=False)
# Instance of GenericLikelihood model does not work directly, because loglike
# cannot get access to data in self.endog, self.exog
nobs = 5000
rvs = np.random.randn(nobs,6)
datal.exog = rvs[:,:-1]
datal.exog = sm.add_constant(datal.exog, prepend=False)
datal.endog = 1 + rvs.sum(1)
show_error = False
show_error2 = 1#False
if show_error:
def loglike_norm_xb(self, params):
beta = params[:-1]
sigma = params[-1]
xb = np.dot(self.exog, beta)
return stats.norm.logpdf(self.endog, loc=xb, scale=sigma)
mod_norm = GenericLikelihoodModel(datal.endog, datal.exog, loglike_norm_xb)
res_norm = mod_norm.fit(method="nm", maxiter = 500)
print(res_norm.params)
if show_error2:
def loglike_norm_xb(params, endog, exog):
beta = params[:-1]
sigma = params[-1]
#print exog.shape, beta.shape
xb = np.dot(exog, beta)
#print xb.shape, stats.norm.logpdf(endog, loc=xb, scale=sigma).shape
return stats.norm.logpdf(endog, loc=xb, scale=sigma).sum()
model_loglike3 = partial(loglike_norm_xb,
endog=datal.endog, exog=datal.exog)
mod_norm = GenericLikelihoodModel(datal.endog, datal.exog, model_loglike3)
res_norm = mod_norm.fit(start_params=np.ones(datal.exog.shape[1]+1),
method="nm", maxiter = 5000)
print(res_norm.params)
class MygMLE(GenericLikelihoodModel):
# just for testing
def loglike(self, params):
beta = params[:-1]
sigma = params[-1]
xb = np.dot(self.exog, beta)
return stats.norm.logpdf(self.endog, loc=xb, scale=sigma).sum()
def loglikeobs(self, params):
beta = params[:-1]
sigma = params[-1]
xb = np.dot(self.exog, beta)
return stats.norm.logpdf(self.endog, loc=xb, scale=sigma)
mod_norm2 = MygMLE(datal.endog, datal.exog)
#res_norm = mod_norm.fit(start_params=np.ones(datal.exog.shape[1]+1), method="nm", maxiter = 500)
res_norm2 = mod_norm2.fit(start_params=[1.]*datal.exog.shape[1]+[1], method="nm", maxiter = 500)
np.allclose(res_norm.params, res_norm2.params)
print(res_norm2.params)
res2 = sm.OLS(datal.endog, datal.exog).fit()
start_params = np.hstack((res2.params, np.sqrt(res2.mse_resid)))
res_norm3 = mod_norm2.fit(start_params=start_params, method="nm", maxiter = 500,
retall=0)
print(start_params)
print(res_norm3.params)
print(res2.bse)
print(res_norm3.bse)
print('llf', res2.llf, res_norm3.llf)
bse = np.sqrt(np.diag(np.linalg.inv(res_norm3.model.hessian(res_norm3.params))))
res_norm3.model.score(res_norm3.params)
#fprime in fit option cannot be overwritten, set to None, when score is defined
# exception is fixed, but I do not think score was supposed to be called
res_bfgs = mod_norm2.fit(start_params=start_params, method="bfgs", fprime=None,
maxiter=500, retall=0)
hb=-approx_hess(res_norm3.params, mod_norm2.loglike, epsilon=-1e-4)
hf=-approx_hess(res_norm3.params, mod_norm2.loglike, epsilon=1e-4)
hh = (hf+hb)/2.
print(np.linalg.eigh(hh))
grad = -approx_fprime(res_norm3.params, mod_norm2.loglike, epsilon=-1e-4)
print(grad)
gradb = -approx_fprime(res_norm3.params, mod_norm2.loglike, epsilon=-1e-4)
gradf = -approx_fprime(res_norm3.params, mod_norm2.loglike, epsilon=1e-4)
print((gradb+gradf)/2.)
print(res_norm3.model.score(res_norm3.params))
print(res_norm3.model.score(start_params))
mod_norm2.loglike(start_params/2.)
print(np.linalg.inv(-1*mod_norm2.hessian(res_norm3.params)))
print(np.sqrt(np.diag(res_bfgs.cov_params())))
print(res_norm3.bse)
print("MLE - OLS parameter estimates")
print(res_norm3.params[:-1] - res2.params)
print("bse diff in percent")
print((res_norm3.bse[:-1] / res2.bse)*100. - 100)
'''
Optimization terminated successfully.
Current function value: 12.818804
Iterations 6
Optimization terminated successfully.
Current function value: 12.818804
Iterations: 439
Function evaluations: 735
Optimization terminated successfully.
Current function value: 12.818804
Iterations: 439
Function evaluations: 735
<statsmodels.model.LikelihoodModelResults object at 0x02131290>
[ 1.6258006 0.05172931 1.42632252 -7.45229732] [ 1.62581004 0.05172895 1.42633234 -7.45231965]
Warning: Maximum number of function evaluations has been exceeded.
[ -1.18109149 246.94438535 -16.21235536 24.05282629 -324.80867176
274.07378453]
Warning: Maximum number of iterations has been exceeded
[ 17.57107 -149.87528787 19.89079376 -72.49810777 -50.06067953
306.14170418]
Optimization terminated successfully.
Current function value: 506.488765
Iterations: 339
Function evaluations: 550
[ -3.08181404 234.34702702 -14.99684418 27.94090839 -237.1465136
284.75079529]
[ -3.08181304 234.34701361 -14.99684381 27.94088692 -237.14649571
274.6857294 ]
[ 5.51471653 80.36595035 7.46933695 82.92232357 199.35166485]
llf -506.488764864 -506.488764864
Optimization terminated successfully.
Current function value: 506.488765
Iterations: 9
Function evaluations: 13
Gradient evaluations: 13
(array([ 2.41772580e-05, 1.62492628e-04, 2.79438138e-04,
1.90996240e-03, 2.07117946e-01, 1.28747174e+00]), array([[ 1.52225754e-02, 2.01838216e-02, 6.90127235e-02,
-2.57002471e-04, -5.25941060e-01, -8.47339404e-01],
[ 2.39797491e-01, -2.32325602e-01, -9.36235262e-01,
3.02434938e-03, 3.95614029e-02, -1.02035585e-01],
[ -2.11381471e-02, 3.01074776e-02, 7.97208277e-02,
-2.94955832e-04, 8.49402362e-01, -5.20391053e-01],
[ -1.55821981e-01, -9.66926643e-01, 2.01517298e-01,
1.52397702e-03, 4.13805882e-03, -1.19878714e-02],
[ -9.57881586e-01, 9.87911166e-02, -2.67819451e-01,
1.55192932e-03, -1.78717579e-02, -2.55757014e-02],
[ -9.96486655e-04, -2.03697290e-03, -2.98130314e-03,
-9.99992985e-01, -1.71500426e-05, 4.70854949e-06]]))
[[ -4.91007768e-05 -7.28732630e-07 -2.51941401e-05 -2.50111043e-08
-4.77484718e-08 -9.72022463e-08]]
[[ -1.64845915e-08 -2.87059265e-08 -2.88764568e-07 -6.82121026e-09
2.84217094e-10 -1.70530257e-09]]
[ -4.90678076e-05 -6.71320777e-07 -2.46166110e-05 -1.13686838e-08
-4.83169060e-08 -9.37916411e-08]
[ -4.56753924e-05 -6.50857146e-07 -2.31756303e-05 -1.70530257e-08
-4.43378667e-08 -1.75592936e-02]
[[ 2.99386348e+01 -1.24442928e+02 9.67254672e+00 -1.58968536e+02
-5.91960010e+02 -2.48738183e+00]
[ -1.24442928e+02 5.62972166e+03 -5.00079203e+02 -7.13057475e+02
-7.82440674e+03 -1.05126925e+01]
[ 9.67254672e+00 -5.00079203e+02 4.87472259e+01 3.37373299e+00
6.96960872e+02 7.69866589e-01]
[ -1.58968536e+02 -7.13057475e+02 3.37373299e+00 6.82417837e+03
4.84485862e+03 3.21440021e+01]
[ -5.91960010e+02 -7.82440674e+03 6.96960872e+02 4.84485862e+03
3.43753691e+04 9.37524459e+01]
[ -2.48738183e+00 -1.05126925e+01 7.69866589e-01 3.21440021e+01
9.37524459e+01 5.23915258e+02]]
>>> res_norm3.bse
array([ 5.47162086, 75.03147114, 6.98192136, 82.60858536,
185.40595756, 22.88919522])
>>> print res_norm3.model.score(res_norm3.params)
[ -4.90678076e-05 -6.71320777e-07 -2.46166110e-05 -1.13686838e-08
-4.83169060e-08 -9.37916411e-08]
>>> print res_norm3.model.score(start_params)
[ -4.56753924e-05 -6.50857146e-07 -2.31756303e-05 -1.70530257e-08
-4.43378667e-08 -1.75592936e-02]
>>> mod_norm2.loglike(start_params/2.)
-598.56178102781314
>>> print np.linalg.inv(-1*mod_norm2.hessian(res_norm3.params))
[[ 2.99386348e+01 -1.24442928e+02 9.67254672e+00 -1.58968536e+02
-5.91960010e+02 -2.48738183e+00]
[ -1.24442928e+02 5.62972166e+03 -5.00079203e+02 -7.13057475e+02
-7.82440674e+03 -1.05126925e+01]
[ 9.67254672e+00 -5.00079203e+02 4.87472259e+01 3.37373299e+00
6.96960872e+02 7.69866589e-01]
[ -1.58968536e+02 -7.13057475e+02 3.37373299e+00 6.82417837e+03
4.84485862e+03 3.21440021e+01]
[ -5.91960010e+02 -7.82440674e+03 6.96960872e+02 4.84485862e+03
3.43753691e+04 9.37524459e+01]
[ -2.48738183e+00 -1.05126925e+01 7.69866589e-01 3.21440021e+01
9.37524459e+01 5.23915258e+02]]
>>> print np.sqrt(np.diag(res_bfgs.cov_params()))
[ 5.10032831 74.34988912 6.96522122 76.7091604 169.8117832
22.91695494]
>>> print res_norm3.bse
[ 5.47162086 75.03147114 6.98192136 82.60858536 185.40595756
22.88919522]
>>> res_norm3.conf_int
<bound method LikelihoodModelResults.conf_int of <statsmodels.model.LikelihoodModelResults object at 0x021317F0>>
>>> res_norm3.conf_int()
array([[0.96421437, 1.01999835],
[0.99251725, 1.04863332],
[0.95721328, 1.01246222],
[0.97134549, 1.02695393],
[0.97050081, 1.02660988],
[0.97773434, 1.03290028],
[0.97529207, 1.01428874]])
>>> res_norm3.params
array([ -3.08181304, 234.34701361, -14.99684381, 27.94088692,
-237.14649571, 274.6857294 ])
>>> res2.params
array([ -3.08181404, 234.34702702, -14.99684418, 27.94090839,
-237.1465136 ])
>>>
>>> res_norm3.params - res2.params
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ValueError: shape mismatch: objects cannot be broadcast to a single shape
>>> res_norm3.params[:-1] - res2.params
array([ 9.96859735e-07, -1.34122981e-05, 3.72278400e-07,
-2.14645839e-05, 1.78919019e-05])
>>>
>>> res_norm3.bse[:-1] - res2.bse
array([ -0.04309567, -5.33447922, -0.48741559, -0.31373822, -13.94570729])
>>> (res_norm3.bse[:-1] / res2.bse) - 1
array([-0.00781467, -0.06637735, -0.06525554, -0.00378352, -0.06995531])
>>> (res_norm3.bse[:-1] / res2.bse)*100. - 100
array([-0.7814667 , -6.6377355 , -6.52555369, -0.37835193, -6.99553089])
>>> np.sqrt(np.diag(np.linalg.inv(res_norm3.model.hessian(res_bfgs.params))))
array([ NaN, NaN, NaN, NaN, NaN, NaN])
>>> np.sqrt(np.diag(np.linalg.inv(-res_norm3.model.hessian(res_bfgs.params))))
array([ 5.10032831, 74.34988912, 6.96522122, 76.7091604 ,
169.8117832 , 22.91695494])
>>> res_norm3.bse
array([ 5.47162086, 75.03147114, 6.98192136, 82.60858536,
185.40595756, 22.88919522])
>>> res2.bse
array([ 5.51471653, 80.36595035, 7.46933695, 82.92232357,
199.35166485])
>>>
>>> bse_bfgs = np.sqrt(np.diag(np.linalg.inv(-res_norm3.model.hessian(res_bfgs.params))))
>>> (bse_bfgs[:-1] / res2.bse)*100. - 100
array([ -7.51422527, -7.4858335 , -6.74913633, -7.49275094, -14.8179759 ])
>>> hb=-approx_hess(res_bfgs.params, mod_norm2.loglike, epsilon=-1e-4)
>>> hf=-approx_hess(res_bfgs.params, mod_norm2.loglike, epsilon=1e-4)
>>> hh = (hf+hb)/2.
>>> bse_bfgs = np.sqrt(np.diag(np.linalg.inv(-hh)))
>>> bse_bfgs
array([ NaN, NaN, NaN, NaN, NaN, NaN])
>>> bse_bfgs = np.sqrt(np.diag(np.linalg.inv(hh)))
>>> np.diag(hh)
array([ 9.81680159e-01, 1.39920076e-02, 4.98101826e-01,
3.60955710e-04, 9.57811608e-04, 1.90709670e-03])
>>> np.diag(np.inv(hh))
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'inv'
>>> np.diag(np.linalg.inv(hh))
array([ 2.64875153e+01, 5.91578496e+03, 5.13279911e+01,
6.11533345e+03, 3.33775960e+04, 5.24357391e+02])
>>> res2.bse**2
array([ 3.04120984e+01, 6.45868598e+03, 5.57909945e+01,
6.87611175e+03, 3.97410863e+04])
>>> bse_bfgs
array([ 5.14660231, 76.91414015, 7.1643556 , 78.20059751,
182.69536402, 22.89885131])
>>> bse_bfgs - res_norm3.bse
array([-0.32501855, 1.88266901, 0.18243424, -4.40798785, -2.71059354,
0.00965609])
>>> (bse_bfgs[:-1] / res2.bse)*100. - 100
array([-6.67512508, -4.29511526, -4.0831115 , -5.69415552, -8.35523538])
>>> (res_norm3.bse[:-1] / res2.bse)*100. - 100
array([-0.7814667 , -6.6377355 , -6.52555369, -0.37835193, -6.99553089])
>>> (bse_bfgs / res_norm3.bse)*100. - 100
array([-5.94007812, 2.50917247, 2.61295176, -5.33599242, -1.46197759,
0.04218624])
>>> bse_bfgs
array([ 5.14660231, 76.91414015, 7.1643556 , 78.20059751,
182.69536402, 22.89885131])
>>> res_norm3.bse
array([ 5.47162086, 75.03147114, 6.98192136, 82.60858536,
185.40595756, 22.88919522])
>>> res2.bse
array([ 5.51471653, 80.36595035, 7.46933695, 82.92232357,
199.35166485])
>>> dir(res_bfgs)
['__class__', '__delattr__', '__dict__', '__doc__', '__getattribute__', '__hash__', '__init__', '__module__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__str__', '__weakref__', 'bse', 'conf_int', 'cov_params', 'f_test', 'initialize', 'llf', 'mle_retvals', 'mle_settings', 'model', 'normalized_cov_params', 'params', 'scale', 't', 't_test']
>>> res_bfgs.scale
1.0
>>> res2.scale
81083.015420213851
>>> res2.mse_resid
81083.015420213851
>>> print np.sqrt(np.diag(np.linalg.inv(-1*mod_norm2.hessian(res_bfgs.params))))
[ 5.10032831 74.34988912 6.96522122 76.7091604 169.8117832
22.91695494]
>>> print np.sqrt(np.diag(np.linalg.inv(-1*res_bfgs.model.hessian(res_bfgs.params))))
[ 5.10032831 74.34988912 6.96522122 76.7091604 169.8117832
22.91695494]
Is scale a misnomer, actually scale squared, i.e. variance of error term ?
'''
print(res_norm3.model.score_obs(res_norm3.params).shape)
jac = res_norm3.model.score_obs(res_norm3.params)
print(np.sqrt(np.diag(np.dot(jac.T, jac)))/start_params)
jac2 = res_norm3.model.score_obs(res_norm3.params, centered=True)
print(np.sqrt(np.diag(np.linalg.inv(np.dot(jac.T, jac)))))
print(res_norm3.bse)
print(res2.bse)
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
# Copyright (c) 2012 X.commerce, a business unit of eBay Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from django.conf import settings
from django import http
from django.test.utils import override_settings
from mox import IsA
from novaclient.v1_1 import servers
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
class ServerWrapperTests(test.TestCase):
def test_get_base_attribute(self):
server = api.nova.Server(self.servers.first(), self.request)
self.assertEqual(server.id, self.servers.first().id)
def test_image_name(self):
image = self.images.first()
self.mox.StubOutWithMock(api.glance, 'image_get')
api.glance.image_get(IsA(http.HttpRequest),
image.id).AndReturn(image)
self.mox.ReplayAll()
server = api.nova.Server(self.servers.first(), self.request)
self.assertEqual(server.image_name, image.name)
class ComputeApiTests(test.APITestCase):
def test_server_reboot(self):
server = self.servers.first()
HARDNESS = servers.REBOOT_HARD
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.servers.get(server.id).AndReturn(server)
novaclient.servers.reboot(server.id, HARDNESS)
self.mox.ReplayAll()
ret_val = api.nova.server_reboot(self.request, server.id)
self.assertIsNone(ret_val)
def test_server_soft_reboot(self):
server = self.servers.first()
HARDNESS = servers.REBOOT_SOFT
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.servers.get(server.id).AndReturn(server)
novaclient.servers.reboot(server.id, HARDNESS)
self.mox.ReplayAll()
ret_val = api.nova.server_reboot(self.request, server.id, HARDNESS)
self.assertIsNone(ret_val)
def test_server_vnc_console(self):
server = self.servers.first()
console = self.servers.vnc_console_data
console_type = console["console"]["type"]
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.servers.get_vnc_console(server.id,
console_type).AndReturn(console)
self.mox.ReplayAll()
ret_val = api.nova.server_vnc_console(self.request,
server.id,
console_type)
self.assertIsInstance(ret_val, api.nova.VNCConsole)
def test_server_spice_console(self):
server = self.servers.first()
console = self.servers.spice_console_data
console_type = console["console"]["type"]
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.servers.get_spice_console(server.id,
console_type).AndReturn(console)
self.mox.ReplayAll()
ret_val = api.nova.server_spice_console(self.request,
server.id,
console_type)
self.assertIsInstance(ret_val, api.nova.SPICEConsole)
def test_server_list(self):
servers = self.servers.list()
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.servers.list(True, {'all_tenants': True}).AndReturn(servers)
self.mox.ReplayAll()
ret_val, has_more = api.nova.server_list(self.request,
all_tenants=True)
for server in ret_val:
self.assertIsInstance(server, api.nova.Server)
def test_server_list_pagination(self):
page_size = getattr(settings, 'API_RESULT_PAGE_SIZE', 20)
servers = self.servers.list()
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.servers.list(True,
{'all_tenants': True,
'marker': None,
'limit': page_size + 1}).AndReturn(servers)
self.mox.ReplayAll()
ret_val, has_more = api.nova.server_list(self.request,
{'marker': None,
'paginate': True},
all_tenants=True)
for server in ret_val:
self.assertIsInstance(server, api.nova.Server)
self.assertFalse(has_more)
@override_settings(API_RESULT_PAGE_SIZE=1)
def test_server_list_pagination_more(self):
page_size = getattr(settings, 'API_RESULT_PAGE_SIZE', 1)
servers = self.servers.list()
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.servers.list(True,
{'all_tenants': True,
'marker': None,
'limit': page_size + 1}) \
.AndReturn(servers[:page_size + 1])
self.mox.ReplayAll()
ret_val, has_more = api.nova.server_list(self.request,
{'marker': None,
'paginate': True},
all_tenants=True)
for server in ret_val:
self.assertIsInstance(server, api.nova.Server)
self.assertEquals(page_size, len(ret_val))
self.assertTrue(has_more)
def test_usage_get(self):
novaclient = self.stub_novaclient()
novaclient.usage = self.mox.CreateMockAnything()
novaclient.usage.get(self.tenant.id,
'start',
'end').AndReturn(self.usages.first())
self.mox.ReplayAll()
ret_val = api.nova.usage_get(self.request, self.tenant.id,
'start', 'end')
self.assertIsInstance(ret_val, api.nova.NovaUsage)
def test_usage_list(self):
usages = self.usages.list()
novaclient = self.stub_novaclient()
novaclient.usage = self.mox.CreateMockAnything()
novaclient.usage.list('start', 'end', True).AndReturn(usages)
self.mox.ReplayAll()
ret_val = api.nova.usage_list(self.request, 'start', 'end')
for usage in ret_val:
self.assertIsInstance(usage, api.nova.NovaUsage)
def test_server_get(self):
server = self.servers.first()
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.servers.get(server.id).AndReturn(server)
self.mox.ReplayAll()
ret_val = api.nova.server_get(self.request, server.id)
self.assertIsInstance(ret_val, api.nova.Server)
def test_absolute_limits_handle_unlimited(self):
values = {"maxTotalCores": -1, "maxTotalInstances": 10}
limits = self.mox.CreateMockAnything()
limits.absolute = []
for key, val in values.iteritems():
limit = self.mox.CreateMockAnything()
limit.name = key
limit.value = val
limits.absolute.append(limit)
novaclient = self.stub_novaclient()
novaclient.limits = self.mox.CreateMockAnything()
novaclient.limits.get(reserved=True).AndReturn(limits)
self.mox.ReplayAll()
ret_val = api.nova.tenant_absolute_limits(self.request, reserved=True)
expected_results = {"maxTotalCores": float("inf"),
"maxTotalInstances": 10}
for key in expected_results.keys():
self.assertEquals(ret_val[key], expected_results[key])
|
|
import sys
import io
import datetime
from pyroman.utils import getkey, varsub
from .generic import Generic
## @class Processor
#
# This class takes a filename, processes it using objects and split out
# output.
class Processor:
## @fn __init__
#
# The initialiser for the Processor class
#
# @param _root The root folder where the file is
# @param _filename The filename part of the path
def __init__(self, _root, _filename, _is_main_file=True):
self.root = _root
self.filename = _filename
self.filepath = '' # calculated by get_line()
self.is_main_file = _is_main_file
self.globalvars = {'root': _root, 'filename': _filename} # A hash for storing document data while processing
self.finished_once = False # The processing needs to be done several times
self.objects = [] # The sequence of objects that makes the document
self.process_queue = [] # A list of objects that need to be processed again
self.document = 'unproduced'
self.lineno = 0 # line number in file, incremented by get_line()
# SyntaxSugar
self.globalvars['$SyntaxSugar'] = []
# Templates
self.globalvars['$Templates'] = {}
# Labels
self.globalvars['$Labels'] = {}
## @fn init_file
#
# Open the file and store the pointer to the object
#
# @return True on success, False on failure
def init_file(self):
self.first_lines = [] #list(getkey(first_lines, self.output, [])) if self.is_main_file else []
self.filepath = ''.join([self.root,self.filename])
self.lineno = 0
try:
self.file = io.open(self.filepath, 'r')
except IOError:
return False
else:
return True
## @fn get_line
#
# Get the next line
#
# @return The line as a string
def get_line(self):
if len(self.first_lines):
self.doing_first_lines = True
return self.first_lines.pop(0) # shift
if self.doing_first_lines:
self.first_object = True
self.doing_first_lines = False
try:
line = self.file.readline()
self.lineno += 1
except IOError:
return ''
else:
return line
## @fn close_file
#
# Close the file opened for process
#
# @return True on success, False on failure
def close_file(self):
try:
self.file.close()
except IOError:
return False
else:
return True
## @fn load_objects
#
# Reads file line by line and strips out objects.
def load_objects(self):
self.doing_first_lines = True
self.first_object = True # First object will be treated differently
last_line = False # A flag for last line (eof) detection
lines = [] # A object line buffer, cleared after each object stored
object_start_line = 0 # line number of object start
while (True):
next_line = self.get_line()
# Detect last line before stripping away te \n
if len(next_line) == 0:
last_line = True
next_line = next_line.strip('\n\r')
# Check if it is an empty line, if so, finish object
if not next_line:
if len(lines):
# Create a Generic Object that can later be upgraded
obj = Generic(self.globalvars, lines, self.first_object,
self.is_main_file, _filepath=self.filepath,
_lineno=object_start_line)
# Prepare for next round and save the current object
lines = []
if not self.doing_first_lines:
self.first_object = False
if not obj.removed:
self.objects.append(obj)
object_start_line = self.lineno + 1
else:
lines.append(next_line)
#lines.append(next_line.strip())
if last_line:
return
## @fn preprocess_objects
#
# Loops through the object list until there are no objects left to (re)propress
# Takes care of Input and Use objects before general processing
# and puts all other objects in the process queue for later handling
def preprocess_objects(self):
rerun = True
turn_count = 0
while (rerun and turn_count < 100):
turn_count += 1
rerun = False
i = 0
o = len(self.objects)
while i < o:
obj = self.objects[i]
if obj.object_name in ['Input']:
obj.process()
s = len(obj.sub_objects)
if s > 0: # if there are subobjects in object
result = []
for j in range(0, i):
result.append(self.objects[j])
for j in range(0, s):
result.append(obj.sub_objects[j])
for j in range(i+1, o):
result.append(self.objects[j])
self.objects = result
i -= 1
o += s - 1
elif 'Wrapper' in obj.object_name:
pass # The wrapper is omitted in the process queue
else:
self.process_queue.append(obj)
i += 1
## @fn process_objects_for_syntax_sugar
#
# Loops through the object list until there are no objects left to (re)propress
# Takes care of Input and Use objects before general processing
# and puts all other objects in the process queue for later handling
def process_objects_for_syntax_sugar(self):
for obj in self.objects:
# SyntaxSugar translation (for paragraphs, which is the fallback object type)
if obj.object_name == "Paragraph" and len(obj.lines):
if '$SyntaxSugar' in self.globalvars:
for sugar in self.globalvars['$SyntaxSugar']:
if not sugar.broken:
if sugar.translate(obj.lines[0]):
obj.content = '' # clear object content from old syntax sugar
obj.lines.pop(0)
obj.lines[:0] = sugar.result
obj.transform() # reload object from new source
obj.process_inline()
# Look for %Inlineobject% things and add those at the end of the document
# This must also create a hash $InlineObjects with the keys as hashes of
# the inline call. Those hashes are later used for inline varsub
# The proccess will be recursicve since the Inline objects are added last
# to the object queue.
#
# This code must both look for correct inline object definitions as well
# as the costumizable short forms, which should be stored as SyntaxSugars in
# a special hash $InlineSyntaxSugar.
## @fn process_object_queue
#
# Loops through the object list until there are no objects left to (re)propress
def process_object_queue(self):
subprocessors = getkey(self.globalvars, '$Subprocessors', [])
while(len(self.process_queue)):
obj = self.process_queue.pop(0)
if not obj.removed:
for sp in subprocessors:
new_objects = sp.run(obj)
if len(new_objects):
for no in new_objects:
self.objects.append(no)
self.process_queue.append(no)
obj.needs_rerun = True
obj.process()
if obj.needs_rerun:
self.process_queue.append(obj)
## @fn generate
#
# A aggregate wrapper that calls all functions needed to generate the output.
# This is the interface to the user application.
#
# @param _template The template name (without path)
#
# @return True on success, False on failure
def generate(self):
# Store todays date
self.globalvars['today'] = str(datetime.date.today())
if not self.init_file():
return False
self.load_objects()
self.close_file()
self.preprocess_objects()
self.process_objects_for_syntax_sugar()
self.process_object_queue()
#self.perform_wrapping()
## @fn get_objects_as_strings
#
# Concatenates all objects' types and primaries and returns them
#
# @return Text represenation of all objects
def get_objects_as_string(self):
output = ''
for obj in self.objects:
output = '\n'.join([output, '============================================',obj.dump()])
return output
|
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.cloud.bigquery_connection_v1.services.connection_service import pagers
from google.cloud.bigquery_connection_v1.types import connection
from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from .transports.base import ConnectionServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport
from .client import ConnectionServiceClient
class ConnectionServiceAsyncClient:
"""Manages external data source connections and credentials."""
_client: ConnectionServiceClient
DEFAULT_ENDPOINT = ConnectionServiceClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = ConnectionServiceClient.DEFAULT_MTLS_ENDPOINT
connection_path = staticmethod(ConnectionServiceClient.connection_path)
parse_connection_path = staticmethod(ConnectionServiceClient.parse_connection_path)
common_billing_account_path = staticmethod(
ConnectionServiceClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
ConnectionServiceClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(ConnectionServiceClient.common_folder_path)
parse_common_folder_path = staticmethod(
ConnectionServiceClient.parse_common_folder_path
)
common_organization_path = staticmethod(
ConnectionServiceClient.common_organization_path
)
parse_common_organization_path = staticmethod(
ConnectionServiceClient.parse_common_organization_path
)
common_project_path = staticmethod(ConnectionServiceClient.common_project_path)
parse_common_project_path = staticmethod(
ConnectionServiceClient.parse_common_project_path
)
common_location_path = staticmethod(ConnectionServiceClient.common_location_path)
parse_common_location_path = staticmethod(
ConnectionServiceClient.parse_common_location_path
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ConnectionServiceAsyncClient: The constructed client.
"""
return ConnectionServiceClient.from_service_account_info.__func__(ConnectionServiceAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ConnectionServiceAsyncClient: The constructed client.
"""
return ConnectionServiceClient.from_service_account_file.__func__(ConnectionServiceAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return ConnectionServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> ConnectionServiceTransport:
"""Returns the transport used by the client instance.
Returns:
ConnectionServiceTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(ConnectionServiceClient).get_transport_class, type(ConnectionServiceClient)
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, ConnectionServiceTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the connection service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.ConnectionServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = ConnectionServiceClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def create_connection(
self,
request: Union[gcbc_connection.CreateConnectionRequest, dict] = None,
*,
parent: str = None,
connection: gcbc_connection.Connection = None,
connection_id: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gcbc_connection.Connection:
r"""Creates a new connection.
.. code-block:: python
from google.cloud import bigquery_connection_v1
def sample_create_connection():
# Create a client
client = bigquery_connection_v1.ConnectionServiceClient()
# Initialize request argument(s)
request = bigquery_connection_v1.CreateConnectionRequest(
parent="parent_value",
)
# Make the request
response = client.create_connection(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.bigquery_connection_v1.types.CreateConnectionRequest, dict]):
The request object. The request for
[ConnectionService.CreateConnection][google.cloud.bigquery.connection.v1.ConnectionService.CreateConnection].
parent (:class:`str`):
Required. Parent resource name. Must be in the format
``projects/{project_id}/locations/{location_id}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
connection (:class:`google.cloud.bigquery_connection_v1.types.Connection`):
Required. Connection to create.
This corresponds to the ``connection`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
connection_id (:class:`str`):
Optional. Connection id that should
be assigned to the created connection.
This corresponds to the ``connection_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.bigquery_connection_v1.types.Connection:
Configuration parameters to establish
connection with an external data source,
except the credential attributes.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, connection, connection_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = gcbc_connection.CreateConnectionRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if connection is not None:
request.connection = connection
if connection_id is not None:
request.connection_id = connection_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_connection,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def get_connection(
self,
request: Union[connection.GetConnectionRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> connection.Connection:
r"""Returns specified connection.
.. code-block:: python
from google.cloud import bigquery_connection_v1
def sample_get_connection():
# Create a client
client = bigquery_connection_v1.ConnectionServiceClient()
# Initialize request argument(s)
request = bigquery_connection_v1.GetConnectionRequest(
name="name_value",
)
# Make the request
response = client.get_connection(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.bigquery_connection_v1.types.GetConnectionRequest, dict]):
The request object. The request for
[ConnectionService.GetConnection][google.cloud.bigquery.connection.v1.ConnectionService.GetConnection].
name (:class:`str`):
Required. Name of the requested connection, for example:
``projects/{project_id}/locations/{location_id}/connections/{connection_id}``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.bigquery_connection_v1.types.Connection:
Configuration parameters to establish
connection with an external data source,
except the credential attributes.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = connection.GetConnectionRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_connection,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def list_connections(
self,
request: Union[connection.ListConnectionsRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListConnectionsAsyncPager:
r"""Returns a list of connections in the given project.
.. code-block:: python
from google.cloud import bigquery_connection_v1
def sample_list_connections():
# Create a client
client = bigquery_connection_v1.ConnectionServiceClient()
# Initialize request argument(s)
request = bigquery_connection_v1.ListConnectionsRequest(
parent="parent_value",
page_size=951,
)
# Make the request
page_result = client.list_connections(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.bigquery_connection_v1.types.ListConnectionsRequest, dict]):
The request object. The request for
[ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections].
parent (:class:`str`):
Required. Parent resource name. Must be in the form:
``projects/{project_id}/locations/{location_id}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.bigquery_connection_v1.services.connection_service.pagers.ListConnectionsAsyncPager:
The response for
[ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = connection.ListConnectionsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_connections,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListConnectionsAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def update_connection(
self,
request: Union[gcbc_connection.UpdateConnectionRequest, dict] = None,
*,
name: str = None,
connection: gcbc_connection.Connection = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gcbc_connection.Connection:
r"""Updates the specified connection. For security
reasons, also resets credential if connection properties
are in the update field mask.
.. code-block:: python
from google.cloud import bigquery_connection_v1
def sample_update_connection():
# Create a client
client = bigquery_connection_v1.ConnectionServiceClient()
# Initialize request argument(s)
request = bigquery_connection_v1.UpdateConnectionRequest(
name="name_value",
)
# Make the request
response = client.update_connection(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.bigquery_connection_v1.types.UpdateConnectionRequest, dict]):
The request object. The request for
[ConnectionService.UpdateConnection][google.cloud.bigquery.connection.v1.ConnectionService.UpdateConnection].
name (:class:`str`):
Required. Name of the connection to update, for example:
``projects/{project_id}/locations/{location_id}/connections/{connection_id}``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
connection (:class:`google.cloud.bigquery_connection_v1.types.Connection`):
Required. Connection containing the
updated fields.
This corresponds to the ``connection`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Required. Update mask for the
connection fields to be updated.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.bigquery_connection_v1.types.Connection:
Configuration parameters to establish
connection with an external data source,
except the credential attributes.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name, connection, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = gcbc_connection.UpdateConnectionRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
if connection is not None:
request.connection = connection
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_connection,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def delete_connection(
self,
request: Union[connection.DeleteConnectionRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Deletes connection and associated credential.
.. code-block:: python
from google.cloud import bigquery_connection_v1
def sample_delete_connection():
# Create a client
client = bigquery_connection_v1.ConnectionServiceClient()
# Initialize request argument(s)
request = bigquery_connection_v1.DeleteConnectionRequest(
name="name_value",
)
# Make the request
client.delete_connection(request=request)
Args:
request (Union[google.cloud.bigquery_connection_v1.types.DeleteConnectionRequest, dict]):
The request object. The request for
[ConnectionService.DeleteConnectionRequest][].
name (:class:`str`):
Required. Name of the deleted connection, for example:
``projects/{project_id}/locations/{location_id}/connections/{connection_id}``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = connection.DeleteConnectionRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_connection,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
await rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
async def get_iam_policy(
self,
request: Union[iam_policy_pb2.GetIamPolicyRequest, dict] = None,
*,
resource: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> policy_pb2.Policy:
r"""Gets the access control policy for a resource.
Returns an empty policy if the resource exists and does
not have a policy set.
.. code-block:: python
from google.cloud import bigquery_connection_v1
def sample_get_iam_policy():
# Create a client
client = bigquery_connection_v1.ConnectionServiceClient()
# Initialize request argument(s)
request = bigquery_connection_v1.GetIamPolicyRequest(
resource="resource_value",
)
# Make the request
response = client.get_iam_policy(request=request)
# Handle the response
print(response)
Args:
request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]):
The request object. Request message for `GetIamPolicy`
method.
resource (:class:`str`):
REQUIRED: The resource for which the
policy is being requested. See the
operation documentation for the
appropriate value for this field.
This corresponds to the ``resource`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.iam.v1.policy_pb2.Policy:
Defines an Identity and Access Management (IAM) policy. It is used to
specify access control policies for Cloud Platform
resources.
A Policy is a collection of bindings. A binding binds
one or more members to a single role. Members can be
user accounts, service accounts, Google groups, and
domains (such as G Suite). A role is a named list of
permissions (defined by IAM or configured by users).
A binding can optionally specify a condition, which
is a logic expression that further constrains the
role binding based on attributes about the request
and/or target resource.
**JSON Example**
{
"bindings": [
{
"role":
"roles/resourcemanager.organizationAdmin",
"members": [ "user:[email protected]",
"group:[email protected]",
"domain:google.com",
"serviceAccount:[email protected]"
]
}, { "role":
"roles/resourcemanager.organizationViewer",
"members": ["user:[email protected]"],
"condition": { "title": "expirable access",
"description": "Does not grant access after
Sep 2020", "expression": "request.time <
timestamp('2020-10-01T00:00:00.000Z')", } }
]
}
**YAML Example**
bindings: - members: - user:\ [email protected] -
group:\ [email protected] - domain:google.com -
serviceAccount:\ [email protected]
role: roles/resourcemanager.organizationAdmin -
members: - user:\ [email protected] role:
roles/resourcemanager.organizationViewer
condition: title: expirable access description:
Does not grant access after Sep 2020 expression:
request.time <
timestamp('2020-10-01T00:00:00.000Z')
For a description of IAM and its features, see the
[IAM developer's
guide](\ https://cloud.google.com/iam/docs).
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([resource])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = iam_policy_pb2.GetIamPolicyRequest(**request)
elif not request:
request = iam_policy_pb2.GetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_iam_policy,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def set_iam_policy(
self,
request: Union[iam_policy_pb2.SetIamPolicyRequest, dict] = None,
*,
resource: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> policy_pb2.Policy:
r"""Sets the access control policy on the specified resource.
Replaces any existing policy.
Can return ``NOT_FOUND``, ``INVALID_ARGUMENT``, and
``PERMISSION_DENIED`` errors.
.. code-block:: python
from google.cloud import bigquery_connection_v1
def sample_set_iam_policy():
# Create a client
client = bigquery_connection_v1.ConnectionServiceClient()
# Initialize request argument(s)
request = bigquery_connection_v1.SetIamPolicyRequest(
resource="resource_value",
)
# Make the request
response = client.set_iam_policy(request=request)
# Handle the response
print(response)
Args:
request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]):
The request object. Request message for `SetIamPolicy`
method.
resource (:class:`str`):
REQUIRED: The resource for which the
policy is being specified. See the
operation documentation for the
appropriate value for this field.
This corresponds to the ``resource`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.iam.v1.policy_pb2.Policy:
Defines an Identity and Access Management (IAM) policy. It is used to
specify access control policies for Cloud Platform
resources.
A Policy is a collection of bindings. A binding binds
one or more members to a single role. Members can be
user accounts, service accounts, Google groups, and
domains (such as G Suite). A role is a named list of
permissions (defined by IAM or configured by users).
A binding can optionally specify a condition, which
is a logic expression that further constrains the
role binding based on attributes about the request
and/or target resource.
**JSON Example**
{
"bindings": [
{
"role":
"roles/resourcemanager.organizationAdmin",
"members": [ "user:[email protected]",
"group:[email protected]",
"domain:google.com",
"serviceAccount:[email protected]"
]
}, { "role":
"roles/resourcemanager.organizationViewer",
"members": ["user:[email protected]"],
"condition": { "title": "expirable access",
"description": "Does not grant access after
Sep 2020", "expression": "request.time <
timestamp('2020-10-01T00:00:00.000Z')", } }
]
}
**YAML Example**
bindings: - members: - user:\ [email protected] -
group:\ [email protected] - domain:google.com -
serviceAccount:\ [email protected]
role: roles/resourcemanager.organizationAdmin -
members: - user:\ [email protected] role:
roles/resourcemanager.organizationViewer
condition: title: expirable access description:
Does not grant access after Sep 2020 expression:
request.time <
timestamp('2020-10-01T00:00:00.000Z')
For a description of IAM and its features, see the
[IAM developer's
guide](\ https://cloud.google.com/iam/docs).
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([resource])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = iam_policy_pb2.SetIamPolicyRequest(**request)
elif not request:
request = iam_policy_pb2.SetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.set_iam_policy,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def test_iam_permissions(
self,
request: Union[iam_policy_pb2.TestIamPermissionsRequest, dict] = None,
*,
resource: str = None,
permissions: Sequence[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> iam_policy_pb2.TestIamPermissionsResponse:
r"""Returns permissions that a caller has on the specified resource.
If the resource does not exist, this will return an empty set of
permissions, not a ``NOT_FOUND`` error.
Note: This operation is designed to be used for building
permission-aware UIs and command-line tools, not for
authorization checking. This operation may "fail open" without
warning.
.. code-block:: python
from google.cloud import bigquery_connection_v1
def sample_test_iam_permissions():
# Create a client
client = bigquery_connection_v1.ConnectionServiceClient()
# Initialize request argument(s)
request = bigquery_connection_v1.TestIamPermissionsRequest(
resource="resource_value",
permissions=['permissions_value_1', 'permissions_value_2'],
)
# Make the request
response = client.test_iam_permissions(request=request)
# Handle the response
print(response)
Args:
request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]):
The request object. Request message for
`TestIamPermissions` method.
resource (:class:`str`):
REQUIRED: The resource for which the
policy detail is being requested. See
the operation documentation for the
appropriate value for this field.
This corresponds to the ``resource`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
permissions (:class:`Sequence[str]`):
The set of permissions to check for the ``resource``.
Permissions with wildcards (such as '*' or 'storage.*')
are not allowed. For more information see `IAM
Overview <https://cloud.google.com/iam/docs/overview#permissions>`__.
This corresponds to the ``permissions`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse:
Response message for TestIamPermissions method.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([resource, permissions])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = iam_policy_pb2.TestIamPermissionsRequest(**request)
elif not request:
request = iam_policy_pb2.TestIamPermissionsRequest(
resource=resource, permissions=permissions,
)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.test_iam_permissions,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-bigquery-connection",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("ConnectionServiceAsyncClient",)
|
|
'''
@author: Frank
'''
import zstacktestagent.testagent as testagent
import zstacklib.utils.shell as shell
import zstacklib.utils.jsonobject as jsonobject
import zstacklib.utils.http as http
import zstacklib.utils.linux as linux
import zstacklib.utils.ssh as ssh
import zstacklib.utils.log as log
import threading
import time
logger = log.get_logger(__name__)
class VmStatusCmd(testagent.AgentCommand):
def __init__(self):
super(VmStatusCmd, self).__init__()
self.vm_uuids = None
class VmDevIoStatusCmd(testagent.AgentCommand):
def __init__(self):
super(VmDevIoStatusCmd, self).__init__()
self.vm_uuid = None
self.vm_device = None
class VmStatusRsp(testagent.AgentResponse):
def __init__(self):
super(VmStatusRsp, self).__init__()
self.vm_status = {}
class VmDeviceIoRsp(testagent.AgentResponse):
def __init__(self):
super(VmDeviceIoRsp, self).__init__()
self.vm_device_io = None
class DeleteVmCmd(testagent.AgentCommand):
def __init__(self):
super(DeleteVmCmd, self).__init__()
self.vm_uuids = None
class SshInVmCmd(testagent.AgentCommand):
def __init__(self):
super(SshInVmCmd, self).__init__()
self.ip = None
self.username = None
self.password = None
self.port = 22
self.timeout = 180 # seconds
self.command = None
class SshInVmRsp(testagent.AgentResponse):
def __init__(self):
super(SshInVmRsp, self).__init__()
self.result = None
class ScpInVmCmd(testagent.AgentCommand):
def __init__(self):
super(ScpInVmCmd, self).__init__()
self.ip = None
self.username = None
self.password = None
self.port = 22
self.timeout = 180 # seconds
self.src_file = None
self.dst_file = None
class ScpInVmRsp(testagent.AgentResponse):
def __init__(self):
super(ScpInVmRsp, self).__init__()
self.result = None
IS_VM_STOPPED_PATH = '/vm/isvmstopped'
IS_VM_DESTROYED_PATH = '/vm/isvmdestroyed'
IS_VM_RUNNING_PATH = '/vm/isvmrunning'
DELETE_VM_PATH = '/vm/deletevm'
SSH_GUEST_VM_PATH = '/vm/sshguestvm'
SCP_GUEST_VM_PATH = '/vm/scpguestvm'
VM_STATUS = '/vm/vmstatus'
LIST_ALL_VM = '/vm/listallvm'
VM_BLK_STATUS = '/vm/vmblkstatus'
ECHO_PATH = '/host/echo'
VM_DEVICE_QOS = '/vm/deviceqos'
class VmAgent(testagent.TestAgent):
VM_STATUS_RUNNING = 'running'
VM_STATUS_STOPPED = 'shut off'
VM_STATUS_DESTROYED = None
VM_EXCEPTION_STATUS = 'EXCEPTION_STATUS'
def start(self):
testagent.TestAgentServer.http_server.register_sync_uri(IS_VM_RUNNING_PATH, self.is_vm_running)
testagent.TestAgentServer.http_server.register_sync_uri(IS_VM_DESTROYED_PATH, self.is_vm_stopped)
testagent.TestAgentServer.http_server.register_sync_uri(IS_VM_STOPPED_PATH, self.is_vm_stopped)
testagent.TestAgentServer.http_server.register_sync_uri(DELETE_VM_PATH, self.delete_vm)
testagent.TestAgentServer.http_server.register_sync_uri(SSH_GUEST_VM_PATH, self.ssh_in_guest_vm)
testagent.TestAgentServer.http_server.register_sync_uri(SCP_GUEST_VM_PATH, self.scp_in_guest_vm)
testagent.TestAgentServer.http_server.register_sync_uri(VM_STATUS, self.get_vm_status)
testagent.TestAgentServer.http_server.register_sync_uri(LIST_ALL_VM, self.list_all_vms)
testagent.TestAgentServer.http_server.register_sync_uri(VM_BLK_STATUS, self.get_vm_blk_status)
testagent.TestAgentServer.http_server.register_sync_uri(ECHO_PATH, self.echo)
testagent.TestAgentServer.http_server.register_sync_uri(VM_DEVICE_QOS, self.get_vm_disk_qos)
shell.logcmd = True
@testagent.replyerror
def echo(self, req):
logger.debug('echo ping')
return ''
def stop(self):
pass
def get_vm_disk_qos(self):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
vm_uuid = cmd.vm_uuid
dev = cmd.vm_device
output = shell.call('virsh blkdeviotune %s %s' % (vm_uuid, dev))
rsp = VmDeviceIoRsp()
rsp.vm_device_io = output
return jsonobject.dumps(rsp)
def _list_all_vms(self):
output = shell.call('virsh list --all')
return output.split('\n')
def _is_vm_status(self, uuid, status):
curr_status = self._get_vm_status(uuid)
if status:
if status in curr_status:
return True
else:
if curr_status != self.VM_EXCEPTION_STATUS:
return True
logger.debug('[vm uuid: %s] does not have status: %s.' % (uuid, status))
return False
def _get_vm_status(self, uuid):
try:
output = shell.call('virsh domstate %s' % uuid)
except Exception as e:
logger.debug('Exception happened when trying to get [vm uuid: %s] status' % uuid)
return self.VM_EXCEPTION_STATUS
return output
def _vm_blk_status(self, uuid):
output = shell.call('virsh domblklist %s' % uuid).split('\n')
output = output[2:]
ret = {}
for item in output:
if item != '':
blk = item.split()
ret[blk[0]] = blk[1]
return ret
def _delete_vm(self, uuid):
shell.call('virsh undefine --managed-save %s' % uuid)
def _destroy_vm(self, uuid):
shell.call('virsh destroy %s' % uuid)
def _delete_all_vm(self):
output = self._list_all_vms()
output = filter(bool, output)
for o in output:
uuid = o.split()[1]
if self.VM_STATUS_RUNNING in o:
self._destroy_vm(uuid)
self._delete_vm(uuid)
else:
self._delete_vm(uuid)
@testagent.replyerror
def is_vm_running(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
rsp = VmStatusRsp()
for uuid in cmd.vm_uuids:
rsp.vm_status[uuid] = self._is_vm_status(uuid, self.VM_STATUS_RUNNING)
return jsonobject.dumps(rsp)
@testagent.replyerror
def is_vm_stopped(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
rsp = VmStatusRsp()
for uuid in cmd.vm_uuids:
rsp.vm_status[uuid] = self._is_vm_status(uuid, self.VM_STATUS_STOPPED)
return jsonobject.dumps(rsp)
@testagent.replyerror
def is_vm_destroyed(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
rsp = VmStatusRsp()
for uuid in cmd.vm_uuids:
rsp.vm_status[uuid] = self._is_vm_status(uuid, self.VM_STATUS_DESTROYED)
return jsonobject.dumps(rsp)
@testagent.replyerror
def get_vm_status(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
rsp = VmStatusRsp()
for uuid in cmd.vm_uuids:
rsp.vm_status[uuid] = self._get_vm_status(uuid)
logger.debug('[vm:%s status:] %s.' % (uuid, rsp.vm_status[uuid]))
return jsonobject.dumps(rsp)
@testagent.replyerror
def get_vm_blk_status(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
rsp = VmStatusRsp()
for uuid in cmd.vm_uuids:
rsp.vm_status[uuid] = self._vm_blk_status(uuid)
return jsonobject.dumps(rsp)
@testagent.replyerror
def list_all_vms(self, req):
rsp = VmStatusRsp()
rsp.vm_status['vms'] = self._list_all_vms()
return jsonobject.dumps(rsp)
@testagent.replyerror
def delete_vm(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
vmuuids = cmd.vm_uuids_
if not vmuuids:
self._delete_all_vm()
else:
for uuid in vmuuids:
if self._is_vm_status(uuid, self.VM_STATUS_RUNNING):
self._destroy_vm(uuid)
if (self._is_vm_status(uuid, self.VM_STATUS_STOPPED) or self._is_vm_status(uuid, self.VM_STATUS_DESTROYED)):
self._delete_vm(uuid)
return jsonobject.dumps(testagent.AgentResponse())
@testagent.replyerror
def ssh_in_guest_vm(self, req):
rsp = SshInVmRsp()
rsp_dict = {'error': None, 'completion': None}
cmd = jsonobject.loads(req[http.REQUEST_BODY])
def login_vm():
try:
ret, output, stderr = ssh.execute(cmd.command, cmd.ip, cmd.username, cmd.password, False)
if ret != 0:
rsp.success = False
rsp.error = '%s\n%s' % (output, stderr)
else:
rsp.result = output
rsp_dict['completion'] = True
return True
except Exception as e:
logger.debug('[SSH] unable to ssh in vm[ip:%s], assume its not ready. Exception: %s' % (cmd.ip, str(e)))
rsp_dict['error'] = True
rsp_dict['completion'] = True
return False
thread = threading.Thread(target = login_vm)
thread.start()
timeout = time.time() + cmd.timeout
while not rsp_dict['completion'] and time.time() < timeout:
time.sleep(0.5)
if rsp_dict['completion']:
if rsp_dict['error']:
rsp.success = False
rsp.error = 'ssh command:%s met exception.' % cmd.command
logger.debug('ssh command:%s met exception.' % cmd.command)
else:
logger.debug('[SSH] ssh in vm[%s] doing %s, timeout after %s seconds' % (cmd.ip, cmd.command, cmd.timeout))
rsp.success = False
rsp.error = 'ssh execution keeps failure, until timeout: %s' \
% cmd.timeout
logger.debug('[SSH] ssh in vm[%s] doing %s done. result is %s' % (cmd.ip, cmd.command, rsp.success))
return jsonobject.dumps(rsp)
@testagent.replyerror
def scp_in_guest_vm(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
rsp = ScpInVmRsp()
try:
ssh.scp_file(cmd.src_file, cmd.dst_file, cmd.ip, cmd.username, cmd.password, cmd.port)
rsp.success = True
rsp.output = '[SCP] Successfully scp %s to [vm:] %s %s' % \
(cmd.src_file, cmd.ip, cmd.dst_file)
except Exception as e:
logger.debug('[SCP] scp %s to vm[ip:%s] failed: %s.' % \
(cmd.src_file, cmd.ip, str(e)))
rsp.success = False
rsp.error = str(e)
return jsonobject.dumps(rsp)
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class IpGroupsOperations:
"""IpGroupsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_03_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
resource_group_name: str,
ip_groups_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.IpGroup":
"""Gets the specified ipGroups.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ip_groups_name: The name of the ipGroups.
:type ip_groups_name: str
:param expand: Expands resourceIds (of Firewalls/Network Security Groups etc.) back referenced
by the IpGroups resource.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IpGroup, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_03_01.models.IpGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IpGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ipGroupsName': self._serialize.url("ip_groups_name", ip_groups_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('IpGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ipGroups/{ipGroupsName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
ip_groups_name: str,
parameters: "_models.IpGroup",
**kwargs: Any
) -> "_models.IpGroup":
cls = kwargs.pop('cls', None) # type: ClsType["_models.IpGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ipGroupsName': self._serialize.url("ip_groups_name", ip_groups_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'IpGroup')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('IpGroup', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('IpGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ipGroups/{ipGroupsName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
ip_groups_name: str,
parameters: "_models.IpGroup",
**kwargs: Any
) -> AsyncLROPoller["_models.IpGroup"]:
"""Creates or updates an ipGroups in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ip_groups_name: The name of the ipGroups.
:type ip_groups_name: str
:param parameters: Parameters supplied to the create or update IpGroups operation.
:type parameters: ~azure.mgmt.network.v2020_03_01.models.IpGroup
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either IpGroup or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_03_01.models.IpGroup]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.IpGroup"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
ip_groups_name=ip_groups_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('IpGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ipGroupsName': self._serialize.url("ip_groups_name", ip_groups_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ipGroups/{ipGroupsName}'} # type: ignore
async def update_groups(
self,
resource_group_name: str,
ip_groups_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> "_models.IpGroup":
"""Updates tags of an IpGroups resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ip_groups_name: The name of the ipGroups.
:type ip_groups_name: str
:param parameters: Parameters supplied to the update ipGroups operation.
:type parameters: ~azure.mgmt.network.v2020_03_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IpGroup, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_03_01.models.IpGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IpGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_groups.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ipGroupsName': self._serialize.url("ip_groups_name", ip_groups_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('IpGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_groups.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ipGroups/{ipGroupsName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
ip_groups_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ipGroupsName': self._serialize.url("ip_groups_name", ip_groups_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ipGroups/{ipGroupsName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
ip_groups_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified ipGroups.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ip_groups_name: The name of the ipGroups.
:type ip_groups_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
ip_groups_name=ip_groups_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ipGroupsName': self._serialize.url("ip_groups_name", ip_groups_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ipGroups/{ipGroupsName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.IpGroupListResult"]:
"""Gets all IpGroups in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IpGroupListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_03_01.models.IpGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IpGroupListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('IpGroupListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.Error, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ipGroups'} # type: ignore
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.IpGroupListResult"]:
"""Gets all IpGroups in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IpGroupListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_03_01.models.IpGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IpGroupListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('IpGroupListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.Error, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/ipGroups'} # type: ignore
|
|
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM15.IEC61968.Common.Document import Document
class ErpInvoiceLineItem(Document):
"""An individual line item on an invoice.An individual line item on an invoice.
"""
def __init__(self, lineNumber='', glDateTime='', lineAmount=0.0, netAmount=0.0, kind="other", lineVersion='', glAccount='', previousAmount=0.0, ErpPayments=None, ContainerErpInvoiceLineItem=None, WorkBillingInfos=None, ErpQuoteLineItem=None, UserAttributes=None, ErpRecDelvLineItem=None, ErpPayableLineItem=None, ComponentErpInvoiceLineItems=None, billPeriod=None, ErpInvoice=None, CustomerBillingInfos=None, ErpRecLineItem=None, ErpJournalEntries=None, *args, **kw_args):
"""Initialises a new 'ErpInvoiceLineItem' instance.
@param lineNumber: Line item number on invoice statement.
@param glDateTime: Date and time line item will be posted to the General Ledger.
@param lineAmount: Amount due for this line item.
@param netAmount: Net line item charge amount.
@param kind: Kind of line item. Values are: "other", "recalculation", "initial"
@param lineVersion: Version number of the bill run.
@param glAccount: General Ledger account code, must be a valid combination.
@param previousAmount: Previous line item charge amount.
@param ErpPayments:
@param ContainerErpInvoiceLineItem:
@param WorkBillingInfos:
@param ErpQuoteLineItem:
@param UserAttributes:
@param ErpRecDelvLineItem:
@param ErpPayableLineItem:
@param ComponentErpInvoiceLineItems:
@param billPeriod: Bill period for the line item.
@param ErpInvoice:
@param CustomerBillingInfos: Customer billing for services rendered.
@param ErpRecLineItem:
@param ErpJournalEntries:
"""
#: Line item number on invoice statement.
self.lineNumber = lineNumber
#: Date and time line item will be posted to the General Ledger.
self.glDateTime = glDateTime
#: Amount due for this line item.
self.lineAmount = lineAmount
#: Net line item charge amount.
self.netAmount = netAmount
#: Kind of line item. Values are: "other", "recalculation", "initial"
self.kind = kind
#: Version number of the bill run.
self.lineVersion = lineVersion
#: General Ledger account code, must be a valid combination.
self.glAccount = glAccount
#: Previous line item charge amount.
self.previousAmount = previousAmount
self._ErpPayments = []
self.ErpPayments = [] if ErpPayments is None else ErpPayments
self._ContainerErpInvoiceLineItem = None
self.ContainerErpInvoiceLineItem = ContainerErpInvoiceLineItem
self._WorkBillingInfos = []
self.WorkBillingInfos = [] if WorkBillingInfos is None else WorkBillingInfos
self._ErpQuoteLineItem = None
self.ErpQuoteLineItem = ErpQuoteLineItem
self._UserAttributes = []
self.UserAttributes = [] if UserAttributes is None else UserAttributes
self._ErpRecDelvLineItem = None
self.ErpRecDelvLineItem = ErpRecDelvLineItem
self._ErpPayableLineItem = None
self.ErpPayableLineItem = ErpPayableLineItem
self._ComponentErpInvoiceLineItems = []
self.ComponentErpInvoiceLineItems = [] if ComponentErpInvoiceLineItems is None else ComponentErpInvoiceLineItems
self.billPeriod = billPeriod
self._ErpInvoice = None
self.ErpInvoice = ErpInvoice
self._CustomerBillingInfos = []
self.CustomerBillingInfos = [] if CustomerBillingInfos is None else CustomerBillingInfos
self._ErpRecLineItem = None
self.ErpRecLineItem = ErpRecLineItem
self._ErpJournalEntries = []
self.ErpJournalEntries = [] if ErpJournalEntries is None else ErpJournalEntries
super(ErpInvoiceLineItem, self).__init__(*args, **kw_args)
_attrs = ["lineNumber", "glDateTime", "lineAmount", "netAmount", "kind", "lineVersion", "glAccount", "previousAmount"]
_attr_types = {"lineNumber": str, "glDateTime": str, "lineAmount": float, "netAmount": float, "kind": str, "lineVersion": str, "glAccount": str, "previousAmount": float}
_defaults = {"lineNumber": '', "glDateTime": '', "lineAmount": 0.0, "netAmount": 0.0, "kind": "other", "lineVersion": '', "glAccount": '', "previousAmount": 0.0}
_enums = {"kind": "ErpInvoiceLineItemKind"}
_refs = ["ErpPayments", "ContainerErpInvoiceLineItem", "WorkBillingInfos", "ErpQuoteLineItem", "UserAttributes", "ErpRecDelvLineItem", "ErpPayableLineItem", "ComponentErpInvoiceLineItems", "billPeriod", "ErpInvoice", "CustomerBillingInfos", "ErpRecLineItem", "ErpJournalEntries"]
_many_refs = ["ErpPayments", "WorkBillingInfos", "UserAttributes", "ComponentErpInvoiceLineItems", "CustomerBillingInfos", "ErpJournalEntries"]
def getErpPayments(self):
return self._ErpPayments
def setErpPayments(self, value):
for p in self._ErpPayments:
filtered = [q for q in p.ErpInvoiceLineItems if q != self]
self._ErpPayments._ErpInvoiceLineItems = filtered
for r in value:
if self not in r._ErpInvoiceLineItems:
r._ErpInvoiceLineItems.append(self)
self._ErpPayments = value
ErpPayments = property(getErpPayments, setErpPayments)
def addErpPayments(self, *ErpPayments):
for obj in ErpPayments:
if self not in obj._ErpInvoiceLineItems:
obj._ErpInvoiceLineItems.append(self)
self._ErpPayments.append(obj)
def removeErpPayments(self, *ErpPayments):
for obj in ErpPayments:
if self in obj._ErpInvoiceLineItems:
obj._ErpInvoiceLineItems.remove(self)
self._ErpPayments.remove(obj)
def getContainerErpInvoiceLineItem(self):
return self._ContainerErpInvoiceLineItem
def setContainerErpInvoiceLineItem(self, value):
if self._ContainerErpInvoiceLineItem is not None:
filtered = [x for x in self.ContainerErpInvoiceLineItem.ComponentErpInvoiceLineItems if x != self]
self._ContainerErpInvoiceLineItem._ComponentErpInvoiceLineItems = filtered
self._ContainerErpInvoiceLineItem = value
if self._ContainerErpInvoiceLineItem is not None:
if self not in self._ContainerErpInvoiceLineItem._ComponentErpInvoiceLineItems:
self._ContainerErpInvoiceLineItem._ComponentErpInvoiceLineItems.append(self)
ContainerErpInvoiceLineItem = property(getContainerErpInvoiceLineItem, setContainerErpInvoiceLineItem)
def getWorkBillingInfos(self):
return self._WorkBillingInfos
def setWorkBillingInfos(self, value):
for p in self._WorkBillingInfos:
filtered = [q for q in p.ErpLineItems if q != self]
self._WorkBillingInfos._ErpLineItems = filtered
for r in value:
if self not in r._ErpLineItems:
r._ErpLineItems.append(self)
self._WorkBillingInfos = value
WorkBillingInfos = property(getWorkBillingInfos, setWorkBillingInfos)
def addWorkBillingInfos(self, *WorkBillingInfos):
for obj in WorkBillingInfos:
if self not in obj._ErpLineItems:
obj._ErpLineItems.append(self)
self._WorkBillingInfos.append(obj)
def removeWorkBillingInfos(self, *WorkBillingInfos):
for obj in WorkBillingInfos:
if self in obj._ErpLineItems:
obj._ErpLineItems.remove(self)
self._WorkBillingInfos.remove(obj)
def getErpQuoteLineItem(self):
return self._ErpQuoteLineItem
def setErpQuoteLineItem(self, value):
if self._ErpQuoteLineItem is not None:
self._ErpQuoteLineItem._ErpInvoiceLineItem = None
self._ErpQuoteLineItem = value
if self._ErpQuoteLineItem is not None:
self._ErpQuoteLineItem.ErpInvoiceLineItem = None
self._ErpQuoteLineItem._ErpInvoiceLineItem = self
ErpQuoteLineItem = property(getErpQuoteLineItem, setErpQuoteLineItem)
def getUserAttributes(self):
return self._UserAttributes
def setUserAttributes(self, value):
for p in self._UserAttributes:
filtered = [q for q in p.ErpInvoiceLineItems if q != self]
self._UserAttributes._ErpInvoiceLineItems = filtered
for r in value:
if self not in r._ErpInvoiceLineItems:
r._ErpInvoiceLineItems.append(self)
self._UserAttributes = value
UserAttributes = property(getUserAttributes, setUserAttributes)
def addUserAttributes(self, *UserAttributes):
for obj in UserAttributes:
if self not in obj._ErpInvoiceLineItems:
obj._ErpInvoiceLineItems.append(self)
self._UserAttributes.append(obj)
def removeUserAttributes(self, *UserAttributes):
for obj in UserAttributes:
if self in obj._ErpInvoiceLineItems:
obj._ErpInvoiceLineItems.remove(self)
self._UserAttributes.remove(obj)
def getErpRecDelvLineItem(self):
return self._ErpRecDelvLineItem
def setErpRecDelvLineItem(self, value):
if self._ErpRecDelvLineItem is not None:
self._ErpRecDelvLineItem._ErpInvoiceLineItem = None
self._ErpRecDelvLineItem = value
if self._ErpRecDelvLineItem is not None:
self._ErpRecDelvLineItem.ErpInvoiceLineItem = None
self._ErpRecDelvLineItem._ErpInvoiceLineItem = self
ErpRecDelvLineItem = property(getErpRecDelvLineItem, setErpRecDelvLineItem)
def getErpPayableLineItem(self):
return self._ErpPayableLineItem
def setErpPayableLineItem(self, value):
if self._ErpPayableLineItem is not None:
self._ErpPayableLineItem._ErpInvoiceLineItem = None
self._ErpPayableLineItem = value
if self._ErpPayableLineItem is not None:
self._ErpPayableLineItem.ErpInvoiceLineItem = None
self._ErpPayableLineItem._ErpInvoiceLineItem = self
ErpPayableLineItem = property(getErpPayableLineItem, setErpPayableLineItem)
def getComponentErpInvoiceLineItems(self):
return self._ComponentErpInvoiceLineItems
def setComponentErpInvoiceLineItems(self, value):
for x in self._ComponentErpInvoiceLineItems:
x.ContainerErpInvoiceLineItem = None
for y in value:
y._ContainerErpInvoiceLineItem = self
self._ComponentErpInvoiceLineItems = value
ComponentErpInvoiceLineItems = property(getComponentErpInvoiceLineItems, setComponentErpInvoiceLineItems)
def addComponentErpInvoiceLineItems(self, *ComponentErpInvoiceLineItems):
for obj in ComponentErpInvoiceLineItems:
obj.ContainerErpInvoiceLineItem = self
def removeComponentErpInvoiceLineItems(self, *ComponentErpInvoiceLineItems):
for obj in ComponentErpInvoiceLineItems:
obj.ContainerErpInvoiceLineItem = None
# Bill period for the line item.
billPeriod = None
def getErpInvoice(self):
return self._ErpInvoice
def setErpInvoice(self, value):
if self._ErpInvoice is not None:
filtered = [x for x in self.ErpInvoice.ErpInvoiceLineItems if x != self]
self._ErpInvoice._ErpInvoiceLineItems = filtered
self._ErpInvoice = value
if self._ErpInvoice is not None:
if self not in self._ErpInvoice._ErpInvoiceLineItems:
self._ErpInvoice._ErpInvoiceLineItems.append(self)
ErpInvoice = property(getErpInvoice, setErpInvoice)
def getCustomerBillingInfos(self):
"""Customer billing for services rendered.
"""
return self._CustomerBillingInfos
def setCustomerBillingInfos(self, value):
for p in self._CustomerBillingInfos:
filtered = [q for q in p.ErpInvoiceLineItems if q != self]
self._CustomerBillingInfos._ErpInvoiceLineItems = filtered
for r in value:
if self not in r._ErpInvoiceLineItems:
r._ErpInvoiceLineItems.append(self)
self._CustomerBillingInfos = value
CustomerBillingInfos = property(getCustomerBillingInfos, setCustomerBillingInfos)
def addCustomerBillingInfos(self, *CustomerBillingInfos):
for obj in CustomerBillingInfos:
if self not in obj._ErpInvoiceLineItems:
obj._ErpInvoiceLineItems.append(self)
self._CustomerBillingInfos.append(obj)
def removeCustomerBillingInfos(self, *CustomerBillingInfos):
for obj in CustomerBillingInfos:
if self in obj._ErpInvoiceLineItems:
obj._ErpInvoiceLineItems.remove(self)
self._CustomerBillingInfos.remove(obj)
def getErpRecLineItem(self):
return self._ErpRecLineItem
def setErpRecLineItem(self, value):
if self._ErpRecLineItem is not None:
self._ErpRecLineItem._ErpInvoiceLineItem = None
self._ErpRecLineItem = value
if self._ErpRecLineItem is not None:
self._ErpRecLineItem.ErpInvoiceLineItem = None
self._ErpRecLineItem._ErpInvoiceLineItem = self
ErpRecLineItem = property(getErpRecLineItem, setErpRecLineItem)
def getErpJournalEntries(self):
return self._ErpJournalEntries
def setErpJournalEntries(self, value):
for x in self._ErpJournalEntries:
x.ErpInvoiceLineItem = None
for y in value:
y._ErpInvoiceLineItem = self
self._ErpJournalEntries = value
ErpJournalEntries = property(getErpJournalEntries, setErpJournalEntries)
def addErpJournalEntries(self, *ErpJournalEntries):
for obj in ErpJournalEntries:
obj.ErpInvoiceLineItem = self
def removeErpJournalEntries(self, *ErpJournalEntries):
for obj in ErpJournalEntries:
obj.ErpInvoiceLineItem = None
|
|
"""Copyright (c) 2010-2012 David Rio Vierra
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE."""
#-# Modifiedby D.C.-G. for translation purpose
from OpenGL import GL
import numpy
import os
from albow import TableView, TableColumn, Label, Button, Column, CheckBox, AttrRef, Row, ask, alert, input_text_buttons, TabPanel
from albow.table_view import TableRowView
from albow.translate import _
from config import config
from editortools.editortool import EditorTool
from editortools.tooloptions import ToolOptions
from glbackground import Panel
from glutils import DisplayList
from mceutils import loadPNGTexture, alertException, drawTerrainCuttingWire, drawCube
from operation import Operation
import pymclevel
from pymclevel.box import BoundingBox, FloatBox
from pymclevel import nbt
import logging
import version_utils
from nbtexplorer import loadFile, saveFile, NBTExplorerToolPanel
import pygame
log = logging.getLogger(__name__)
class PlayerRemoveOperation(Operation):
undoTag = None
def __init__(self, tool, player="Player (Single Player)"):
super(PlayerRemoveOperation, self).__init__(tool.editor, tool.editor.level)
self.tool = tool
self.player = player
self.level = self.tool.editor.level
self.canUndo = False
def perform(self, recordUndo=True):
if self.level.saving:
alert(_("Cannot perform action while saving is taking place"))
return
if self.player == "Player (Single Player)":
answer = ask(_("Are you sure you want to delete the default player?"), ["Yes", "Cancel"])
if answer == "Cancel":
return
self.player = "Player"
if recordUndo:
self.undoTag = self.level.getPlayerTag(self.player)
self.level.players.remove(self.player)
if self.tool.panel:
if self.player != "Player":
#self.tool.panel.players.remove(version_utils.getPlayerNameFromUUID(self.player))
self.tool.panel.players.remove(version_utils.playercache.getPlayerFromUUID(self.player))
else:
self.tool.panel.players.remove("Player (Single Player)")
while self.tool.panel.table.index >= len(self.tool.panel.players):
self.tool.panel.table.index -= 1
if len(self.tool.panel.players) == 0:
self.tool.hidePanel()
self.tool.showPanel()
self.tool.markerList.invalidate()
self.tool.movingPlayer = None
pos = self.tool.revPlayerPos[self.player]
del self.tool.playerPos[pos]
if self.player != "Player":
del self.tool.playerTexture[self.player]
else:
del self.level.root_tag["Data"]["Player"]
del self.tool.revPlayerPos[self.player]
self.canUndo = True
def undo(self):
if not (self.undoTag is None):
if self.player != "Player":
self.level.playerTagCache[self.level.getPlayerPath(self.player)] = self.undoTag
else:
self.level.root_tag["Data"]["Player"] = self.undoTag
self.level.players.append(self.player)
if self.tool.panel:
if self.player != "Player":
self.tool.panel.players.append(version_utils.playercache.getPlayerFromUUID(self.player))
else:
self.tool.panel.players.append("Player (Single Player)")
if "[No players]" in self.tool.panel.players:
self.tool.panel.players.remove("[No players]")
self.tool.hidePanel()
self.tool.showPanel()
self.tool.markerList.invalidate()
def redo(self):
self.perform()
class PlayerAddOperation(Operation):
playerTag = None
def __init__(self, tool):
super(PlayerAddOperation, self).__init__(tool.editor, tool.editor.level)
self.tool = tool
self.level = self.tool.editor.level
self.canUndo = False
def perform(self, recordUndo=True):
initial = ""
allowed_chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_"
while True:
self.player = input_text_buttons("Enter a Player Name: ", 160, initial=initial, allowed_chars=allowed_chars)
if self.player is None:
return
elif len(self.player) > 16:
alert("Name too long. Maximum name length is 16.")
initial = self.player
elif len(self.player) < 1:
alert("Name too short. Minimum name length is 1.")
initial = self.player
else:
break
try:
'''
print "Player: \""+str(self.player)+"\""
self.uuid = version_utils.playercache.getPlayerFromPlayername(self.player)
print "UUID: \""+str(self.uuid)+"\""
self.player = version_utils.playercache.getPlayerFromUUID(self.uuid) #Case Corrected
'''
data = version_utils.playercache.getPlayerInfo(self.player, force=True)
if isinstance(data, tuple):
self.uuid = data[0]
self.player = data[1]
else:
self.uuid = data
except:
action = ask("Could not get {}'s UUID. Please make sure that you are connected to the internet and that the player {} exists.".format(self.player, self.player), ["Enter UUID manually", "Cancel"])
if action != "Enter UUID manually":
return
self.uuid = input_text_buttons("Enter a Player UUID: ", 160)
if not self.uuid:
return
self.player = version_utils.playercache.getPlayerFromUUID(self.uuid)
if self.player == self.uuid.replace("-", ""):
if ask("UUID was not found. Continue anyways?") == "Cancel":
return
if self.uuid in self.level.players:
alert("Player already exists in this World.")
return
self.playerTag = self.newPlayer()
if self.tool.panel:
self.tool.panel.players.append(self.player)
if self.level.oldPlayerFolderFormat:
self.level.playerTagCache[self.level.getPlayerPath(self.player)] = self.playerTag
self.level.players.append(self.player)
if self.tool.panel:
self.tool.panel.player_UUID[self.player] = self.player
else:
self.level.playerTagCache[self.level.getPlayerPath(self.uuid)] = self.playerTag
self.level.players.append(self.uuid)
if self.tool.panel:
self.tool.panel.player_UUID[self.player] = self.uuid
self.tool.playerPos[(0,0,0)] = self.uuid
self.tool.revPlayerPos[self.uuid] = (0,0,0)
self.tool.playerTexture[self.uuid] = loadPNGTexture(version_utils.getPlayerSkin(self.uuid, force=False))
self.tool.markerList.invalidate()
self.tool.recordMove = False
self.tool.movingPlayer = self.uuid
if self.tool.panel:
self.tool.hidePanel()
self.tool.showPanel()
self.canUndo = True
self.playerTag.save(self.level.getPlayerPath(self.uuid))
self.tool.nonSavedPlayers.append(self.level.getPlayerPath(self.uuid))
self.tool.inOtherDimension[self.editor.level.dimNo].append(self.uuid)
def newPlayer(self):
playerTag = nbt.TAG_Compound()
playerTag['Air'] = nbt.TAG_Short(300)
playerTag['AttackTime'] = nbt.TAG_Short(0)
playerTag['DeathTime'] = nbt.TAG_Short(0)
playerTag['Fire'] = nbt.TAG_Short(-20)
playerTag['Health'] = nbt.TAG_Short(20)
playerTag['HurtTime'] = nbt.TAG_Short(0)
playerTag['Score'] = nbt.TAG_Int(0)
playerTag['FallDistance'] = nbt.TAG_Float(0)
playerTag['OnGround'] = nbt.TAG_Byte(0)
playerTag['Dimension'] = nbt.TAG_Int(self.editor.level.dimNo)
playerTag["Inventory"] = nbt.TAG_List()
playerTag['Motion'] = nbt.TAG_List([nbt.TAG_Double(0) for i in range(3)])
spawn = self.level.playerSpawnPosition()
spawnX = spawn[0]
spawnZ = spawn[2]
blocks = [self.level.blockAt(spawnX, i, spawnZ) for i in range(self.level.Height)]
i = self.level.Height
done = False
for index, b in enumerate(reversed(blocks)):
if b != 0 and not done:
i = index
done = True
spawnY = self.level.Height - i
playerTag['Pos'] = nbt.TAG_List([nbt.TAG_Double([spawnX, spawnY, spawnZ][i]) for i in range(3)])
playerTag['Rotation'] = nbt.TAG_List([nbt.TAG_Float(0), nbt.TAG_Float(0)])
return playerTag
def undo(self):
self.level.players.remove(self.uuid)
self.tool.movingPlayer = None
if self.tool.panel:
self.tool.panel.players.remove(self.player)
self.tool.panel.player_UUID.pop(self.player)
del self.tool.playerPos[(0,0,0)]
del self.tool.revPlayerPos[self.uuid]
del self.tool.playerTexture[self.uuid]
os.remove(self.level.getPlayerPath(self.uuid))
self.tool.nonSavedPlayers.remove(self.level.getPlayerPath(self.uuid))
self.tool.markerList.invalidate()
def redo(self):
if not (self.playerTag is None):
self.level.playerTagCache[self.level.getPlayerPath(self.uuid)] = self.playerTag
self.level.players.append(self.uuid)
if self.tool.panel:
self.tool.panel.players.append(self.player)
self.tool.panel.player_UUID[self.player] = self.uuid
self.tool.playerTexture[self.uuid] = loadPNGTexture(version_utils.getPlayerSkin(self.uuid))
self.tool.playerPos[(0,0,0)] = self.uuid
self.tool.revPlayerPos[self.uuid] = (0,0,0)
self.playerTag.save(self.level.getPlayerPath(self.uuid))
self.tool.nonSavedPlayers.append(self.level.getPlayerPath(self.uuid))
self.tool.markerList.invalidate()
class PlayerMoveOperation(Operation):
undoPos = None
redoPos = None
def __init__(self, tool, pos, player="Player", yp=(None, None)):
super(PlayerMoveOperation, self).__init__(tool.editor, tool.editor.level)
self.tool = tool
self.canUndo = False
self.pos = pos
self.player = player
self.yp = yp
def perform(self, recordUndo=True):
if self.level.saving:
alert(_("Cannot perform action while saving is taking place"))
return
try:
level = self.tool.editor.level
try:
self.undoPos = level.getPlayerPosition(self.player)
self.undoDim = level.getPlayerDimension(self.player)
self.undoYP = level.getPlayerOrientation(self.player)
except Exception, e:
log.info(_("Couldn't get player position! ({0!r})").format(e))
yaw, pitch = self.yp
if yaw is not None and pitch is not None:
level.setPlayerOrientation((yaw, pitch), self.player)
level.setPlayerPosition(self.pos, self.player)
level.setPlayerDimension(level.dimNo, self.player)
self.tool.markerList.invalidate()
self.canUndo = True
except pymclevel.PlayerNotFound, e:
print "Player move failed: ", e
def undo(self):
if not (self.undoPos is None):
level = self.tool.editor.level
try:
self.redoPos = level.getPlayerPosition(self.player)
self.redoDim = level.getPlayerDimension(self.player)
self.redoYP = level.getPlayerOrientation(self.player)
except Exception, e:
log.info(_("Couldn't get player position! ({0!r})").format(e))
level.setPlayerPosition(self.undoPos, self.player)
level.setPlayerDimension(self.undoDim, self.player)
level.setPlayerOrientation(self.undoYP, self.player)
self.tool.markerList.invalidate()
def redo(self):
if not (self.redoPos is None):
level = self.tool.editor.level
try:
self.undoPos = level.getPlayerPosition(self.player)
self.undoDim = level.getPlayerDimension(self.player)
self.undoYP = level.getPlayerOrientation(self.player)
except Exception, e:
log.info(_("Couldn't get player position! ({0!r})").format(e))
level.setPlayerPosition(self.redoPos, self.player)
level.setPlayerDimension(self.redoDim, self.player)
level.setPlayerOrientation(self.redoYP, self.player)
self.tool.markerList.invalidate()
@staticmethod
def bufferSize():
return 20
class SpawnPositionInvalid(Exception):
pass
def okayAt63(level, pos):
"""blocks 63 or 64 must be occupied"""
# return level.blockAt(pos[0], 63, pos[2]) != 0 or level.blockAt(pos[0], 64, pos[2]) != 0
return True
def okayAboveSpawn(level, pos):
"""3 blocks above spawn must be open"""
return not any([level.blockAt(pos[0], pos[1] + i, pos[2]) for i in range(1, 4)])
def positionValid(level, pos):
try:
return okayAt63(level, pos) and okayAboveSpawn(level, pos)
except EnvironmentError:
return False
class PlayerSpawnMoveOperation(Operation):
undoPos = None
redoPos = None
def __init__(self, tool, pos):
super(PlayerSpawnMoveOperation, self).__init__(tool.editor, tool.editor.level)
self.tool, self.pos = tool, pos
self.canUndo = False
def perform(self, recordUndo=True):
if self.level.saving:
alert(_("Cannot perform action while saving is taking place"))
return
level = self.tool.editor.level
'''
if isinstance(level, pymclevel.MCInfdevOldLevel):
if not positionValid(level, self.pos):
if config.spawn.spawnProtection.get():
raise SpawnPositionInvalid(
"You cannot have two air blocks at Y=63 and Y=64 in your spawn point's column. Additionally, you cannot have a solid block in the three blocks above your spawn point. It's weird, I know.")
'''
self.undoPos = level.playerSpawnPosition()
level.setPlayerSpawnPosition(self.pos)
self.tool.markerList.invalidate()
self.canUndo = True
def undo(self):
if self.undoPos is not None:
level = self.tool.editor.level
self.redoPos = level.playerSpawnPosition()
level.setPlayerSpawnPosition(self.undoPos)
self.tool.markerList.invalidate()
def redo(self):
if self.redoPos is not None:
level = self.tool.editor.level
self.undoPos = level.playerSpawnPosition()
level.setPlayerSpawnPosition(self.redoPos)
self.tool.markerList.invalidate()
class PlayerPositionPanel(Panel):
def __init__(self, tool):
Panel.__init__(self)
self.tool = tool
self.player_UUID = {}
self.level = tool.editor.level
if hasattr(self.level, 'players'):
players = self.level.players or ["[No players]"]
if not self.level.oldPlayerFolderFormat:
for player in players:
if player != "Player" and player != "[No players]":
if len(player) > 4 and player[4] == "-":
os.rename(os.path.join(self.level.worldFolder.getFolderPath("playerdata"), player+".dat"), os.path.join(self.level.worldFolder.getFolderPath("playerdata"), player.replace("-", "", 1)+".dat"))
player = player.replace("-", "", 1)
data = version_utils.playercache.getPlayerInfo(player)
if isinstance(data, tuple):
self.player_UUID[data[1]] = data[0]
else:
self.player_UUID[player] = data
if "Player" in players:
self.player_UUID["Player (Single Player)"] = "Player"
if "[No players]" not in players:
players = sorted(self.player_UUID.keys(), key=lambda x: False if x == "Player (Single Player)" else x)
else:
players = ["Player (Single Player)"]
self.players = players
self.pages = TabPanel()
tab_height = self.pages.tab_height
max_height = tab_height + self.tool.editor.mainViewport.height - self.tool.editor.toolbar.height - self.tool.editor.subwidgets[0].height - self.pages.margin * 2
#-# Uncomment the following line to have a maximum height for this panel.
# max_height = min(max_height, 500)
self.editNBTDataButton = Button("Edit NBT", action=self.editNBTData, tooltipText="Open the NBT Explorer to edit player's attributes and inventory")
addButton = Button("Add", action=self.tool.addPlayer)
removeButton = Button("Remove", action=self.tool.removePlayer)
gotoButton = Button("Goto", action=self.tool.gotoPlayer)
gotoCameraButton = Button("Goto View", action=self.tool.gotoPlayerCamera)
moveButton = Button("Move", action=self.tool.movePlayer)
moveToCameraButton = Button("Align to Camera", action=self.tool.movePlayerToCamera)
reloadSkin = Button("Reload Skins", action=self.tool.reloadSkins, tooltipText="This pulls skins from the online server, so this may take a while")
btns = Column([self.editNBTDataButton, addButton, removeButton, gotoButton, gotoCameraButton, moveButton, moveToCameraButton, reloadSkin], margin=0, spacing=2)
h = max_height - btns.height - self.pages.margin * 2 - 2 - self.font.get_linesize() * 2
col = Label('')
def close():
self.pages.show_page(col)
self.nbttree = NBTExplorerToolPanel(self.tool.editor, nbtObject={}, height=max_height, \
close_text="Go Back", no_header=True, close_action=close,
load_text=None)
self.nbttree.shrink_wrap()
self.nbtpage = Column([self.nbttree,])
self.nbtpage.shrink_wrap()
self.pages.add_page("NBT Data", self.nbtpage)
self.pages.set_rect(map(lambda x:x+self.margin, self.nbttree._rect))
tableview = TableView(nrows=(h - (self.font.get_linesize() * 2.5)) / self.font.get_linesize(),
header_height=self.font.get_linesize(),
columns=[TableColumn("Player Name(s):", self.nbttree.width - (self.margin * 3)),],
)
tableview.index = 0
tableview.num_rows = lambda: len(players)
tableview.row_data = lambda i: (players[i],)
tableview.row_is_selected = lambda x: x == tableview.index
tableview.zebra_color = (0, 0, 0, 48)
def selectTableRow(i, evt):
tableview.index = i
tableview.click_row = selectTableRow
def mouse_down(e):
if e.button == 1 and e.num_clicks > 1:
self.editNBTData()
TableRowView.mouse_down(tableview.rows, e)
tableview.rows.mouse_down = mouse_down
tableview.rows.tooltipText = "Double-click or use the button below to edit the NBT Data."
self.table = tableview
col.set_parent(None)
self.col = col = Column([tableview, btns], spacing=2)
self.pages.add_page("Players", col, 0)
self.pages.shrink_wrap()
self.pages.show_page(col)
self.add(self.pages)
self.shrink_wrap()
self.max_height = max_height
def editNBTData(self):
player = self.selectedPlayer
if player == 'Player (Single Player)':
alert("Not yet implemented.\nUse the NBT Explorer to edit this player.")
elif player == '[No players]':
return
else:
path = os.path.join(os.path.split(self.level.filename)[0], 'playerdata')
if not os.path.exists(path):
path = os.path.join(os.path.split(self.level.filename)[0], 'players')
if player + '.dat' in os.listdir(path):
fName = os.path.join(path, player + '.dat')
nbtObject, dataKeyName, dontSaveRootTag, fn = loadFile(fName)
self.pages.remove_page(self.nbtpage)
def close():
self.pages.show_page(self.col)
self.nbttree = NBTExplorerToolPanel(self.tool.editor, nbtObject=nbtObject, fileName=fName,
dontSaveRootTag=dontSaveRootTag, dataKeyName=dataKeyName,
height=self.max_height, no_header=True, close_text="Go Back",
close_action=close, load_text=None)
self.nbtpage = Column([self.nbttree,])
self.nbtpage.shrink_wrap()
self.pages.add_page("NBT Data", self.nbtpage)
self.pages.show_page(self.nbtpage)
#elif self.selectedPlayer.isNew:
else:
alert(_("Error while getting player file.\n%s not found.")%(player + '.dat'), doNotTranslate=True)
@property
def selectedPlayer(self):
if not self.level.oldPlayerFolderFormat:
player = self.players[self.table.index]
if player != "Player (Single Player)" and player != "[No players]":
return self.player_UUID[player]
else:
return player
else:
return self.players[self.table.index]
class PlayerPositionTool(EditorTool):
surfaceBuild = True
toolIconName = "player"
tooltipText = "Players"
movingPlayer = None
recordMove = True
def reloadTextures(self):
self.charTex = loadPNGTexture('char.png')
@alertException
def addPlayer(self):
op = PlayerAddOperation(self)
self.editor.addOperation(op)
if op.canUndo:
self.editor.addUnsavedEdit()
@alertException
def removePlayer(self):
player = self.panel.selectedPlayer
if player != "[No players]":
op = PlayerRemoveOperation(self, player)
self.editor.addOperation(op)
if op.canUndo:
self.editor.addUnsavedEdit()
@alertException
def movePlayer(self):
if self.panel.selectedPlayer != "[No players]":
self.movingPlayer = self.panel.selectedPlayer
if self.movingPlayer == "Player (Single Player)":
self.movingPlayer = "Player"
@alertException
def movePlayerToCamera(self):
player = self.panel.selectedPlayer
if player == "Player (Single Player)":
player = "Player"
if player != "[No players]":
pos = self.editor.mainViewport.cameraPosition
y = self.editor.mainViewport.yaw
p = self.editor.mainViewport.pitch
op = PlayerMoveOperation(self, pos, player, (y, p))
self.movingPlayer = None
self.editor.addOperation(op)
if op.canUndo:
self.editor.addUnsavedEdit()
def delete_skin(self, uuid):
del self.playerTexture[uuid]
self.playerTexture[uuid] = loadPNGTexture('char.png')
@alertException
def reloadSkins(self):
#result = ask("This pulls skins from the online server, so this may take a while", ["Ok", "Cancel"])
#if result == "Ok":
try:
for player in self.editor.level.players:
if player != "Player" and player in self.playerTexture.keys():
del self.playerTexture[player]
self.playerTexture[player] = loadPNGTexture(version_utils.getPlayerSkin(player, force=True, instance=self))
except:
raise Exception("Could not connect to the skins server, please check your Internet connection and try again.")
def gotoPlayerCamera(self):
player = self.panel.selectedPlayer
if player == "Player (Single Player)":
player = "Player"
try:
pos = self.editor.level.getPlayerPosition(player)
y, p = self.editor.level.getPlayerOrientation(player)
self.editor.gotoDimension(self.editor.level.getPlayerDimension(player))
self.editor.mainViewport.cameraPosition = pos
self.editor.mainViewport.yaw = y
self.editor.mainViewport.pitch = p
self.editor.mainViewport.stopMoving()
self.editor.mainViewport.invalidate()
except pymclevel.PlayerNotFound:
pass
def gotoPlayer(self):
player = self.panel.selectedPlayer
if player == "Player (Single Player)":
player = "Player"
try:
if self.editor.mainViewport.pitch < 0:
self.editor.mainViewport.pitch = -self.editor.mainViewport.pitch
self.editor.mainViewport.cameraVector = self.editor.mainViewport._cameraVector()
cv = self.editor.mainViewport.cameraVector
pos = self.editor.level.getPlayerPosition(player)
pos = map(lambda p, c: p - c * 5, pos, cv)
self.editor.gotoDimension(self.editor.level.getPlayerDimension(player))
self.editor.mainViewport.cameraPosition = pos
self.editor.mainViewport.stopMoving()
except pymclevel.PlayerNotFound:
pass
def __init__(self, *args):
EditorTool.__init__(self, *args)
self.reloadTextures()
self.nonSavedPlayers = []
textureVerticesHead = numpy.array(
(
# Backside of Head
24, 16, # Bottom Left
24, 8, # Top Left
32, 8, # Top Right
32, 16, # Bottom Right
# Front of Head
8, 16,
8, 8,
16, 8,
16, 16,
#
24, 0,
16, 0,
16, 8,
24, 8,
#
16, 0,
8, 0,
8, 8,
16, 8,
#
8, 8,
0, 8,
0, 16,
8, 16,
16, 16,
24, 16,
24, 8,
16, 8,
), dtype='f4')
textureVerticesHat = numpy.array(
(
56, 16,
56, 8,
64, 8,
64, 16,
48, 16,
48, 8,
40, 8,
40, 16,
56, 0,
48, 0,
48, 8,
56, 8,
48, 0,
40, 0,
40, 8,
48, 8,
40, 8,
32, 8,
32, 16,
40, 16,
48, 16,
56, 16,
56, 8,
48, 8,
), dtype='f4')
textureVerticesHead.shape = (24, 2)
textureVerticesHat.shape = (24, 2)
textureVerticesHead *= 4
textureVerticesHead[:, 1] *= 2
textureVerticesHat *= 4
textureVerticesHat[:, 1] *= 2
self.texVerts = (textureVerticesHead, textureVerticesHat)
self.playerPos = {0:{}, -1:{}, 1:{}}
self.playerTexture = {}
self.revPlayerPos = {0:{}, -1:{}, 1:{}}
self.inOtherDimension = {0: [], 1: [], -1: []}
self.markerList = DisplayList()
panel = None
def showPanel(self):
if not self.panel:
self.panel = PlayerPositionPanel(self)
self.panel.centery = (self.editor.mainViewport.height - self.editor.toolbar.height) / 2 + self.editor.subwidgets[0].height
self.panel.left = self.editor.left
self.editor.add(self.panel)
def hidePanel(self):
if self.panel and self.panel.parent:
self.panel.parent.remove(self.panel)
self.panel = None
def drawToolReticle(self):
if self.movingPlayer is None:
return
pos, direction = self.editor.blockFaceUnderCursor
dim = self.editor.level.getPlayerDimension(self.movingPlayer)
pos = (pos[0], pos[1] + 2, pos[2])
x, y, z = pos
# x,y,z=map(lambda p,d: p+d, pos, direction)
GL.glEnable(GL.GL_BLEND)
GL.glColor(1.0, 1.0, 1.0, 0.5)
self.drawCharacterHead(x + 0.5, y + 0.75, z + 0.5, self.revPlayerPos[dim][self.movingPlayer], dim)
GL.glDisable(GL.GL_BLEND)
GL.glEnable(GL.GL_DEPTH_TEST)
self.drawCharacterHead(x + 0.5, y + 0.75, z + 0.5, self.revPlayerPos[dim][self.movingPlayer], dim)
drawTerrainCuttingWire(BoundingBox((x, y, z), (1, 1, 1)))
drawTerrainCuttingWire(BoundingBox((x, y - 1, z), (1, 1, 1)))
#drawTerrainCuttingWire( BoundingBox((x,y-2,z), (1,1,1)) )
GL.glDisable(GL.GL_DEPTH_TEST)
markerLevel = None
def drawToolMarkers(self):
if not config.settings.drawPlayerHeads.get():
return
if self.markerLevel != self.editor.level:
self.markerList.invalidate()
self.markerLevel = self.editor.level
self.markerList.call(self._drawToolMarkers)
def _drawToolMarkers(self):
GL.glColor(1.0, 1.0, 1.0, 0.5)
GL.glEnable(GL.GL_DEPTH_TEST)
GL.glMatrixMode(GL.GL_MODELVIEW)
for player in self.editor.level.players:
try:
pos = self.editor.level.getPlayerPosition(player)
yaw, pitch = self.editor.level.getPlayerOrientation(player)
dim = self.editor.level.getPlayerDimension(player)
self.inOtherDimension[dim].append(player)
self.playerPos[dim][pos] = player
self.revPlayerPos[dim][player] = pos
if player != "Player" and config.settings.downloadPlayerSkins.get():
self.playerTexture[player] = loadPNGTexture(version_utils.getPlayerSkin(player, force=False))
else:
self.playerTexture[player] = self.charTex
if dim != self.editor.level.dimNo:
continue
x, y, z = pos
GL.glPushMatrix()
GL.glTranslate(x, y, z)
GL.glRotate(-yaw, 0, 1, 0)
GL.glRotate(pitch, 1, 0, 0)
GL.glColor(1, 1, 1, 1)
self.drawCharacterHead(0, 0, 0, (x,y,z), self.editor.level.dimNo)
GL.glPopMatrix()
# GL.glEnable(GL.GL_BLEND)
drawTerrainCuttingWire(FloatBox((x - .5, y - .5, z - .5), (1, 1, 1)),
c0=(0.3, 0.9, 0.7, 1.0),
c1=(0, 0, 0, 0),
)
#GL.glDisable(GL.GL_BLEND)
except Exception, e:
print repr(e)
continue
GL.glDisable(GL.GL_DEPTH_TEST)
def drawCharacterHead(self, x, y, z, realCoords=None, dim=0):
GL.glEnable(GL.GL_CULL_FACE)
origin = (x - 0.25, y - 0.25, z - 0.25)
size = (0.5, 0.5, 0.5)
box = FloatBox(origin, size)
hat_origin = (x - 0.275, y - 0.275, z - 0.275)
hat_size = (0.55, 0.55, 0.55)
hat_box = FloatBox(hat_origin, hat_size)
if realCoords is not None and self.playerPos[dim][realCoords] != "Player" and config.settings.downloadPlayerSkins.get():
drawCube(box,
texture=self.playerTexture[self.playerPos[dim][realCoords]], textureVertices=self.texVerts[0])
GL.glEnable(GL.GL_BLEND)
GL.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA)
drawCube(hat_box,
texture=self.playerTexture[self.playerPos[dim][realCoords]], textureVertices=self.texVerts[1])
GL.glDisable(GL.GL_BLEND)
else:
drawCube(box,
texture=self.charTex, textureVertices=self.texVerts[0])
GL.glDisable(GL.GL_CULL_FACE)
#@property
#def statusText(self):
# if not self.panel:
# return ""
# player = self.panel.selectedPlayer
# if player == "Player":
# return "Click to move the player"
#
# return _("Click to move the player \"{0}\"").format(player)
@alertException
def mouseDown(self, evt, pos, direction):
if self.movingPlayer is None:
return
pos = (pos[0] + 0.5, pos[1] + 2.75, pos[2] + 0.5)
op = PlayerMoveOperation(self, pos, self.movingPlayer)
self.movingPlayer = None
if self.recordMove:
self.editor.addOperation(op)
addingMoving = False
else:
self.editor.performWithRetry(op) #Prevent recording of Undo when adding player
self.recordMove = True
addingMoving = True
if op.canUndo and not addingMoving:
self.editor.addUnsavedEdit()
def keyDown(self, evt):
if not self.recordMove:
if not pygame.key.get_focused():
return
keyname = evt.dict.get('keyname', None) or self.panel.get_root().getKey(evt)
if keyname == "Escape":
self.recordMove = True
def keyUp(self, evt):
pass
def levelChanged(self):
self.markerList.invalidate()
@alertException
def toolSelected(self):
self.showPanel()
self.movingPlayer = None
@alertException
def toolReselected(self):
if self.panel:
self.gotoPlayer()
class PlayerSpawnPositionOptions(ToolOptions):
def __init__(self, tool):
Panel.__init__(self)
self.tool = tool
self.spawnProtectionCheckBox = CheckBox(ref=AttrRef(tool, "spawnProtection"))
self.spawnProtectionLabel = Label("Spawn Position Safety")
self.spawnProtectionLabel.mouse_down = self.spawnProtectionCheckBox.mouse_down
tooltipText = "Minecraft will randomly move your spawn point if you try to respawn in a column where there are no blocks at Y=63 and Y=64. Only uncheck this box if Minecraft is changed."
self.spawnProtectionLabel.tooltipText = self.spawnProtectionCheckBox.tooltipText = tooltipText
row = Row((self.spawnProtectionCheckBox, self.spawnProtectionLabel))
col = Column((Label("Spawn Point Options"), row, Button("OK", action=self.dismiss)))
self.add(col)
self.shrink_wrap()
class PlayerSpawnPositionTool(PlayerPositionTool):
surfaceBuild = True
toolIconName = "playerspawn"
tooltipText = "Move Spawn Point\nRight-click for options"
def __init__(self, *args):
PlayerPositionTool.__init__(self, *args)
self.optionsPanel = PlayerSpawnPositionOptions(self)
def toolEnabled(self):
return self.editor.level.dimNo == 0
def showPanel(self):
self.panel = Panel()
button = Button("Goto Spawn", action=self.gotoSpawn)
self.panel.add(button)
self.panel.shrink_wrap()
self.panel.left = self.editor.left
self.panel.centery = self.editor.centery
self.editor.add(self.panel)
def gotoSpawn(self):
cv = self.editor.mainViewport.cameraVector
pos = self.editor.level.playerSpawnPosition()
pos = map(lambda p, c: p - c * 5, pos, cv)
self.editor.mainViewport.cameraPosition = pos
self.editor.mainViewport.stopMoving()
@property
def statusText(self):
return "Click to set the spawn position."
spawnProtection = config.spawn.spawnProtection.property()
def drawToolReticle(self):
pos, direction = self.editor.blockFaceUnderCursor
x, y, z = map(lambda p, d: p + d, pos, direction)
color = (1.0, 1.0, 1.0, 0.5)
if isinstance(self.editor.level, pymclevel.MCInfdevOldLevel) and self.spawnProtection:
if not positionValid(self.editor.level, (x, y, z)):
color = (1.0, 0.0, 0.0, 0.5)
GL.glColor(*color)
GL.glEnable(GL.GL_BLEND)
self.drawCage(x, y, z)
self.drawCharacterHead(x + 0.5, y + 0.5, z + 0.5)
GL.glDisable(GL.GL_BLEND)
GL.glEnable(GL.GL_DEPTH_TEST)
self.drawCage(x, y, z)
self.drawCharacterHead(x + 0.5, y + 0.5, z + 0.5)
color2 = map(lambda a: a * 0.4, color)
drawTerrainCuttingWire(BoundingBox((x, y, z), (1, 1, 1)), color2, color)
GL.glDisable(GL.GL_DEPTH_TEST)
def _drawToolMarkers(self):
x, y, z = self.editor.level.playerSpawnPosition()
GL.glColor(1.0, 1.0, 1.0, 1.0)
GL.glEnable(GL.GL_DEPTH_TEST)
self.drawCage(x, y, z)
self.drawCharacterHead(x + 0.5, y + 0.5 + 0.125 * numpy.sin(self.editor.frames * 0.05), z + 0.5)
GL.glDisable(GL.GL_DEPTH_TEST)
def drawCage(self, x, y, z):
cageTexVerts = numpy.array(pymclevel.MCInfdevOldLevel.materials.blockTextures[52, 0])
pixelScale = 0.5 if self.editor.level.materials.name in ("Pocket", "Alpha") else 1.0
texSize = 16 * pixelScale
cageTexVerts *= pixelScale
cageTexVerts = numpy.array(
[((tx, ty), (tx + texSize, ty), (tx + texSize, ty + texSize), (tx, ty + texSize)) for (tx, ty) in
cageTexVerts], dtype='float32')
GL.glEnable(GL.GL_ALPHA_TEST)
drawCube(BoundingBox((x, y, z), (1, 1, 1)), texture=pymclevel.alphaMaterials.terrainTexture,
textureVertices=cageTexVerts)
GL.glDisable(GL.GL_ALPHA_TEST)
@alertException
def mouseDown(self, evt, pos, direction):
pos = map(lambda p, d: p + d, pos, direction)
op = PlayerSpawnMoveOperation(self, pos)
try:
self.editor.addOperation(op)
if op.canUndo:
self.editor.addUnsavedEdit()
self.markerList.invalidate()
except SpawnPositionInvalid, e:
if "Okay" != ask(str(e), responses=["Okay", "Fix it for me!"]):
level = self.editor.level
status = ""
if not okayAt63(level, pos):
level.setBlockAt(pos[0], 63, pos[2], 1)
status += _("Block added at y=63.\n")
if 59 < pos[1] < 63:
pos[1] = 63
status += _("Spawn point moved upward to y=63.\n")
if not okayAboveSpawn(level, pos):
if pos[1] > 63 or pos[1] < 59:
lpos = (pos[0], pos[1] - 1, pos[2])
if level.blockAt(*pos) == 0 and level.blockAt(*lpos) != 0 and okayAboveSpawn(level, lpos):
pos = lpos
status += _("Spawn point shifted down by one block.\n")
if not okayAboveSpawn(level, pos):
for i in range(1, 4):
level.setBlockAt(pos[0], pos[1] + i, pos[2], 0)
status += _("Blocks above spawn point cleared.\n")
self.editor.invalidateChunks([(pos[0] // 16, pos[2] // 16)])
op = PlayerSpawnMoveOperation(self, pos)
try:
self.editor.addOperation(op)
if op.canUndo:
self.editor.addUnsavedEdit()
self.markerList.invalidate()
except SpawnPositionInvalid, e:
alert(str(e))
return
if len(status):
alert(_("Spawn point fixed. Changes: \n\n") + status)
@alertException
def toolReselected(self):
self.gotoSpawn()
|
|
# Copyright 2016 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import logging
import re
from datetime import datetime
from concurrent.futures import as_completed
from dateutil.tz import tzutc
from dateutil.parser import parse
from c7n.actions import ActionRegistry, BaseAction
from c7n.filters import FilterRegistry, AgeFilter, OPERATORS
import c7n.filters.vpc as net_filters
from c7n.manager import resources
from c7n.query import QueryResourceManager
from c7n import tags
from c7n.utils import (
local_session, get_account_id, generate_arn,
get_retry, chunks, snapshot_identifier, type_schema)
log = logging.getLogger('custodian.elasticache')
filters = FilterRegistry('elasticache.filters')
actions = ActionRegistry('elasticache.actions')
#registered marked-for-op filter
filters.register('marked-for-op', tags.TagActionFilter)
TTYPE = re.compile('cache.t')
@resources.register('cache-cluster')
class ElastiCacheCluster(QueryResourceManager):
resource_type = 'aws.elasticache.cluster'
filter_registry = filters
action_registry = actions
_generate_arn = _account_id = None
retry = staticmethod(get_retry(('Throttled',)))
@property
def account_id(self):
if self._account_id is None:
session = local_session(self.session_factory)
self._account_id = get_account_id(session)
return self._account_id
@property
def generate_arn(self):
if self._generate_arn is None:
self._generate_arn = functools.partial(
generate_arn,
'elasticache',
region=self.config.region,
account_id=self.account_id,
resource_type='cluster',
separator=':')
return self._generate_arn
def augment(self, clusters):
filter(None, _elasticache_cluster_tags(
self.get_model(),
clusters, self.session_factory, self.executor_factory,
self.generate_arn, self.retry))
return clusters
@filters.register('security-group')
class SecurityGroupFilter(net_filters.SecurityGroupFilter):
RelatedIdsExpression = "SecurityGroups[].SecurityGroupId"
@filters.register('subnet')
class SubnetFilter(net_filters.SubnetFilter):
RelatedIdsExpression = ""
def get_related_ids(self, resources):
group_ids = set()
for r in resources:
group_ids.update(
[s['SubnetIdentifier'] for s in
self.groups[r['CacheSubnetGroupName']]['Subnets']])
return group_ids
def process(self, resources, event=None):
self.groups = {
r['CacheSubnetGroupName']: r for r in
ElastiCacheSubnetGroup(self.manager.ctx, {}).resources()}
return super(SubnetFilter, self).process(resources, event)
# added mark-for-op
@actions.register('mark-for-op')
class TagDelayedAction(tags.TagDelayedAction):
batch_size = 1
def process_resource_set(self, clusters, tags):
client = local_session(self.manager.session_factory).client(
'elasticache')
for cluster in clusters:
arn = self.manager.generate_arn(cluster['CacheClusterId'])
client.add_tags_to_resource(ResourceName=arn, Tags=tags)
# added unmark
@actions.register('remove-tag')
@actions.register('unmark')
class RemoveTag(tags.RemoveTag):
concurrency = 2
batch_size = 5
def process_resource_set(self, clusters, tag_keys):
client = local_session(
self.manager.session_factory).client('elasticache')
for cluster in clusters:
arn = self.manager.generate_arn(cluster['CacheClusterId'])
client.remove_tags_from_resource(
ResourceName=arn, TagKeys=tag_keys)
@actions.register('delete')
class DeleteElastiCacheCluster(BaseAction):
schema = type_schema(
'delete', **{'skip-snapshot': {'type': 'boolean'}})
def process(self, clusters):
skip = self.data.get('skip-snapshot', False)
client = local_session(
self.manager.session_factory).client('elasticache')
clusters_to_delete = []
replication_groups_to_delete = set()
for cluster in clusters:
if cluster.get('ReplicationGroupId', ''):
replication_groups_to_delete.add(cluster['ReplicationGroupId'])
else:
clusters_to_delete.append(cluster)
# added if statement to handle differences in parameters if snapshot is skipped
for cluster in clusters_to_delete:
params = {'CacheClusterId': cluster['CacheClusterId']}
if _cluster_eligible_for_snapshot(cluster) and not skip:
params['FinalSnapshotIdentifier'] = snapshot_identifier(
'Final', cluster['CacheClusterId'])
self.log.debug(
"Taking final snapshot of %s", cluster['CacheClusterId'])
else:
self.log.debug(
"Skipping final snapshot of %s", cluster['CacheClusterId'])
client.delete_cache_cluster(**params)
self.log.info(
'Deleted ElastiCache cluster: %s',
cluster['CacheClusterId'])
for replication_group in replication_groups_to_delete:
params = {'ReplicationGroupId': replication_group,
'RetainPrimaryCluster': False}
if not skip:
params['FinalSnapshotIdentifier'] = snapshot_identifier(
'Final', replication_group)
client.delete_replication_group(**params)
self.log.info(
'Deleted ElastiCache replication group: %s',
replication_group)
@actions.register('snapshot')
class SnapshotElastiCacheCluster(BaseAction):
def process(self, clusters):
with self.executor_factory(max_workers=3) as w:
futures = []
for cluster in clusters:
if not _cluster_eligible_for_snapshot(cluster):
continue
futures.append(w.submit(
self.process_cluster_snapshot,
cluster))
for f in as_completed(futures):
if f.exception():
self.log.error(
"Exception creating cache cluster snapshot \n %s",
f.exception())
return clusters
def process_cluster_snapshot(self, cluster):
c = local_session(self.manager.session_factory).client('elasticache')
c.create_snapshot(
SnapshotName=snapshot_identifier(
'Backup',
cluster['CacheClusterId']),
CacheClusterId=cluster['CacheClusterId'])
@resources.register('cache-subnet-group')
class ElastiCacheSubnetGroup(QueryResourceManager):
resource_type = 'aws.elasticache.subnet-group'
@resources.register('cache-snapshot')
class ElastiCacheSnapshot(QueryResourceManager):
resource_type = 'aws.elasticache.snapshot'
filter_registry = FilterRegistry('elasticache-snapshot.filters')
action_registry = ActionRegistry('elasticache-snapshot.actions')
filter_registry.register('marked-for-op', tags.TagActionFilter)
_generate_arn = _account_id = None
retry = staticmethod(get_retry(('Throttled',)))
@property
def account_id(self):
if self._account_id is None:
session = local_session(self.session_factory)
self._account_id = get_account_id(session)
return self._account_id
@property
def generate_arn(self):
if self._generate_arn is None:
self._generate_arn = functools.partial(
generate_arn,
'elasticache',
region=self.config.region,
account_id=self.account_id,
resource_type='snapshot',
separator=':')
return self._generate_arn
def augment(self, clusters):
filter(None, _elasticache_snapshot_tags(
self.get_model(),
clusters, self.session_factory, self.executor_factory,
self.generate_arn, self.retry))
return clusters
@ElastiCacheSnapshot.filter_registry.register('age')
class ElastiCacheSnapshotAge(AgeFilter):
schema = type_schema(
'age', days={'type': 'number'},
op={'type': 'string', 'enum': OPERATORS.keys()})
date_attribute = 'dummy'
def get_resource_date(self, snapshot):
""" Override superclass method as there is no single snapshot date attribute.
"""
def to_datetime(v):
if not isinstance(v, datetime):
v = parse(v)
if not v.tzinfo:
v = v.replace(tzinfo=tzutc())
return v
# Return the earliest of the node snaphot creation times.
return min([to_datetime(ns['SnapshotCreateTime'])
for ns in snapshot['NodeSnapshots']])
@ElastiCacheSnapshot.action_registry.register('delete')
class DeleteElastiCacheSnapshot(BaseAction):
def process(self, snapshots):
log.info("Deleting %d ElastiCache snapshots", len(snapshots))
with self.executor_factory(max_workers=3) as w:
futures = []
for snapshot_set in chunks(reversed(snapshots), size=50):
futures.append(
w.submit(self.process_snapshot_set, snapshot_set))
for f in as_completed(futures):
if f.exception():
self.log.error(
"Exception deleting snapshot set \n %s",
f.exception())
return snapshots
def process_snapshot_set(self, snapshots_set):
c = local_session(self.manager.session_factory).client('elasticache')
for s in snapshots_set:
c.delete_snapshot(SnapshotName=s['SnapshotName'])
# added mark-for-op
@ElastiCacheSnapshot.action_registry.register('mark-for-op')
class ElastiCacheSnapshotTagDelayedAction(tags.TagDelayedAction):
batch_size = 1
def process_resource_set(self, snapshots, tags):
client = local_session(
self.manager.session_factory).client('elasticache')
for snapshot in snapshots:
arn = self.manager.generate_arn(snapshot['SnapshotName'])
client.add_tags_to_resource(ResourceName=arn, Tags=tags)
# added unmark
@ElastiCacheSnapshot.action_registry.register('remove-tag')
@ElastiCacheSnapshot.action_registry.register('unmark')
class ElastiCacheSnapshotRemoveTag(tags.RemoveTag):
concurrency = 2
batch_size = 5
def process_resource_set(self, snapshots, tag_keys):
client = local_session(
self.manager.session_factory).client('elasticache')
for snapshot in snapshots:
arn = self.manager.generate_arn(snapshot['SnapshotName'])
client.remove_tags_from_resource(
ResourceName=arn, TagKeys=tag_keys)
def _elasticache_cluster_tags(
model, clusters, session_factory, executor_factory, generator, retry):
""" Augment ElastiCache clusters with their respective tags
"""
def process_tags(cluster):
client = local_session(session_factory).client('elasticache')
arn = generator(cluster[model.id])
# added if statement to ensure snapshot is available in order to list tags
if not cluster['CacheClusterStatus'] == 'available':
return
tag_list = retry(
client.list_tags_for_resource,
ResourceName=arn)['TagList']
cluster['Tags'] = tag_list or []
return cluster
with executor_factory(max_workers=2) as w:
return list(w.map(process_tags, clusters))
def _elasticache_snapshot_tags(
model, snapshots, session_factory, executor_factory, generator, retry):
""" Augment ElastiCache snapshots with their respective tags
"""
# added if statement to ensure snapshot is available in order to list tags
def process_tags(snapshot):
client = local_session(session_factory).client('elasticache')
arn = generator(snapshot[model.id])
if not snapshot['SnapshotStatus'] == 'available':
return
tag_list = retry(
client.list_tags_for_resource,
ResourceName=arn)['TagList']
snapshot['Tags'] = tag_list or []
return snapshot
with executor_factory(max_workers=2) as w:
return list(w.map(process_tags, snapshots))
def _cluster_eligible_for_snapshot(cluster):
# added regex search to filter unsupported cachenode types
return (
cluster['Engine'] != 'memcached' and not
TTYPE.match(cluster['CacheNodeType'])
)
|
|
# [h] hTools2.modules.webfonts
'''
A collection of tools for working with webfonts.
Some functions in this module require external C libraries:
- [ttfautohint](http://freetype.org/ttfautohint/)
- [sfnt2woff](http://people.mozilla.org/~jkew/woff/)
- [ttf2eot](http://code.google.com/p/ttf2eot/)
- [woff2_compress](http://github.com/google/woff2)
'''
import os
import shutil
from base64 import b64encode
from hTools2.modules.ttx import otf2ttx, ttx2otf
from hTools2.modules.ttx import strip_names as ttx_strip_names
from hTools2.modules.sysutils import SuppressPrint
try:
from mojo.roboFont import OpenFont
from mojo.compile import executeCommand, hasTTFAutoHint
from lib.tools.bezierTools import curveConverter
except:
from robofab.world import OpenFont
#--------------------
# higher-level tools
#--------------------
def generate_webfont(otf_path, strip_names=False, woff=True, woff_path=None, woff2=False, woff2_path=None, clear_ttx=True, clear_otf_tmp=True):
'''Generate woff/woff2 webfont(s) from an otf/ttf input file.'''
file_name, extension = os.path.splitext(otf_path)
# strip font infos (webfont obfuscation)
if strip_names:
ttx_path = '%s.ttx' % file_name
otf2ttx(otf_path, ttx_path)
ttx_strip_names(ttx_path)
otf_path_tmp = '%s_tmp.otf' % file_name
ttx2otf(ttx_path, otf_path_tmp)
if clear_ttx:
os.remove(ttx_path)
otf_path = otf_path_tmp
# generate woff
if woff:
if woff_path is None:
woff_path = '%s.woff' % file_name
sfnt2woff(otf_path, woff_path)
# generate woff2
if woff2:
if woff_path is None:
woff2_path = '%s.woff2' % file_name
woff2_compress(otf_path, woff2_path)
# clear temporary otf file
if clear_otf_tmp and strip_names:
print otf_path
# os.remove(otf_path_tmp)
def subset_font(src_path, dst_path, enc_path, remove_features=True, remove_kerning=False, remove_hinting=False, strip_names=False, verbose=False):
'''Generate a subsetted copy of an .otf or .ttf font.'''
from fontTools import subset
# build subsetting command
command = [src_path]
command += ["--output-file=%s" % dst_path]
command += ["--glyphs-file=%s" % enc_path]
# name options
if strip_names:
command += ["--obfuscate-names"]
else:
command += ["--name-IDs=*"]
command += ["--name-languages=0,1033"]
command += ["--name-legacy"]
# kerning & features
if remove_features:
if not remove_kerning:
command += ["--legacy-kern"]
command += ["--layout-features='kern'"]
else:
command += ["--layout-features=''"]
# hinting
if remove_hinting:
command += ["--no-hinting"]
# run subsetting
if verbose:
command += ["--verbose"]
subset.main(command)
# done
return os.path.exists(dst_path)
#------------
# WOFF tools
#------------
def sfnt2woff(otf_path, woff_path=None):
'''
Generate a .woff file from an .otf or .ttf font.
Requires ``sfnt2woff`` installed on your system.
'''
command = ['sfnt2woff', "%s" % otf_path]
executeCommand(command, shell=True)
woff_path_temp = '%s.woff' % os.path.splitext(otf_path)[0]
if woff_path is not None and os.path.exists(woff_path_temp):
shutil.move(woff_path_temp, woff_path)
#-------------
# WOFF2 tools
#-------------
def woff2_compress(otf_path, woff_path=None):
'''
Generate a .woff2 file from an .otf or .ttf font.
Requires ``woff2_compress`` installed on your system.
'''
command = ['woff2_compress', "%s" % otf_path]
executeCommand(command, shell=True)
woff_path_temp = '%s.woff2' % os.path.splitext(otf_path)[0]
if woff_path is not None and os.path.exists(woff_path_temp):
shutil.move(woff_path_temp, woff_path)
#-----------
# TTF tools
#-----------
def otf2ttf(otf_path, ttf_path):
'''
Generate a .ttf font from an .otf source font.
Requires RoboFont.
'''
otf_font = OpenFont(otf_path, showUI=False)
### is this curve conversion really necessary?
### some scripts do just `font.generate('myfont.ttf', 'ttf')`
coreFont = otf_font.naked()
for glyph in coreFont:
curveConverter.bezier2quadratic(glyph)
coreFont.segmentType = glyph.segmentType
### end conversion
otf_font.generate(ttf_path, 'ttf')
return os.path.exists(ttf_path)
def autohint_ttf(ttf_path, ttfautohinted_path):
'''
Autohint a .ttf font.
Requires ``ttfautohint`` installed on your system.
'''
# if hasTTFAutoHint() is False:
# message('ERROR: ttfautohint is not installed.')
# return
ttfautohint_options = []
ttfautohint_command = ['ttfautohint'] + \
ttfautohint_options + [ttf_path, ttfautohinted_path]
executeCommand(ttfautohint_command, shell=True)
return os.path.exists(ttfautohinted_path)
def autohint_ttfs(folder_ttfs, folder_ttfs_autohint):
'''
Run ``ttfautohint`` on all .ttf fonts in a given folder, and save them in another folder.
'''
for file_ in os.listdir(folder_ttfs):
file_name, extension = os.path.splitext(file_)
if extension == '.ttf':
ttf_path = os.path.join(folder_ttfs, file_)
ttf_path_autohint = os.path.join(folder_ttfs_autohint, '%s.ttf' % file_name)
autohint_ttf(ttf_path, ttf_path_autohint)
#-----------
# EOT tools
#-----------
def ttf2eot(ttf_path, eot_path):
'''
Generate .eot font file from a .ttf font.
Needs ``ttf2eot`` installed on your system.
'''
eot_command = ['ttf2eot', '<', ttf_path, '>', eot_path]
executeCommand(eot_command, shell=True)
return os.path.exists(eot_path)
def generate_eots(folder_ttfs, folder_eots):
'''
Make .eot font files from all .ttf fonts in a given folder. Save the generated fonts in another folder.
'''
for file_ in os.listdir(folder_ttfs):
file_name, extension = os.path.splitext(file_)
if extension == '.ttf':
ttf_path = os.path.join(folder_ttfs, file_)
eot_path = os.path.join(folder_ttfs, '%s.eot' % file_name)
generate_eot(ttf_path, eot_path)
#-----------
# SVG tools
#-----------
from ufo2svg import convertUFOToSVGFont
from defcon import Font
from extractor import extractUFO
def generate_svg(src_path, svg_path):
font = Font()
try:
extractUFO(src_path, font)
convertUFOToSVGFont(font, svg_path)
except:
print "Failed to generate SVG."
#--------------
# base64 tools
#--------------
def encode_base64(font_path):
'''
Convert a font at a given path to base64 encoding.
'''
font_file = open(font_path,'rb').read()
font_base64 = b64encode(font_file)
return font_base64
def make_base64_fontface_woff(font_name, base64_font):
'''
Generate a CSS ``@font-face`` declaration for a base64-encoded font with a given name.
'''
font_face = '''@font-face { font-family: '%s'; src:url(data:application/x-font-woff;charset=utf-8;base64,%s) format('woff') }''' % (font_name, base64_font)
return font_face
|
|
from __future__ import absolute_import
import logging
import os
import six
import subprocess
import tempfile
import time
from email.utils import parseaddr
from functools import partial
from operator import attrgetter
from random import randrange
import lxml
import toronado
from django.conf import settings
from django.core import mail
from django.core.mail import EmailMultiAlternatives
from django.core.mail.backends.base import BaseEmailBackend
from django.core.signing import BadSignature, Signer
from django.utils.crypto import constant_time_compare
from django.utils.encoding import force_bytes, force_str, force_text
from sentry import options
from sentry.logging import LoggingFormat
from sentry.models import Activity, Group, GroupEmailThread, Project, User, UserOption
from sentry.utils import metrics
from sentry.utils.safe import safe_execute
from sentry.utils.strings import is_valid_dot_atom
from sentry.web.helpers import render_to_string
from sentry.utils.compat import map
# The maximum amount of recipients to display in human format.
MAX_RECIPIENTS = 5
# The fake TLD used to construct email addresses when one is required,
# for example by automatically generated SSO accounts.
FAKE_EMAIL_TLD = ".sentry-fake"
logger = logging.getLogger("sentry.mail")
def inline_css(value):
tree = lxml.html.document_fromstring(value)
toronado.inline(tree)
# CSS media query support is inconsistent when the DOCTYPE declaration is
# missing, so we force it to HTML5 here.
return lxml.html.tostring(tree, doctype="<!DOCTYPE html>")
class _CaseInsensitiveSigner(Signer):
"""
Generate a signature that is comprised of only lowercase letters.
WARNING: Do not use this for anything that needs to be cryptographically
secure! This is losing entropy and has a much higher chance of collision
due to dropping to lowercase letters. For our purposes, this lack of entropy
is ok and doesn't pose a risk.
NOTE: This is needed strictly for signatures used in email addresses. Some
clients, coughAirmailcough, treat email addresses as being case-insensitive,
and sends the value as all lowercase.
"""
def signature(self, value):
sig = super(_CaseInsensitiveSigner, self).signature(value)
return sig.lower()
def unsign(self, signed_value):
# This unsign is identical to subclass except for the lowercasing
# See: https://github.com/django/django/blob/1.6.11/django/core/signing.py#L165-L172
signed_value = force_str(signed_value)
if self.sep not in signed_value:
raise BadSignature('No "%s" found in value' % self.sep)
value, sig = signed_value.rsplit(self.sep, 1)
if constant_time_compare(sig.lower(), self.signature(value)):
return force_text(value)
raise BadSignature('Signature "%s" does not match' % sig)
signer = _CaseInsensitiveSigner()
def email_to_group_id(address):
"""
Email address should be in the form of:
{group_id}+{signature}@example.com
"""
address = address.split("@", 1)[0]
signed_data = address.replace("+", ":")
return int(force_bytes(signer.unsign(signed_data)))
def group_id_to_email(group_id):
signed_data = signer.sign(six.text_type(group_id))
return "@".join(
(
signed_data.replace(":", "+"),
options.get("mail.reply-hostname") or get_from_email_domain(),
)
)
def domain_from_email(email):
email = parseaddr(email)[1]
try:
return email.split("@", 1)[1]
except IndexError:
# The email address is likely malformed or something
return email
# Slightly modified version of Django's
# `django.core.mail.message:make_msgid` because we need
# to override the domain. If we ever upgrade to
# django 1.8, we can/should replace this.
def make_msgid(domain):
"""Returns a string suitable for RFC 2822 compliant Message-ID, e.g:
<[email protected]>
Optional idstring if given is a string used to strengthen the
uniqueness of the message id. Optional domain if given provides the
portion of the message id after the '@'. It defaults to the locally
defined hostname.
"""
timeval = time.time()
utcdate = time.strftime("%Y%m%d%H%M%S", time.gmtime(timeval))
pid = os.getpid()
randint = randrange(100000)
msgid = "<%s.%s.%s@%s>" % (utcdate, pid, randint, domain)
return msgid
# cache the domain_from_email calculation
# This is just a tuple of (email, email-domain)
_from_email_domain_cache = (None, None)
def get_from_email_domain():
global _from_email_domain_cache
from_ = options.get("mail.from")
if not _from_email_domain_cache[0] == from_:
_from_email_domain_cache = (from_, domain_from_email(from_))
return _from_email_domain_cache[1]
def create_fake_email(unique_id, namespace):
"""
Generate a fake email of the form: {unique_id}@{namespace}{FAKE_EMAIL_TLD}
For example: [email protected]
"""
return u"{}@{}{}".format(unique_id, namespace, FAKE_EMAIL_TLD)
def is_fake_email(email):
"""
Returns True if the provided email matches the fake email pattern.
"""
return email.endswith(FAKE_EMAIL_TLD)
def get_email_addresses(user_ids, project=None):
pending = set(user_ids)
results = {}
if project:
queryset = UserOption.objects.filter(project=project, user__in=pending, key="mail:email")
for option in (o for o in queryset if o.value and not is_fake_email(o.value)):
results[option.user_id] = option.value
pending.discard(option.user_id)
if pending:
queryset = User.objects.filter(pk__in=pending, is_active=True)
for (user_id, email) in queryset.values_list("id", "email"):
if email and not is_fake_email(email):
results[user_id] = email
pending.discard(user_id)
if pending:
logger.warning(
"Could not resolve email addresses for user IDs in %r, discarding...", pending
)
return results
class ListResolver(object):
"""
Manages the generation of RFC 2919 compliant list-id strings from varying
objects types.
"""
class UnregisteredTypeError(Exception):
"""
Error raised when attempting to build a list-id from an unregisted object type.
"""
def __init__(self, namespace, type_handlers):
assert is_valid_dot_atom(namespace)
# The list-id-namespace that will be used when generating the list-id
# string. This should be a domain name under the control of the
# generator (see RFC 2919.)
self.__namespace = namespace
# A mapping of classes to functions that accept an instance of that
# class, returning a tuple of values that will be used to generate the
# list label. Returned values must be valid RFC 2822 dot-atom-text
# values.
self.__type_handlers = type_handlers
def __call__(self, instance):
"""
Build a list-id string from an instance.
Raises ``UnregisteredTypeError`` if there is no registered handler for
the instance type. Raises ``AssertionError`` if a valid list-id string
cannot be generated from the values returned by the type handler.
"""
try:
handler = self.__type_handlers[type(instance)]
except KeyError:
raise self.UnregisteredTypeError(
u"Cannot generate mailing list identifier for {!r}".format(instance)
)
label = ".".join(map(six.text_type, handler(instance)))
assert is_valid_dot_atom(label)
return u"<{}.{}>".format(label, self.__namespace)
default_list_type_handlers = {
Activity: attrgetter("project.slug", "project.organization.slug"),
Project: attrgetter("slug", "organization.slug"),
Group: attrgetter("project.slug", "organization.slug"),
}
make_listid_from_instance = ListResolver(
options.get("mail.list-namespace"), default_list_type_handlers
)
class MessageBuilder(object):
def __init__(
self,
subject,
context=None,
template=None,
html_template=None,
body="",
html_body=None,
headers=None,
reference=None,
reply_reference=None,
from_email=None,
type=None,
):
assert not (body and template)
assert not (html_body and html_template)
assert context or not (template or html_template)
if headers is None:
headers = {}
self.subject = subject
self.context = context or {}
self.template = template
self.html_template = html_template
self._txt_body = body
self._html_body = html_body
self.headers = headers
self.reference = reference # The object that generated this message
self.reply_reference = reply_reference # The object this message is replying about
self.from_email = from_email or options.get("mail.from")
self._send_to = set()
self.type = type if type else "generic"
if reference is not None and "List-Id" not in headers:
try:
headers["List-Id"] = make_listid_from_instance(reference)
except ListResolver.UnregisteredTypeError as error:
logger.debug(six.text_type(error))
except AssertionError as error:
logger.warning(six.text_type(error))
def __render_html_body(self):
html_body = None
if self.html_template:
html_body = render_to_string(self.html_template, self.context)
else:
html_body = self._html_body
if html_body is not None:
return inline_css(html_body)
def __render_text_body(self):
if self.template:
return render_to_string(self.template, self.context)
return self._txt_body
def add_users(self, user_ids, project=None):
self._send_to.update(list(get_email_addresses(user_ids, project).values()))
def build(self, to, reply_to=None, cc=None, bcc=None):
if self.headers is None:
headers = {}
else:
headers = self.headers.copy()
if options.get("mail.enable-replies") and "X-Sentry-Reply-To" in headers:
reply_to = headers["X-Sentry-Reply-To"]
else:
reply_to = set(reply_to or ())
reply_to.discard(to)
reply_to = ", ".join(reply_to)
if reply_to:
headers.setdefault("Reply-To", reply_to)
# Every message sent needs a unique message id
message_id = make_msgid(get_from_email_domain())
headers.setdefault("Message-Id", message_id)
subject = force_text(self.subject)
if self.reply_reference is not None:
reference = self.reply_reference
subject = "Re: %s" % subject
else:
reference = self.reference
if isinstance(reference, Group):
thread, created = GroupEmailThread.objects.get_or_create(
email=to,
group=reference,
defaults={"project": reference.project, "msgid": message_id},
)
if not created:
headers.setdefault("In-Reply-To", thread.msgid)
headers.setdefault("References", thread.msgid)
msg = EmailMultiAlternatives(
subject=subject.splitlines()[0],
body=self.__render_text_body(),
from_email=self.from_email,
to=(to,),
cc=cc or (),
bcc=bcc or (),
headers=headers,
)
html_body = self.__render_html_body()
if html_body:
msg.attach_alternative(html_body.decode("utf-8"), "text/html")
return msg
def get_built_messages(self, to=None, cc=None, bcc=None):
send_to = set(to or ())
send_to.update(self._send_to)
results = [
self.build(to=email, reply_to=send_to, cc=cc, bcc=bcc) for email in send_to if email
]
if not results:
logger.debug("Did not build any messages, no users to send to.")
return results
def format_to(self, to):
if not to:
return ""
if len(to) > MAX_RECIPIENTS:
to = to[:MAX_RECIPIENTS] + [u"and {} more.".format(len(to[MAX_RECIPIENTS:]))]
return ", ".join(to)
def send(self, to=None, cc=None, bcc=None, fail_silently=False):
return send_messages(
self.get_built_messages(to, cc=cc, bcc=bcc), fail_silently=fail_silently
)
def send_async(self, to=None, cc=None, bcc=None):
from sentry.tasks.email import send_email
fmt = options.get("system.logging-format")
messages = self.get_built_messages(to, cc=cc, bcc=bcc)
extra = {"message_type": self.type}
loggable = [v for k, v in six.iteritems(self.context) if hasattr(v, "id")]
for context in loggable:
extra["%s_id" % type(context).__name__.lower()] = context.id
log_mail_queued = partial(logger.info, "mail.queued", extra=extra)
for message in messages:
safe_execute(send_email.delay, message=message, _with_transaction=False)
extra["message_id"] = message.extra_headers["Message-Id"]
metrics.incr("email.queued", instance=self.type, skip_internal=False)
if fmt == LoggingFormat.HUMAN:
extra["message_to"] = (self.format_to(message.to),)
log_mail_queued()
elif fmt == LoggingFormat.MACHINE:
for recipient in message.to:
extra["message_to"] = recipient
log_mail_queued()
def send_messages(messages, fail_silently=False):
connection = get_connection(fail_silently=fail_silently)
sent = connection.send_messages(messages)
metrics.incr("email.sent", len(messages), skip_internal=False)
for message in messages:
extra = {
"message_id": message.extra_headers["Message-Id"],
"size": len(message.message().as_bytes()),
}
logger.info("mail.sent", extra=extra)
return sent
def get_mail_backend():
backend = options.get("mail.backend")
try:
return settings.SENTRY_EMAIL_BACKEND_ALIASES[backend]
except KeyError:
return backend
def get_connection(fail_silently=False):
"""
Gets an SMTP connection using our OptionsStore
"""
return mail.get_connection(
backend=get_mail_backend(),
host=options.get("mail.host"),
port=options.get("mail.port"),
username=options.get("mail.username"),
password=options.get("mail.password"),
use_tls=options.get("mail.use-tls"),
timeout=options.get("mail.timeout"),
fail_silently=fail_silently,
)
def send_mail(subject, message, from_email, recipient_list, fail_silently=False, **kwargs):
"""
Wrapper that forces sending mail through our connection.
Uses EmailMessage class which has more options than the simple send_mail
"""
email = mail.EmailMessage(
subject,
message,
from_email,
recipient_list,
connection=get_connection(fail_silently=fail_silently),
**kwargs
)
return email.send(fail_silently=fail_silently)
def is_smtp_enabled(backend=None):
"""
Check if the current backend is SMTP based.
"""
if backend is None:
backend = get_mail_backend()
return backend not in settings.SENTRY_SMTP_DISABLED_BACKENDS
class PreviewBackend(BaseEmailBackend):
"""
Email backend that can be used in local development to open messages in the
local mail client as they are sent.
Probably only works on OS X.
"""
def send_messages(self, email_messages):
for message in email_messages:
content = six.binary_type(message.message())
preview = tempfile.NamedTemporaryFile(
delete=False, prefix="sentry-email-preview-", suffix=".eml"
)
try:
preview.write(content)
preview.flush()
finally:
preview.close()
subprocess.check_call(("open", preview.name))
return len(email_messages)
|
|
"""
EVLANG
A mini-language for online coding of Evennia
Evennia contribution - Griatch 2012
WARNING:
Restricted python execution is a tricky art, and this module -is-
partly based on blacklisting techniques, which might be vulnerable to
new venues of attack opening up in the future (or existing ones we've
missed). Whereas I/we know of no obvious exploits to this, it is no
guarantee. If you are paranoid about security, consider also using
secondary defences on the OS level such as a jail and highly
restricted execution abilities for the twisted process. So in short,
this should work fine, but use it at your own risk. You have been
warned.
This module offers a highly restricted execution environment for users
to script objects in an almost-Python language. It's not really a true
sandbox but based on a very stunted version of Python. This not only
restricts obvious things like import statements and other builins, but
also pre-parses the AST tree to completely kill whole families of
functionality. The result is a subset of Python that -should- keep an
untrusted, malicious user from doing bad things to the server.
An important limitation with this this implementation is a lack of a
timeout check - inside Twisted (and in Python in general) it's very
hard to safely kill a thread with arbitrary code once it's running. So
instead we restrict the most common DOS-attack vectors, such as while
loops, huge power-law assignments as well as function definitions. A
better way would probably be to spawn the runner into a separate
process but that stunts much of the work a user might want to do with
objects (since the current in-memory state of an object has potential
importance in Evennia). If you want to try the subprocess route, you
might want to look into hacking the Evlang handler (below) onto code
from the pysandbox project (https://github.com/haypo/pysandbox). Note
however, that one would probably need to rewrite that to use Twisted's
non-blocking subprocess mechanisms instead.
The module holds the "Evlang" handler, which is intended to be the
entry point for adding scripting support anywhere in Evennia.
By default the execution environment makes the following objects
available (some or all of these may be None depending on how the
code was launched):
caller - a reference to the object triggering the code
scripter - the original creator of the code
self - the object on which the code is defined
here - shortcut to self.location, if applicable
There is finally a variable "evl" which is a holder object for safe
functions to execute. This object is initiated with the objects above,
to make sure the user does not try to forge the input arguments. See
below the default safe methods defined on it.
You can add new safe symbols to the execution context by adding
EVLANG_SAFE_CONTEXT to your settings file. This should be a dictionary
with {"name":object} pairs.
You can also add new safe methods to the evl object. You add them as a
dictionary on the same form to settings.EVLANG_SAFE_METHODS. Remember
that such meethods must be defined properly to be a class method
(notably "self" must be be the first argument on the argument list).
You can finally define settings.EVLANG_UNALLOWED_SYMBOLS as a list of
python symbol names you specifically want to lock. This will lock both
functions of that name as well as trying to access attributes on
objects with that name (note that these "attributes" have nothing to
do with Evennia's in-database "Attribute" system!).
"""
import sys, os, time
import __builtin__
import inspect, compiler.ast
from twisted.internet import reactor, threads, task
from twisted.internet.defer import inlineCallbacks
# set up django, if necessary
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
from django.core.management import setup_environ
from game import settings
setup_environ(settings)
#from src.utils.utils import run_async as thread_run_async
_LOGGER = None
#------------------------------------------------------------
# Evennia-specific blocks
#------------------------------------------------------------
# specifically forbidden symbols
_EV_UNALLOWED_SYMBOLS = ["attr", "set_attribute", "delete"]
try: _EV_UNALLOWED_SYMBOLS.expand(settings.EVLANG_UNALLOWED_SYMBOLS)
except AttributeError: pass
# safe methods (including self in args) to make available on
# the evl object
_EV_SAFE_METHODS = {}
try: _EV_SAFE_METHODS.update(settings.EVLANG_SAFE_METHODS)
except AttributeError: pass
# symbols to make available directly in code
_EV_SAFE_CONTEXT = {"testvar": "This is a safe var!"}
try: _EV_SAFE_CONTEXT.update(settings.EVLANG_SAFE_CONTEXT)
except AttributeError: pass
#------------------------------------------------------------
# Holder object for "safe" function access
#------------------------------------------------------------
class Evl(object):
"""
This is a wrapper object for storing safe functions
in a secure way, while offering a few properties for
them to access. This will be made available as the
"evl" property in code.
"""
def __init__(self, obj=None, caller=None, scripter=None, **kwargs):
"Populate the object with safe properties"
self.obj = obj
self.caller = caller
self.scripter = scripter
self.locatiton = None
if obj and hasattr(obj, "location"):
self.location = obj.location
for key, val in _EV_SAFE_METHODS.items():
setattr(self.__class__, name, val)
for key, val in kwargs.items():
setattr(self.__class__, name, val)
def list(self):
"""
list()
returns a string listing all methods on the evl object, including doc strings."
"""
# must do it this way since __dict__ is restricted
members = [mtup for mtup in inspect.getmembers(Evl, predicate=inspect.ismethod)
if not mtup[0].startswith("_")]
string = "\n".join(["{w%s{n\n %s" % (mtup[0], mtup[1].func_doc.strip()) for mtup in members])
return string
def msg(self, string, obj=None):
"""
msg(string, obj=None)
Sends message to obj or to caller if obj is not defined..
"""
if not obj:
obj = self.caller
obj.msg(string)
return True
def msg_contents(self, string, obj=None):
"""
msg_contents(string, obj=None):
Sends message to the contents of obj, or to content of self if obj is not defined.
"""
if not obj:
obj = self.obj
obj.msg_contents(string, exclude=[obj])
return True
def msg_here(self, string, obj=None):
"""
msg_here(string, obj=None)
Sends to contents of obj.location, or to self.location if obj is not defined.
"""
if obj and hasattr(obj, "location"):
here = obj.location
else:
here = self.location
if here:
here.msg_contents(string)
def delay(self, seconds, function, *args, **kwargs):
"""
delay(seconds, function, *args, **kwargs):
Delay execution of function(*args, **kwargs) for up to 120 seconds.
Error messages are relayed to caller unless a specific keyword
'errobj' is supplied pointing to another object to receiver errors.
"""
# handle the special error-reporting object
errobj = self.caller
if "errobj" in kwargs:
errobj = kwargs["errobj"]
del kwargs["errobj"]
# set up some callbacks for delayed execution
def errback(f, errobj):
if errobj:
try: f = f.getErrorMessage()
except: pass
errobj.msg("EVLANG delay error: " + str(f))
def runfunc(func, *args, **kwargs):
threads.deferToThread(func, *args, **kwargs).addErrback(errback, errobj)
# get things going
if seconds <= 120:
task.deferLater(reactor, seconds, runfunc, function, *args, **kwargs).addErrback(errback, errobj)
else:
raise EvlangError("delay() can only delay for a maximum of 120 seconds (got %ss)." % seconds )
return True
def attr(self, obj, attrname=None, value=None, delete=False):
"""
attr(obj, attrname=None, value=None, delete=False)
Access and edit database Attributes on obj. if only obj
is given, return list of Attributes on obj. If attrname
is given, return that Attribute's value only. If also
value is given, set the attribute to that value. The
delete flag will delete the given attrname from the object.
Access is checked for all operations. The method will return
the attribute value or True if the operation was a success,
None otherwise.
"""
print obj, hasattr(obj, "secure_attr")
if hasattr(obj, "secure_attr"):
return obj.secure_attr(self.caller, attrname, value, delete=False,
default_access_read=True, default_access_edit=False,
default_access_create=True)
return False
#------------------------------------------------------------
# Evlang class handler
#------------------------------------------------------------
class EvlangError(Exception):
"Error for evlang handler"
pass
class Evlang(object):
"""
This is a handler for launching limited execution Python scripts.
Normally this handler is stored on an object and will then give
access to basic operations on the object. It can however also be
run stand-alone.
If running on an object, it should normally be initiated in the
object's at_server_start() method and assigned to a property
"evlang" (or similar) for easy access. It will then use the object
for storing a dictionary of available evlang scripts (default name
of this attribute is "evlang_scripts").
Note: This handler knows nothing about access control. To get that
one needs to append a LockHandler as "lockhandler" at creation
time, as well as arrange for commands to do access checks of
suitable type. Once methods on this handler are called, access is
assumed to be granted.
"""
def __init__(self, obj=None, scripts=None, storage_attr="evlang_scripts", safe_context=None, safe_timeout=2):
"""
Setup of the Evlang handler.
Input:
obj - a reference to the object this handler is defined on. If not set, handler will operate stand-alone.
scripts = dictionary {scriptname, (codestring, callerobj), ...} where callerobj can be None.
evlang_storage_attr - if obj is given, will look for a dictionary {scriptname, (codestring, callerobj)...}
stored in this given attribute name on that object.
safe_funcs - dictionary of {funcname:funcobj, ...} to make available for the execution environment
safe_timeout - the time we let a script run. If it exceeds this time, it will be blocked from running again.
"""
self.obj = obj
self.evlang_scripts = {}
self.safe_timeout = safe_timeout
self.evlang_storage_attr = storage_attr
if scripts:
self.evlang_scripts.update(scripts)
if self.obj:
self.evlang_scripts.update(obj.attr(storage_attr))
self.safe_context = _EV_SAFE_CONTEXT # set by default + settings
if safe_context:
self.safe_context.update(safe_context)
self.timedout_codestrings = []
def msg(self, string, scripter=None, caller=None):
"""
Try to send string to a receiver. Returns False
if no receiver was found.
"""
if scripter:
scripter.msg(string)
elif caller:
caller.msg(string)
elif self.obj:
self.obj.msg(string)
else:
return False
return True
def start_timer(self, timeout, codestring, caller, scripter):
"""
Start a timer to check how long an execution has lasted.
Returns a deferred, which should be cancelled when the
code does finish.
"""
def alarm(codestring):
"store the code of too-long-running scripts"
global _LOGGER
if not _LOGGER:
from src.utils import logger as _LOGGER
self.timedout_codestrings.append(codestring)
err = "Evlang code '%s' exceeded allowed execution time (>%ss)." % (codestring, timeout)
_LOGGER.log_errmsg("EVLANG time exceeded: caller: %s, scripter: %s, code: %s" % (caller, scripter, codestring))
if not self.msg(err, scripter, caller):
raise EvlangError(err)
def errback(f):
"We need an empty errback, to catch the traceback of defer.cancel()"
pass
return task.deferLater(reactor, timeout, alarm, codestring).addErrback(errback)
def stop_timer(self, _, deferred):
"Callback for stopping a previously started timer. Cancels the given deferred."
deferred.cancel()
@inlineCallbacks
def run(self, codestring, caller=None, scripter=None):
"""
run a given code string.
codestring - the actual code to execute.
scripter - the creator of the script. Preferentially sees error messages
caller - the object triggering the script - sees error messages if no scripter is given
"""
# catching previously detected long-running code
if codestring in self.timedout_codestrings:
err = "Code '%s' previously failed with a timeout. Please rewrite code." % codestring
if not self.msg(err, scripter, caller):
raise EvlangError(err)
return
# dynamically setup context, then overload with custom additions
location = None
if self.obj:
location = self.obj.location
context = {"self":self.obj,
"caller":caller,
"scripter": scripter,
"here": location,
"evl": Evl(self.obj, caller, scripter)}
context.update(self.safe_context)
# launch the runner in a separate thread, tracking how long it runs.
timer = self.start_timer(self.safe_timeout, codestring, scripter, caller)
try:
yield threads.deferToThread(limited_exec, codestring, context=context,
timeout_secs=self.safe_timeout).addCallback(self.stop_timer, timer)
except Exception, e:
self.stop_timer(None, timer)
if not self.msg(e, scripter, caller):
raise e
def run_by_name(self, scriptname, caller=None, quiet=True):
"""
Run a script previously stored on the handler, identified by scriptname.
scriptname - identifier of the stored script
caller - optional reference to the object triggering the script.
quiet - will not raise error if scriptname is not found.
All scripts run will have access to the self, caller and here variables.
"""
scripter = None
try:
codestring, scripter = self.evlang_scripts[scriptname]
except KeyError:
if quiet:
return
errmsg = "Found no script with the name '%s'." % scriptname
if not self.msg(errmsg, scripter=None, caller=caller):
raise EvlangError(errmsg)
return
# execute code
self.run(codestring, caller, scripter)
def add(self, scriptname, codestring, scripter=None):
"""
Add a new script to the handler. This will also save the
script properly. This is used also to update scripts when
debugging.
"""
self.evlang_scripts[scriptname] = (codestring, scripter)
if self.obj:
# save to database
self.obj.attr(self.evlang_storage_attr, self.evlang_scripts)
def delete(self, scriptname):
"""
Permanently remove script from object.
"""
if scriptname in self.evlang_scripts:
del self.evlang_scripts[scriptname]
if self.obj:
# update change to database
self.obj.attr(self.evlang_storage_attr, self.evlang_scripts)
#----------------------------------------------------------------------
# Limited Python evaluation.
# Based on PD recipe by Babar K. Zafar
# http://code.activestate.com/recipes/496746/
# Expanded specifically for Evennia by Griatch
# - some renaming/cleanup
# - limited size of power expressions
# - removed print (use msg() instead)
# - blocking certain function calls
# - removed assignment of properties - this is too big of a security risk.
# One needs to us a safe function to change propertes.
# - removed thread-based check for execution time - it doesn't work
# embedded in twisted/python.
# - removed while, since it's night impossible to properly check compile
# time in an embedded Python thread (or rather, it's possible, but
# there is no way to cancel the thread anyway). while is an easy way
# to create an infinite loop.
#----------------------------------------------------------------------
#----------------------------------------------------------------------
# Module globals.
#----------------------------------------------------------------------
# Toggle module level debugging mode.
DEBUG = False
# List of all AST node classes in compiler/ast.py.
ALL_AST_NODES = \
set([name for (name, obj) in inspect.getmembers(compiler.ast)
if inspect.isclass(obj) and issubclass(obj, compiler.ast.Node)])
# List of all builtin functions and types (ignoring exception classes).
ALL_BUILTINS = \
set([name for (name, obj) in inspect.getmembers(__builtin__)
if (inspect.isbuiltin(obj) or name in ('True', 'False', 'None') or
(inspect.isclass(obj) and not issubclass(obj, BaseException)))])
#----------------------------------------------------------------------
# Utilties.
#----------------------------------------------------------------------
def classname(obj):
return obj.__class__.__name__
def is_valid_ast_node(name):
return name in ALL_AST_NODES
def is_valid_builtin(name):
return name in ALL_BUILTINS
def get_node_lineno(node):
return (node.lineno) and node.lineno or 0
#----------------------------------------------------------------------
# Restricted AST nodes & builtins.
#----------------------------------------------------------------------
# Deny evaluation of code if the AST contain any of the following nodes:
UNALLOWED_AST_NODES = set([
# 'Add', 'And',
'AssAttr',
# 'AssList',
# 'AssName',
# 'AssTuple',
# 'Assert', 'Assign', 'AugAssign',
'Backquote',
# 'Bitand', 'Bitor', 'Bitxor', 'Break',
# 'CallFunc', 'Class', 'Compare', 'Const', 'Continue',
# 'Decorators', 'Dict', 'Discard', 'Div',
# 'Ellipsis', 'EmptyNode',
'Exec',
# 'Expression', 'FloorDiv',
# 'For',
'From',
'Function',
# 'GenExpr', 'GenExprFor', 'GenExprIf', 'GenExprInner',
# 'Getattr', 'Global', 'If',
'Import',
# 'Invert',
# 'Keyword', 'Lambda', 'LeftShift',
# 'List', 'ListComp', 'ListCompFor', 'ListCompIf', 'Mod',
# 'Module',
# 'Mul', 'Name', 'Node', 'Not', 'Or', 'Pass', 'Power',
'Print', 'Printnl',
'Raise',
# 'Return', 'RightShift', 'Slice', 'Sliceobj',
# 'Stmt', 'Sub', 'Subscript',
'TryExcept', 'TryFinally',
# 'Tuple', 'UnaryAdd', 'UnarySub',
'While',
# 'Yield'
])
# Deny evaluation of code if it tries to access any of the following builtins:
UNALLOWED_BUILTINS = set([
'__import__',
# 'abs', 'apply', 'basestring', 'bool', 'buffer',
# 'callable', 'chr', 'classmethod', 'cmp', 'coerce',
'compile',
# 'complex',
'delattr',
# 'dict',
'dir',
# 'divmod', 'enumerate',
'eval', 'execfile', 'file',
# 'filter', 'float', 'frozenset',
'getattr', 'globals', 'hasattr',
# 'hash', 'hex', 'id',
'input',
# 'int',
'intern',
# 'isinstance', 'issubclass', 'iter',
# 'len', 'list',
'locals',
# 'long', 'map', 'max',
'memoryview',
# 'min', 'object', 'oct',
'open',
# 'ord', 'pow', 'property', 'range',
'raw_input',
# 'reduce',
'reload',
# 'repr', 'reversed', 'round', 'set',
'setattr',
# 'slice', 'sorted', 'staticmethod', 'str', 'sum',
'super',
# 'tuple',
'type',
# 'unichr', 'unicode',
'vars',
# 'xrange', 'zip'
])
# extra validation whitelist-style to avoid new versions of Python creeping
# in with new unsafe things
SAFE_BUILTINS = set([
'False', 'None', 'True', 'abs', 'all', 'any', 'apply', 'basestring',
'bin', 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr', 'classmethod',
'cmp', 'coerce', 'complex', 'dict', 'divmod', 'enumerate', 'filter',
'float', 'format', 'frozenset', 'hash', 'hex', 'id', 'int',
'isinstance', 'issubclass', 'iter', 'len', 'list', 'long', 'map', 'max', 'min',
'next', 'object', 'oct', 'ord', 'pow', 'print', 'property', 'range', 'reduce',
'repr', 'reversed', 'round', 'set', 'slice', 'sorted', 'staticmethod', 'str',
'sum', 'tuple', 'unichr', 'unicode', 'xrange', 'zip' ])
for ast_name in UNALLOWED_AST_NODES:
assert(is_valid_ast_node(ast_name))
for name in UNALLOWED_BUILTINS:
assert(is_valid_builtin(name))
def _cross_match_whitelist():
"check the whitelist's completeness"
available = ALL_BUILTINS - UNALLOWED_BUILTINS
diff = available.difference(SAFE_BUILTINS)
assert not diff, diff # check so everything not disallowed is in safe
diff = SAFE_BUILTINS.difference(available)
assert not diff, diff # check so everything everything in safe is in not-disallowed
_cross_match_whitelist()
def is_unallowed_ast_node(kind):
return kind in UNALLOWED_AST_NODES
def is_unallowed_builtin(name):
return name in UNALLOWED_BUILTINS
#----------------------------------------------------------------------
# Restricted attributes.
#----------------------------------------------------------------------
# In addition to these we deny access to all lowlevel attrs (__xxx__).
UNALLOWED_ATTR = [
'im_class', 'im_func', 'im_self',
'func_code', 'func_defaults', 'func_globals', 'func_name',
'tb_frame', 'tb_next',
'f_back', 'f_builtins', 'f_code', 'f_exc_traceback',
'f_exc_type', 'f_exc_value', 'f_globals', 'f_locals']
UNALLOWED_ATTR.extend(_EV_UNALLOWED_SYMBOLS)
def is_unallowed_attr(name):
return (name[:2] == '__' and name[-2:] == '__') or \
(name in UNALLOWED_ATTR)
#----------------------------------------------------------------------
# LimitedExecVisitor.
#----------------------------------------------------------------------
class LimitedExecError(object):
"""
Base class for all which occur while walking the AST.
Attributes:
errmsg = short decription about the nature of the error
lineno = line offset to where error occured in source code
"""
def __init__(self, errmsg, lineno):
self.errmsg, self.lineno = errmsg, lineno
def __str__(self):
return "line %d : %s" % (self.lineno, self.errmsg)
class LimitedExecASTNodeError(LimitedExecError):
"Expression/statement in AST evaluates to a restricted AST node type."
pass
class LimitedExecBuiltinError(LimitedExecError):
"Expression/statement in tried to access a restricted builtin."
pass
class LimitedExecAttrError(LimitedExecError):
"Expression/statement in tried to access a restricted attribute."
pass
class LimitedExecVisitor(object):
"""
Data-driven visitor which walks the AST for some code and makes
sure it doesn't contain any expression/statements which are
declared as restricted in 'UNALLOWED_AST_NODES'. We'll also make
sure that there aren't any attempts to access/lookup restricted
builtin declared in 'UNALLOWED_BUILTINS'. By default we also won't
allow access to lowlevel stuff which can be used to dynamically
access non-local envrioments.
Interface:
walk(ast) = validate AST and return True if AST is 'safe'
Attributes:
errors = list of LimitedExecError if walk() returned False
Implementation:
The visitor will automatically generate methods for all of the
available AST node types and redirect them to self.ok or self.fail
reflecting the configuration in 'UNALLOWED_AST_NODES'. While
walking the AST we simply forward the validating step to each of
node callbacks which take care of reporting errors.
"""
def __init__(self):
"Initialize visitor by generating callbacks for all AST node types."
self.errors = []
for ast_name in ALL_AST_NODES:
# Don't reset any overridden callbacks.
if getattr(self, 'visit' + ast_name, None):
continue
if is_unallowed_ast_node(ast_name):
setattr(self, 'visit' + ast_name, self.fail)
else:
setattr(self, 'visit' + ast_name, self.ok)
def walk(self, ast):
"Validate each node in AST and return True if AST is 'safe'."
self.visit(ast)
return self.errors == []
def visit(self, node, *args):
"Recursively validate node and all of its children."
fn = getattr(self, 'visit' + classname(node))
if DEBUG: self.trace(node)
fn(node, *args)
for child in node.getChildNodes():
self.visit(child, *args)
def visitName(self, node, *args):
"Disallow any attempts to access a restricted builtin/attr."
name = node.getChildren()[0]
lineno = get_node_lineno(node)
if is_unallowed_builtin(name):
self.errors.append(LimitedExecBuiltinError( \
"access to builtin '%s' is denied" % name, lineno))
elif is_unallowed_attr(name):
self.errors.append(LimitedExecAttrError( \
"access to attribute '%s' is denied" % name, lineno))
def visitGetattr(self, node, *args):
"Disallow any attempts to access a restricted attribute."
attrname = node.attrname
try:
name = node.getChildren()[0].name
except Exception:
name = ""
lineno = get_node_lineno(node)
if attrname == 'attr' and name =='evl':
pass
elif is_unallowed_attr(attrname):
self.errors.append(LimitedExecAttrError( \
"access to attribute '%s' is denied" % attrname, lineno))
def visitAssName(self, node, *args):
"Disallow attempts to delete an attribute or name"
if node.flags == 'OP_DELETE':
self.fail(node, *args)
def visitPower(self, node, *args):
"Make sure power-of operations don't get too big"
if node.left.value > 1000000 or node.right.value > 10:
lineno = get_node_lineno(node)
self.errors.append(LimitedExecAttrError( \
"power law solution too big - restricted", lineno))
def ok(self, node, *args):
"Default callback for 'harmless' AST nodes."
pass
def fail(self, node, *args):
"Default callback for unallowed AST nodes."
lineno = get_node_lineno(node)
self.errors.append(LimitedExecASTNodeError( \
"execution of '%s' statements is denied" % classname(node),
lineno))
def trace(self, node):
"Debugging utility for tracing the validation of AST nodes."
print classname(node)
for attr in dir(node):
if attr[:2] != '__':
print ' ' * 4, "%-15.15s" % attr, getattr(node, attr)
#----------------------------------------------------------------------
# Safe 'eval' replacement.
#----------------------------------------------------------------------
class LimitedExecException(Exception):
"Base class for all safe-eval related errors."
pass
class LimitedExecCodeException(LimitedExecException):
"""
Exception class for reporting all errors which occured while
validating AST for source code in limited_exec().
Attributes:
code = raw source code which failed to validate
errors = list of LimitedExecError
"""
def __init__(self, code, errors):
self.code, self.errors = code, errors
def __str__(self):
return '\n'.join([str(err) for err in self.errors])
class LimitedExecContextException(LimitedExecException):
"""
Exception class for reporting unallowed objects found in the dict
intended to be used as the local enviroment in safe_eval().
Attributes:
keys = list of keys of the unallowed objects
errors = list of strings describing the nature of the error
for each key in 'keys'
"""
def __init__(self, keys, errors):
self.keys, self.errors = keys, errors
def __str__(self):
return '\n'.join([str(err) for err in self.errors])
class LimitedExecTimeoutException(LimitedExecException):
"""
Exception class for reporting that code evaluation execeeded
the given timelimit.
Attributes:
timeout = time limit in seconds
"""
def __init__(self, timeout):
self.timeout = timeout
def __str__(self):
return "Timeout limit execeeded (%s secs) during exec" % self.timeout
def validate_context(context):
"Checks a supplied context for dangerous content"
ctx_errkeys, ctx_errors = [], []
for (key, obj) in context.items():
if inspect.isbuiltin(obj):
ctx_errkeys.append(key)
ctx_errors.append("key '%s' : unallowed builtin %s" % (key, obj))
if inspect.ismodule(obj):
ctx_errkeys.append(key)
ctx_errors.append("key '%s' : unallowed module %s" % (key, obj))
if ctx_errors:
raise LimitedExecContextException(ctx_errkeys, ctx_errors)
return True
def validate_code(codestring):
"validate a code string"
# prepare the code tree for checking
ast = compiler.parse(codestring)
checker = LimitedExecVisitor()
# check code tree, then execute in a time-restricted environment
if not checker.walk(ast):
raise LimitedExecCodeException(codestring, checker.errors)
return True
def limited_exec(code, context = {}, timeout_secs=2, retobj=None, procpool_async=None):
"""
Validate source code and make sure it contains no unauthorized
expression/statements as configured via 'UNALLOWED_AST_NODES' and
'UNALLOWED_BUILTINS'. By default this means that code is not
allowed import modules or access dangerous builtins like 'open' or
'eval'.
code - code to execute. Will be evaluated for safety
context - if code is deemed safe, code will execute with this environment
time_out_secs - only used if procpool_async is given. Sets timeout
for remote code execution
retobj - only used if procpool_async is also given. Defines an Object
(which must define a msg() method), for receiving returns from
the execution.
procpool_async - a run_async function alternative to the one in src.utils.utils.
this must accept the keywords
proc_timeout (will be set to timeout_secs
at_return - a callback
at_err - an errback
If retobj is given, at_return/at_err will be created and
set to msg callbacks and errors to that object.
Tracebacks:
LimitedExecContextException
LimitedExecCodeException
"""
if validate_context(context) and validate_code(code):
# run code only after validation has completed
if procpool_async:
# custom run_async
if retobj:
callback = lambda r: retobj.msg(r)
errback = lambda e: retobj.msg(e)
procpool_async(code, *context, proc_timeout=timeout_secs, at_return=callback, at_err=errback)
else:
procpool_async(code, *context, proc_timeout=timeout_secs)
else:
# run in-process
exec code in context
#----------------------------------------------------------------------
# Basic tests.
#----------------------------------------------------------------------
import unittest
class TestLimitedExec(unittest.TestCase):
def test_builtin(self):
# attempt to access a unsafe builtin
self.assertRaises(LimitedExecException,
limited_exec, "open('test.txt', 'w')")
def test_getattr(self):
# attempt to get arround direct attr access
self.assertRaises(LimitedExecException, \
limited_exec, "getattr(int, '__abs__')")
def test_func_globals(self):
# attempt to access global enviroment where fun was defined
self.assertRaises(LimitedExecException, \
limited_exec, "def x(): pass; print x.func_globals")
def test_lowlevel(self):
# lowlevel tricks to access 'object'
self.assertRaises(LimitedExecException, \
limited_exec, "().__class__.mro()[1].__subclasses__()")
def test_timeout_ok(self):
# attempt to exectute 'slow' code which finishes within timelimit
def test(): time.sleep(2)
env = {'test':test}
limited_exec("test()", env, timeout_secs = 5)
def test_timeout_exceed(self):
# attempt to exectute code which never teminates
self.assertRaises(LimitedExecException, \
limited_exec, "while 1: pass")
def test_invalid_context(self):
# can't pass an enviroment with modules or builtins
env = {'f' : __builtins__.open, 'g' : time}
self.assertRaises(LimitedExecException, \
limited_exec, "print 1", env)
def test_callback(self):
# modify local variable via callback
self.value = 0
def test(): self.value = 1
env = {'test':test}
limited_exec("test()", env)
self.assertEqual(self.value, 1)
if __name__ == "__main__":
unittest.main()
|
|
from binascii import b2a_hex
try:
import cPickle as pickle
except ImportError:
import pickle
from django.contrib.gis.gdal import (OGRGeometry, OGRGeomType, OGRException,
OGRIndexError, SpatialReference, CoordTransform, GDAL_VERSION)
from django.contrib.gis.gdal.prototypes.geom import GEOJSON
from django.contrib.gis.geometry.test_data import TestDataMixin
from django.utils import unittest
class OGRGeomTest(unittest.TestCase, TestDataMixin):
"This tests the OGR Geometry."
def test00a_geomtype(self):
"Testing OGRGeomType object."
# OGRGeomType should initialize on all these inputs.
try:
g = OGRGeomType(1)
g = OGRGeomType(7)
g = OGRGeomType('point')
g = OGRGeomType('GeometrycollectioN')
g = OGRGeomType('LINearrING')
g = OGRGeomType('Unknown')
except:
self.fail('Could not create an OGRGeomType object!')
# Should throw TypeError on this input
self.assertRaises(OGRException, OGRGeomType, 23)
self.assertRaises(OGRException, OGRGeomType, 'fooD')
self.assertRaises(OGRException, OGRGeomType, 9)
# Equivalence can take strings, ints, and other OGRGeomTypes
self.assertEqual(True, OGRGeomType(1) == OGRGeomType(1))
self.assertEqual(True, OGRGeomType(7) == 'GeometryCollection')
self.assertEqual(True, OGRGeomType('point') == 'POINT')
self.assertEqual(False, OGRGeomType('point') == 2)
self.assertEqual(True, OGRGeomType('unknown') == 0)
self.assertEqual(True, OGRGeomType(6) == 'MULtiPolyGON')
self.assertEqual(False, OGRGeomType(1) != OGRGeomType('point'))
self.assertEqual(True, OGRGeomType('POINT') != OGRGeomType(6))
# Testing the Django field name equivalent property.
self.assertEqual('PointField', OGRGeomType('Point').django)
self.assertEqual('GeometryField', OGRGeomType('Unknown').django)
self.assertEqual(None, OGRGeomType('none').django)
# 'Geometry' initialization implies an unknown geometry type.
gt = OGRGeomType('Geometry')
self.assertEqual(0, gt.num)
self.assertEqual('Unknown', gt.name)
def test00b_geomtype_25d(self):
"Testing OGRGeomType object with 25D types."
wkb25bit = OGRGeomType.wkb25bit
self.assertTrue(OGRGeomType(wkb25bit + 1) == 'Point25D')
self.assertTrue(OGRGeomType('MultiLineString25D') == (5 + wkb25bit))
self.assertEqual('GeometryCollectionField', OGRGeomType('GeometryCollection25D').django)
def test01a_wkt(self):
"Testing WKT output."
for g in self.geometries.wkt_out:
geom = OGRGeometry(g.wkt)
self.assertEqual(g.wkt, geom.wkt)
def test01a_ewkt(self):
"Testing EWKT input/output."
for ewkt_val in ('POINT (1 2 3)', 'LINEARRING (0 0,1 1,2 1,0 0)'):
# First with ewkt output when no SRID in EWKT
self.assertEqual(ewkt_val, OGRGeometry(ewkt_val).ewkt)
# No test consumption with an SRID specified.
ewkt_val = 'SRID=4326;%s' % ewkt_val
geom = OGRGeometry(ewkt_val)
self.assertEqual(ewkt_val, geom.ewkt)
self.assertEqual(4326, geom.srs.srid)
def test01b_gml(self):
"Testing GML output."
for g in self.geometries.wkt_out:
geom = OGRGeometry(g.wkt)
exp_gml = g.gml
if GDAL_VERSION >= (1, 8):
# In GDAL 1.8, the non-conformant GML tag <gml:GeometryCollection> was
# replaced with <gml:MultiGeometry>.
exp_gml = exp_gml.replace('GeometryCollection', 'MultiGeometry')
self.assertEqual(exp_gml, geom.gml)
def test01c_hex(self):
"Testing HEX input/output."
for g in self.geometries.hex_wkt:
geom1 = OGRGeometry(g.wkt)
self.assertEqual(g.hex, geom1.hex)
# Constructing w/HEX
geom2 = OGRGeometry(g.hex)
self.assertEqual(geom1, geom2)
def test01d_wkb(self):
"Testing WKB input/output."
for g in self.geometries.hex_wkt:
geom1 = OGRGeometry(g.wkt)
wkb = geom1.wkb
self.assertEqual(b2a_hex(wkb).upper(), g.hex)
# Constructing w/WKB.
geom2 = OGRGeometry(wkb)
self.assertEqual(geom1, geom2)
def test01e_json(self):
"Testing GeoJSON input/output."
if not GEOJSON: return
for g in self.geometries.json_geoms:
geom = OGRGeometry(g.wkt)
if not hasattr(g, 'not_equal'):
self.assertEqual(g.json, geom.json)
self.assertEqual(g.json, geom.geojson)
self.assertEqual(OGRGeometry(g.wkt), OGRGeometry(geom.json))
def test02_points(self):
"Testing Point objects."
prev = OGRGeometry('POINT(0 0)')
for p in self.geometries.points:
if not hasattr(p, 'z'): # No 3D
pnt = OGRGeometry(p.wkt)
self.assertEqual(1, pnt.geom_type)
self.assertEqual('POINT', pnt.geom_name)
self.assertEqual(p.x, pnt.x)
self.assertEqual(p.y, pnt.y)
self.assertEqual((p.x, p.y), pnt.tuple)
def test03_multipoints(self):
"Testing MultiPoint objects."
for mp in self.geometries.multipoints:
mgeom1 = OGRGeometry(mp.wkt) # First one from WKT
self.assertEqual(4, mgeom1.geom_type)
self.assertEqual('MULTIPOINT', mgeom1.geom_name)
mgeom2 = OGRGeometry('MULTIPOINT') # Creating empty multipoint
mgeom3 = OGRGeometry('MULTIPOINT')
for g in mgeom1:
mgeom2.add(g) # adding each point from the multipoints
mgeom3.add(g.wkt) # should take WKT as well
self.assertEqual(mgeom1, mgeom2) # they should equal
self.assertEqual(mgeom1, mgeom3)
self.assertEqual(mp.coords, mgeom2.coords)
self.assertEqual(mp.n_p, mgeom2.point_count)
def test04_linestring(self):
"Testing LineString objects."
prev = OGRGeometry('POINT(0 0)')
for ls in self.geometries.linestrings:
linestr = OGRGeometry(ls.wkt)
self.assertEqual(2, linestr.geom_type)
self.assertEqual('LINESTRING', linestr.geom_name)
self.assertEqual(ls.n_p, linestr.point_count)
self.assertEqual(ls.coords, linestr.tuple)
self.assertEqual(True, linestr == OGRGeometry(ls.wkt))
self.assertEqual(True, linestr != prev)
self.assertRaises(OGRIndexError, linestr.__getitem__, len(linestr))
prev = linestr
# Testing the x, y properties.
x = [tmpx for tmpx, tmpy in ls.coords]
y = [tmpy for tmpx, tmpy in ls.coords]
self.assertEqual(x, linestr.x)
self.assertEqual(y, linestr.y)
def test05_multilinestring(self):
"Testing MultiLineString objects."
prev = OGRGeometry('POINT(0 0)')
for mls in self.geometries.multilinestrings:
mlinestr = OGRGeometry(mls.wkt)
self.assertEqual(5, mlinestr.geom_type)
self.assertEqual('MULTILINESTRING', mlinestr.geom_name)
self.assertEqual(mls.n_p, mlinestr.point_count)
self.assertEqual(mls.coords, mlinestr.tuple)
self.assertEqual(True, mlinestr == OGRGeometry(mls.wkt))
self.assertEqual(True, mlinestr != prev)
prev = mlinestr
for ls in mlinestr:
self.assertEqual(2, ls.geom_type)
self.assertEqual('LINESTRING', ls.geom_name)
self.assertRaises(OGRIndexError, mlinestr.__getitem__, len(mlinestr))
def test06_linearring(self):
"Testing LinearRing objects."
prev = OGRGeometry('POINT(0 0)')
for rr in self.geometries.linearrings:
lr = OGRGeometry(rr.wkt)
#self.assertEqual(101, lr.geom_type.num)
self.assertEqual('LINEARRING', lr.geom_name)
self.assertEqual(rr.n_p, len(lr))
self.assertEqual(True, lr == OGRGeometry(rr.wkt))
self.assertEqual(True, lr != prev)
prev = lr
def test07a_polygons(self):
"Testing Polygon objects."
# Testing `from_bbox` class method
bbox = (-180,-90,180,90)
p = OGRGeometry.from_bbox( bbox )
self.assertEqual(bbox, p.extent)
prev = OGRGeometry('POINT(0 0)')
for p in self.geometries.polygons:
poly = OGRGeometry(p.wkt)
self.assertEqual(3, poly.geom_type)
self.assertEqual('POLYGON', poly.geom_name)
self.assertEqual(p.n_p, poly.point_count)
self.assertEqual(p.n_i + 1, len(poly))
# Testing area & centroid.
self.assertAlmostEqual(p.area, poly.area, 9)
x, y = poly.centroid.tuple
self.assertAlmostEqual(p.centroid[0], x, 9)
self.assertAlmostEqual(p.centroid[1], y, 9)
# Testing equivalence
self.assertEqual(True, poly == OGRGeometry(p.wkt))
self.assertEqual(True, poly != prev)
if p.ext_ring_cs:
ring = poly[0]
self.assertEqual(p.ext_ring_cs, ring.tuple)
self.assertEqual(p.ext_ring_cs, poly[0].tuple)
self.assertEqual(len(p.ext_ring_cs), ring.point_count)
for r in poly:
self.assertEqual('LINEARRING', r.geom_name)
def test07b_closepolygons(self):
"Testing closing Polygon objects."
# Both rings in this geometry are not closed.
poly = OGRGeometry('POLYGON((0 0, 5 0, 5 5, 0 5), (1 1, 2 1, 2 2, 2 1))')
self.assertEqual(8, poly.point_count)
print("\nBEGIN - expecting IllegalArgumentException; safe to ignore.\n")
try:
c = poly.centroid
except OGRException:
# Should raise an OGR exception, rings are not closed
pass
else:
self.fail('Should have raised an OGRException!')
print("\nEND - expecting IllegalArgumentException; safe to ignore.\n")
# Closing the rings -- doesn't work on GDAL versions 1.4.1 and below:
# http://trac.osgeo.org/gdal/ticket/1673
if GDAL_VERSION <= (1, 4, 1): return
poly.close_rings()
self.assertEqual(10, poly.point_count) # Two closing points should've been added
self.assertEqual(OGRGeometry('POINT(2.5 2.5)'), poly.centroid)
def test08_multipolygons(self):
"Testing MultiPolygon objects."
prev = OGRGeometry('POINT(0 0)')
for mp in self.geometries.multipolygons:
mpoly = OGRGeometry(mp.wkt)
self.assertEqual(6, mpoly.geom_type)
self.assertEqual('MULTIPOLYGON', mpoly.geom_name)
if mp.valid:
self.assertEqual(mp.n_p, mpoly.point_count)
self.assertEqual(mp.num_geom, len(mpoly))
self.assertRaises(OGRIndexError, mpoly.__getitem__, len(mpoly))
for p in mpoly:
self.assertEqual('POLYGON', p.geom_name)
self.assertEqual(3, p.geom_type)
self.assertEqual(mpoly.wkt, OGRGeometry(mp.wkt).wkt)
def test09a_srs(self):
"Testing OGR Geometries with Spatial Reference objects."
for mp in self.geometries.multipolygons:
# Creating a geometry w/spatial reference
sr = SpatialReference('WGS84')
mpoly = OGRGeometry(mp.wkt, sr)
self.assertEqual(sr.wkt, mpoly.srs.wkt)
# Ensuring that SRS is propagated to clones.
klone = mpoly.clone()
self.assertEqual(sr.wkt, klone.srs.wkt)
# Ensuring all children geometries (polygons and their rings) all
# return the assigned spatial reference as well.
for poly in mpoly:
self.assertEqual(sr.wkt, poly.srs.wkt)
for ring in poly:
self.assertEqual(sr.wkt, ring.srs.wkt)
# Ensuring SRS propagate in topological ops.
a = OGRGeometry(self.geometries.topology_geoms[0].wkt_a, sr)
b = OGRGeometry(self.geometries.topology_geoms[0].wkt_b, sr)
diff = a.difference(b)
union = a.union(b)
self.assertEqual(sr.wkt, diff.srs.wkt)
self.assertEqual(sr.srid, union.srs.srid)
# Instantiating w/an integer SRID
mpoly = OGRGeometry(mp.wkt, 4326)
self.assertEqual(4326, mpoly.srid)
mpoly.srs = SpatialReference(4269)
self.assertEqual(4269, mpoly.srid)
self.assertEqual('NAD83', mpoly.srs.name)
# Incrementing through the multipolyogn after the spatial reference
# has been re-assigned.
for poly in mpoly:
self.assertEqual(mpoly.srs.wkt, poly.srs.wkt)
poly.srs = 32140
for ring in poly:
# Changing each ring in the polygon
self.assertEqual(32140, ring.srs.srid)
self.assertEqual('NAD83 / Texas South Central', ring.srs.name)
ring.srs = str(SpatialReference(4326)) # back to WGS84
self.assertEqual(4326, ring.srs.srid)
# Using the `srid` property.
ring.srid = 4322
self.assertEqual('WGS 72', ring.srs.name)
self.assertEqual(4322, ring.srid)
def test09b_srs_transform(self):
"Testing transform()."
orig = OGRGeometry('POINT (-104.609 38.255)', 4326)
trans = OGRGeometry('POINT (992385.4472045 481455.4944650)', 2774)
# Using an srid, a SpatialReference object, and a CoordTransform object
# or transformations.
t1, t2, t3 = orig.clone(), orig.clone(), orig.clone()
t1.transform(trans.srid)
t2.transform(SpatialReference('EPSG:2774'))
ct = CoordTransform(SpatialReference('WGS84'), SpatialReference(2774))
t3.transform(ct)
# Testing use of the `clone` keyword.
k1 = orig.clone()
k2 = k1.transform(trans.srid, clone=True)
self.assertEqual(k1, orig)
self.assertNotEqual(k1, k2)
prec = 3
for p in (t1, t2, t3, k2):
self.assertAlmostEqual(trans.x, p.x, prec)
self.assertAlmostEqual(trans.y, p.y, prec)
def test09c_transform_dim(self):
"Testing coordinate dimension is the same on transformed geometries."
ls_orig = OGRGeometry('LINESTRING(-104.609 38.255)', 4326)
ls_trans = OGRGeometry('LINESTRING(992385.4472045 481455.4944650)', 2774)
prec = 3
ls_orig.transform(ls_trans.srs)
# Making sure the coordinate dimension is still 2D.
self.assertEqual(2, ls_orig.coord_dim)
self.assertAlmostEqual(ls_trans.x[0], ls_orig.x[0], prec)
self.assertAlmostEqual(ls_trans.y[0], ls_orig.y[0], prec)
def test10_difference(self):
"Testing difference()."
for i in xrange(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
d1 = OGRGeometry(self.geometries.diff_geoms[i].wkt)
d2 = a.difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a - b) # __sub__ is difference operator
a -= b # testing __isub__
self.assertEqual(d1, a)
def test11_intersection(self):
"Testing intersects() and intersection()."
for i in xrange(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
i1 = OGRGeometry(self.geometries.intersect_geoms[i].wkt)
self.assertEqual(True, a.intersects(b))
i2 = a.intersection(b)
self.assertEqual(i1, i2)
self.assertEqual(i1, a & b) # __and__ is intersection operator
a &= b # testing __iand__
self.assertEqual(i1, a)
def test12_symdifference(self):
"Testing sym_difference()."
for i in xrange(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
d1 = OGRGeometry(self.geometries.sdiff_geoms[i].wkt)
d2 = a.sym_difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a ^ b) # __xor__ is symmetric difference operator
a ^= b # testing __ixor__
self.assertEqual(d1, a)
def test13_union(self):
"Testing union()."
for i in xrange(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
u1 = OGRGeometry(self.geometries.union_geoms[i].wkt)
u2 = a.union(b)
self.assertEqual(u1, u2)
self.assertEqual(u1, a | b) # __or__ is union operator
a |= b # testing __ior__
self.assertEqual(u1, a)
def test14_add(self):
"Testing GeometryCollection.add()."
# Can't insert a Point into a MultiPolygon.
mp = OGRGeometry('MultiPolygon')
pnt = OGRGeometry('POINT(5 23)')
self.assertRaises(OGRException, mp.add, pnt)
# GeometryCollection.add may take an OGRGeometry (if another collection
# of the same type all child geoms will be added individually) or WKT.
for mp in self.geometries.multipolygons:
mpoly = OGRGeometry(mp.wkt)
mp1 = OGRGeometry('MultiPolygon')
mp2 = OGRGeometry('MultiPolygon')
mp3 = OGRGeometry('MultiPolygon')
for poly in mpoly:
mp1.add(poly) # Adding a geometry at a time
mp2.add(poly.wkt) # Adding WKT
mp3.add(mpoly) # Adding a MultiPolygon's entire contents at once.
for tmp in (mp1, mp2, mp3): self.assertEqual(mpoly, tmp)
def test15_extent(self):
"Testing `extent` property."
# The xmin, ymin, xmax, ymax of the MultiPoint should be returned.
mp = OGRGeometry('MULTIPOINT(5 23, 0 0, 10 50)')
self.assertEqual((0.0, 0.0, 10.0, 50.0), mp.extent)
# Testing on the 'real world' Polygon.
poly = OGRGeometry(self.geometries.polygons[3].wkt)
ring = poly.shell
x, y = ring.x, ring.y
xmin, ymin = min(x), min(y)
xmax, ymax = max(x), max(y)
self.assertEqual((xmin, ymin, xmax, ymax), poly.extent)
def test16_25D(self):
"Testing 2.5D geometries."
pnt_25d = OGRGeometry('POINT(1 2 3)')
self.assertEqual('Point25D', pnt_25d.geom_type.name)
self.assertEqual(3.0, pnt_25d.z)
self.assertEqual(3, pnt_25d.coord_dim)
ls_25d = OGRGeometry('LINESTRING(1 1 1,2 2 2,3 3 3)')
self.assertEqual('LineString25D', ls_25d.geom_type.name)
self.assertEqual([1.0, 2.0, 3.0], ls_25d.z)
self.assertEqual(3, ls_25d.coord_dim)
def test17_pickle(self):
"Testing pickle support."
g1 = OGRGeometry('LINESTRING(1 1 1,2 2 2,3 3 3)', 'WGS84')
g2 = pickle.loads(pickle.dumps(g1))
self.assertEqual(g1, g2)
self.assertEqual(4326, g2.srs.srid)
self.assertEqual(g1.srs.wkt, g2.srs.wkt)
def test18_ogrgeometry_transform_workaround(self):
"Testing coordinate dimensions on geometries after transformation."
# A bug in GDAL versions prior to 1.7 changes the coordinate
# dimension of a geometry after it has been transformed.
# This test ensures that the bug workarounds employed within
# `OGRGeometry.transform` indeed work.
wkt_2d = "MULTILINESTRING ((0 0,1 1,2 2))"
wkt_3d = "MULTILINESTRING ((0 0 0,1 1 1,2 2 2))"
srid = 4326
# For both the 2D and 3D MultiLineString, ensure _both_ the dimension
# of the collection and the component LineString have the expected
# coordinate dimension after transform.
geom = OGRGeometry(wkt_2d, srid)
geom.transform(srid)
self.assertEqual(2, geom.coord_dim)
self.assertEqual(2, geom[0].coord_dim)
self.assertEqual(wkt_2d, geom.wkt)
geom = OGRGeometry(wkt_3d, srid)
geom.transform(srid)
self.assertEqual(3, geom.coord_dim)
self.assertEqual(3, geom[0].coord_dim)
self.assertEqual(wkt_3d, geom.wkt)
def test19_equivalence_regression(self):
"Testing equivalence methods with non-OGRGeometry instances."
self.assertNotEqual(None, OGRGeometry('POINT(0 0)'))
self.assertEqual(False, OGRGeometry('LINESTRING(0 0, 1 1)') == 3)
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(OGRGeomTest))
return s
def run(verbosity=2):
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
|
import sys
import pytest
from desmod.config import (
ConfigError,
NamedManager,
_safe_eval,
apply_user_config,
apply_user_overrides,
factorial_config,
fuzzy_lookup,
parse_user_factor,
parse_user_factors,
)
@pytest.fixture
def config():
return {
'foo.bar.baz': 17,
'foo.bar.biz': 1.23,
'abc.def.baz': False,
'a.b.c': 'something',
'd.e.f': [3, 2, 1],
'g.h.i': {'a': 1, 'b': 2},
}
@pytest.fixture
def named_mgr():
return NamedManager()
def test_named_reuse(named_mgr):
named_mgr.name('xxx', [], {'x': 0})
with pytest.raises(ConfigError):
named_mgr.name('xxx', [], {'y': 1})
with pytest.raises(ConfigError):
named_mgr.resolve('yyy')
def test_named_resolve(named_mgr):
named_mgr.name('www', config={'w': 0})
named_mgr.name('xxx', [], {'x': 1}, category='thing', doc='documentation')
named_mgr.name('yyy', ['xxx', 'www'], {'y': 2})
named_mgr.name('zzz', depend=['yyy'], config={'z': 3})
named_mgr.name('qqq', ['zzz'])
assert named_mgr.resolve('qqq') == {'w': 0, 'x': 1, 'y': 2, 'z': 3}
assert set(nc.name for nc in named_mgr) == {'www', 'xxx', 'yyy', 'zzz', 'qqq'}
for nc in named_mgr:
if nc.name == 'xxx':
assert nc.category == 'thing' and nc.doc == 'documentation'
else:
assert not nc.category and not nc.doc
@pytest.mark.parametrize(
'fuzzy_key, expected',
[
('foo', ConfigError),
('b.foo', ('a.b.foo', 1)),
('d.foo', ('c.d.foo', 3)),
('bar', ('a.b.bar', 2)),
('o', ('e.f.o', 4)),
('.o', ('e.f.o', 4)),
('x.y.z', ('x.y.z', 5)),
('y.z', ConfigError),
],
)
def test_fuzzy_lookup(fuzzy_key, expected):
config = {
'a.b.foo': 1,
'a.b.bar': 2,
'c.d.foo': 3,
'e.f.o': 4,
'x.y.z': 5,
'w.x.y.z': 6,
}
if isinstance(expected, type) and issubclass(expected, Exception):
with pytest.raises(expected):
fuzzy_lookup(config, fuzzy_key)
else:
assert fuzzy_lookup(config, fuzzy_key) == expected
def test_user_override(config):
apply_user_overrides(
config, [('biz', '12'), ('e.f', 'range(4)'), ('g.h.i', 'zip("abc", range(3))')]
)
assert config['foo.bar.biz'] == 12.0
assert config['d.e.f'] == [0, 1, 2, 3]
assert config['g.h.i'] == {'a': 0, 'b': 1, 'c': 2}
def test_user_override_type_mismatch(config):
with pytest.raises(ConfigError):
apply_user_overrides(config, [('d.e.f', 'os.system("clear")')])
def test_user_override_invalid_value(config):
with pytest.raises(ConfigError):
apply_user_overrides(config, [('baz', '1')])
def test_user_override_invalid_key(config):
with pytest.raises(ConfigError):
apply_user_overrides(config, [('not.a.key', '1')])
def test_user_override_int(config):
apply_user_overrides(config, [('bar.baz', '18')])
assert config['foo.bar.baz'] == 18
def test_user_override_int_invalid(config):
with pytest.raises(ConfigError):
apply_user_overrides(config, [('bar.baz', 'eighteen')])
def test_user_override_bool(config):
apply_user_overrides(config, [('def.baz', '1')])
assert config['abc.def.baz'] is True
apply_user_overrides(config, [('def.baz', 'True')])
assert config['abc.def.baz'] is True
apply_user_overrides(config, [('def.baz', 'False')])
assert config['abc.def.baz'] is False
def test_user_override_str(config):
apply_user_overrides(config, [('a.b.c', 'just a string')])
assert config['a.b.c'] == 'just a string'
def test_user_override_str_int(config):
apply_user_overrides(config, [('a.b.c', '123')])
assert config['a.b.c'] == '123'
def test_user_config(config):
user_config = {
'foo.bar.baz': 99,
'g.h.i': {'c': 1, 'd': 2},
}
apply_user_config(config, user_config)
assert config['foo.bar.baz'] == 99
assert config['g.h.i'] == {'c': 1, 'd': 2}
def test_user_config_bad_key(config):
user_config = {'a.bad.key': 1}
with pytest.raises(ConfigError):
apply_user_config(config, user_config)
def test_user_config_bad_value(config):
user_config = {'foo.bar.baz': 'not an int'}
with pytest.raises(ConfigError):
apply_user_config(config, user_config)
@pytest.mark.skipif(
hasattr(sys, 'pypy_version_info'), reason="PyPy's eval() mishandles locals dict"
)
def test_safe_eval_str_builtin_alias():
assert _safe_eval('oct', str) == 'oct'
assert _safe_eval('oct') is oct
with pytest.raises(ConfigError):
_safe_eval('oct', eval_locals={})
assert _safe_eval('oct', str, {}) == 'oct'
def test_safe_eval_dict():
with pytest.raises(ConfigError):
_safe_eval('oct', coerce_type=dict)
@pytest.mark.parametrize(
'user_keys, user_exprs, expected',
[
('foo', '1,2,3', (['a.b.foo'], [[1], [2], [3]])),
('bar', '1.2, 3, 4.5', (['a.b.bar'], [[1.2], [3.0], [4.5]])),
('b.baz', '"abc"', (['a.b.baz'], [['a'], ['b'], ['c']])),
('b.baz', '"abc","def"', (['a.b.baz'], [['abc'], ['def']])),
('d.baz', '1, "y", 0', (['c.d.baz'], [[True], [True], [False]])),
('foo,bar', '(1,1),(2,2)', (['a.b.foo', 'a.b.bar'], [[1, 1.0], [2, 2.0]])),
],
)
def test_parse_user_factor(user_keys, user_exprs, expected):
config = {
'a.b.foo': 1,
'a.b.bar': 2.0,
'a.b.baz': 'three',
'c.d.baz': True,
}
factor = parse_user_factor(config, user_keys, user_exprs)
assert expected == factor
assert all(
isinstance(value, type(expected_value))
for value, expected_value in zip(factor[1], expected[1])
)
def test_parse_user_factors(config):
config = {
'a.b.foo': 1,
'a.b.bar': 2.0,
'a.b.baz': 'three',
'c.d.baz': True,
}
user_factors = [['foo', '1,2,3'], ['bar', '2.0, 4.0']]
factors = parse_user_factors(config, user_factors)
assert factors[0] == (['a.b.foo'], [[1], [2], [3]])
assert factors[1] == (['a.b.bar'], [[2.0], [4.0]])
@pytest.mark.parametrize(
'user_keys, user_exprs, err_str',
[
('baz', 'True, False', 'ambiguous'),
('foo', '"one", "two"', 'coerce'),
('foo', '1', 'sequence'),
],
)
def test_parse_user_factor_invalid(user_keys, user_exprs, err_str):
config = {
'a.b.foo': 1,
'a.b.bar': 2.0,
'a.b.baz': 'three',
'c.d.baz': True,
}
with pytest.raises(ConfigError) as e:
parse_user_factor(config, user_keys, user_exprs)
print(e)
assert err_str in str(e)
def test_factorial_config():
factors = [
(['k0', 'k1'], [[0, 1], [2, 3]]),
(['k2'], [[4], [5], [6]]),
]
expected = [
{'k0': 0, 'k1': 1, 'k2': 4},
{'k0': 0, 'k1': 1, 'k2': 5},
{'k0': 0, 'k1': 1, 'k2': 6},
{'k0': 2, 'k1': 3, 'k2': 4},
{'k0': 2, 'k1': 3, 'k2': 5},
{'k0': 2, 'k1': 3, 'k2': 6},
]
assert list(factorial_config({}, factors)) == expected
def test_factorial_config_special():
factors = [
(['k0', 'k1'], [[0, 1], [2, 3]]),
(['k2'], [[4], [5], [6]]),
]
expected = [
{'k0': 0, 'k1': 1, 'k2': 4, 'special': [('k0', 0), ('k1', 1), ('k2', 4)]},
{'k0': 0, 'k1': 1, 'k2': 5, 'special': [('k0', 0), ('k1', 1), ('k2', 5)]},
{'k0': 0, 'k1': 1, 'k2': 6, 'special': [('k0', 0), ('k1', 1), ('k2', 6)]},
{'k0': 2, 'k1': 3, 'k2': 4, 'special': [('k0', 2), ('k1', 3), ('k2', 4)]},
{'k0': 2, 'k1': 3, 'k2': 5, 'special': [('k0', 2), ('k1', 3), ('k2', 5)]},
{'k0': 2, 'k1': 3, 'k2': 6, 'special': [('k0', 2), ('k1', 3), ('k2', 6)]},
]
fc = factorial_config({}, factors, 'special')
assert list(fc) == expected
|
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from collections import OrderedDict
from fairseq import utils
from fairseq.models import (
FairseqMultiModel,
register_model,
register_model_architecture,
)
from fairseq.models.transformer import (
Embedding,
TransformerDecoder,
TransformerEncoder,
TransformerModel,
base_architecture,
)
from fairseq.utils import safe_hasattr
@register_model("multilingual_transformer")
class MultilingualTransformerModel(FairseqMultiModel):
"""Train Transformer models for multiple language pairs simultaneously.
Requires `--task multilingual_translation`.
We inherit all arguments from TransformerModel and assume that all language
pairs use a single Transformer architecture. In addition, we provide several
options that are specific to the multilingual setting.
Args:
--share-encoder-embeddings: share encoder embeddings across all source languages
--share-decoder-embeddings: share decoder embeddings across all target languages
--share-encoders: share all encoder params (incl. embeddings) across all source languages
--share-decoders: share all decoder params (incl. embeddings) across all target languages
"""
def __init__(self, encoders, decoders):
super().__init__(encoders, decoders)
@staticmethod
def add_args(parser):
"""Add model-specific arguments to the parser."""
TransformerModel.add_args(parser)
parser.add_argument(
"--share-encoder-embeddings",
action="store_true",
help="share encoder embeddings across languages",
)
parser.add_argument(
"--share-decoder-embeddings",
action="store_true",
help="share decoder embeddings across languages",
)
parser.add_argument(
"--share-encoders",
action="store_true",
help="share encoders across languages",
)
parser.add_argument(
"--share-decoders",
action="store_true",
help="share decoders across languages",
)
@classmethod
def build_model(cls, args, task):
"""Build a new model instance."""
from fairseq.tasks.multilingual_translation import MultilingualTranslationTask
assert isinstance(task, MultilingualTranslationTask)
# make sure all arguments are present in older models
base_multilingual_architecture(args)
if not safe_hasattr(args, "max_source_positions"):
args.max_source_positions = 1024
if not safe_hasattr(args, "max_target_positions"):
args.max_target_positions = 1024
src_langs = [lang_pair.split("-")[0] for lang_pair in task.model_lang_pairs]
tgt_langs = [lang_pair.split("-")[1] for lang_pair in task.model_lang_pairs]
if args.share_encoders:
args.share_encoder_embeddings = True
if args.share_decoders:
args.share_decoder_embeddings = True
def build_embedding(dictionary, embed_dim, path=None):
num_embeddings = len(dictionary)
padding_idx = dictionary.pad()
emb = Embedding(num_embeddings, embed_dim, padding_idx)
# if provided, load from preloaded dictionaries
if path:
embed_dict = utils.parse_embedding(path)
utils.load_embedding(embed_dict, dictionary, emb)
return emb
# build shared embeddings (if applicable)
shared_encoder_embed_tokens, shared_decoder_embed_tokens = None, None
if args.share_all_embeddings:
if args.encoder_embed_dim != args.decoder_embed_dim:
raise ValueError(
"--share-all-embeddings requires --encoder-embed-dim to match --decoder-embed-dim"
)
if args.decoder_embed_path and (
args.decoder_embed_path != args.encoder_embed_path
):
raise ValueError(
"--share-all-embeddings not compatible with --decoder-embed-path"
)
shared_encoder_embed_tokens = FairseqMultiModel.build_shared_embeddings(
dicts=task.dicts,
langs=task.langs,
embed_dim=args.encoder_embed_dim,
build_embedding=build_embedding,
pretrained_embed_path=args.encoder_embed_path,
)
shared_decoder_embed_tokens = shared_encoder_embed_tokens
args.share_decoder_input_output_embed = True
else:
if args.share_encoder_embeddings:
shared_encoder_embed_tokens = FairseqMultiModel.build_shared_embeddings(
dicts=task.dicts,
langs=src_langs,
embed_dim=args.encoder_embed_dim,
build_embedding=build_embedding,
pretrained_embed_path=args.encoder_embed_path,
)
if args.share_decoder_embeddings:
shared_decoder_embed_tokens = FairseqMultiModel.build_shared_embeddings(
dicts=task.dicts,
langs=tgt_langs,
embed_dim=args.decoder_embed_dim,
build_embedding=build_embedding,
pretrained_embed_path=args.decoder_embed_path,
)
# encoders/decoders for each language
lang_encoders, lang_decoders = {}, {}
def get_encoder(lang):
if lang not in lang_encoders:
if shared_encoder_embed_tokens is not None:
encoder_embed_tokens = shared_encoder_embed_tokens
else:
encoder_embed_tokens = build_embedding(
task.dicts[lang],
args.encoder_embed_dim,
args.encoder_embed_path,
)
lang_encoders[lang] = cls._get_module_class(
True, args, task.dicts[lang], encoder_embed_tokens, src_langs
)
return lang_encoders[lang]
def get_decoder(lang):
if lang not in lang_decoders:
if shared_decoder_embed_tokens is not None:
decoder_embed_tokens = shared_decoder_embed_tokens
else:
decoder_embed_tokens = build_embedding(
task.dicts[lang],
args.decoder_embed_dim,
args.decoder_embed_path,
)
lang_decoders[lang] = cls._get_module_class(
False, args, task.dicts[lang], decoder_embed_tokens, tgt_langs
)
return lang_decoders[lang]
# shared encoders/decoders (if applicable)
shared_encoder, shared_decoder = None, None
if args.share_encoders:
shared_encoder = get_encoder(src_langs[0])
if args.share_decoders:
shared_decoder = get_decoder(tgt_langs[0])
encoders, decoders = OrderedDict(), OrderedDict()
for lang_pair, src, tgt in zip(task.model_lang_pairs, src_langs, tgt_langs):
encoders[lang_pair] = (
shared_encoder if shared_encoder is not None else get_encoder(src)
)
decoders[lang_pair] = (
shared_decoder if shared_decoder is not None else get_decoder(tgt)
)
return MultilingualTransformerModel(encoders, decoders)
@classmethod
def _get_module_class(cls, is_encoder, args, lang_dict, embed_tokens, langs):
module_class = TransformerEncoder if is_encoder else TransformerDecoder
return module_class(args, lang_dict, embed_tokens)
def load_state_dict(self, state_dict, strict=True, model_cfg=None):
state_dict_subset = state_dict.copy()
for k, _ in state_dict.items():
assert k.startswith("models.")
lang_pair = k.split(".")[1]
if lang_pair not in self.models:
del state_dict_subset[k]
super().load_state_dict(state_dict_subset, strict=strict, model_cfg=model_cfg)
@register_model_architecture("multilingual_transformer", "multilingual_transformer")
def base_multilingual_architecture(args):
base_architecture(args)
args.share_encoder_embeddings = getattr(args, "share_encoder_embeddings", False)
args.share_decoder_embeddings = getattr(args, "share_decoder_embeddings", False)
args.share_encoders = getattr(args, "share_encoders", False)
args.share_decoders = getattr(args, "share_decoders", False)
@register_model_architecture(
"multilingual_transformer", "multilingual_transformer_iwslt_de_en"
)
def multilingual_transformer_iwslt_de_en(args):
args.encoder_embed_dim = getattr(args, "encoder_embed_dim", 512)
args.encoder_ffn_embed_dim = getattr(args, "encoder_ffn_embed_dim", 1024)
args.encoder_attention_heads = getattr(args, "encoder_attention_heads", 4)
args.encoder_layers = getattr(args, "encoder_layers", 6)
args.decoder_embed_dim = getattr(args, "decoder_embed_dim", 512)
args.decoder_ffn_embed_dim = getattr(args, "decoder_ffn_embed_dim", 1024)
args.decoder_attention_heads = getattr(args, "decoder_attention_heads", 4)
args.decoder_layers = getattr(args, "decoder_layers", 6)
base_multilingual_architecture(args)
|
|
# flake8: noqa
# Disable Flake8 because of all the sphinx imports
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Airflow documentation build configuration file, created by
# sphinx-quickstart on Thu Oct 9 20:50:01 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
"""Configuration of Airflow Docs"""
import os
import sys
from glob import glob
from itertools import chain
from typing import Dict, List
import airflow
from airflow.configuration import default_config_yaml
try:
import sphinx_airflow_theme # pylint: disable=unused-import
airflow_theme_is_available = True
except ImportError:
airflow_theme_is_available = False
autodoc_mock_imports = [
'MySQLdb',
'adal',
'analytics',
'azure',
'azure.cosmos',
'azure.datalake',
'azure.kusto',
'azure.mgmt',
'boto3',
'botocore',
'bson',
'cassandra',
'celery',
'cloudant',
'cryptography',
'cx_Oracle',
'datadog',
'distributed',
'docker',
'google',
'google_auth_httplib2',
'googleapiclient',
'grpc',
'hdfs',
'httplib2',
'jaydebeapi',
'jenkins',
'jira',
'kubernetes',
'msrestazure',
'pandas',
'pandas_gbq',
'paramiko',
'pinotdb',
'psycopg2',
'pydruid',
'pyhive',
'pyhive',
'pymongo',
'pymssql',
'pysftp',
'qds_sdk',
'redis',
'simple_salesforce',
'slackclient',
'smbclient',
'snowflake',
'sshtunnel',
'tenacity',
'vertica_python',
'winrm',
'zdesk',
]
# Hack to allow changing for piece of the code to behave differently while
# the docs are being built. The main objective was to alter the
# behavior of the utils.apply_default that was hiding function headers
os.environ['BUILDING_AIRFLOW_DOCS'] = 'TRUE'
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.join(os.path.dirname(__file__), 'exts'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.graphviz',
'sphinxarg.ext',
'sphinxcontrib.httpdomain',
'sphinxcontrib.jinja',
'sphinx.ext.intersphinx',
'autoapi.extension',
'exampleinclude',
'docroles',
'removemarktransform',
]
autodoc_default_options = {
'show-inheritance': True,
'members': True
}
jinja_contexts = {
'config_ctx': {"configs": default_config_yaml()}
}
viewcode_follow_imported_members = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Airflow'
# copyright = ''
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
# version = '1.0.0'
version = airflow.__version__
# The full version, including alpha/beta/rc tags.
# release = '1.0.0'
release = airflow.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns: List[str] = [
# We only link to selected subpackages.
'_api/airflow/index.rst',
# Required by airflow/contrib/plugins
'_api/main',
# We have custom page - operators-and-hooks-ref.rst
'_api/airflow/providers/index.rst',
# Packages with subpackages
"_api/airflow/providers/amazon/index.rst",
"_api/airflow/providers/microsoft/index.rst",
"_api/airflow/providers/google/index.rst",
"_api/airflow/providers/apache/index.rst",
"_api/airflow/providers/yandex/index.rst",
"_api/airflow/providers/cncf/index.rst",
# Utils for internal use
'_api/airflow/providers/google/cloud/utils',
# Templates or partials
'autoapi_templates',
'howto/operator/gcp/_partials',
]
ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
# Generate top-level
for path in glob(f"{ROOT_DIR}/airflow/*"):
name = os.path.basename(path)
if os.path.isfile(path):
exclude_patterns.append(f"_api/airflow/{name.rpartition('.')[0]}")
browsable_packages = ["operators", "hooks", "sensors", "providers", "executors", "models"]
if os.path.isdir(path) and name not in browsable_packages:
exclude_patterns.append(f"_api/airflow/{name}")
# Generate list of package index
providers_packages_roots = {
name.rpartition("/")[0]
for entity in ["hooks", "operators", "sensors"]
for name in chain(glob(f"{ROOT_DIR}/airflow/providers/**/{entity}", recursive=True))
}
providers_package_indexes = {
f"_api/{os.path.relpath(name, ROOT_DIR)}/index.rst"
for name in providers_packages_roots
}
exclude_patterns.extend(providers_package_indexes)
# Generate list of example_dags
excluded_example_dags = (
f"_api/{os.path.relpath(name, ROOT_DIR)}"
for name in glob(f"{ROOT_DIR}/airflow/providers/**/example_dags", recursive=True)
)
exclude_patterns.extend(excluded_example_dags)
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
keep_warnings = True
intersphinx_mapping = {
'boto3': ('https://boto3.amazonaws.com/v1/documentation/api/latest/', None),
'mongodb': ('https://api.mongodb.com/python/current/', None),
'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None),
'python': ('https://docs.python.org/3/', None),
'requests': ('https://requests.readthedocs.io/en/master/', None),
'sqlalchemy': ('https://docs.sqlalchemy.org/en/latest/', None),
'hdfs': ('https://hdfscli.readthedocs.io/en/latest/', None),
# google-cloud-python
'google-cloud-automl': ('https://googleapis.dev/python/automl/latest', None),
'google-cloud-bigquery': ('https://googleapis.dev/python/bigquery/latest', None),
'google-cloud-bigquery-datatransfer': ('https://googleapis.dev/python/bigquerydatatransfer/latest', None),
'google-cloud-bigquery-storage': ('https://googleapis.dev/python/bigquerystorage/latest', None),
'google-cloud-bigtable': ('https://googleapis.dev/python/bigtable/latest', None),
'google-cloud-container': ('https://googleapis.dev/python/container/latest', None),
'google-cloud-core': ('https://googleapis.dev/python/google-cloud-core/latest', None),
'google-cloud-datastore': ('https://googleapis.dev/python/datastore/latest', None),
'google-cloud-dlp': ('https://googleapis.dev/python/dlp/latest', None),
'google-cloud-kms': ('https://googleapis.dev/python/cloudkms/latest', None),
'google-cloud-language': ('https://googleapis.dev/python/language/latest', None),
'google-cloud-pubsub': ('https://googleapis.dev/python/pubsub/latest', None),
'google-cloud-redis': ('https://googleapis.dev/python/redis/latest', None),
'google-cloud-spanner': ('https://googleapis.dev/python/spanner/latest', None),
'google-cloud-speech': ('https://googleapis.dev/python/speech/latest', None),
'google-cloud-storage': ('https://googleapis.dev/python/storage/latest', None),
'google-cloud-tasks': ('https://googleapis.dev/python/cloudtasks/latest', None),
'google-cloud-texttospeech': ('https://googleapis.dev/python/texttospeech/latest', None),
'google-cloud-translate': ('https://googleapis.dev/python/translation/latest', None),
'google-cloud-videointelligence': ('https://googleapis.dev/python/videointelligence/latest', None),
'google-cloud-vision': ('https://googleapis.dev/python/vision/latest', None),
}
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
if airflow_theme_is_available:
html_theme = 'sphinx_airflow_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "Airflow Documentation"
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = ""
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
html_favicon = "../airflow/www/static/pin_32.png"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# A list of JavaScript filename. The entry must be a filename string or a
# tuple containing the filename string and the attributes dictionary. The
# filename must be relative to the html_static_path, or a full URI with
# scheme like http://example.org/script.js.
html_js_files = ['jira-links.js']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
if airflow_theme_is_available:
html_sidebars = {
'**': [
'version-selector.html',
'searchbox.html',
'globaltoc.html',
]
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Airflowdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
} # type: Dict[str,str]
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Airflow.tex', 'Airflow Documentation',
'Apache Airflow', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'airflow', 'Airflow Documentation',
['Apache Airflow'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [(
'index', 'Airflow', 'Airflow Documentation',
'Apache Airflow', 'Airflow',
'Airflow is a system to programmatically author, schedule and monitor data pipelines.',
'Miscellaneous'
), ]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# sphinx-autoapi configuration
# See:
# https://sphinx-autoapi.readthedocs.io/en/latest/config.html
# Paths (relative or absolute) to the source code that you wish to generate
# your API documentation from.
autoapi_dirs = [
os.path.abspath('../airflow'),
]
# A directory that has user-defined templates to override our default templates.
autoapi_template_dir = 'autoapi_templates'
# A list of patterns to ignore when finding files
autoapi_ignore = [
'*/airflow/kubernetes/kubernetes_request_factory/*',
'*/airflow/contrib/sensors/*',
'*/airflow/contrib/hooks/*',
'*/airflow/contrib/operators/*',
'*/node_modules/*',
'*/migrations/*',
]
# Keep the AutoAPI generated files on the filesystem after the run.
# Useful for debugging.
autoapi_keep_files = True
# Relative path to output the AutoAPI files into. This can also be used to place the generated documentation
# anywhere in your documentation hierarchy.
autoapi_root = '_api'
# -- Options for example include ------------------------------------------
exampleinclude_sourceroot = os.path.abspath('..')
# -- Additional HTML Context variable
html_context = {
# Google Analytics ID.
# For more information look at:
# https://github.com/readthedocs/sphinx_rtd_theme/blob/master/sphinx_rtd_theme/layout.html#L222-L232
'theme_analytics_id': 'UA-140539454-1',
}
if airflow_theme_is_available:
html_context = {
# Variables used to build a button for editing the source code
#
# The path is created according to the following template:
#
# https://{{ github_host|default("github.com") }}/{{ github_user }}/{{ github_repo }}/
# {{ theme_vcs_pageview_mode|default("blob") }}/{{ github_version }}{{ conf_py_path }}
# {{ pagename }}{{ suffix }}
#
# More information:
# https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/doc_builder/templates/doc_builder/conf.py.tmpl#L100-L103
# https://github.com/readthedocs/sphinx_rtd_theme/blob/master/sphinx_rtd_theme/breadcrumbs.html#L45
# https://github.com/apache/airflow-site/blob/91f760c/sphinx_airflow_theme/sphinx_airflow_theme/suggest_change_button.html#L36-L40
#
'theme_vcs_pageview_mode': 'edit',
'conf_py_path': '/docs/',
'github_user': 'apache',
'github_repo': 'airflow',
'github_version': 'master',
'display_github': 'master',
'suffix': '.rst',
}
|
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
from unittest import mock
from google.cloud.dataproc_v1beta2.types import JobStatus # pylint: disable=no-name-in-module
from airflow.exceptions import AirflowException
from airflow.providers.google.cloud.hooks.dataproc import DataprocHook, DataProcJobBuilder
from airflow.version import version
AIRFLOW_VERSION = "v" + version.replace(".", "-").replace("+", "-")
JOB = {"job": "test-job"}
JOB_ID = "test-id"
TASK_ID = "test-task-id"
GCP_LOCATION = "global"
GCP_PROJECT = "test-project"
CLUSTER_CONFIG = {"test": "test"}
LABELS = {"test": "test"}
CLUSTER_NAME = "cluster-name"
CLUSTER = {
"cluster_name": CLUSTER_NAME,
"config": CLUSTER_CONFIG,
"labels": LABELS,
"project_id": GCP_PROJECT,
}
PARENT = "parent"
NAME = "name"
BASE_STRING = "airflow.providers.google.common.hooks.base_google.{}"
DATAPROC_STRING = "airflow.providers.google.cloud.hooks.dataproc.{}"
def mock_init(*args, **kwargs):
pass
class TestDataprocHook(unittest.TestCase):
def setUp(self):
with mock.patch(BASE_STRING.format("GoogleBaseHook.__init__"), new=mock_init):
self.hook = DataprocHook(gcp_conn_id="test")
@mock.patch(DATAPROC_STRING.format("DataprocHook._get_credentials"))
@mock.patch(DATAPROC_STRING.format("DataprocHook.client_info"), new_callable=mock.PropertyMock)
@mock.patch(DATAPROC_STRING.format("ClusterControllerClient"))
def test_get_cluster_client(self, mock_client, mock_client_info, mock_get_credentials):
self.hook.get_cluster_client(location=GCP_LOCATION)
mock_client.assert_called_once_with(
credentials=mock_get_credentials.return_value,
client_info=mock_client_info.return_value,
client_options={"api_endpoint": f"{GCP_LOCATION}-dataproc.googleapis.com:443"},
)
@mock.patch(DATAPROC_STRING.format("DataprocHook._get_credentials"))
@mock.patch(DATAPROC_STRING.format("DataprocHook.client_info"), new_callable=mock.PropertyMock)
@mock.patch(DATAPROC_STRING.format("WorkflowTemplateServiceClient"))
def test_get_template_client(self, mock_client, mock_client_info, mock_get_credentials):
_ = self.hook.get_template_client
mock_client.assert_called_once_with(
credentials=mock_get_credentials.return_value, client_info=mock_client_info.return_value
)
@mock.patch(DATAPROC_STRING.format("DataprocHook._get_credentials"))
@mock.patch(DATAPROC_STRING.format("DataprocHook.client_info"), new_callable=mock.PropertyMock)
@mock.patch(DATAPROC_STRING.format("JobControllerClient"))
def test_get_job_client(self, mock_client, mock_client_info, mock_get_credentials):
self.hook.get_job_client(location=GCP_LOCATION)
mock_client.assert_called_once_with(
credentials=mock_get_credentials.return_value,
client_info=mock_client_info.return_value,
client_options={"api_endpoint": f"{GCP_LOCATION}-dataproc.googleapis.com:443"},
)
@mock.patch(DATAPROC_STRING.format("DataprocHook.get_cluster_client"))
def test_create_cluster(self, mock_client):
self.hook.create_cluster(
project_id=GCP_PROJECT,
region=GCP_LOCATION,
cluster_name=CLUSTER_NAME,
cluster_config=CLUSTER_CONFIG,
labels=LABELS,
)
mock_client.assert_called_once_with(location=GCP_LOCATION)
mock_client.return_value.create_cluster.assert_called_once_with(
project_id=GCP_PROJECT,
region=GCP_LOCATION,
cluster=CLUSTER,
metadata=None,
request_id=None,
retry=None,
timeout=None,
)
@mock.patch(DATAPROC_STRING.format("DataprocHook.get_cluster_client"))
def test_delete_cluster(self, mock_client):
self.hook.delete_cluster(project_id=GCP_PROJECT, region=GCP_LOCATION, cluster_name=CLUSTER_NAME)
mock_client.assert_called_once_with(location=GCP_LOCATION)
mock_client.return_value.delete_cluster.assert_called_once_with(
project_id=GCP_PROJECT,
region=GCP_LOCATION,
cluster_name=CLUSTER_NAME,
cluster_uuid=None,
metadata=None,
request_id=None,
retry=None,
timeout=None,
)
@mock.patch(DATAPROC_STRING.format("DataprocHook.get_cluster_client"))
def test_diagnose_cluster(self, mock_client):
self.hook.diagnose_cluster(project_id=GCP_PROJECT, region=GCP_LOCATION, cluster_name=CLUSTER_NAME)
mock_client.assert_called_once_with(location=GCP_LOCATION)
mock_client.return_value.diagnose_cluster.assert_called_once_with(
project_id=GCP_PROJECT,
region=GCP_LOCATION,
cluster_name=CLUSTER_NAME,
metadata=None,
retry=None,
timeout=None,
)
mock_client.return_value.diagnose_cluster.return_value.result.assert_called_once_with()
@mock.patch(DATAPROC_STRING.format("DataprocHook.get_cluster_client"))
def test_get_cluster(self, mock_client):
self.hook.get_cluster(project_id=GCP_PROJECT, region=GCP_LOCATION, cluster_name=CLUSTER_NAME)
mock_client.assert_called_once_with(location=GCP_LOCATION)
mock_client.return_value.get_cluster.assert_called_once_with(
project_id=GCP_PROJECT,
region=GCP_LOCATION,
cluster_name=CLUSTER_NAME,
metadata=None,
retry=None,
timeout=None,
)
@mock.patch(DATAPROC_STRING.format("DataprocHook.get_cluster_client"))
def test_list_clusters(self, mock_client):
filter_ = "filter"
self.hook.list_clusters(project_id=GCP_PROJECT, region=GCP_LOCATION, filter_=filter_)
mock_client.assert_called_once_with(location=GCP_LOCATION)
mock_client.return_value.list_clusters.assert_called_once_with(
project_id=GCP_PROJECT,
region=GCP_LOCATION,
filter_=filter_,
page_size=None,
metadata=None,
retry=None,
timeout=None,
)
@mock.patch(DATAPROC_STRING.format("DataprocHook.get_cluster_client"))
def test_update_cluster(self, mock_client):
update_mask = "update-mask"
self.hook.update_cluster(
project_id=GCP_PROJECT,
location=GCP_LOCATION,
cluster=CLUSTER,
cluster_name=CLUSTER_NAME,
update_mask=update_mask,
)
mock_client.assert_called_once_with(location=GCP_LOCATION)
mock_client.return_value.update_cluster.assert_called_once_with(
project_id=GCP_PROJECT,
region=GCP_LOCATION,
cluster=CLUSTER,
cluster_name=CLUSTER_NAME,
update_mask=update_mask,
graceful_decommission_timeout=None,
metadata=None,
request_id=None,
retry=None,
timeout=None,
)
@mock.patch(DATAPROC_STRING.format("DataprocHook.get_template_client"))
def test_create_workflow_template(self, mock_client):
template = {"test": "test"}
mock_client.region_path.return_value = PARENT
self.hook.create_workflow_template(location=GCP_LOCATION, template=template, project_id=GCP_PROJECT)
mock_client.region_path.assert_called_once_with(GCP_PROJECT, GCP_LOCATION)
mock_client.create_workflow_template.assert_called_once_with(
parent=PARENT, template=template, retry=None, timeout=None, metadata=None
)
@mock.patch(DATAPROC_STRING.format("DataprocHook.get_template_client"))
def test_instantiate_workflow_template(self, mock_client):
template_name = "template_name"
mock_client.workflow_template_path.return_value = NAME
self.hook.instantiate_workflow_template(
location=GCP_LOCATION, template_name=template_name, project_id=GCP_PROJECT
)
mock_client.workflow_template_path.assert_called_once_with(GCP_PROJECT, GCP_LOCATION, template_name)
mock_client.instantiate_workflow_template.assert_called_once_with(
name=NAME, version=None, parameters=None, request_id=None, retry=None, timeout=None, metadata=None
)
@mock.patch(DATAPROC_STRING.format("DataprocHook.get_template_client"))
def test_instantiate_inline_workflow_template(self, mock_client):
template = {"test": "test"}
mock_client.region_path.return_value = PARENT
self.hook.instantiate_inline_workflow_template(
location=GCP_LOCATION, template=template, project_id=GCP_PROJECT
)
mock_client.region_path.assert_called_once_with(GCP_PROJECT, GCP_LOCATION)
mock_client.instantiate_inline_workflow_template.assert_called_once_with(
parent=PARENT, template=template, request_id=None, retry=None, timeout=None, metadata=None
)
@mock.patch(DATAPROC_STRING.format("DataprocHook.get_job"))
def test_wait_for_job(self, mock_get_job):
mock_get_job.side_effect = [
mock.MagicMock(status=mock.MagicMock(state=JobStatus.RUNNING)),
mock.MagicMock(status=mock.MagicMock(state=JobStatus.ERROR)),
]
with self.assertRaises(AirflowException):
self.hook.wait_for_job(job_id=JOB_ID, location=GCP_LOCATION, project_id=GCP_PROJECT, wait_time=0)
calls = [
mock.call(location=GCP_LOCATION, job_id=JOB_ID, project_id=GCP_PROJECT),
mock.call(location=GCP_LOCATION, job_id=JOB_ID, project_id=GCP_PROJECT),
]
mock_get_job.has_calls(calls)
@mock.patch(DATAPROC_STRING.format("DataprocHook.get_job_client"))
def test_get_job(self, mock_client):
self.hook.get_job(location=GCP_LOCATION, job_id=JOB_ID, project_id=GCP_PROJECT)
mock_client.assert_called_once_with(location=GCP_LOCATION)
mock_client.return_value.get_job.assert_called_once_with(
region=GCP_LOCATION,
job_id=JOB_ID,
project_id=GCP_PROJECT,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(DATAPROC_STRING.format("DataprocHook.get_job_client"))
def test_submit_job(self, mock_client):
self.hook.submit_job(location=GCP_LOCATION, job=JOB, project_id=GCP_PROJECT)
mock_client.assert_called_once_with(location=GCP_LOCATION)
mock_client.return_value.submit_job.assert_called_once_with(
region=GCP_LOCATION,
job=JOB,
project_id=GCP_PROJECT,
request_id=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(DATAPROC_STRING.format("DataprocHook.wait_for_job"))
@mock.patch(DATAPROC_STRING.format("DataprocHook.submit_job"))
def test_submit(self, mock_submit_job, mock_wait_for_job):
mock_submit_job.return_value.reference.job_id = JOB_ID
with self.assertWarns(DeprecationWarning):
self.hook.submit(project_id=GCP_PROJECT, job=JOB, region=GCP_LOCATION)
mock_submit_job.assert_called_once_with(location=GCP_LOCATION, project_id=GCP_PROJECT, job=JOB)
mock_wait_for_job.assert_called_once_with(
location=GCP_LOCATION, project_id=GCP_PROJECT, job_id=JOB_ID
)
@mock.patch(DATAPROC_STRING.format("DataprocHook.get_job_client"))
def test_cancel_job(self, mock_client):
self.hook.cancel_job(location=GCP_LOCATION, job_id=JOB_ID, project_id=GCP_PROJECT)
mock_client.assert_called_once_with(location=GCP_LOCATION)
mock_client.return_value.cancel_job.assert_called_once_with(
region=GCP_LOCATION,
job_id=JOB_ID,
project_id=GCP_PROJECT,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(DATAPROC_STRING.format("DataprocHook.get_job_client"))
def test_cancel_job_deprecation_warning(self, mock_client):
with self.assertWarns(DeprecationWarning):
self.hook.cancel_job(job_id=JOB_ID, project_id=GCP_PROJECT)
mock_client.assert_called_once_with(location='global')
mock_client.return_value.cancel_job.assert_called_once_with(
region='global',
job_id=JOB_ID,
project_id=GCP_PROJECT,
retry=None,
timeout=None,
metadata=None,
)
class TestDataProcJobBuilder(unittest.TestCase):
def setUp(self) -> None:
self.job_type = "test"
self.builder = DataProcJobBuilder(
project_id=GCP_PROJECT,
task_id=TASK_ID,
cluster_name=CLUSTER_NAME,
job_type=self.job_type,
properties={"test": "test"},
)
@mock.patch(DATAPROC_STRING.format("uuid.uuid4"))
def test_init(self, mock_uuid):
mock_uuid.return_value = "uuid"
properties = {"test": "test"}
job = {
"job": {
"labels": {"airflow-version": AIRFLOW_VERSION},
"placement": {"cluster_name": CLUSTER_NAME},
"reference": {"job_id": TASK_ID + "_uuid", "project_id": GCP_PROJECT},
"test": {"properties": properties},
}
}
builder = DataProcJobBuilder(
project_id=GCP_PROJECT,
task_id=TASK_ID,
cluster_name=CLUSTER_NAME,
job_type="test",
properties=properties,
)
self.assertDictEqual(job, builder.job)
def test_add_labels(self):
labels = {"key": "value"}
self.builder.add_labels(labels)
self.assertIn("key", self.builder.job["job"]["labels"])
self.assertEqual("value", self.builder.job["job"]["labels"]["key"])
def test_add_variables(self):
variables = ["variable"]
self.builder.add_variables(variables)
self.assertEqual(variables, self.builder.job["job"][self.job_type]["script_variables"])
def test_add_args(self):
args = ["args"]
self.builder.add_args(args)
self.assertEqual(args, self.builder.job["job"][self.job_type]["args"])
def test_add_query(self):
query = ["query"]
self.builder.add_query(query)
self.assertEqual({"queries": [query]}, self.builder.job["job"][self.job_type]["query_list"])
def test_add_query_uri(self):
query_uri = "query_uri"
self.builder.add_query_uri(query_uri)
self.assertEqual(query_uri, self.builder.job["job"][self.job_type]["query_file_uri"])
def test_add_jar_file_uris(self):
jar_file_uris = ["jar_file_uris"]
self.builder.add_jar_file_uris(jar_file_uris)
self.assertEqual(jar_file_uris, self.builder.job["job"][self.job_type]["jar_file_uris"])
def test_add_archive_uris(self):
archive_uris = ["archive_uris"]
self.builder.add_archive_uris(archive_uris)
self.assertEqual(archive_uris, self.builder.job["job"][self.job_type]["archive_uris"])
def test_add_file_uris(self):
file_uris = ["file_uris"]
self.builder.add_file_uris(file_uris)
self.assertEqual(file_uris, self.builder.job["job"][self.job_type]["file_uris"])
def test_add_python_file_uris(self):
python_file_uris = ["python_file_uris"]
self.builder.add_python_file_uris(python_file_uris)
self.assertEqual(python_file_uris, self.builder.job["job"][self.job_type]["python_file_uris"])
def test_set_main_error(self):
with self.assertRaises(Exception):
self.builder.set_main("test", "test")
def test_set_main_class(self):
main = "main"
self.builder.set_main(main_class=main, main_jar=None)
self.assertEqual(main, self.builder.job["job"][self.job_type]["main_class"])
def test_set_main_jar(self):
main = "main"
self.builder.set_main(main_class=None, main_jar=main)
self.assertEqual(main, self.builder.job["job"][self.job_type]["main_jar_file_uri"])
def test_set_python_main(self):
main = "main"
self.builder.set_python_main(main)
self.assertEqual(main, self.builder.job["job"][self.job_type]["main_python_file_uri"])
@mock.patch(DATAPROC_STRING.format("uuid.uuid4"))
def test_set_job_name(self, mock_uuid):
uuid = "test_uuid"
mock_uuid.return_value = uuid
name = "name"
self.builder.set_job_name(name)
name += "_" + uuid[:8]
self.assertEqual(name, self.builder.job["job"]["reference"]["job_id"])
def test_build(self):
self.assertEqual(self.builder.job, self.builder.build())
|
|
from __future__ import unicode_literals, division, absolute_import
import argparse
import contextlib
from datetime import datetime, timedelta
from sqlalchemy import func
from flexget import options, plugin
from flexget.event import event
from flexget.manager import Session
from flexget.utils.tools import console
try:
from flexget.plugins.filter.series import (Series, Episode, Release, SeriesTask, forget_series,
forget_series_episode, set_series_begin, normalize_series_name,
new_eps_after, get_latest_release)
except ImportError:
raise plugin.DependencyError(issued_by='cli_series', missing='series',
message='Series commandline interface not loaded')
def do_cli(manager, options):
if options.series_action == 'list':
display_summary(options)
elif options.series_action == 'show':
display_details(options.series_name)
elif options.series_action == 'forget':
forget(manager, options)
elif options.series_action == 'begin':
begin(manager, options)
def display_summary(options):
"""
Display series summary.
:param options: argparse options from the CLI
"""
formatting = ' %-30s %-10s %-10s %-20s'
console(formatting % ('Name', 'Latest', 'Age', 'Downloaded'))
console('-' * 79)
session = Session()
try:
query = (session.query(Series).outerjoin(Series.episodes).outerjoin(Episode.releases).
outerjoin(Series.in_tasks).group_by(Series.id))
if options.configured == 'configured':
query = query.having(func.count(SeriesTask.id) >= 1)
elif options.configured == 'unconfigured':
query = query.having(func.count(SeriesTask.id) < 1)
if options.premieres:
query = (query.having(func.max(Episode.season) <= 1).having(func.max(Episode.number) <= 2).
having(func.count(SeriesTask.id) < 1)).filter(Release.downloaded == True)
if options.new:
query = query.having(func.max(Episode.first_seen) > datetime.now() - timedelta(days=options.new))
if options.stale:
query = query.having(func.max(Episode.first_seen) < datetime.now() - timedelta(days=options.stale))
for series in query.order_by(Series.name).yield_per(10):
series_name = series.name
if len(series_name) > 30:
series_name = series_name[:27] + '...'
new_ep = ' '
behind = 0
status = 'N/A'
age = 'N/A'
episode_id = 'N/A'
latest = get_latest_release(series)
if latest:
if latest.first_seen > datetime.now() - timedelta(days=2):
new_ep = '>'
behind = new_eps_after(latest)
status = get_latest_status(latest)
age = latest.age
episode_id = latest.identifier
if behind:
episode_id += ' +%s' % behind
console(new_ep + formatting[1:] % (series_name, episode_id, age, status))
if behind >= 3:
console(' ! Latest download is %d episodes behind, this may require '
'manual intervention' % behind)
console('-' * 79)
console(' > = new episode ')
console(' Use `flexget series show NAME` to get detailed information')
finally:
session.close()
def begin(manager, options):
series_name = options.series_name
ep_id = options.episode_id
session = Session()
try:
series = session.query(Series).filter(Series.name == series_name).first()
if not series:
console('Series not yet in database, adding `%s`' % series_name)
series = Series()
series.name = series_name
session.add(series)
try:
set_series_begin(series, ep_id)
except ValueError as e:
console(e)
else:
console('Episodes for `%s` will be accepted starting with `%s`' % (series.name, ep_id))
session.commit()
finally:
session.close()
manager.config_changed()
def forget(manager, options):
name = options.series_name
if options.episode_id:
# remove by id
identifier = options.episode_id
try:
forget_series_episode(name, identifier)
console('Removed episode `%s` from series `%s`.' % (identifier, name.capitalize()))
except ValueError:
# Try upper casing identifier if we fail at first
try:
forget_series_episode(name, identifier.upper())
console('Removed episode `%s` from series `%s`.' % (identifier, name.capitalize()))
except ValueError as e:
console(e.message)
else:
# remove whole series
try:
forget_series(name)
console('Removed series `%s` from database.' % name.capitalize())
except ValueError as e:
console(e.message)
manager.config_changed()
def get_latest_status(episode):
"""
:param episode: Instance of Episode
:return: Status string for given episode
"""
status = ''
for release in sorted(episode.releases, key=lambda r: r.quality):
if not release.downloaded:
continue
status += release.quality.name
if release.proper_count > 0:
status += '-proper'
if release.proper_count > 1:
status += str(release.proper_count)
status += ', '
return status.rstrip(', ') if status else None
def display_details(name):
"""Display detailed series information, ie. series show NAME"""
from flexget.manager import Session
with contextlib.closing(Session()) as session:
name = normalize_series_name(name)
# Sort by length of name, so that partial matches always show shortest matching title
matches = (session.query(Series).filter(Series._name_normalized.contains(name)).
order_by(func.char_length(Series.name)).all())
if not matches:
console('ERROR: Unknown series `%s`' % name)
return
# Pick the best matching series
series = matches[0]
console('Showing results for `%s`.' % series.name)
if len(matches) > 1:
console('WARNING: Multiple series match to `%s`.' % name)
console('Be more specific to see the results of other matches:')
for s in matches[1:]:
console(' - %s' % s.name)
console(' %-63s%-15s' % ('Identifier, Title', 'Quality'))
console('-' * 79)
# Query episodes in sane order instead of iterating from series.episodes
episodes = session.query(Episode).filter(Episode.series_id == series.id)
if series.identified_by == 'sequence':
episodes = episodes.order_by(Episode.number).all()
elif series.identified_by == 'ep':
episodes = episodes.order_by(Episode.season, Episode.number).all()
else:
episodes = episodes.order_by(Episode.identifier).all()
for episode in episodes:
if episode.identifier is None:
console(' None <--- Broken!')
else:
console(' %s (%s) - %s' % (episode.identifier, episode.identified_by or 'N/A', episode.age))
for release in episode.releases:
status = release.quality.name
title = release.title
if len(title) > 55:
title = title[:55] + '...'
if release.proper_count > 0:
status += '-proper'
if release.proper_count > 1:
status += str(release.proper_count)
if release.downloaded:
console(' * %-60s%-15s' % (title, status))
else:
console(' %-60s%-15s' % (title, status))
console('-' * 79)
console(' * = downloaded')
if not series.identified_by:
console('')
console(' Series plugin is still learning which episode numbering mode is ')
console(' correct for this series (identified_by: auto).')
console(' Few duplicate downloads can happen with different numbering schemes')
console(' during this time.')
else:
console(' Series uses `%s` mode to identify episode numbering (identified_by).' % series.identified_by)
console(' See option `identified_by` for more information.')
if series.begin:
console(' Begin episode for this series set to `%s`.' % series.begin.identifier)
@event('options.register')
def register_parser_arguments():
# Register the command
parser = options.register_command('series', do_cli, help='view and manipulate the series plugin database')
# Parent parser for subcommands that need a series name
series_parser = argparse.ArgumentParser(add_help=False)
series_parser.add_argument('series_name', help='the name of the series', metavar='<series name>')
# Set up our subparsers
subparsers = parser.add_subparsers(title='actions', metavar='<action>', dest='series_action')
list_parser = subparsers.add_parser('list', help='list a summary of the different series being tracked')
list_parser.add_argument('configured', nargs='?', choices=['configured', 'unconfigured', 'all'],
default='configured',
help='limit list to series that are currently in the config or not (default: %(default)s)')
list_parser.add_argument('--premieres', action='store_true',
help='limit list to series which only have episode 1 (and maybe also 2) downloaded')
list_parser.add_argument('--new', nargs='?', type=int, metavar='DAYS', const=7,
help='limit list to series with a release seen in last %(const)s days. number of days can '
'be overridden with %(metavar)s')
list_parser.add_argument('--stale', nargs='?', type=int, metavar='DAYS', const=365,
help='limit list to series which have not seen a release in %(const)s days. number of '
'days can be overridden with %(metavar)s')
show_parser = subparsers.add_parser('show', parents=[series_parser],
help='show the releases FlexGet has seen for a given series ')
begin_parser = subparsers.add_parser('begin', parents=[series_parser],
help='set the episode to start getting a series from')
begin_parser.add_argument('episode_id', metavar='<episode ID>',
help='episode ID to start getting the series from (e.g. S02E01, 2013-12-11, or 9, '
'depending on how the series is numbered)')
forget_parser = subparsers.add_parser('forget', parents=[series_parser],
help='removes episodes or whole series from the series database')
forget_parser.add_argument('episode_id', nargs='?', default=None, help='episode ID to forget (optional)')
|
|
#!/usr/bin/python
import pyglet
import math
import sys
import random
from collections import namedtuple
window = pyglet.window.Window(1024, 768) # or: fullscreen=True)
pyglet.resource.path.append('./images')
pyglet.resource.reindex()
key = pyglet.window.key
# Gravity constant - no relation to normal G
G = 15000
# Particles shouldn't go off the edge of the screen?
# OR: could attract them all towards the center
MAX_DIST = int(0.9 * min(window.height, window.width) / 2)
CENTER = (window.width / 2, window.height / 2)
center = namedtuple("Point", ['x','y'])(*CENTER)
print("MAX_DIST:", MAX_DIST)
print("CENTER:", CENTER)
print(center)
def center_anchor(img):
"""Center an image's 'anchor' in the middle of the image
(not top left or whatever it normally is"""
img.anchor_x = img.width // 2
img.anchor_y = img.height // 2
# assume that images are roughly spherical
# / by 8 : half height+width = avg length,
# then /2 for half length = /4, then
# scale factor of 0.5 is another /2
img.radius = int((img.width + img.height) / 8)
images = []
for i in range(1,4):
image = pyglet.resource.image('asteroid%d.png' % i)
center_anchor(image)
images.append(image)
class Particle(pyglet.sprite.Sprite):
def __init__(self, image, x=0, y=0, vx=0, vy=0, mass=1, batch=None):
super(Particle, self).__init__(image, x, y, batch=batch)
center_anchor(self.image)
self.x = x
self.y = y
self.vx = vx
self.vy = vy
self.mass = mass
self.scale = 0.5
self.opacity = random.randint(100,255)
def __str__(self):
return "<Particle, x=%d, y=%d, vx=%d, vy=%d>" % (self.x, self.y, self.vx, self.vy)
def dist_vec_to(self, target=None, x=0, y=0):
if target:
dx = self.x - target.x
dy = self.y - target.y
else:
dx = self.x - x
dy = self.y - y
sqr_distance = dx**2 + dy**2
distance = math.sqrt(sqr_distance)
# try and reduce weird non-linear behaviour
# when particles are close
if distance < self.image.radius:
distance = self.image.radius
angle = math.acos(float(dx) / distance)
if dy < 0:
angle = 2*math.pi - angle
return (distance, angle)
def force_from(self, target):
distance, angle = self.dist_vec_to(target)
if distance:
return ((G * target.mass) / (distance**2), angle)
else:
return (0, 0)
def update_vel(self, dt, particles):
# friction
self.vx *= 0.98
self.vy *= 0.98
ax = 0
ay = 0
for particle in particles:
if particle is self:
continue
force = self.force_from(particle)
if force[0] != 0:
force_x = force[0] * math.cos(force[1]) * dt
force_y = force[0] * math.sin(force[1]) * dt
ax += force_x
ay += force_y
# BUG: Should be divided by our mass... (?)
self.vx += ax
self.vy += ay
# enforce max speed
if self.vx > 50 or self.vy > 50: # optimisation
speed = self.vx ** 2 + self.vy ** 2
if speed > 50 ** 2: # speed = pixels per second
print(math.sqrt(speed))
ratio = float(50 ** 2) / speed
self.vx = self.vx * ratio
self.vy = self.vy * ratio
def update(self, dt):
#self.enforce_max_speed()
self.x += self.vx * dt
self.y += self.vy * dt
# use constrain and/or constrain_center for different effects
self.constrain()
#self.constrain_center(dt)
# one jiggle per second
if random.random() < 0.0001: # 1 / (len(particles) * 60):
self.jiggle()
def enforce_max_speed(self):
"""This is a bit of a hack to try and work around a weird bug
with (I think) fast moving particles on the wall of the border.
UPDATE: I can see particles being speed checked doing 1500 px/sec
at high densities, so I'm pretty sure this is right :)"""
speed = math.sqrt(self.vx ** 2 + self.vy ** 2)
if speed > 50: # pixels per second?
print(speed, self.vx, self.vy, self.x, self.y)
ratio = 50 / speed
self.vx = self.vx * ratio
self.vy = self.vy * ratio
new_speed = math.sqrt(self.vx ** 2 + self.vy ** 2)
print(new_speed, self.vx, self.vy)
print()
def constrain(self):
"""Keep within a certain distance of the center of the screen.
If too far away, then move back towards the center."""
correction_vector = self.dist_vec_to(center)
distance = correction_vector[0]
if distance > MAX_DIST:
correction_vector = self.dist_vec_to(center)
correct_x = (distance - MAX_DIST) * math.cos(correction_vector[1])
correct_y = (distance - MAX_DIST) * math.sin(correction_vector[1])
self.x -= correct_x
self.y -= correct_y
self.vx = 0; self.vy = 0
self.opacity = 100 + 155 * (MAX_DIST - min(distance, MAX_DIST)) / MAX_DIST
def constrain_center(self, dt):
"""Apply a slight force towards the center of the screen,
growing larger the further the distance."""
center_d = self.dist_vec_to(target=center)
# reduce force by some amount
force = (-center_d[0] / 2, center_d[1])
force_x = force[0] * math.cos(force[1]) * dt
force_y = force[0] * math.sin(force[1]) * dt
ax = force_x
ay = force_y
# BUG: Should be divided by our mass... (?)
self.vx += ax
self.vy += ay
def jiggle(self):
"""Brownian motion, to try and find stable configurations."""
#self.x += random.choice((-2, -1, -1, 0, 1, 1, 2))
#self.y += random.choice((-2, -1, -1, 0, 1, 1, 2))
self.x += random.choice((-1, 0, 1))
self.y += random.choice((-1, 0, 1))
@window.event
def on_draw():
window.clear()
#label.draw()
particles_batch.draw()
def update(dt):
for particle in particles:
particle.update_vel(dt, particles)
for particle in particles:
particle.update(0.05)
for particle in particles:
particle.collide_checked = False
if particle.mass == 0:
particles.remove(particle)
def make_particle(batch):
x = int(random.random() * window.width)
y = int(random.random() * window.height)
vx = random.random()*6 - 3
vy = random.random()*6 - 3
particle = Particle(images[0], x, y, 0, 0, mass=10, batch=particles_batch)
return particle
def add_particle():
x = random.randint(center.x - 100, center.x + 100)
y = random.randint(center.y - 100, center.y + 100)
new_particle = Particle(images[0], x, y, 0, 0, mass=10, batch=particles_batch)
particles.append(new_particle)
def del_particle():
if particles:
pick = random.choice(particles)
pick.delete() # Sprite delete from batch
particles.remove(pick)
def distance(point1, point2):
return math.sqrt( (point1.x - point2.x) ** 2 +
(point1.y - point2.y) ** 2)
def del_wall_particle():
"""delete a 'wall' particle, ie. one close to MAX_DIST from the center"""
wall_particles = [p for p in particles
if distance(p, center) >= MAX_DIST - 2]
pick = random.choice(wall_particles)
pick.delete()
particles.remove(pick)
def stop_wall_particles():
"""zero out the motion of wall particles. Hack to fix a weird crowding bug"""
wall_particles = [p for p in particles
if distance(p, center) >= MAX_DIST - 2]
for p in wall_particles:
p.vx = 0; p.vy = 0
p.ax = 0; p.ay = 0
particles_batch = pyglet.graphics.Batch()
particles = []
for i in range(1):
particles.append(make_particle(particles_batch))
#for particle in particles:
# print particle
@window.event
def on_key_press(symbol, modifiers):
if symbol == key.SPACE:
if modifiers & key.MOD_SHIFT:
for i in range(10):
add_particle()
else:
add_particle()
elif symbol == key.MINUS:
del_particle()
elif symbol == key.UNDERSCORE:
for i in range(10):
del_particle()
elif symbol == key.BACKSPACE:
del_wall_particle()
elif symbol == key.S:
stop_wall_particles()
elif symbol == key.ESCAPE:
window.close()
@window.event
def on_mouse_press(x, y, button, modifiers):
print((x, y, button))
# Call update 60 times a second
pyglet.clock.schedule_interval(update, 1/60.0)
pyglet.app.run()
|
|
import copy
import re
import datetime
from taggit.forms import TagWidget
from modelcluster.forms import ClusterForm, ClusterFormMetaclass
from django.template.loader import render_to_string
from django.template.defaultfilters import addslashes
from django.utils.safestring import mark_safe
from django import forms
from django.db import models
from django.forms.models import fields_for_model
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist, ImproperlyConfigured, ValidationError
from django.core.urlresolvers import reverse
from django.conf import settings
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore.util import camelcase_to_underscore
from wagtail.wagtailcore.fields import RichTextArea
class FriendlyDateInput(forms.DateInput):
"""
A custom DateInput widget that formats dates as "05 Oct 2013"
and adds class="friendly_date" to be picked up by jquery datepicker.
"""
def __init__(self, attrs=None):
default_attrs = {'class': 'friendly_date'}
if attrs:
default_attrs.update(attrs)
super(FriendlyDateInput, self).__init__(attrs=default_attrs, format='%d %b %Y')
class FriendlyTimeInput(forms.TimeInput):
"""
A custom TimeInput widget that formats dates as "5.30pm"
and adds class="friendly_time" to be picked up by jquery timepicker.
"""
def __init__(self, attrs=None):
default_attrs = {'class': 'friendly_time'}
if attrs:
default_attrs.update(attrs)
super(FriendlyTimeInput, self).__init__(attrs=default_attrs, format='%I.%M%p')
class FriendlyTimeField(forms.CharField):
def to_python(self, time_string):
# Check if the string is blank
if not time_string:
return None
# Look for time in the string
expr = re.compile("^(?P<hour>\d+)(?:(?:.|:)(?P<minute>\d+))?(?P<am_pm>am|pm)")
match = expr.match(time_string.lower())
if match:
# Pull out values from string
hour_string, minute_string, am_pm = match.groups()
# Convert hours and minutes to integers
hour = int(hour_string)
if minute_string:
minute = int(minute_string)
else:
minute = 0
# Create python time
if am_pm == "pm" and hour < 12:
hour += 12
if am_pm == "am" and hour >= 12:
hour -= 12
return datetime.time(hour=hour, minute=minute)
else:
raise ValidationError(_("Please type a valid time"))
class LocalizedDateInput(forms.DateInput):
"""
A custom DateInput widget that formats localized dates
and adds class="friendly_date" to be picked up by jquery datepicker.
"""
def __init__(self, attrs=None):
default_attrs = {'class': 'localized_date', 'localize':True}
if attrs:
default_attrs.update(attrs)
super(LocalizedDateInput, self).__init__(attrs=default_attrs)
class LocalizedTimeInput(forms.TimeInput):
"""
A custom TimeInput widget that formats dates as "5.30pm"
and adds class="friendly_time" to be picked up by jquery timepicker.
"""
def __init__(self, attrs=None):
default_attrs = {'class': 'localized_time'}
if attrs:
default_attrs.update(attrs)
# Just use 24-hour format
super(LocalizedTimeInput, self).__init__(attrs=default_attrs, format='%H:%M')
class LocalizedTimeField(forms.CharField):
def to_python(self, time_string):
# Check if the string is blank
if not time_string:
return None
# Look for time in the string
expr = re.compile("^(?P<hour>\d+)(?:(?:.|:)(?P<minute>\d+))?")
match = expr.match(time_string.lower())
if match:
# Pull out values from string
hour_string, minute_string= match.groups()
# Convert hours and minutes to integers
hour = int(hour_string)
if minute_string:
minute = int(minute_string)
else:
minute = 0
if hour>=24 or hour < 0 or minute >=60 or minute < 0:
raise ValidationError(_("Please type a valid time"))
return datetime.time(hour=hour, minute=minute)
else:
raise ValidationError(_("Please type a valid time") )
if hasattr(settings, 'USE_L10N') and settings.USE_L10N==True:
FORM_FIELD_OVERRIDES = {
models.DateField: {'widget': LocalizedDateInput},
models.TimeField: {'widget': LocalizedTimeInput, 'form_class': LocalizedTimeField},
}
else: # Fall back to friendly date/time
FORM_FIELD_OVERRIDES = {
models.DateField: {'widget': FriendlyDateInput},
models.TimeField: {'widget': FriendlyTimeInput, 'form_class': FriendlyTimeField},
}
WIDGET_JS = {
FriendlyDateInput: (lambda id: "initFriendlyDateChooser(fixPrefix('%s'));" % id),
FriendlyTimeInput: (lambda id: "initFriendlyTimeChooser(fixPrefix('%s'));" % id),
LocalizedDateInput: (lambda id: "initLocalizedDateChooser(fixPrefix('%s'));" % id),
LocalizedTimeInput: (lambda id: "initLocalizedTimeChooser(fixPrefix('%s'));" % id),
RichTextArea: (lambda id: "makeRichTextEditable(fixPrefix('%s'));" % id),
TagWidget: (
lambda id: "initTagField(fixPrefix('%s'), '%s');" % (
id, addslashes(reverse('wagtailadmin_tag_autocomplete'))
)
),
}
# Callback to allow us to override the default form fields provided for each model field.
def formfield_for_dbfield(db_field, **kwargs):
# snarfed from django/contrib/admin/options.py
# If we've got overrides for the formfield defined, use 'em. **kwargs
# passed to formfield_for_dbfield override the defaults.
for klass in db_field.__class__.mro():
if klass in FORM_FIELD_OVERRIDES:
kwargs = dict(copy.deepcopy(FORM_FIELD_OVERRIDES[klass]), **kwargs)
return db_field.formfield(**kwargs)
# For any other type of field, just call its formfield() method.
return db_field.formfield(**kwargs)
class WagtailAdminModelFormMetaclass(ClusterFormMetaclass):
# Override the behaviour of the regular ModelForm metaclass -
# which handles the translation of model fields to form fields -
# to use our own formfield_for_dbfield function to do that translation.
# This is done by sneaking a formfield_callback property into the class
# being defined (unless the class already provides a formfield_callback
# of its own).
# while we're at it, we'll also set extra_form_count to 0, as we're creating
# extra forms in JS
extra_form_count = 0
def __new__(cls, name, bases, attrs):
if 'formfield_callback' not in attrs or attrs['formfield_callback'] is None:
attrs['formfield_callback'] = formfield_for_dbfield
new_class = super(WagtailAdminModelFormMetaclass, cls).__new__(cls, name, bases, attrs)
return new_class
WagtailAdminModelForm = WagtailAdminModelFormMetaclass('WagtailAdminModelForm', (ClusterForm,), {})
# Now, any model forms built off WagtailAdminModelForm instead of ModelForm should pick up
# the nice form fields defined in FORM_FIELD_OVERRIDES.
def get_form_for_model(
model,
fields=None, exclude=None, formsets=None, exclude_formsets=None, widgets=None
):
# django's modelform_factory with a bit of custom behaviour
# (dealing with Treebeard's tree-related fields that really should have
# been editable=False)
attrs = {'model': model}
if fields is not None:
attrs['fields'] = fields
if exclude is not None:
attrs['exclude'] = exclude
if issubclass(model, Page):
attrs['exclude'] = attrs.get('exclude', []) + ['content_type', 'path', 'depth', 'numchild']
if widgets is not None:
attrs['widgets'] = widgets
if formsets is not None:
attrs['formsets'] = formsets
if exclude_formsets is not None:
attrs['exclude_formsets'] = exclude_formsets
# Give this new form class a reasonable name.
class_name = model.__name__ + str('Form')
form_class_attrs = {
'Meta': type('Meta', (object,), attrs)
}
return WagtailAdminModelFormMetaclass(class_name, (WagtailAdminModelForm,), form_class_attrs)
def extract_panel_definitions_from_model_class(model, exclude=None):
if hasattr(model, 'panels'):
return model.panels
panels = []
_exclude = []
if exclude:
_exclude.extend(exclude)
if issubclass(model, Page):
_exclude = ['content_type', 'path', 'depth', 'numchild']
fields = fields_for_model(model, exclude=_exclude, formfield_callback=formfield_for_dbfield)
for field_name, field in fields.items():
try:
panel_class = field.widget.get_panel()
except AttributeError:
panel_class = FieldPanel
panel = panel_class(field_name)
panels.append(panel)
return panels
class EditHandler(object):
"""
Abstract class providing sensible default behaviours for objects implementing
the EditHandler API
"""
# return list of widget overrides that this EditHandler wants to be in place
# on the form it receives
@classmethod
def widget_overrides(cls):
return {}
# return list of formset names that this EditHandler requires to be present
# as children of the ClusterForm
@classmethod
def required_formsets(cls):
return []
# the top-level edit handler is responsible for providing a form class that can produce forms
# acceptable to the edit handler
_form_class = None
@classmethod
def get_form_class(cls, model):
if cls._form_class is None:
cls._form_class = get_form_for_model(
model,
formsets=cls.required_formsets(), widgets=cls.widget_overrides())
return cls._form_class
def __init__(self, instance=None, form=None):
if not instance:
raise ValueError("EditHandler did not receive an instance object")
self.instance = instance
if not form:
raise ValueError("EditHandler did not receive a form object")
self.form = form
# Heading / help text to display to the user
heading = ""
help_text = ""
def object_classnames(self):
"""
Additional classnames to add to the <li class="object"> when rendering this
within an ObjectList
"""
return ""
def field_classnames(self):
"""
Additional classnames to add to the <li> when rendering this within a
<ul class="fields">
"""
return ""
def field_type(self):
"""
The kind of field it is e.g boolean_field. Useful for better semantic markup of field display based on type
"""
return ""
def render_as_object(self):
"""
Render this object as it should appear within an ObjectList. Should not
include the <h2> heading or help text - ObjectList will supply those
"""
# by default, assume that the subclass provides a catch-all render() method
return self.render()
def render_as_field(self):
"""
Render this object as it should appear within a <ul class="fields"> list item
"""
# by default, assume that the subclass provides a catch-all render() method
return self.render()
def render_js(self):
"""
Render a snippet of Javascript code to be executed when this object's rendered
HTML is inserted into the DOM. (This won't necessarily happen on page load...)
"""
return ""
def rendered_fields(self):
"""
return a list of the fields of the passed form which are rendered by this
EditHandler.
"""
return []
def render_missing_fields(self):
"""
Helper function: render all of the fields of the form that are not accounted for
in rendered_fields
"""
rendered_fields = self.rendered_fields()
missing_fields_html = [
unicode(self.form[field_name])
for field_name in self.form.fields
if field_name not in rendered_fields
]
return mark_safe(u''.join(missing_fields_html))
def render_form_content(self):
"""
Render this as an 'object', along with any unaccounted-for fields to make this
a valid submittable form
"""
return mark_safe(self.render_as_object() + self.render_missing_fields())
class BaseCompositeEditHandler(EditHandler):
"""
Abstract class for EditHandlers that manage a set of sub-EditHandlers.
Concrete subclasses must attach a 'children' property
"""
_widget_overrides = None
def object_classnames(self):
try:
return "multi-field " + self.classname
except (AttributeError, TypeError):
return "multi-field"
@classmethod
def widget_overrides(cls):
if cls._widget_overrides is None:
# build a collated version of all its children's widget lists
widgets = {}
for handler_class in cls.children:
widgets.update(handler_class.widget_overrides())
cls._widget_overrides = widgets
return cls._widget_overrides
_required_formsets = None
@classmethod
def required_formsets(cls):
if cls._required_formsets is None:
formsets = []
for handler_class in cls.children:
formsets.extend(handler_class.required_formsets())
cls._required_formsets = formsets
return cls._required_formsets
def __init__(self, instance=None, form=None):
super(BaseCompositeEditHandler, self).__init__(instance=instance, form=form)
self.children = [
handler_class(instance=self.instance, form=self.form)
for handler_class in self.__class__.children
]
def render(self):
return mark_safe(render_to_string(self.template, {
'self': self
}))
def render_js(self):
return mark_safe(u'\n'.join([handler.render_js() for handler in self.children]))
def rendered_fields(self):
result = []
for handler in self.children:
result += handler.rendered_fields()
return result
class BaseTabbedInterface(BaseCompositeEditHandler):
template = "wagtailadmin/edit_handlers/tabbed_interface.html"
def TabbedInterface(children):
return type('_TabbedInterface', (BaseTabbedInterface,), {'children': children})
class BaseObjectList(BaseCompositeEditHandler):
template = "wagtailadmin/edit_handlers/object_list.html"
def ObjectList(children, heading=""):
return type('_ObjectList', (BaseObjectList,), {
'children': children,
'heading': heading,
})
class BaseMultiFieldPanel(BaseCompositeEditHandler):
template = "wagtailadmin/edit_handlers/multi_field_panel.html"
def MultiFieldPanel(children, heading="", classname=None):
return type('_MultiFieldPanel', (BaseMultiFieldPanel,), {
'children': children,
'heading': heading,
'classname': classname,
})
class BaseFieldPanel(EditHandler):
def __init__(self, instance=None, form=None):
super(BaseFieldPanel, self).__init__(instance=instance, form=form)
self.bound_field = self.form[self.field_name]
self.heading = self.bound_field.label
self.help_text = self.bound_field.help_text
def object_classnames(self):
try:
return "single-field " + self.classname
except (AttributeError, TypeError):
return "single-field"
def field_type(self):
return camelcase_to_underscore(self.bound_field.field.__class__.__name__)
def field_classnames(self):
classname = self.field_type()
if self.bound_field.field.required:
classname += " required"
if self.bound_field.errors:
classname += " error"
return classname
object_template = "wagtailadmin/edit_handlers/field_panel_object.html"
def render_as_object(self):
return mark_safe(render_to_string(self.object_template, {
'self': self,
'field_content': self.render_as_field(show_help_text=False),
}))
def render_js(self):
try:
# see if there's an entry for this widget type in WIDGET_JS
js_func = WIDGET_JS[self.bound_field.field.widget.__class__]
except KeyError:
return ''
return mark_safe(js_func(self.bound_field.id_for_label))
field_template = "wagtailadmin/edit_handlers/field_panel_field.html"
def render_as_field(self, show_help_text=True):
return mark_safe(render_to_string(self.field_template, {
'field': self.bound_field,
'field_type': self.field_type(),
'show_help_text': show_help_text,
}))
def rendered_fields(self):
return [self.field_name]
def FieldPanel(field_name, classname=None):
return type('_FieldPanel', (BaseFieldPanel,), {
'field_name': field_name,
'classname': classname,
})
class BaseRichTextFieldPanel(BaseFieldPanel):
def render_js(self):
return mark_safe("makeRichTextEditable(fixPrefix('%s'));" % self.bound_field.id_for_label)
def RichTextFieldPanel(field_name):
return type('_RichTextFieldPanel', (BaseRichTextFieldPanel,), {
'field_name': field_name,
})
class BaseChooserPanel(BaseFieldPanel):
"""
Abstract superclass for panels that provide a modal interface for choosing (or creating)
a database object such as an image, resulting in an ID that is used to populate
a hidden foreign key input.
Subclasses provide:
* field_template
* object_type_name - something like 'image' which will be used as the var name
for the object instance in the field_template
* js_function_name - a JS function responsible for the modal workflow; this receives
the ID of the hidden field as a parameter, and should ultimately populate that field
with the appropriate object ID. If the function requires any other parameters, the
subclass will need to override render_js instead.
"""
@classmethod
def widget_overrides(cls):
return {cls.field_name: forms.HiddenInput}
def get_chosen_item(self):
try:
return getattr(self.instance, self.field_name)
except ObjectDoesNotExist:
# if the ForeignKey is null=False, Django decides to raise
# a DoesNotExist exception here, rather than returning None
# like every other unpopulated field type. Yay consistency!
return None
def render_as_field(self, show_help_text=True):
instance_obj = self.get_chosen_item()
return mark_safe(render_to_string(self.field_template, {
'field': self.bound_field,
self.object_type_name: instance_obj,
'is_chosen': bool(instance_obj),
'show_help_text': show_help_text,
}))
def render_js(self):
return mark_safe("%s(fixPrefix('%s'));" % (self.js_function_name, self.bound_field.id_for_label))
class BasePageChooserPanel(BaseChooserPanel):
field_template = "wagtailadmin/edit_handlers/page_chooser_panel.html"
object_type_name = "page"
_target_content_type = None
@classmethod
def target_content_type(cls):
if cls._target_content_type is None:
if cls.page_type:
if isinstance(cls.page_type, basestring):
# translate the passed model name into an actual model class
from django.db.models import get_model
try:
app_label, model_name = cls.page_type.split('.')
except ValueError:
raise ImproperlyConfigured("The page_type passed to PageChooserPanel must be of the form 'app_label.model_name'")
page_type = get_model(app_label, model_name)
if page_type is None:
raise ImproperlyConfigured("PageChooserPanel refers to model '%s' that has not been installed" % cls.page_type)
else:
page_type = cls.page_type
cls._target_content_type = ContentType.objects.get_for_model(page_type)
else:
# TODO: infer the content type by introspection on the foreign key
cls._target_content_type = ContentType.objects.get_by_natural_key('wagtailcore', 'page')
return cls._target_content_type
def render_js(self):
page = self.get_chosen_item()
parent = page.get_parent() if page else None
content_type = self.__class__.target_content_type()
return mark_safe("createPageChooser(fixPrefix('%s'), '%s.%s', %s);" % (
self.bound_field.id_for_label,
content_type.app_label,
content_type.model,
(parent.id if parent else 'null'),
))
def PageChooserPanel(field_name, page_type=None):
return type('_PageChooserPanel', (BasePageChooserPanel,), {
'field_name': field_name,
'page_type': page_type,
})
class BaseInlinePanel(EditHandler):
@classmethod
def get_panel_definitions(cls):
# Look for a panels definition in the InlinePanel declaration
if cls.panels is not None:
return cls.panels
# Failing that, get it from the model
else:
return extract_panel_definitions_from_model_class(cls.related.model, exclude=[cls.related.field.name])
_child_edit_handler_class = None
@classmethod
def get_child_edit_handler_class(cls):
if cls._child_edit_handler_class is None:
panels = cls.get_panel_definitions()
cls._child_edit_handler_class = MultiFieldPanel(panels, heading=cls.heading)
return cls._child_edit_handler_class
@classmethod
def required_formsets(cls):
return [cls.relation_name]
@classmethod
def widget_overrides(cls):
overrides = cls.get_child_edit_handler_class().widget_overrides()
if overrides:
return {cls.relation_name: overrides}
else:
return {}
def __init__(self, instance=None, form=None):
super(BaseInlinePanel, self).__init__(instance=instance, form=form)
self.formset = form.formsets[self.__class__.relation_name]
child_edit_handler_class = self.__class__.get_child_edit_handler_class()
self.children = []
for subform in self.formset.forms:
# override the DELETE field to have a hidden input
subform.fields['DELETE'].widget = forms.HiddenInput()
# ditto for the ORDER field, if present
if self.formset.can_order:
subform.fields['ORDER'].widget = forms.HiddenInput()
self.children.append(
child_edit_handler_class(instance=subform.instance, form=subform)
)
empty_form = self.formset.empty_form
empty_form.fields['DELETE'].widget = forms.HiddenInput()
if self.formset.can_order:
empty_form.fields['ORDER'].widget = forms.HiddenInput()
self.empty_child = child_edit_handler_class(instance=empty_form.instance, form=empty_form)
template = "wagtailadmin/edit_handlers/inline_panel.html"
def render(self):
return mark_safe(render_to_string(self.template, {
'self': self,
'can_order': self.formset.can_order,
}))
js_template = "wagtailadmin/edit_handlers/inline_panel.js"
def render_js(self):
return mark_safe(render_to_string(self.js_template, {
'self': self,
'can_order': self.formset.can_order,
}))
def InlinePanel(base_model, relation_name, panels=None, label='', help_text=''):
rel = getattr(base_model, relation_name).related
return type('_InlinePanel', (BaseInlinePanel,), {
'relation_name': relation_name,
'related': rel,
'panels': panels,
'heading': label,
'help_text': help_text, # TODO: can we pick this out of the foreign key definition as an alternative? (with a bit of help from the inlineformset object, as we do for label/heading)
})
# Now that we've defined EditHandlers, we can set up wagtailcore.Page to have some.
Page.content_panels = [
FieldPanel('title', classname="full title"),
]
Page.promote_panels = [
MultiFieldPanel([
FieldPanel('slug'),
FieldPanel('seo_title'),
FieldPanel('show_in_menus'),
FieldPanel('search_description'),
], ugettext_lazy('Common page configuration')),
]
|
|
#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import epdb
import errno
import fcntl
import logging
import os
import random
import signal
import time
from jobmaster.util import close_fds
log = logging.getLogger(__name__)
class LockError(RuntimeError):
pass
class LockTimeoutError(LockError):
pass
class Lockable(object):
_lockFile = None
_lockLevel = fcntl.LOCK_UN
_lockPath = None
@staticmethod
def _sleep():
time.sleep(random.uniform(0.1, 0.5))
def _lock(self, mode=fcntl.LOCK_SH):
assert self._lockPath
# Short-circuit if we already have the lock
if mode == self._lockLevel:
return True
if self._lockFile:
lockFile = self._lockFile
else:
lockFile = self._lockFile = open(self._lockPath, 'w')
try:
try:
fcntl.flock(self._lockFile.fileno(), mode | fcntl.LOCK_NB)
except IOError, err:
if err.errno in (errno.EACCES, errno.EAGAIN):
# Already locked, retry later.
raise LockError('Could not acquire lock')
raise
else:
self._lockFile = lockFile
self._lockLevel = mode
finally:
if mode == fcntl.LOCK_UN:
# If we don't have any lock at the moment then close the file
# so that if another process deletes the lockfile we don't end
# up locking the now-nameless file. The other process *must*
# hold an exclusive lock to delete the lockfile, so this
# assures lock safety.
self._lockFile.close()
self._lockFile = None
return True
def _lockWait(self, mode=fcntl.LOCK_SH, timeout=600.0, breakIf=None):
logged = False
runUntil = time.time() + timeout
while True:
# First, try to lock.
try:
return self._lock(mode)
except LockError:
pass
if breakIf and breakIf():
return False
if time.time() > runUntil:
raise LockTimeoutError('Timed out waiting for lock')
if not logged:
logged = True
log.debug("Waiting for lock")
self._sleep()
def _deleteLock(self):
self._lock(fcntl.LOCK_EX)
os.unlink(self._lockPath)
self._lock(fcntl.LOCK_UN)
def _close(self):
if self._lockFile:
self._lockFile.close()
self._lockFile = None
self._lockLevel = fcntl.LOCK_UN
class Pipe(object):
def __init__(self):
readFD, writeFD = os.pipe()
self.reader = os.fdopen(readFD, 'rb')
self.writer = os.fdopen(writeFD, 'wb')
def closeReader(self):
self.reader.close()
def closeWriter(self):
self.writer.close()
def close(self):
self.closeReader()
self.closeWriter()
def read(self):
self.reader.read()
def write(self, data):
self.writer.write(data)
class Subprocess(object):
# Class settings
procName = "subprocess"
setsid = False
closefds = False
# Runtime variables
pid = None
exitStatus = exitPid = None
@property
def exitCode(self):
if self.exitStatus is None:
return -2
elif self.exitStatus < 0:
return self.exitStatus
elif os.WIFEXITED(self.exitStatus):
return os.WEXITSTATUS(self.exitStatus)
else:
return -2
def start(self):
self.exitStatus = self.exitPid = None
self.pid = os.fork()
if not self.pid:
#pylint: disable-msg=W0702,W0212
try:
try:
if self.setsid:
os.setsid()
if self.closefds:
self._close_fds()
ret = self.run()
if not isinstance(ret, (int, long)):
ret = bool(ret)
os._exit(ret)
except SystemExit, err:
os._exit(err.code)
except:
log.exception("Unhandled exception in %s:", self.procName)
finally:
os._exit(70)
return self.pid
def run(self):
raise NotImplementedError
def _subproc_wait(self, flags):
if not self.pid:
return False
while True:
try:
pid, status = os.waitpid(self.pid, flags)
except OSError, err:
if err.errno == errno.EINTR:
# Interrupted by signal so wait again.
continue
elif err.errno == errno.ECHILD:
# Process doesn't exist.
log.debug("Lost track of subprocess %d (%s)", self.pid,
self.procName)
self.exitPid, self.pid = self.pid, None
self.exitStatus = -1
return False
else:
raise
else:
if pid:
# Process exists and is no longer running.
log.debug("Reaped subprocess %d (%s) with status %s",
self.pid, self.procName, status)
self.exitPid, self.pid = self.pid, None
self.exitStatus = status
return False
else:
# Process exists and is still running.
return True
def _close_fds(self, keep=()):
# Collect a list of file descriptors we want to keep.
keep_fds = set([0, 1, 2])
for file in keep:
if hasattr(file, 'fileno'):
file = file.fileno()
if isinstance(file, (int, long)):
keep_fds.add(int(file))
else:
raise TypeError("Must pass a sequence of integers or of "
"objects with a fileno() method.")
for handler in logging.getLogger().handlers:
stream = getattr(handler, 'stream', None)
if stream:
keep_fds.add(stream.fileno())
close_fds(keep_fds)
def check(self):
"""
Return C{True} if the subprocess is running.
"""
return self._subproc_wait(os.WNOHANG)
def wait(self):
"""
Wait for the process to exit, then return. Returns the exit code if the
process exited normally, -2 if the process exited abnormally, or -1 if
the process does not exist.
"""
self._subproc_wait(0)
return self.exitCode
def kill(self, signum=signal.SIGTERM, timeout=5):
"""
Kill the subprocess and wait for it to exit.
"""
if not self.pid:
return
try:
os.kill(self.pid, signum)
except OSError, err:
if err.errno != errno.ESRCH:
raise
# Process doesn't exist (or is a zombie)
if timeout:
# If a timeout is given, wait that long for the process to
# terminate, then send a SIGKILL.
start = time.time()
while time.time() - start < timeout:
if not self.check():
break
time.sleep(0.1)
else:
# If it's still going, use SIGKILL and wait indefinitely.
os.kill(self.pid, signal.SIGKILL)
self.wait()
def debugHook(signum, sigtb):
port = 8080
try:
log.error("Starting epdb session on port %d", port)
debugger = epdb.Epdb()
debugger._server = epdb.telnetserver.InvertedTelnetServer(('', port))
debugger._server.handle_request()
debugger._port = port
except:
log.exception("epdb session failed to start")
else:
debugger.set_trace(skip=1)
def setDebugHook():
signal.signal(signal.SIGUSR1, debugHook)
|
|
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from os.path import basename, join, splitext, getsize
from os import remove
from json import loads
from shutil import copy
from h5py import File
from gzip import open as gopen
from qiita_client import ArtifactInfo
from qiita_client.util import system_call
from qiita_files.util import open_file
from qiita_files.demux import to_hdf5, to_ascii_file
FILEPATH_TYPE_DICT = {
'SFF': ({'raw_sff'}, set()),
'FASTQ': ({'raw_forward_seqs', 'raw_barcodes'}, {'raw_reverse_seqs'}),
'FASTA': ({'raw_fasta'}, {'raw_qual'}),
'FASTA_Sanger': ({'raw_fasta'}, set()),
'FASTA_preprocessed': ({'preprocessed_fasta'}, set()),
}
MUST_GZ = {
# raw input files: FASTQ, per_sample_FASTQ
'raw_forward_seqs', 'raw_barcodes', 'raw_reverse_seqs', 'raw_fasta',
# preprocessed files: demultiplexed, trimmed
'preprocessed_fastq', 'preprocessed_fasta'}
def _gzip_file(filepath, test=False):
"""gzip the given filepath if needed
Parameters
----------
filepath : string
The filepath to verify or compress
test : bolean
If True do not compress but change the filename, used for unit testing
Returns
-------
str
the new gz filepath, None if error
str
the error, None if success
"""
error = None
return_fp = filepath
if test:
return_fp = '%s.gz' % filepath
else:
is_gzip = False
try:
with gopen(filepath, 'rb') as f:
f.read(1)
is_gzip = True
except (OSError, IOError):
pass
if not is_gzip:
gz_cmd = 'pigz -p 5 -c {0} > {0}.gz'.format(filepath)
std_out, std_err, return_value = system_call(gz_cmd)
if return_value != 0 and not test:
error = ("Std out: %s\nStd err: %s\n\nCommand run was:\n%s"
% (std_out, std_err, gz_cmd))
else:
# removing non gz file
remove(filepath)
return_fp = '%s.gz' % filepath
return return_fp, error
def _validate_multiple(qclient, job_id, prep_info, files, atype, test=False):
"""Validate and fix a new 'SFF', 'FASTQ', 'FASTA' or 'FASTA_Sanger' artifact
Parameters
----------
qclient : qiita_client.QiitaClient
The Qiita server client
job_id : str
The job id
prep_info : dict of {str: dict of {str: str}}
The prep information keyed by sample id
files : dict of {str: list of str}
The files to add to the new artifact, keyed by filepath type
atype: str
The type of the artifact
test: bolean, optional
If True this is being called by a test
Returns
-------
dict
The results of the job
"""
qclient.update_job_step(job_id, "Step 2: Validating '%s' files" % atype)
req_fp_types, opt_fp_types = FILEPATH_TYPE_DICT[atype]
all_fp_types = req_fp_types | opt_fp_types
# Check if there is any filepath type that is not supported
unsupported_fp_types = set(files) - all_fp_types
if unsupported_fp_types:
error_msg = ("Filepath type(s) %s not supported by artifact "
"type %s. Supported filepath types: %s"
% (', '.join(unsupported_fp_types), atype,
', '.join(sorted(all_fp_types))))
return False, None, error_msg
# Check if the run_prefix column is present in the prep info
offending = {}
types_seen = set()
if 'run_prefix' in prep_info[next(iter(prep_info))]:
# We can potentially have more than one lane in the prep information
# so check that the provided files are prefixed with the values in
# the run_prefix column
run_prefixes = set(v['run_prefix'] for k, v in prep_info.items())
num_prefixes = len(run_prefixes)
# Check those filepath types that are required
for ftype, t_files in files.items():
# SFF is an special case cause we can have multiple files with
# the same prefix
if num_prefixes != len(t_files) and atype != 'SFF':
offending[ftype] = (
"The number of provided files (%d) doesn't match the "
"number of run prefix values in the prep info (%d): %s"
% (len(t_files), num_prefixes,
', '.join(basename(f) for f in t_files)))
else:
rps = []
fps = []
for fp in t_files:
bn = basename(fp)
found = [rp for rp in run_prefixes if bn.startswith(rp)]
if found:
rps.extend(found)
else:
fps.append(bn)
if fps:
offending[ftype] = (
"The provided files do not match the run prefix "
"values in the prep information: %s" % ', '.join(fps))
else:
rps = run_prefixes - set(rps)
if rps:
offending[ftype] = (
"The following run prefixes in the prep "
"information file do not match any file: %s"
% ', '.join(rps))
types_seen.add(ftype)
else:
# If the run prefix column is not provided, we only allow a single
# lane, so check that we have a single file for each provided
# filepath type
for ftype, t_files in files.items():
if len(t_files) != 1:
offending[ftype] = (
"Only one file per type is allowed. Please provide the "
"column 'run_prefix' if you need more than one file per "
"type: %s" % ', '.join(basename(fp) for fp in t_files))
types_seen.add(ftype)
# Check that all required filepath types where present
missing = req_fp_types - types_seen
if missing:
error_msg = ("Missing required filepath type(s): %s"
% ', '.join(missing))
return False, None, error_msg
# Check if there was any offending file
if offending:
error_list = ["%s: %s" % (k, v) for k, v in offending.items()]
error_msg = ("Error creating artifact. Offending files:\n%s"
% '\n'.join(error_list))
return False, None, error_msg
# Everything is ok
filepaths = []
for fps_type, fps in files.items():
for fp in fps:
if fps_type in MUST_GZ:
fp, error_msg = _gzip_file(fp, test)
if error_msg is not None:
return False, None, error_msg
filepaths.append((fp, fps_type))
return True, [ArtifactInfo(None, atype, filepaths)], ""
def _validate_per_sample_FASTQ(qclient, job_id, prep_info, files, test=False):
"""Validate and fix a new 'per_sample_FASTQ' artifact
Parameters
----------
qclient : qiita_client.QiitaClient
The Qiita server client
job_id : str
The job id
prep_info : dict of {str: dict of {str: str}}
The prep information keyed by sample id
files : dict of {str: list of str}
The files to add to the new artifact, keyed by filepath type
test: bolean, optional
If True this is being called by a test
Returns
-------
dict
The results of the job
"""
qclient.update_job_step(
job_id, "Step 2: Validating 'per_sample_FASTQ' files")
samples = list(prep_info.keys())
samples_count = len(samples)
# Check if there is any filepath type that is not supported
unsupported_fp_types = set(files) - {'raw_forward_seqs',
'raw_reverse_seqs',
'preprocessed_fastq'}
if unsupported_fp_types:
error_msg = ("Filepath type(s) %s not supported by artifact "
"type per_sample_FASTQ. Supported filepath types: "
"raw_forward_seqs, raw_reverse_seqs, preprocessed_fastq"
% ', '.join(unsupported_fp_types))
return False, None, error_msg
if 'raw_forward_seqs' in files:
if 'preprocessed_fastq' in files:
error_msg = ("If raw_forward_seqs is provided, preprocessed_fastq "
"should not be provided")
return False, None, error_msg
read_files = files['raw_forward_seqs']
read_files_count = len(read_files)
counts_match = read_files_count == samples_count
elif 'preprocessed_fastq' in files:
if 'raw_reverse_seqs' in files:
error_msg = ("If preprocessed_fastq is provided, raw_reverse_seqs "
"should not be provided")
return False, None, error_msg
read_files = files['preprocessed_fastq']
read_files_count = len(read_files)
# In the preprocessed_fastq case, we either have 1 file per sample
# or 4 files per sample
counts_match = ((read_files_count == samples_count) or
(read_files_count == 4 * samples_count))
else:
error_msg = ("Missing required filepath type: raw_forward_seqs or "
"preprocessed_fastq")
return False, None, error_msg
# Make sure that we hve the same number of files than samples
if 'raw_reverse_seqs' in files:
rev_count = len(files['raw_reverse_seqs'])
counts_match = counts_match and (rev_count == samples_count)
else:
rev_count = 0
if not counts_match:
error_msg = ("The number of provided files doesn't match the "
"number of samples (%d): %d raw_forward_seqs, "
"%d raw_reverse_seqs (optional, 0 is ok)"
% (samples_count, read_files_count, rev_count))
return False, None, error_msg
def _check_files(run_prefixes, read_files, rev_count, files):
# Check that the provided files match the run prefixes
fwd_fail = [basename(fp) for fp in read_files
if not basename(fp).startswith(tuple(run_prefixes))]
if rev_count > 0:
rev_fail = [basename(fp) for fp in files['raw_reverse_seqs']
if not basename(fp).startswith(tuple(run_prefixes))]
else:
rev_fail = []
return fwd_fail, rev_fail
# first let's check via sample sample_names
run_prefixes = [sid.split('.', 1)[1] for sid in samples]
fwd_fail, rev_fail = _check_files(run_prefixes, read_files,
rev_count, files)
# if that doesn't work, let's test via run_prefix
run_prefix_present = 'run_prefix' in prep_info[samples[0]]
if (fwd_fail or rev_fail) and run_prefix_present:
run_prefixes = [v['run_prefix'] for k, v in prep_info.items()]
if samples_count != len(set(run_prefixes)):
repeated = ["%s (%d)" % (p, run_prefixes.count(p))
for p in set(run_prefixes)
if run_prefixes.count(p) > 1]
error_msg = ("The values for the column 'run_prefix' are not "
"unique for each sample. Repeated values: %s"
% ', '.join(repeated))
return False, None, error_msg
fwd_fail, rev_fail = _check_files(run_prefixes, read_files,
rev_count, files)
if fwd_fail or rev_fail:
error_msg = "The provided files are not prefixed by sample id"
if run_prefix_present:
error_msg += (" or do not match the run prefix values in the "
"prep information.")
else:
error_msg += "."
error_msg += (" Offending files:\n raw_forward_seqs: %s\n"
"raw_reverse_seqs: %s" % (', '.join(fwd_fail),
', '.join(rev_fail)))
return False, None, error_msg
filepaths = []
empty_files = []
for fps_type, fps in files.items():
for fp in fps:
try:
fp_size = getsize(fp)
except OSError:
fp_size = 0
# 62 is the size of a gzip empty files that we generate
if fp_size <= 100:
empty_files.append(basename(fp))
if fps_type in MUST_GZ:
fp, error_msg = _gzip_file(fp, test)
if error_msg is not None:
return False, None, error_msg
filepaths.append((fp, fps_type))
if empty_files:
error_msg = "Some of the files are empty: %s" % ', '.join(empty_files)
return False, None, error_msg
return True, [ArtifactInfo(None, 'per_sample_FASTQ', filepaths)], ""
def _validate_demux_file(qclient, job_id, prep_info, out_dir, demux_fp,
fastq_fp=None, fasta_fp=None, log_fp=None):
"""Validate and fix a 'demux' file and regenerate fastq and fasta files
Parameters
----------
qclient : qiita_client.QiitaClient
The Qiita server client
job_id : str
The job id
prep_info : dict of {str: dict of {str: str}}
The prep information keyed by sample id
out_dir : str
The output directory
demux_fp : str
The demux file path
fastq_fp : str, optional
The original fastq filepath. If demux is correct, it will not be
regenerated
fasta_fp : str, optional
The original fasta filepath. If demux is correct, it will no be
regenerated
log_fp : str, optional
The original log filepath
Returns
-------
dict
The results og the job
"""
pt_sample_ids = set(prep_info)
with open_file(demux_fp) as f:
demux_sample_ids = set(f.keys())
if not pt_sample_ids.issuperset(demux_sample_ids):
# The demux sample ids are different from the ones in the prep template
qclient.update_job_step(job_id, "Step 3: Fixing sample ids")
# Atempt 1: the user provided the run prefix column - in this case the
# run prefix column holds the sample ids present in the demux file
if 'run_prefix' in prep_info[next(iter(pt_sample_ids))]:
id_map = {v['run_prefix']: k for k, v in prep_info.items()}
if not set(id_map).issuperset(demux_sample_ids):
error_msg = ('The sample ids in the "run_prefix" columns '
'from the prep information do not match the '
'ones in the demux file. Please, correct the '
'column "run_prefix" in the prep information to '
'map the existing sample ids to the prep '
'information sample ids.')
return False, None, error_msg
else:
# Attempt 2: the sample ids in the demux table are the same that
# in the prep template but without the prefix
prefix = next(iter(pt_sample_ids)).split('.', 1)[0]
prefixed = set("%s.%s" % (prefix, s) for s in demux_sample_ids)
if pt_sample_ids.issuperset(prefixed):
id_map = {s: "%s.%s" % (prefix, s) for s in demux_sample_ids}
else:
# There is nothing we can do. The samples in the demux file do
# not match the ones in the prep template and we can't fix it
error_msg = ('The sample ids in the demultiplexed files do '
'not match the ones in the prep information. '
'Please, provide the column "run_prefix" in '
'the prep information to map the existing sample'
' ids to the prep information sample ids.')
return False, None, error_msg
# Fix the sample ids
# Do not modify the original demux file, copy it to a new location
new_demux_fp = join(out_dir, basename(demux_fp))
# this if is important so we don't regenerate the demux file if the
# user uploads fastq or fna
if demux_fp != new_demux_fp:
copy(demux_fp, new_demux_fp)
demux_fp = new_demux_fp
with open_file(demux_fp, 'r+') as f:
for old in f:
f.move(old, id_map[old])
# When we fix, we always generate the FASTQ and FASTA file
# By setting them to None, below will be generated
fastq_fp = None
fasta_fp = None
# If we didn't fix anything, we only generate the files if they don't
# already exists
name = splitext(basename(demux_fp))[0]
if not fastq_fp:
fastq_fp = join(out_dir, "%s.fastq" % name)
to_ascii_file(demux_fp, fastq_fp, out_format='fastq')
fastq_fp, error_msg = _gzip_file(fastq_fp)
if error_msg is not None:
return False, None, error_msg
if not fasta_fp:
fasta_fp = join(out_dir, "%s.fasta" % name)
to_ascii_file(demux_fp, fasta_fp, out_format='fasta')
fasta_fp, error_msg = _gzip_file(fasta_fp)
if error_msg is not None:
return False, None, error_msg
filepaths = [(fastq_fp, 'preprocessed_fastq'),
(fasta_fp, 'preprocessed_fasta'),
(demux_fp, 'preprocessed_demux')]
if log_fp:
filepaths.append((log_fp, 'log'))
return True, [ArtifactInfo(None, 'Demultiplexed', filepaths)], ""
def _validate_demultiplexed(qclient, job_id, prep_info, files, out_dir):
"""Validate and fix a new 'Demultiplexed' artifact
Parameters
----------
qclient : qiita_client.QiitaClient
The Qiita server client
job_id : str
The job id
prep_info : dict of {str: dict of {str: str}}
The prep information keyed by sample id
files : dict of {str: list of str}
The files to add to the new artifact, keyed by filepath type
out_dir : str
The output directory
Returns
-------
dict
The results of the job
"""
qclient.update_job_step(job_id, "Step 2: Validating 'Demultiplexed' files")
supported_fp_types = {'preprocessed_fasta', 'preprocessed_fastq',
'preprocessed_demux', 'log'}
unsupported_fp_types = set(files) - supported_fp_types
if unsupported_fp_types:
error_msg = ("Filepath type(s) %s not supported by artifact type "
"Demultiplexed. Supported filepath types: %s"
% (', '.join(unsupported_fp_types),
', '.join(sorted(supported_fp_types))))
return False, None, error_msg
# At most one file of each type can be provided
offending = set(fp_t for fp_t, fps in files.items() if len(fps) > 1)
if offending:
errors = ["%s (%d): %s"
% (fp_t, len(files[fp_t]), ', '.join(files[fp_t]))
for fp_t in sorted(offending)]
error_msg = ("Only one filepath of each file type is supported, "
"offending types:\n%s" % "; ".join(errors))
return False, None, error_msg
# Check which files we have available:
fasta = (files['preprocessed_fasta'][0]
if 'preprocessed_fasta' in files else None)
fastq = (files['preprocessed_fastq'][0]
if 'preprocessed_fastq' in files else None)
demux = (files['preprocessed_demux'][0]
if 'preprocessed_demux' in files else None)
log = (files['log'][0] if 'log' in files else None)
if demux:
# If demux is available, use that one to perform the validation and
# generate the fasta and fastq from it
success, a_info, error_msg = _validate_demux_file(
qclient, job_id, prep_info, out_dir, demux, log_fp=log)
elif fastq:
# Generate the demux file from the fastq
demux = join(out_dir, "%s.demux" % splitext(basename(fastq))[0])
with File(demux, 'w') as f:
# to_hdf5 expects a list
to_hdf5([fastq], f)
# Validate the demux, providing the original fastq
success, a_info, error_msg = _validate_demux_file(
qclient, job_id, prep_info, out_dir, demux, fastq_fp=fastq,
log_fp=log)
elif fasta:
# Generate the demux file from the fasta
demux = join(out_dir, "%s.demux" % splitext(basename(fasta))[0])
with File(demux, 'w') as f:
# to_hdf5 expects a list
to_hdf5([fasta], f)
# Validate the demux, providing the original fasta
success, a_info, error_msg = _validate_demux_file(
qclient, job_id, prep_info, out_dir, demux, fasta_fp=fasta,
log_fp=log)
else:
error_msg = ("Either a 'preprocessed_demux', 'preprocessed_fastq' or "
"'preprocessed_fasta' file should be provided.")
return False, None, error_msg
return success, a_info, error_msg
def validate(qclient, job_id, parameters, out_dir):
"""Validae and fix a new artifact
Parameters
----------
qclient : qiita_client.QiitaClient
The Qiita server client
job_id : str
The job id
parameters : dict
The parameter values to validate and create the artifact
out_dir : str
The path to the job's output directory
Returns
-------
dict
The results of the job
Raises
------
ValueError
If there is any error gathering the information from the server
"""
prep_id = parameters['template']
files = loads(parameters['files'])
a_type = parameters['artifact_type']
qclient.update_job_step(job_id, "Step 1: Collecting prep information")
prep_info = qclient.get("/qiita_db/prep_template/%s/data/" % prep_id)
prep_info = prep_info['data']
_vm = ['SFF', 'FASTQ', 'FASTA', 'FASTA_Sanger', 'FASTA_preprocessed']
if a_type in _vm:
return _validate_multiple(qclient, job_id, prep_info, files, a_type)
elif a_type == 'per_sample_FASTQ':
return _validate_per_sample_FASTQ(qclient, job_id, prep_info, files)
elif a_type == 'Demultiplexed':
return _validate_demultiplexed(qclient, job_id, prep_info, files,
out_dir)
else:
error_msg = ("Unknown artifact_type %s. Supported types: 'SFF', "
"'FASTQ', 'FASTA', 'FASTA_Sanger', 'per_sample_FASTQ', "
"'FASTA_preprocessed', 'Demultiplexed'" % a_type)
return False, None, error_msg
|
|
""" pyKML Utility Module
The pykml.utility module provides utility functions that operate on KML
documents
"""
import re
def clean_xml_string(input_string):
'''removes invalid characters from an XML string'''
from curses import ascii
return ''.join(c for c in input_string if ascii.isascii(c))
def format_xml_with_cdata(
obj,
cdata_elements = ['description', 'text', 'linkDescription', 'displayName']
):
from lxml import etree
# Convert Objectify document to lxml.etree (is there a better way?)
root = etree.fromstring(etree.tostring(etree.ElementTree(obj)))
#Create an xpath expression to search for all desired cdata elements
xpath = '|'.join(map(lambda tag: '//kml:' + tag, cdata_elements))
results = root.xpath(
xpath,
namespaces = {'kml': 'http://www.opengis.net/kml/2.2'}
)
for element in results:
element.text = etree.CDATA(element.text)
return root
def count_elements(doc):
"Counts the number of times each element is used in a document"
summary = {}
for el in doc.iter():
try:
namespace, element_name = re.search('^{(.+)}(.+)$', el.tag).groups()
except:
namespace = None
element_name = el.tag
if not summary.has_key(namespace):
summary[namespace] = {}
if not summary[namespace].has_key(element_name):
summary[namespace][element_name] = 1
else:
summary[namespace][element_name] += 1
return summary
def wrap_angle180(angle):
# returns an angle such that -180 < angle <= 180
try:
# if angle is a sequence
return [((a+180) % 360 ) - 180 for a in angle]
except TypeError:
return ((angle+180) % 360 ) - 180
def to_wkt_list(doc):
'''converts all geometries to Well Know Text format'''
from lxml import etree
def ring_coords_to_wkt(ring):
'''converts LinearRing coordinates to WKT style coordinates'''
return(
(
ring.coordinates.text.strip()
).replace(' ','@@').replace(',',' ').replace('@@',', ')
)
ring_wkt_list = []
context = etree.iterwalk(
doc,
events=("start",),
tag="{http://www.opengis.net/kml/2.2}*",
)
for action, elem in context:
if elem.tag in ['{http://www.opengis.net/kml/2.2}Polygon',
'{http://www.opengis.net/kml/2.2}MultiPolygon']:
#print("%s: %s" % (action, elem.tag))
if elem.tag == '{http://www.opengis.net/kml/2.2}Polygon':
# outer boundary
ringlist = [
'({0})'.format(
ring_coords_to_wkt(elem.outerBoundaryIs.LinearRing)
)
]
for obj in elem.findall('{http://www.opengis.net/kml/2.2}innerBoundaryIs'):
ringlist.append(
'({0})'.format(
ring_coords_to_wkt(obj.LinearRing)
)
)
wkt = 'POLYGON ({rings})'.format(rings=', '.join(ringlist))
ring_wkt_list.append(wkt)
return(ring_wkt_list)
def convert_csv_to_kml(
fileObj,
latitude_field='latitude',
longitude_field='longitude',
altitude_field='altitude',
name_field='name',
description_field='description',
snippet_field='snippet',
):
'''Reads a CSV document from a file-like object and converts it to KML'''
import csv
#import urllib2
from pykml.factory import KML_ElementMaker as KML
# create a basic KML document
kmldoc = KML.kml(KML.Document(
KML.Folder(
KML.name("KmlFile"))
)
)
csvdoc = csv.DictReader(fileObj)
# if field is not found, check for other common field names
if latitude_field not in csvdoc.fieldnames:
match_field = None
for name in ['latitude','lat']:
try:
match_field = csvdoc.fieldnames[[s.lower() for s in csvdoc.fieldnames].index(name)]
break
except:
pass
if match_field is not None:
latitude_field = match_field
if longitude_field not in csvdoc.fieldnames:
match_field = None
for name in ['longitude','lon','long']:
try:
match_field = csvdoc.fieldnames[[s.lower() for s in csvdoc.fieldnames].index(name)]
break
except:
pass
if match_field is not None:
longitude_field = match_field
if altitude_field not in csvdoc.fieldnames:
match_field = None
for name in ['altitude','alt']:
try:
match_field = csvdoc.fieldnames[[s.lower() for s in csvdoc.fieldnames].index(name)]
break
except:
pass
if match_field is not None:
altitude_field = match_field
if name_field not in csvdoc.fieldnames:
match_field = None
for name in ['name']:
try:
match_field = csvdoc.fieldnames[[s.lower() for s in csvdoc.fieldnames].index(name)]
break
except:
pass
if match_field is not None:
name_field = match_field
if snippet_field not in csvdoc.fieldnames:
match_field = None
for name in ['snippet']:
try:
match_field = csvdoc.fieldnames[[s.lower() for s in csvdoc.fieldnames].index(name)]
break
except:
pass
if match_field is not None:
snippet_field = match_field
if description_field not in csvdoc.fieldnames:
match_field = None
for name in ['description','desc']:
try:
match_field = csvdoc.fieldnames[[s.lower() for s in csvdoc.fieldnames].index(name)]
break
except:
pass
if match_field is not None:
description_field = match_field
# check that latitude and longitude columns can be found
if latitude_field not in csvdoc.fieldnames:
raise KeyError(
'Latitude field ({0}) was not found in the CSV file '
'column names {1}'.format(latitude_field,csvdoc.fieldnames)
)
if longitude_field not in csvdoc.fieldnames:
raise KeyError(
'Longitude field ({0}) was not found in the CSV file '
'column names {1}'.format(longitude_field,csvdoc.fieldnames)
)
for row in csvdoc:
pm = KML.Placemark()
if row.has_key(name_field):
pm.append(
KML.name(clean_xml_string(row[name_field]))
)
if row.has_key(snippet_field):
pm.append(
KML.Snippet(clean_xml_string(row[snippet_field]),maxLines="2")
)
if row.has_key(description_field):
pm.append(
KML.description(clean_xml_string(row[description_field]))
)
else:
desc = '<table border="1"'
for key,val in row.iteritems():
desc += '<tr><th>{0}</th><td>{1}</td></tr>'.format(key,val)
desc += '</table>'
pm.append(KML.description(clean_xml_string(desc)))
coord_list = [row[longitude_field], row[latitude_field]]
if row.has_key(altitude_field):
coord_list += [row[altitude_field]]
pm.append(
KML.Point(
KML.coordinates(','.join(coord_list))
)
)
kmldoc.Document.Folder.append(pm)
return kmldoc
def csv2kml():
"""Parse a CSV file and generates a KML document
Example: csv2kml test.csv
"""
import sys
import urllib2
from optparse import OptionParser
from lxml import etree
parser = OptionParser(
usage="usage: %prog FILENAME_or_URL",
version="%prog 0.1",
)
parser.add_option("--longitude_field", dest="longitude_field",
help="name of the column that contains longitude data")
parser.add_option("--latitude_field", dest="latitude_field",
help="name of the column that contains latitude data")
parser.add_option("--altitude_field", dest="altitude_field",
help="name of the column that contains altitude data")
parser.add_option("--name_field", dest="name_field",
help="name of the column used for the placemark name")
parser.add_option("--description_field", dest="description_field",
help="name of the column used for the placemark description")
parser.add_option("--snippet_field", dest="snippet_field",
help="name of the column used for the placemark snippet text")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error("wrong number of arguments")
else:
uri = args[0]
# try to open the URI as both a local file and a remote URL
try:
f = open(uri)
except IOError:
try:
f = urllib2.urlopen(uri)
except ValueError:
raise ValueError('unable to load URI {0}'.format(uri))
except:
raise
kmldoc = convert_csv_to_kml(f,
latitude_field = options.latitude_field,
longitude_field = options.longitude_field,
altitude_field = options.altitude_field,
name_field = options.name_field,
description_field = options.description_field,
snippet_field = options.snippet_field,
)
# close the fileobject, if needed
try:
f
except NameError:
pass #variable was not defined
else:
f.close
kmlstr = format_as_cdata(etree.tostring(kmldoc, pretty_print=True))
sys.stdout.write(kmlstr)
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# filename: client.py
'''
Client module for Fastdfs 3.08
author: scott yuan [email protected]
date: 2012-06-21
'''
import os, sys
from fdfs_client.utils import *
from fdfs_client.tracker_client import *
from fdfs_client.storage_client import *
from fdfs_client.exceptions import *
def get_tracker_conf(conf_path = 'client.conf'):
cf = Fdfs_ConfigParser()
tracker = {}
try:
cf.read(conf_path)
timeout = cf.getint('__config__', 'connect_timeout')
tracker_list = cf.get('__config__', 'tracker_server')
if isinstance(tracker_list, str):
tracker_list = [tracker_list]
tracker_ip_list = []
for tr in tracker_list:
tracker_ip, tracker_port = tr.split(':')
tracker_ip_list.append((tracker_ip, tracker_port))
tracker['host_tuple'] = tuple(tracker_ip_list)
tracker['timeout'] = timeout
tracker['name'] = 'Tracker Pool'
except:
raise
return tracker
class Fdfs_client(object):
'''
Class Fdfs_client implemented Fastdfs client protol ver 3.08.
It's useful upload, download, delete file to or from fdfs server, etc. It's uses
connection pool to manage connection to server.
'''
def __init__(self, conf_path = '/etc/fdfs/client.conf', \
poolclass =ConnectionPool):
self.trackers = get_tracker_conf(conf_path)
self.tracker_pool = poolclass(**self.trackers)
self.timeout = self.trackers['timeout']
self.storages = {}
return None
def __del__(self):
try:
self.pool.destroy()
self.pool = None
except:
pass
def get_storage(self, store_serv):
store = self.storages.get((store_serv.ip_addr, store_serv.port), None)
if store is None:
store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
self.storages[(store_serv.ip_addr, store_serv.port)] = store
return store
def upload_by_filename(self, filename, meta_dict = None):
'''
Upload a file to Storage server.
arguments:
@filename: string, name of file that will be uploaded
@meta_dict: dictionary e.g.:{
'ext_name' : 'jpg',
'file_size' : '10240B',
'width' : '160px',
'hight' : '80px'
} meta_dict can be null
@return dict {
'Group name' : group_name,
'Remote file_id' : remote_file_id,
'Status' : 'Upload successed.',
'Local file name' : local_file_name,
'Uploaded size' : upload_size,
'Storage IP' : storage_ip
} if success else None
'''
isfile, errmsg = fdfs_check_file(filename)
if not isfile:
raise DataError(errmsg + '(uploading)')
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_stor_without_group()
return self.get_storage(store_serv).storage_upload_by_filename(tc, store_serv, filename, meta_dict)
def upload_by_file(self, filename, meta_dict = None):
isfile, errmsg = fdfs_check_file(filename)
if not isfile:
raise DataError(errmsg + '(uploading)')
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_stor_without_group()
return self.get_storage(store_serv).storage_upload_by_file(tc, store_serv, filename, meta_dict)
def upload_by_buffer(self, filebuffer, file_ext_name = None, meta_dict = None):
'''
Upload a buffer to Storage server.
arguments:
@filebuffer: string, buffer
@file_ext_name: string, file extend name
@meta_dict: dictionary e.g.:{
'ext_name' : 'jpg',
'file_size' : '10240B',
'width' : '160px',
'hight' : '80px'
}
@return dict {
'Group name' : group_name,
'Remote file_id' : remote_file_id,
'Status' : 'Upload successed.',
'Local file name' : '',
'Uploaded size' : upload_size,
'Storage IP' : storage_ip
} if success else None
'''
if not filebuffer:
raise DataError('[-] Error: argument filebuffer can not be null.')
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_stor_without_group()
return self.get_storage(store_serv).storage_upload_by_buffer(tc, store_serv, filebuffer, \
file_ext_name, meta_dict)
def upload_slave_by_filename(self, filename, remote_file_id, prefix_name, \
meta_dict = None):
'''
Upload slave file to Storage server.
arguments:
@filename: string, local file name
@remote_file_id: string, remote file id
@prefix_name: string
@meta_dict: dictionary e.g.:{
'ext_name' : 'jpg',
'file_size' : '10240B',
'width' : '160px',
'hight' : '80px'
}
@return dictionary {
'Status' : 'Upload slave successed.',
'Local file name' : local_filename,
'Uploaded size' : upload_size,
'Remote file id' : remote_file_id,
'Storage IP' : storage_ip
}
'''
isfile, errmsg = fdfs_check_file(filename)
if not isfile:
raise DataError(errmsg + '(uploading slave)')
tmp = split_remote_fileid(remote_file_id)
if not tmp:
raise DataError('[-] Error: remote_file_id is invalid.(uploading slave)')
if not prefix_name:
raise DataError('[-] Error: prefix_name can not be null.')
group_name, remote_filename = tmp
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_stor_with_group(group_name)
store = self.get_storage(store_serv)
try:
ret_dict = store.storage_upload_slave_by_filename(tc, store_serv, filename, \
prefix_name, remote_filename, \
meta_dict = None)
except:
raise
ret_dict['Status'] = 'Upload slave file successed.'
return ret_dict
def upload_slave_by_file(self, filename, remote_file_id, prefix_name, \
meta_dict = None):
'''
Upload slave file to Storage server.
arguments:
@filename: string, local file name
@remote_file_id: string, remote file id
@prefix_name: string
@meta_dict: dictionary e.g.:{
'ext_name' : 'jpg',
'file_size' : '10240B',
'width' : '160px',
'hight' : '80px'
}
@return dictionary {
'Status' : 'Upload slave successed.',
'Local file name' : local_filename,
'Uploaded size' : upload_size,
'Remote file id' : remote_file_id,
'Storage IP' : storage_ip
}
'''
isfile, errmsg = fdfs_check_file(filename)
if not isfile:
raise DataError(errmsg + '(uploading slave)')
tmp = split_remote_fileid(remote_file_id)
if not tmp:
raise DataError('[-] Error: remote_file_id is invalid.(uploading slave)')
if not prefix_name:
raise DataError('[-] Error: prefix_name can not be null.')
group_name, remote_filename = tmp
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_stor_with_group(group_name)
store = self.get_storage(store_serv)
try:
ret_dict = store.storage_upload_slave_by_file(tc, store_serv, filename, \
prefix_name, remote_filename, \
meta_dict = None)
except:
raise
ret_dict['Status'] = 'Upload slave file successed.'
return ret_dict
def upload_slave_by_buffer(self, filebuffer, remote_file_id, \
meta_dict = None, file_ext_name = None):
'''
Upload slave file by buffer
arguments:
@filebuffer: string
@remote_file_id: string
@meta_dict: dictionary e.g.:{
'ext_name' : 'jpg',
'file_size' : '10240B',
'width' : '160px',
'hight' : '80px'
}
@return dictionary {
'Status' : 'Upload slave successed.',
'Local file name' : local_filename,
'Uploaded size' : upload_size,
'Remote file id' : remote_file_id,
'Storage IP' : storage_ip
}
'''
if not filebuffer:
raise DataError('[-] Error: argument filebuffer can not be null.')
tmp = split_remote_fileid(remote_file_id)
if not tmp:
raise DataError('[-] Error: remote_file_id is invalid.(uploading slave)')
group_name, remote_filename = tmp
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_update(group_name, remote_filename)
store = self.get_storage(store_serv)
return store.storage_upload_slave_by_buffer(tc, store_serv, filebuffer, \
remote_filename, meta_dict, \
file_ext_name)
def upload_appender_by_filename(self, local_filename, meta_dict = None):
'''
Upload an appender file by filename.
arguments:
@local_filename: string
@meta_dict: dictionary e.g.:{
'ext_name' : 'jpg',
'file_size' : '10240B',
'width' : '160px',
'hight' : '80px'
} Notice: it can be null
@return dict {
'Group name' : group_name,
'Remote file_id' : remote_file_id,
'Status' : 'Upload successed.',
'Local file name' : '',
'Uploaded size' : upload_size,
'Storage IP' : storage_ip
} if success else None
'''
isfile, errmsg = fdfs_check_file(local_filename)
if not isfile:
raise DataError(errmsg + '(uploading appender)')
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_stor_without_group()
store = self.get_storage(store_serv)
return store.storage_upload_appender_by_filename(tc, store_serv, \
local_filename, meta_dict)
def upload_appender_by_file(self, local_filename, meta_dict = None):
'''
Upload an appender file by file.
arguments:
@local_filename: string
@meta_dict: dictionary e.g.:{
'ext_name' : 'jpg',
'file_size' : '10240B',
'width' : '160px',
'hight' : '80px'
} Notice: it can be null
@return dict {
'Group name' : group_name,
'Remote file_id' : remote_file_id,
'Status' : 'Upload successed.',
'Local file name' : '',
'Uploaded size' : upload_size,
'Storage IP' : storage_ip
} if success else None
'''
isfile, errmsg = fdfs_check_file(local_filename)
if not isfile:
raise DataError(errmsg + '(uploading appender)')
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_stor_without_group()
store = self.get_storage(store_serv)
return store.storage_upload_appender_by_file(tc, store_serv, \
local_filename, meta_dict)
def upload_appender_by_buffer(self, filebuffer, file_ext_name = None, meta_dict = None):
'''
Upload a buffer to Storage server.
arguments:
@filebuffer: string
@file_ext_name: string, can be null
@meta_dict: dictionary, can be null
@return dict {
'Group name' : group_name,
'Remote file_id' : remote_file_id,
'Status' : 'Upload successed.',
'Local file name' : '',
'Uploaded size' : upload_size,
'Storage IP' : storage_ip
} if success else None
'''
if not filebuffer:
raise DataError('[-] Error: argument filebuffer can not be null.')
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_stor_without_group()
store = self.get_storage(store_serv)
return store.storage_upload_appender_by_buffer(tc, store_serv, \
filebuffer, meta_dict, \
file_ext_name)
def delete_file(self, remote_file_id):
'''
Delete a file from Storage server.
arguments:
@remote_file_id: string, file_id of file that is on storage server
@return tuple ('Delete file successed.', remote_file_id, storage_ip)
'''
tmp = split_remote_fileid(remote_file_id)
if not tmp:
raise DataError('[-] Error: remote_file_id is invalid.(in delete file)')
group_name, remote_filename = tmp
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_update(group_name, remote_filename)
store = self.get_storage(store_serv)
return store.storage_delete_file(tc, store_serv, remote_filename)
def download_to_file(self, local_filename, remote_file_id, offset = 0, down_bytes = 0):
'''
Download a file from Storage server.
arguments:
@local_filename: string, local name of file
@remote_file_id: string, file_id of file that is on storage server
@offset: long
@downbytes: long
@return dict {
'Remote file_id' : remote_file_id,
'Content' : local_filename,
'Download size' : downloaded_size,
'Storage IP' : storage_ip
}
'''
tmp = split_remote_fileid(remote_file_id)
if not tmp:
raise DataError('[-] Error: remote_file_id is invalid.(in download file)')
group_name, remote_filename = tmp
if not offset:
file_offset = long(offset)
if not down_bytes:
download_bytes = long(down_bytes)
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_fetch(group_name, remote_filename)
store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
return store.storage_download_to_file(tc, store_serv, local_filename, \
file_offset, download_bytes, \
remote_filename)
def download_to_buffer(self, remote_file_id, offset = 0, down_bytes = 0):
'''
Download a file from Storage server and store in buffer.
arguments:
@remote_file_id: string, file_id of file that is on storage server
@offset: long
@down_bytes: long
@return dict {
'Remote file_id' : remote_file_id,
'Content' : file_buffer,
'Download size' : downloaded_size,
'Storage IP' : storage_ip
}
'''
tmp = split_remote_fileid(remote_file_id)
if not tmp:
raise DataError('[-] Error: remote_file_id is invalid.(in download file)')
group_name, remote_filename = tmp
if not offset:
file_offset = long(offset)
if not down_bytes:
download_bytes = long(down_bytes)
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_fetch(group_name, remote_filename)
store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
file_buffer = None
return store.storage_download_to_buffer(tc, store_serv, file_buffer, \
file_offset, download_bytes, \
remote_filename)
def list_one_group(self, group_name):
'''
List one group information.
arguments:
@group_name: string, group name will be list
@return Group_info, instance
'''
tc = Tracker_client(self.tracker_pool)
return tc.tracker_list_one_group(group_name)
def list_servers(self, group_name, storage_ip = None):
'''
List all storage servers information in a group
arguments:
@group_name: string
@return dictionary {
'Group name' : group_name,
'Servers' : server list,
}
'''
tc = Tracker_client(self.tracker_pool)
return tc.tracker_list_servers(group_name, storage_ip)
def list_all_groups(self):
'''
List all group information.
@return dictionary {
'Groups count' : group_count,
'Groups' : list of groups
}
'''
tc = Tracker_client(self.tracker_pool)
return tc.tracker_list_all_groups()
def get_meta_data(self, remote_file_id):
'''
Get meta data of remote file.
arguments:
@remote_fileid: string, remote file id
@return dictionary, meta data
'''
tmp = split_remote_fileid(remote_file_id)
if not tmp:
raise DataError('[-] Error: remote_file_id is invalid.(in get meta data)')
group_name, remote_filename = tmp
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_update(group_name, remote_filename)
store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
return store.storage_get_metadata(tc, store_serv, remote_filename)
def set_meta_data(self, remote_file_id, \
meta_dict, op_flag = STORAGE_SET_METADATA_FLAG_OVERWRITE):
'''
Set meta data of remote file.
arguments:
@remote_file_id: string
@meta_dict: dictionary
@op_flag: char, 'O' for overwrite, 'M' for merge
@return dictionary {
'Status' : status,
'Storage IP' : storage_ip
}
'''
tmp = split_remote_fileid(remote_file_id)
if not tmp:
raise DataError('[-] Error: remote_file_id is invalid.(in set meta data)')
group_name, remote_filename = tmp
tc = Tracker_client(self.tracker_pool)
try:
store_serv = tc.tracker_query_storage_update(group_name, remote_filename)
store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
status = store.storage_set_metadata(tc, store_serv, \
remote_filename, meta_dict)
except (ConnectionError, ResponseError, DataError):
raise
#if status == 2:
# raise DataError('[-] Error: remote file %s is not exist.' % remote_file_id)
if status != 0:
raise DataError('[-] Error: %d, %s' % (th.status, os.strerror(th.status)))
ret_dict = {}
ret_dict['Status'] = 'Set meta data success.'
ret_dict['Storage IP'] = store_serv.ip_addr
return ret_dict
def append_by_filename(self, local_filename, remote_fileid):
isfile, errmsg = fdfs_check_file(local_filename)
if not isfile:
raise DataError(errmsg + '(append)')
tmp = split_remote_fileid(remote_fileid)
if not tmp:
raise DataError('[-] Error: remote_file_id is invalid.(append)')
group_name, appended_filename = tmp
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_update(group_name, appended_filename)
store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
return store.storage_append_by_filename(tc, store_serv, local_filename, \
appended_filename)
def append_by_file(self, local_filename, remote_fileid):
isfile, errmsg = fdfs_check_file(local_filename)
if not isfile:
raise DataError(errmsg + '(append)')
tmp = split_remote_fileid(remote_fileid)
if not tmp:
raise DataError('[-] Error: remote_file_id is invalid.(append)')
group_name, appended_filename = tmp
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_update(group_name, appended_filename)
store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
return store.storage_append_by_file(tc, store_serv, local_filename, \
appended_filename)
def append_by_buffer(self, file_buffer, remote_fileid):
if not file_buffer:
raise DataError('[-] Error: file_buffer can not be null.')
tmp = split_remote_fileid(remote_fileid)
if not tmp:
raise DataError('[-] Error: remote_file_id is invalid.(append)')
group_name, appended_filename = tmp
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_update(group_name, appended_filename)
store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
return store.storage_append_by_buffer(tc, store_serv, file_buffer, \
appended_filename)
def truncate_file(self, truncated_filesize, appender_fileid):
'''
Truncate file in Storage server.
arguments:
@truncated_filesize: long
@appender_fileid: remote_fileid
@return: dictionary {
'Status' : 'Truncate successed.',
'Storage IP' : storage_ip
}
'''
trunc_filesize = long(truncated_filesize)
tmp = split_remote_fileid(appender_fileid)
if not tmp:
raise DataError('[-] Error: appender_fileid is invalid.(truncate)')
group_name, appender_filename = tmp
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_update(group_name, appender_filename)
store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
return store.storage_truncate_file(tc, store_serv, trunc_filesize, \
appender_filename)
def modify_by_filename(self, filename, appender_fileid, offset = 0):
'''
Modify a file in Storage server by file.
arguments:
@filename: string, local file name
@offset: long, file offset
@appender_fileid: string, remote file id
@return: dictionary {
'Status' : 'Modify successed.',
'Storage IP' : storage_ip
}
'''
isfile, errmsg = fdfs_check_file(filename)
if not isfile:
raise DataError(errmsg + '(modify)')
filesize = os.stat(filename).st_size
tmp = split_remote_fileid(appender_fileid)
if not tmp:
raise DataError('[-] Error: remote_fileid is invalid.(modify)')
group_name, appender_filename = tmp
if not offset:
file_offset = long(offset)
else:
file_offset = 0
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_update(group_name, appender_filename)
store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
return store.storage_modify_by_filename(tc, store_serv, filename, file_offset, \
filesize, appender_filename)
def modify_by_file(self, filename, appender_fileid, offset = 0):
'''
Modify a file in Storage server by file.
arguments:
@filename: string, local file name
@offset: long, file offset
@appender_fileid: string, remote file id
@return: dictionary {
'Status' : 'Modify successed.',
'Storage IP' : storage_ip
}
'''
isfile, errmsg = fdfs_check_file(filename)
if not isfile:
raise DataError(errmsg + '(modify)')
filesize = os.stat(filename).st_size
tmp = split_remote_fileid(appender_fileid)
if not tmp:
raise DataError('[-] Error: remote_fileid is invalid.(modify)')
group_name, appender_filename = tmp
if not offset:
file_offset = long(offset)
else:
file_offset = 0
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_update(group_name, appender_filename)
store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
return store.storage_modify_by_file(tc, store_serv, filename, file_offset, \
filesize, appender_filename)
def modify_by_buffer(self, filebuffer, appender_fileid, offset = 0):
'''
Modify a file in Storage server by buffer.
arguments:
@filebuffer: string, file buffer
@offset: long, file offset
@appender_fileid: string, remote file id
@return: dictionary {
'Status' : 'Modify successed.',
'Storage IP' : storage_ip
}
'''
if not filebuffer:
raise DataError('[-] Error: filebuffer can not be null.(modify)')
filesize = len(filebuffer)
tmp = split_remote_fileid(appender_fileid)
if not tmp:
raise DataError('[-] Error: remote_fileid is invalid.(modify)')
group_name, appender_filename = tmp
if not offset:
file_offset = long(offset)
else:
file_offset = 0
tc = Tracker_client(self.tracker_pool)
store_serv = tc.tracker_query_storage_update(group_name, appender_filename)
store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
return store.storage_modify_by_buffer(tc, store_serv, filebuffer, file_offset, \
filesize, appender_filename)
|
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Operations to emit summaries."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import collections
import functools
import getpass
import os
import re
import threading
import time
import six
from tensorflow.core.framework import graph_pb2
from tensorflow.core.framework import summary_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.eager import context
from tensorflow.python.eager import profiler as _profiler
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import smart_cond
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_summary_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import summary_op_util
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import training_util
from tensorflow.python.util import deprecation
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
# Name for graph collection of summary writer init ops, which is only exposed
# as a legacy API for tf.contrib.summary in TF 1.x.
_SUMMARY_WRITER_INIT_COLLECTION_NAME = "_SUMMARY_WRITER_V2"
_EXPERIMENT_NAME_PATTERNS = re.compile(r"^[^\x00-\x1F<>]{0,256}$")
_RUN_NAME_PATTERNS = re.compile(r"^[^\x00-\x1F<>]{0,512}$")
_USER_NAME_PATTERNS = re.compile(r"^[a-z]([-a-z0-9]{0,29}[a-z0-9])?$", re.I)
def _should_record_summaries_internal(default_state):
"""Returns boolean Tensor if summaries should/shouldn't be recorded.
Now the summary condition is decided by logical "and" of two conditions:
ctx.summary_recording and ctx.summary_recording_distribution_strategy. The
former one is usually set by user, and the latter one is controlled by
DistributionStrategy (tf.distribute.ReplicaContext).
Args:
default_state: can be True or False. The default summary behavior when user
does not specify ctx.summary_recording and
ctx.summary_recording_distribution_strategy is True.
"""
ctx = context.context()
resolve = lambda x: x() if callable(x) else x
cond_distributed = resolve(ctx.summary_recording_distribution_strategy)
cond = resolve(ctx.summary_recording)
if cond is None:
cond = default_state
return math_ops.logical_and(cond_distributed, cond)
def _should_record_summaries_v2():
"""Returns boolean Tensor which is true if summaries should be recorded.
If no recording status has been set, this defaults to True, unlike the public
should_record_summaries().
"""
return _should_record_summaries_internal(default_state=True)
def should_record_summaries():
"""Returns boolean Tensor which is true if summaries should be recorded."""
return _should_record_summaries_internal(default_state=False)
@tf_export("summary.record_if", v1=[])
@tf_contextlib.contextmanager
def record_if(condition):
"""Sets summary recording on or off per the provided boolean value.
The provided value can be a python boolean, a scalar boolean Tensor, or
or a callable providing such a value; if a callable is passed it will be
invoked on-demand to determine whether summary writing will occur.
Args:
condition: can be True, False, a bool Tensor, or a callable providing such.
Yields:
Returns a context manager that sets this value on enter and restores the
previous value on exit.
"""
old = context.context().summary_recording
try:
context.context().summary_recording = condition
yield
finally:
context.context().summary_recording = old
# TODO(apassos) consider how to handle local step here.
def record_summaries_every_n_global_steps(n, global_step=None):
"""Sets the should_record_summaries Tensor to true if global_step % n == 0."""
if global_step is None:
global_step = training_util.get_or_create_global_step()
with ops.device("cpu:0"):
should = lambda: math_ops.equal(global_step % n, 0)
if not context.executing_eagerly():
should = should()
return record_if(should)
def always_record_summaries():
"""Sets the should_record_summaries Tensor to always true."""
return record_if(True)
def never_record_summaries():
"""Sets the should_record_summaries Tensor to always false."""
return record_if(False)
@tf_export("summary.experimental.get_step", v1=[])
def get_step():
"""Returns the default summary step for the current thread.
Returns:
The step set by `tf.summary.experimental.set_step()` if one has been set,
otherwise None.
"""
return context.context().summary_step
@tf_export("summary.experimental.set_step", v1=[])
def set_step(step):
"""Sets the default summary step for the current thread.
For convenience, this function sets a default value for the `step` parameter
used in summary-writing functions elsewhere in the API so that it need not
be explicitly passed in every such invocation. The value can be a constant
or a variable, and can be retrieved via `tf.summary.experimental.get_step()`.
Note: when using this with @tf.functions, the step value will be captured at
the time the function is traced, so changes to the step outside the function
will not be reflected inside the function unless using a `tf.Variable` step.
Args:
step: An `int64`-castable default step value, or None to unset.
"""
context.context().summary_step = step
@tf_export("summary.SummaryWriter", v1=[])
@six.add_metaclass(abc.ABCMeta)
class SummaryWriter(object):
"""Interface representing a stateful summary writer object."""
@abc.abstractmethod
def set_as_default(self):
"""Enables this summary writer for the current thread."""
raise NotImplementedError()
@abc.abstractmethod
@tf_contextlib.contextmanager
def as_default(self):
"""Returns a context manager that enables summary writing."""
raise NotImplementedError()
def init(self):
"""Initializes the summary writer."""
raise NotImplementedError()
def flush(self):
"""Flushes any buffered data."""
raise NotImplementedError()
def close(self):
"""Flushes and closes the summary writer."""
raise NotImplementedError()
class ResourceSummaryWriter(SummaryWriter):
"""Implementation of SummaryWriter using a SummaryWriterInterface resource."""
def __init__(self, shared_name, init_op_fn, name=None, v2=False):
self._resource = gen_summary_ops.summary_writer(
shared_name=shared_name, name=name)
# TODO(nickfelt): cache other constructed ops in graph mode
self._init_op_fn = init_op_fn
self._init_op = init_op_fn(self._resource)
self._v2 = v2
self._closed = False
if context.executing_eagerly():
self._resource_deleter = resource_variable_ops.EagerResourceDeleter(
handle=self._resource, handle_device="cpu:0")
else:
ops.add_to_collection(_SUMMARY_WRITER_INIT_COLLECTION_NAME, self._init_op)
def set_as_default(self):
"""Enables this summary writer for the current thread."""
if self._v2 and context.executing_eagerly() and self._closed:
raise RuntimeError("SummaryWriter is already closed")
context.context().summary_writer = self
@tf_contextlib.contextmanager
def as_default(self):
"""Returns a context manager that enables summary writing."""
if self._v2 and context.executing_eagerly() and self._closed:
raise RuntimeError("SummaryWriter is already closed")
old = context.context().summary_writer
try:
context.context().summary_writer = self
yield self
# Flushes the summary writer in eager mode or in graph functions, but
# not in legacy graph mode (you're on your own there).
self.flush()
finally:
context.context().summary_writer = old
def init(self):
"""Initializes the summary writer."""
if self._v2:
if context.executing_eagerly() and self._closed:
raise RuntimeError("SummaryWriter is already closed")
return self._init_op
# Legacy behavior allows re-initializing the resource.
return self._init_op_fn(self._resource)
def flush(self):
"""Flushes any buffered data."""
if self._v2 and context.executing_eagerly() and self._closed:
return
return _flush_fn(writer=self)
def close(self):
"""Flushes and closes the summary writer."""
if self._v2 and context.executing_eagerly() and self._closed:
return
try:
with ops.control_dependencies([self.flush()]):
with ops.device("cpu:0"):
return gen_summary_ops.close_summary_writer(self._resource)
finally:
if self._v2 and context.executing_eagerly():
self._closed = True
class NoopSummaryWriter(SummaryWriter):
"""A summary writer that does nothing, for create_noop_writer()."""
def set_as_default(self):
pass
@tf_contextlib.contextmanager
def as_default(self):
yield
def init(self):
pass
def flush(self):
pass
def close(self):
pass
@tf_export(v1=["summary.initialize"])
def initialize(
graph=None, # pylint: disable=redefined-outer-name
session=None):
"""Initializes summary writing for graph execution mode.
This operation is a no-op when executing eagerly.
This helper method provides a higher-level alternative to using
`tf.contrib.summary.summary_writer_initializer_op` and
`tf.contrib.summary.graph`.
Most users will also want to call `tf.compat.v1.train.create_global_step`
which can happen before or after this function is called.
Args:
graph: A `tf.Graph` or `tf.GraphDef` to output to the writer.
This function will not write the default graph by default. When
writing to an event log file, the associated step will be zero.
session: So this method can call `tf.Session.run`. This defaults
to `tf.get_default_session`.
Raises:
RuntimeError: If the current thread has no default
`tf.contrib.summary.SummaryWriter`.
ValueError: If session wasn't passed and no default session.
"""
if context.executing_eagerly():
return
if context.context().summary_writer is None:
raise RuntimeError("No default tf.contrib.summary.SummaryWriter found")
if session is None:
session = ops.get_default_session()
if session is None:
raise ValueError("session must be passed if no default session exists")
session.run(summary_writer_initializer_op())
if graph is not None:
data = _serialize_graph(graph)
x = array_ops.placeholder(dtypes.string)
session.run(_graph(x, 0), feed_dict={x: data})
@tf_export("summary.create_file_writer", v1=[])
def create_file_writer_v2(logdir,
max_queue=None,
flush_millis=None,
filename_suffix=None,
name=None):
"""Creates a summary file writer for the given log directory.
Args:
logdir: a string specifying the directory in which to write an event file.
max_queue: the largest number of summaries to keep in a queue; will
flush once the queue gets bigger than this. Defaults to 10.
flush_millis: the largest interval between flushes. Defaults to 120,000.
filename_suffix: optional suffix for the event file name. Defaults to `.v2`.
name: a name for the op that creates the writer.
Returns:
A SummaryWriter object.
"""
if logdir is None:
raise ValueError("logdir cannot be None")
inside_function = ops.inside_function()
with ops.name_scope(name, "create_file_writer") as scope, ops.device("cpu:0"):
# Run init inside an init_scope() to hoist it out of tf.functions.
with ops.init_scope():
if context.executing_eagerly():
_check_create_file_writer_args(
inside_function,
logdir=logdir,
max_queue=max_queue,
flush_millis=flush_millis,
filename_suffix=filename_suffix)
logdir = ops.convert_to_tensor(logdir, dtype=dtypes.string)
if max_queue is None:
max_queue = constant_op.constant(10)
if flush_millis is None:
flush_millis = constant_op.constant(2 * 60 * 1000)
if filename_suffix is None:
filename_suffix = constant_op.constant(".v2")
# Prepend the PID and a process-local UID to the filename suffix to avoid
# filename collisions within the machine (the filename already contains
# the hostname to avoid cross-machine collisions).
unique_prefix = constant_op.constant(".%s.%s" % (os.getpid(), ops.uid()))
filename_suffix = unique_prefix + filename_suffix
# Use a unique shared_name to prevent resource sharing.
if context.executing_eagerly():
shared_name = context.shared_name()
else:
shared_name = ops.name_from_scope_name(scope) # pylint: disable=protected-access
return ResourceSummaryWriter(
shared_name=shared_name,
init_op_fn=functools.partial(
gen_summary_ops.create_summary_file_writer,
logdir=logdir,
max_queue=max_queue,
flush_millis=flush_millis,
filename_suffix=filename_suffix),
name=name,
v2=True)
def create_file_writer(logdir,
max_queue=None,
flush_millis=None,
filename_suffix=None,
name=None):
"""Creates a summary file writer in the current context under the given name.
Args:
logdir: a string, or None. If a string, creates a summary file writer
which writes to the directory named by the string. If None, returns
a mock object which acts like a summary writer but does nothing,
useful to use as a context manager.
max_queue: the largest number of summaries to keep in a queue; will
flush once the queue gets bigger than this. Defaults to 10.
flush_millis: the largest interval between flushes. Defaults to 120,000.
filename_suffix: optional suffix for the event file name. Defaults to `.v2`.
name: Shared name for this SummaryWriter resource stored to default
Graph. Defaults to the provided logdir prefixed with `logdir:`. Note: if a
summary writer resource with this shared name already exists, the returned
SummaryWriter wraps that resource and the other arguments have no effect.
Returns:
Either a summary writer or an empty object which can be used as a
summary writer.
"""
if logdir is None:
return NoopSummaryWriter()
logdir = str(logdir)
with ops.device("cpu:0"):
if max_queue is None:
max_queue = constant_op.constant(10)
if flush_millis is None:
flush_millis = constant_op.constant(2 * 60 * 1000)
if filename_suffix is None:
filename_suffix = constant_op.constant(".v2")
if name is None:
name = "logdir:" + logdir
return ResourceSummaryWriter(
shared_name=name,
init_op_fn=functools.partial(
gen_summary_ops.create_summary_file_writer,
logdir=logdir,
max_queue=max_queue,
flush_millis=flush_millis,
filename_suffix=filename_suffix))
def create_db_writer(db_uri,
experiment_name=None,
run_name=None,
user_name=None,
name=None):
"""Creates a summary database writer in the current context.
This can be used to write tensors from the execution graph directly
to a database. Only SQLite is supported right now. This function
will create the schema if it doesn't exist. Entries in the Users,
Experiments, and Runs tables will be created automatically if they
don't already exist.
Args:
db_uri: For example "file:/tmp/foo.sqlite".
experiment_name: Defaults to YYYY-MM-DD in local time if None.
Empty string means the Run will not be associated with an
Experiment. Can't contain ASCII control characters or <>. Case
sensitive.
run_name: Defaults to HH:MM:SS in local time if None. Empty string
means a Tag will not be associated with any Run. Can't contain
ASCII control characters or <>. Case sensitive.
user_name: Defaults to system username if None. Empty means the
Experiment will not be associated with a User. Must be valid as
both a DNS label and Linux username.
name: Shared name for this SummaryWriter resource stored to default
`tf.Graph`.
Returns:
A `tf.summary.SummaryWriter` instance.
"""
with ops.device("cpu:0"):
if experiment_name is None:
experiment_name = time.strftime("%Y-%m-%d", time.localtime(time.time()))
if run_name is None:
run_name = time.strftime("%H:%M:%S", time.localtime(time.time()))
if user_name is None:
user_name = getpass.getuser()
experiment_name = _cleanse_string(
"experiment_name", _EXPERIMENT_NAME_PATTERNS, experiment_name)
run_name = _cleanse_string("run_name", _RUN_NAME_PATTERNS, run_name)
user_name = _cleanse_string("user_name", _USER_NAME_PATTERNS, user_name)
return ResourceSummaryWriter(
shared_name=name,
init_op_fn=functools.partial(
gen_summary_ops.create_summary_db_writer,
db_uri=db_uri,
experiment_name=experiment_name,
run_name=run_name,
user_name=user_name))
@tf_export("summary.create_noop_writer", v1=[])
def create_noop_writer():
"""Returns a summary writer that does nothing.
This is useful as a placeholder in code that expects a context manager.
"""
return NoopSummaryWriter()
def _cleanse_string(name, pattern, value):
if isinstance(value, six.string_types) and pattern.search(value) is None:
raise ValueError("%s (%s) must match %s" % (name, value, pattern.pattern))
return ops.convert_to_tensor(value, dtypes.string)
def _nothing():
"""Convenient else branch for when summaries do not record."""
return constant_op.constant(False)
def all_summary_ops():
"""Graph-mode only. Returns all summary ops.
Please note this excludes `tf.summary.graph` ops.
Returns:
The summary ops.
"""
if context.executing_eagerly():
return None
return ops.get_collection(ops.GraphKeys._SUMMARY_COLLECTION) # pylint: disable=protected-access
def summary_writer_initializer_op():
"""Graph-mode only. Returns the list of ops to create all summary writers.
Returns:
The initializer ops.
Raises:
RuntimeError: If in Eager mode.
"""
if context.executing_eagerly():
raise RuntimeError(
"tf.contrib.summary.summary_writer_initializer_op is only "
"supported in graph mode.")
return ops.get_collection(_SUMMARY_WRITER_INIT_COLLECTION_NAME)
_INVALID_SCOPE_CHARACTERS = re.compile(r"[^-_/.A-Za-z0-9]")
@tf_export("summary.summary_scope", v1=[])
@tf_contextlib.contextmanager
def summary_scope(name, default_name="summary", values=None):
"""A context manager for use when defining a custom summary op.
This behaves similarly to `tf.name_scope`, except that it returns a generated
summary tag in addition to the scope name. The tag is structurally similar to
the scope name - derived from the user-provided name, prefixed with enclosing
name scopes if any - but we relax the constraint that it be uniquified, as
well as the character set limitation (so the user-provided name can contain
characters not legal for scope names; in the scope name these are removed).
This makes the summary tag more predictable and consistent for the user.
For example, to define a new summary op called `my_op`:
```python
def my_op(name, my_value, step):
with tf.summary.summary_scope(name, "MyOp", [my_value]) as (tag, scope):
my_value = tf.convert_to_tensor(my_value)
return tf.summary.write(tag, my_value, step=step)
```
Args:
name: string name for the summary.
default_name: Optional; if provided, used as default name of the summary.
values: Optional; passed as `values` parameter to name_scope.
Yields:
A tuple `(tag, scope)` as described above.
"""
name = name or default_name
current_scope = ops.get_name_scope()
tag = current_scope + "/" + name if current_scope else name
# Strip illegal characters from the scope name, and if that leaves nothing,
# use None instead so we pick up the default name.
name = _INVALID_SCOPE_CHARACTERS.sub("", name) or None
with ops.name_scope(name, default_name, values) as scope:
yield tag, scope
@tf_export("summary.write", v1=[])
def write(tag, tensor, step=None, metadata=None, name=None):
"""Writes a generic summary to the default SummaryWriter if one exists.
This exists primarily to support the definition of type-specific summary ops
like scalar() and image(), and is not intended for direct use unless defining
a new type-specific summary op.
Args:
tag: string tag used to identify the summary (e.g. in TensorBoard), usually
generated with `tf.summary.summary_scope`
tensor: the Tensor holding the summary data to write
step: Explicit `int64`-castable monotonic step value for this summary. If
omitted, this defaults to `tf.summary.experimental.get_step()`, which must
not be None.
metadata: Optional SummaryMetadata, as a proto or serialized bytes
name: Optional string name for this op.
Returns:
True on success, or false if no summary was written because no default
summary writer was available.
Raises:
ValueError: if a default writer exists, but no step was provided and
`tf.summary.experimental.get_step()` is None.
"""
with ops.name_scope(name, "write_summary") as scope:
if context.context().summary_writer is None:
return constant_op.constant(False)
if step is None:
step = get_step()
if step is None:
raise ValueError("No step set via 'step' argument or "
"tf.summary.experimental.set_step()")
if metadata is None:
serialized_metadata = b""
elif hasattr(metadata, "SerializeToString"):
serialized_metadata = metadata.SerializeToString()
else:
serialized_metadata = metadata
def record():
"""Record the actual summary and return True."""
# Note the identity to move the tensor to the CPU.
with ops.device("cpu:0"):
write_summary_op = gen_summary_ops.write_summary(
context.context().summary_writer._resource, # pylint: disable=protected-access
step,
array_ops.identity(tensor),
tag,
serialized_metadata,
name=scope)
with ops.control_dependencies([write_summary_op]):
return constant_op.constant(True)
return smart_cond.smart_cond(
_should_record_summaries_v2(), record, _nothing, name="summary_cond")
def summary_writer_function(name, tensor, function, family=None):
"""Helper function to write summaries.
Args:
name: name of the summary
tensor: main tensor to form the summary
function: function taking a tag and a scope which writes the summary
family: optional, the summary's family
Returns:
The result of writing the summary.
"""
name_scope = ops.get_name_scope()
if name_scope:
# Add a slash to allow reentering the name scope.
name_scope += "/"
def record():
with ops.name_scope(name_scope), summary_op_util.summary_scope(
name, family, values=[tensor]) as (tag, scope):
with ops.control_dependencies([function(tag, scope)]):
return constant_op.constant(True)
if context.context().summary_writer is None:
return control_flow_ops.no_op()
with ops.device("cpu:0"):
op = smart_cond.smart_cond(
should_record_summaries(), record, _nothing, name="")
if not context.executing_eagerly():
ops.add_to_collection(ops.GraphKeys._SUMMARY_COLLECTION, op) # pylint: disable=protected-access
return op
def generic(name, tensor, metadata=None, family=None, step=None):
"""Writes a tensor summary if possible."""
def function(tag, scope):
if metadata is None:
serialized_metadata = constant_op.constant("")
elif hasattr(metadata, "SerializeToString"):
serialized_metadata = constant_op.constant(metadata.SerializeToString())
else:
serialized_metadata = metadata
# Note the identity to move the tensor to the CPU.
return gen_summary_ops.write_summary(
context.context().summary_writer._resource, # pylint: disable=protected-access
_choose_step(step),
array_ops.identity(tensor),
tag,
serialized_metadata,
name=scope)
return summary_writer_function(name, tensor, function, family=family)
def scalar(name, tensor, family=None, step=None):
"""Writes a scalar summary if possible.
Unlike `tf.contrib.summary.generic` this op may change the dtype
depending on the writer, for both practical and efficiency concerns.
Args:
name: An arbitrary name for this summary.
tensor: A `tf.Tensor` Must be one of the following types:
`float32`, `float64`, `int32`, `int64`, `uint8`, `int16`,
`int8`, `uint16`, `half`, `uint32`, `uint64`.
family: Optional, the summary's family.
step: The `int64` monotonic step variable, which defaults
to `tf.train.get_global_step`.
Returns:
The created `tf.Operation` or a `tf.no_op` if summary writing has
not been enabled for this context.
"""
def function(tag, scope):
# Note the identity to move the tensor to the CPU.
return gen_summary_ops.write_scalar_summary(
context.context().summary_writer._resource, # pylint: disable=protected-access
_choose_step(step),
tag,
array_ops.identity(tensor),
name=scope)
return summary_writer_function(name, tensor, function, family=family)
def histogram(name, tensor, family=None, step=None):
"""Writes a histogram summary if possible."""
def function(tag, scope):
# Note the identity to move the tensor to the CPU.
return gen_summary_ops.write_histogram_summary(
context.context().summary_writer._resource, # pylint: disable=protected-access
_choose_step(step),
tag,
array_ops.identity(tensor),
name=scope)
return summary_writer_function(name, tensor, function, family=family)
def image(name, tensor, bad_color=None, max_images=3, family=None, step=None):
"""Writes an image summary if possible."""
def function(tag, scope):
bad_color_ = (constant_op.constant([255, 0, 0, 255], dtype=dtypes.uint8)
if bad_color is None else bad_color)
# Note the identity to move the tensor to the CPU.
return gen_summary_ops.write_image_summary(
context.context().summary_writer._resource, # pylint: disable=protected-access
_choose_step(step),
tag,
array_ops.identity(tensor),
bad_color_,
max_images,
name=scope)
return summary_writer_function(name, tensor, function, family=family)
def audio(name, tensor, sample_rate, max_outputs, family=None, step=None):
"""Writes an audio summary if possible."""
def function(tag, scope):
# Note the identity to move the tensor to the CPU.
return gen_summary_ops.write_audio_summary(
context.context().summary_writer._resource, # pylint: disable=protected-access
_choose_step(step),
tag,
array_ops.identity(tensor),
sample_rate=sample_rate,
max_outputs=max_outputs,
name=scope)
return summary_writer_function(name, tensor, function, family=family)
def graph(param, step=None, name=None):
"""Writes a TensorFlow graph to the summary interface.
The graph summary is, strictly speaking, not a summary. Conditions
like `tf.summary.should_record_summaries` do not apply. Only
a single graph can be associated with a particular run. If multiple
graphs are written, then only the last one will be considered by
TensorBoard.
When not using eager execution mode, the user should consider passing
the `graph` parameter to `tf.contrib.summary.initialize` instead of
calling this function. Otherwise special care needs to be taken when
using the graph to record the graph.
Args:
param: A `tf.Tensor` containing a serialized graph proto. When
eager execution is enabled, this function will automatically
coerce `tf.Graph`, `tf.GraphDef`, and string types.
step: The global step variable. This doesn't have useful semantics
for graph summaries, but is used anyway, due to the structure of
event log files. This defaults to the global step.
name: A name for the operation (optional).
Returns:
The created `tf.Operation` or a `tf.no_op` if summary writing has
not been enabled for this context.
Raises:
TypeError: If `param` isn't already a `tf.Tensor` in graph mode.
"""
if not context.executing_eagerly() and not isinstance(param, ops.Tensor):
raise TypeError("graph() needs a tf.Tensor (e.g. tf.placeholder) in graph "
"mode, but was: %s" % type(param))
writer = context.context().summary_writer
if writer is None:
return control_flow_ops.no_op()
with ops.device("cpu:0"):
if isinstance(param, (ops.Graph, graph_pb2.GraphDef)):
tensor = ops.convert_to_tensor(_serialize_graph(param), dtypes.string)
else:
tensor = array_ops.identity(param)
return gen_summary_ops.write_graph_summary(
writer._resource, _choose_step(step), tensor, name=name) # pylint: disable=protected-access
_graph = graph # for functions with a graph parameter
@tf_export("summary.import_event", v1=[])
def import_event(tensor, name=None):
"""Writes a `tf.Event` binary proto.
This can be used to import existing event logs into a new summary writer sink.
Please note that this is lower level than the other summary functions and
will ignore the `tf.summary.should_record_summaries` setting.
Args:
tensor: A `tf.Tensor` of type `string` containing a serialized
`tf.Event` proto.
name: A name for the operation (optional).
Returns:
The created `tf.Operation`.
"""
return gen_summary_ops.import_event(
context.context().summary_writer._resource, tensor, name=name) # pylint: disable=protected-access
@tf_export("summary.flush", v1=[])
def flush(writer=None, name=None):
"""Forces summary writer to send any buffered data to storage.
This operation blocks until that finishes.
Args:
writer: The `tf.summary.SummaryWriter` resource to flush.
The thread default will be used if this parameter is None.
Otherwise a `tf.no_op` is returned.
name: A name for the operation (optional).
Returns:
The created `tf.Operation`.
"""
if writer is None:
writer = context.context().summary_writer
if writer is None:
return control_flow_ops.no_op()
if isinstance(writer, ResourceSummaryWriter):
resource = writer._resource # pylint: disable=protected-access
else:
# Assume we were passed a raw resource tensor.
resource = writer
with ops.device("cpu:0"):
return gen_summary_ops.flush_summary_writer(resource, name=name)
_flush_fn = flush # for within SummaryWriter.flush()
def eval_dir(model_dir, name=None):
"""Construct a logdir for an eval summary writer."""
return os.path.join(model_dir, "eval" if not name else "eval_" + name)
@deprecation.deprecated(date=None,
instructions="Renamed to create_file_writer().")
def create_summary_file_writer(*args, **kwargs):
"""Please use `tf.contrib.summary.create_file_writer`."""
logging.warning("Deprecation Warning: create_summary_file_writer was renamed "
"to create_file_writer")
return create_file_writer(*args, **kwargs)
def _serialize_graph(arbitrary_graph):
if isinstance(arbitrary_graph, ops.Graph):
return arbitrary_graph.as_graph_def(add_shapes=True).SerializeToString()
else:
return arbitrary_graph.SerializeToString()
def _choose_step(step):
if step is None:
return training_util.get_or_create_global_step()
if not isinstance(step, ops.Tensor):
return ops.convert_to_tensor(step, dtypes.int64)
return step
def _check_create_file_writer_args(inside_function, **kwargs):
"""Helper to check the validity of arguments to a create_file_writer() call.
Args:
inside_function: whether the create_file_writer() call is in a tf.function
**kwargs: the arguments to check, as kwargs to give them names.
Raises:
ValueError: if the arguments are graph tensors.
"""
for arg_name, arg in kwargs.items():
if not isinstance(arg, ops.EagerTensor) and tensor_util.is_tensor(arg):
if inside_function:
raise ValueError(
"Invalid graph Tensor argument \"%s=%s\" to create_file_writer() "
"inside an @tf.function. The create call will be lifted into the "
"outer eager execution context, so it cannot consume graph tensors "
"defined inside the function body." % (arg_name, arg))
else:
raise ValueError(
"Invalid graph Tensor argument \"%s=%s\" to eagerly executed "
"create_file_writer()." % (arg_name, arg))
def run_metadata(name, data, step=None):
"""Writes entire RunMetadata summary.
A RunMetadata can contain DeviceStats, partition graphs, and function graphs.
Please refer to the proto for definition of each field.
Args:
name: A name for this summary. The summary tag used for TensorBoard will be
this name prefixed by any active name scopes.
data: A RunMetadata proto to write.
step: Explicit `int64`-castable monotonic step value for this summary. If
omitted, this defaults to `tf.summary.experimental.get_step()`, which must
not be None.
Returns:
True on success, or false if no summary was written because no default
summary writer was available.
Raises:
ValueError: if a default writer exists, but no step was provided and
`tf.summary.experimental.get_step()` is None.
"""
summary_metadata = summary_pb2.SummaryMetadata()
# Hard coding a plugin name. Please refer to go/tb-plugin-name-hardcode for
# the rationale.
summary_metadata.plugin_data.plugin_name = "graph_run_metadata"
# version number = 1
summary_metadata.plugin_data.content = b"1"
with summary_scope(name,
"graph_run_metadata_summary",
[data, step]) as (tag, _):
return write(
tag=tag,
tensor=constant_op.constant(
data.SerializeToString(), dtype=dtypes.string),
step=step,
metadata=summary_metadata)
def run_metadata_graphs(name, data, step=None):
"""Writes graphs from a RunMetadata summary.
Args:
name: A name for this summary. The summary tag used for TensorBoard will be
this name prefixed by any active name scopes.
data: A RunMetadata proto to write.
step: Explicit `int64`-castable monotonic step value for this summary. If
omitted, this defaults to `tf.summary.experimental.get_step()`, which must
not be None.
Returns:
True on success, or false if no summary was written because no default
summary writer was available.
Raises:
ValueError: if a default writer exists, but no step was provided and
`tf.summary.experimental.get_step()` is None.
"""
summary_metadata = summary_pb2.SummaryMetadata()
# Hard coding a plugin name. Please refer to go/tb-plugin-name-hardcode for
# the rationale.
summary_metadata.plugin_data.plugin_name = "graph_run_metadata_graph"
# version number = 1
summary_metadata.plugin_data.content = b"1"
data = config_pb2.RunMetadata(
function_graphs=data.function_graphs,
partition_graphs=data.partition_graphs)
with summary_scope(name,
"graph_run_metadata_graph_summary",
[data, step]) as (tag, _):
return write(
tag=tag,
tensor=constant_op.constant(
data.SerializeToString(), dtype=dtypes.string),
step=step,
metadata=summary_metadata)
def keras_model(name, data, step=None):
"""Writes a Keras model as JSON to as a Summary.
Writing the Keras model configuration allows the TensorBoard graph plugin to
render a conceptual graph, as opposed to graph of ops. In case the model fails
to serialze as JSON, it ignores and returns False.
Args:
name: A name for this summary. The summary tag used for TensorBoard will be
this name prefixed by any active name scopes.
data: A Keras Model to write.
step: Explicit `int64`-castable monotonic step value for this summary. If
omitted, this defaults to `tf.summary.experimental.get_step()`, which must
not be None.
Returns:
True on success, or False if no summary was written because no default
summary writer was available.
Raises:
ValueError: if a default writer exists, but no step was provided and
`tf.summary.experimental.get_step()` is None.
"""
summary_metadata = summary_pb2.SummaryMetadata()
# Hard coding a plugin name. Please refer to go/tb-plugin-name-hardcode for
# the rationale.
summary_metadata.plugin_data.plugin_name = "graph_keras_model"
# version number = 1
summary_metadata.plugin_data.content = b"1"
try:
json_string = data.to_json()
except Exception as exc: # pylint: disable=broad-except
# An exception should not break a model code.
logging.warn("Model failed to serialize as JSON. Ignoring... %s" % exc)
return False
with summary_scope(name, "graph_keras_model", [data, step]) as (tag, _):
return write(
tag=tag,
tensor=constant_op.constant(json_string, dtype=dtypes.string),
step=step,
metadata=summary_metadata)
_TraceContext = collections.namedtuple("TraceContext", ("graph", "profiler"))
_current_trace_context_lock = threading.Lock()
_current_trace_context = None
@tf_export("summary.trace_on", v1=[])
def trace_on(graph=True, profiler=False): # pylint: disable=redefined-outer-name
"""Starts a trace to record computation graphs and profiling information.
Must be invoked in eager mode.
When enabled, TensorFlow runtime will collection information that can later be
exported and consumed by TensorBoard. The trace is activated across the entire
TensorFlow runtime and affects all threads of execution.
To stop the trace and export the collected information, use
`tf.summary.trace_export`. To stop the trace without exporting, use
`tf.summary.trace_off`.
Args:
graph: If True, enables collection of executed graphs. It includes ones from
tf.function invocation and ones from the legacy graph mode. The default
is True.
profiler: If True, enables the advanced profiler. Enabling profiler
implicitly enables the graph collection. The profiler may incur a high
memory overhead. The default is False.
"""
if ops.inside_function():
logging.warn("Cannot enable trace inside a tf.function.")
return
if not context.context().executing_eagerly():
logging.warn("Must enable trace in eager mode.")
return
global _current_trace_context
with _current_trace_context_lock:
if _current_trace_context:
logging.warn("Trace already enabled")
return
if graph and not profiler:
context.context().enable_graph_collection()
if profiler:
context.context().enable_run_metadata()
_profiler.start()
_current_trace_context = _TraceContext(graph=graph, profiler=profiler)
@tf_export("summary.trace_export", v1=[])
def trace_export(name, step=None, profiler_outdir=None):
"""Stops and exports the active trace as a Summary and/or profile file.
Stops the trace and exports all metadata collected during the trace to the
default SummaryWriter, if one has been set.
Args:
name: A name for the summary to be written.
step: Explicit `int64`-castable monotonic step value for this summary. If
omitted, this defaults to `tf.summary.experimental.get_step()`, which must
not be None.
profiler_outdir: Output directory for profiler. It is required when profiler
is enabled when trace was started. Otherwise, it is ignored.
Raises:
ValueError: if a default writer exists, but no step was provided and
`tf.summary.experimental.get_step()` is None.
"""
# TODO(stephanlee): See if we can remove profiler_outdir and infer it from
# the SummaryWriter's logdir.
global _current_trace_context
if ops.inside_function():
logging.warn("Cannot export trace inside a tf.function.")
return
if not context.context().executing_eagerly():
logging.warn("Can only export trace while executing eagerly.")
return
with _current_trace_context_lock:
if _current_trace_context is None:
raise ValueError("Must enable trace before export.")
graph, profiler = _current_trace_context # pylint: disable=redefined-outer-name
if profiler and profiler_outdir is None:
raise ValueError("Required profiler_outdir is not specified")
run_meta = context.context().export_run_metadata()
if graph and not profiler:
run_metadata_graphs(name, run_meta, step)
else:
run_metadata(name, run_meta, step)
if profiler:
_profiler.save(profiler_outdir, _profiler.stop())
trace_off()
@tf_export("summary.trace_off", v1=[])
def trace_off():
"""Stops the current trace and discards any collected information."""
global _current_trace_context
with _current_trace_context_lock:
_current_trace_context = None
# Disabling run_metadata disables graph collection as well.
context.context().disable_run_metadata()
# profiler only has start and stop. One needs to stop in order to export
# and stopping when it is not running will raise an error.
try:
_profiler.stop()
except _profiler.ProfilerNotRunningError:
pass
|
|
from project.utils import auth
from flask import Blueprint, request, redirect
from project import db, session
from project.models import Task
from project.utils import utils
from sqlalchemy import text
from datetime import datetime
import json
tasks = Blueprint('tasks', __name__)
@tasks.route('/retrieve', methods=['POST'])
@auth.login_required
def retrieve():
"""Swap to a post request because you are sending data"""
# Support for the reverse query here
todo_list = utils.retrieve_tasks_helper()
# Must generate the initial task
if len(todo_list) == 0:
task = Task(
content="Edit your first task",
user_id=session['user_id'],
due_date=None
)
db.session.add(task)
db.session.commit()
todo_list = utils.retrieve_tasks_helper()
return json.dumps(todo_list)
@tasks.route('/retrieve_tasks', methods=['POST'])
@auth.login_required
def retrieve_tasks():
user_id = session['user_id']
# find all root nodes
roots = []
root1 = Task.query.filter(Task.user_id == user_id).filter(Task.lft == 0).first()
if not root1:
task = Task(
content="Edit your first task",
user_id=session['user_id'],
due_date=None
)
db.session.add(task)
db.session.commit()
trees = utils.retrieve_tasks_helper()
return json.dumps(trees)
roots.append(root1)
rgt = root1.rgt
lft = root1.lft
while True:
r = Task.query.filter(Task.user_id == user_id, Task.lft == rgt + 1).first()
if not r:
break
else:
roots.append(r)
rgt = r.rgt
lft = r.lft
trees = []
for root in roots:
trees.append(utils.get_tree(root))
# Recurse through the trees to extract every task
output = []
group = 0
for tree in trees:
output += utils.in_order_traverse(tree, [], 0, tree['id'])
return json.dumps(output)
@tasks.route('/add', methods=['POST'])
@auth.login_required
def add_task():
data = request.json['content']
# if not data:
# return redirect('/')
dt, content = utils.extract_datetime_from_text(data)
user_id = request.json['user_id']
prev_task = Task.query.get(request.json['prev_task'])
my_right = prev_task.rgt
task = Task(
content=content,
user_id=user_id,
due_date=dt,
my_right=my_right,
parent_id=prev_task.parent_id
)
user_id = str(user_id)
# Technically this should be wrapped in a transaction
cmd = "UPDATE tasks SET rgt = rgt + 2 WHERE user_id =" + user_id + " AND rgt > " + str(my_right)
db.engine.execute(text(cmd))
cmd2 = "UPDATE tasks SET lft = lft + 2 WHERE user_id =" + user_id + " AND lft > " + str(my_right)
db.engine.execute(text(cmd2))
db.session.add(task)
db.session.commit()
return json.dumps(utils.task_to_dictionary(task))
@tasks.route('/make_subtask', methods=['POST'])
@auth.login_required
def make_subtask():
user_id = request.json['user_id']
id = request.json['subtask_id']
current_task = Task.query.filter_by(id=id).first()
content = current_task.content
# user_id = current_task.user_id
due_date = current_task.due_date
parent_id = request.json['prev_task_id']
parent_task = Task.query.filter_by(id=parent_id).first()
sub_tasks = utils.get_subtasks(parent_task)
if not sub_tasks:
# adding a child to a node with no existing children
parent_left = parent_task.lft
cmd = "UPDATE tasks SET rgt = rgt + 2 WHERE user_id = :user_id AND rgt > :parent_left"
cmd2 = "UPDATE tasks SET lft = lft + 2 WHERE user_id = :user_id AND lft > :parent_left"
db.engine.execute(cmd, {'user_id': str(user_id), 'parent_left': str(parent_left)})
db.engine.execute(cmd2, {'user_id': str(user_id), 'parent_left': str(parent_left)})
utils.delete_task_helper(current_task)
task = Task(
content=content,
user_id=user_id,
due_date=due_date,
parent_id=parent_id,
my_right=parent_left
)
db.session.add(task)
db.session.commit()
else:
sub_tasks.sort(key=lambda x: x.rgt)
prev_right = sub_tasks[-1].rgt
# Technically this should be wrapped in a transaction
cmd = "UPDATE tasks SET rgt = rgt + 2 WHERE user_id = :user_id AND rgt > :prev_right"
db.engine.execute(cmd, {'user_id': str(user_id), 'prev_right': str(prev_right)})
cmd2 = "UPDATE tasks SET lft = lft + 2 WHERE user_id = :user_id AND lft > :prev_right"
db.engine.execute(cmd2, {'user_id': str(user_id), 'prev_right': str(prev_right)})
utils.delete_task_helper(current_task)
task = Task(
content=content,
user_id=user_id,
due_date=due_date,
parent_id=parent_id,
my_right=prev_right
)
db.session.add(task)
db.session.commit()
return json.dumps(utils.task_to_dictionary(task))
@tasks.route('/add_subtask', methods=['POST'])
@auth.login_required
def add_subtask():
user_id = request.json['user_id']
parent_id = request.json['parent_id']
task_content = request.json['task_content']
parent_task = Task.query.filter_by(id=parent_id).first()
sub_tasks = utils.get_subtasks(parent_task)
if not sub_tasks:
# adding a child to a node with no existing children
parent_left = parent_task.lft
cmd = "UPDATE tasks SET rgt = rgt + 2 WHERE user_id = :user_id AND rgt > :parent_left"
cmd2 = "UPDATE tasks SET lft = lft + 2 WHERE user_id = :user_id AND lft > :parent_left"
db.engine.execute(cmd, {'user_id': str(user_id), 'parent_left': str(parent_left)})
db.engine.execute(cmd2, {'user_id': str(user_id), 'parent_left': str(parent_left)})
task = Task(
content=task_content,
user_id=user_id,
parent_id=parent_id,
my_right=parent_left
)
db.session.add(task)
db.session.commit()
else:
sub_tasks.sort(key=lambda x: x.rgt)
prev_right = sub_tasks[-1].rgt
# Technically this should be wrapped in a transaction
cmd = "UPDATE tasks SET rgt = rgt + 2 WHERE user_id = :user_id AND rgt > :prev_right"
db.engine.execute(cmd, {'user_id': str(user_id), 'prev_right': str(prev_right)})
cmd2 = "UPDATE tasks SET lft = lft + 2 WHERE user_id = :user_id AND lft > :prev_right"
db.engine.execute(cmd2, {'user_id': str(user_id), 'prev_right': str(prev_right)})
task = Task(
content=task_content,
user_id=user_id,
parent_id=parent_id,
my_right=prev_right
)
db.session.add(task)
db.session.commit()
return json.dumps(utils.task_to_dictionary(task))
@tasks.route('/get_prev_sibling', methods=['POST'])
@auth.login_required
def get_prev_sibling():
task_id = request.json['task_id']
task = Task.query.filter(Task.id == task_id).first()
parent_id = task.parent_id
user_id = task.user_id
left = task.lft
prev_sibling = Task.query.filter(Task.parent_id == parent_id, Task.user_id == user_id, Task.rgt == left - 1).first()
return json.dumps(utils.task_to_dictionary(prev_sibling))
@tasks.route('/markdone', methods=['POST'])
@auth.login_required
def mark_as_done():
uid = request.json['id']
if not uid:
return redirect('/')
current_task = Task.query.filter_by(id=uid).first()
if current_task.done:
current_task.done = False
db.session.commit()
return json.dumps({'done': False})
else:
current_task.done = True
db.session.commit()
return json.dumps({'done': True})
@tasks.route('/edit_task', methods=['POST'])
@auth.login_required
def edit_task():
uid = request.json['id']
content = request.json['content']
current_task = Task.query.filter_by(id=uid).first()
current_task.content = content
db.session.commit()
return 'OK'
@tasks.route('/edit_date', methods=['POST'])
@auth.login_required
def edit_date():
uid = request.json['id']
new_date = request.json['date']
new_date = datetime.strptime(new_date, '%a %b %d %Y %H:%M:%S GMT%z (%Z)')
current_task = Task.query.filter_by(id=uid).first()
current_task.due_date = new_date
db.session.commit()
return new_date.strftime("%Y/%m/%d %H:%M:%S")
@tasks.route('/remove_date', methods=['POST'])
@auth.login_required
def remove_date():
uid = request.json['id']
current_task = Task.query.filter_by(id=uid).first()
current_task.due_date = None
db.session.commit()
return 'OK'
@tasks.route('/parse_task', methods=['POST'])
@auth.login_required
def parse_task():
uid = request.json['id']
my_text = request.json['content']
dt, content = utils.extract_datetime_from_text(my_text)
current_task = Task.query.filter_by(id=uid).first()
current_task.content = content
current_task.due_date = dt
db.session.commit()
return 'OK'
@tasks.route('/delete_task', methods=['POST'])
@auth.login_required
def delete_task():
task_id = request.json['id']
# user_id = request.json['user_id']
current_task = db.session.query(Task).get(task_id)
# get all subordinates of task
# deleting a parent task deletes all subordinates
tree = Task.query.filter(Task.lft >= current_task.lft, Task.lft < current_task.rgt).all()
for task in tree:
utils.delete_task_helper(task)
return 'OK'
@tasks.route('/get_direct_subtasks', methods=['POST'])
@auth.login_required
def get_direct_subtasks():
task_id = request.json['id']
parent = db.session.query(Task).get(task_id)
print(parent.content)
subtasks = utils.get_subtasks(parent)
subtasks_list = []
for subtask in subtasks:
subtask_dict = utils.task_to_dictionary(subtask)
subtasks_list.append(subtask_dict)
return json.dumps(subtasks_list)
|
|
# -*- coding: utf-8 -*-
import random
import collections
import warnings
import numpy as np
from uncertainties import ufloat
from future.utils import with_metaclass
from abc import ABCMeta, abstractmethod, abstractproperty
from ..patch.pint import ureg
MAXDEPTH = 4
MAXITEMS = 5
NTRIES = 1000
TYPES = "numpy", "uncertainties", "pint"
try:
unicode
except NameError:
unicode = str
try:
unichr
except NameError:
unichr = chr
def try_unique_range(name, seq):
if NTRIES:
for i in range(NTRIES):
yield i
else:
raise RuntimeError(
"Could not find unique {} in {} tries:\n {}".format(name, NTRIES, seq)
)
else:
i = 0
while True:
yield i
i += 1
class DataBase(with_metaclass(ABCMeta)):
def __init__(self, **kwargs):
self.generate()
@abstractmethod
def generate(self):
pass
def __eq__(self, other):
if not self.isinstance(other):
return False
return self.baseinstance.data == other.baseinstance.data
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return self.baseinstance.__class__.__name__
@property
def baseinstance(self):
return self
def isinstance(self, other, class_=None):
if class_ is None:
class_ = self.baseinstance.__class__
return isinstance(other.baseinstance, class_)
# ====== Native misc ======
class RandomNone(DataBase):
def generate(self):
self.data = None
# ====== Native numbers ======
def numequal(a, b):
# Just to make sure nan == nan
#
# Prevent magnitude to get converted to ndarray by np.isclose
# Unit is always dimensionless
try:
a = a.magnitude
except AttributeError:
pass
try:
b = b.magnitude
except AttributeError:
pass
try:
if a == b:
return True
except:
pass
try:
return np.isclose(a, b, rtol=0, atol=0, equal_nan=True)
except TypeError:
return False
class NumberBase(DataBase):
def __eq__(self, other):
if not self.isinstance(other, NumberBase):
return False
a, b = self.data, other.data
return numequal(a, b)
class RandomBool(NumberBase):
def generate(self):
self.data = bool(random.getrandbits(1))
class RandomInt(NumberBase):
def generate(self):
self.data = random.randint(-100, 100)
def randomfloat(finite=False):
if finite:
return random.random()
else:
return random.choice([random.random()] * 5 + [float("nan"), float("inf")])
class RandomFloat(NumberBase):
def generate(self):
self.data = randomfloat()
class RandomComplex(NumberBase):
def generate(self):
self.data = complex(randomfloat(), randomfloat())
# ====== Native strings ======
class StringBase(DataBase):
ALPHABET = None
CLASS = None
def __init__(self, maxitems=MAXITEMS, **kwargs):
self.n = random.randint(0, maxitems)
super(StringBase, self).__init__(maxitems=MAXITEMS, **kwargs)
def generate(self):
self.data = self._join(random.choice(self.ALPHABET) for _ in range(self.n))
@classmethod
@abstractmethod
def _join(cls):
pass
def __eq__(self, other):
if not self.isinstance(other, StringBase):
return False
a, b = self.data, other.data
abytes = isinstance(a, bytes)
bbytes = isinstance(b, bytes)
if not (abytes ^ bbytes):
if not abytes:
a = a.encode("utf-8")
if not bbytes:
b = b.encode("utf-8")
return a == b
def __repr__(self):
s = super(StringBase, self).__repr__()
return "{}_{}".format(s, self.n)
def alphabet_bytes(encoding="ascii"):
include_ranges = [(0x0021, 0x0021), (0x0023, 0x0026), (0x0028, 0x007E)]
if encoding != "ascii":
include_ranges += [(0x00A1, 0x00AC), (0x00AE, 0x00FF)]
alphabet = [
unichr(code_point).encode(encoding)
for current_range in include_ranges
for code_point in range(current_range[0], current_range[1] + 1)
]
return alphabet
class RandomBytes(StringBase):
CLASS = bytes
@classmethod
def _join(cls, iterable):
return cls.CLASS(b"".join(iterable))
class RandomBytesAscii(RandomBytes):
ALPHABET = alphabet_bytes(encoding="ascii")
class RandomBytesExt(RandomBytes):
ALPHABET = alphabet_bytes(encoding="latin1")
def alphabet_unicode():
include_ranges = [
(0x0021, 0x0021),
(0x0023, 0x0026),
(0x0028, 0x007E),
(0x00A1, 0x00AC),
(0x00AE, 0x00FF),
(0x0100, 0x017F),
(0x0180, 0x024F),
(0x2C60, 0x2C7F),
(0x16A0, 0x16F0),
(0x0370, 0x0377),
(0x037A, 0x037E),
(0x0384, 0x038A),
(0x038C, 0x038C),
]
alphabet = [
unichr(code_point)
for current_range in include_ranges
for code_point in range(current_range[0], current_range[1] + 1)
]
return alphabet
class RandomUnicode(StringBase):
ALPHABET = alphabet_unicode()
CLASS = unicode
@classmethod
def _join(cls, iterable):
return cls.CLASS(u"".join(iterable))
# ====== Native sequences ======
def init_sequence(
seq_types,
unique=False,
_depth=0,
nitems=None,
maxdepth=MAXDEPTH,
maxitems=MAXITEMS,
**kwargs
):
"""
Generate a sequence with random length, random types
Args:
seq_types: possible item types
unique: sequence should have unique values
maxdepth: maximal sequence depth
maxitems: maximal number of items
"""
if nitems is None:
if _depth == maxdepth:
n = 0
else:
n = random.randint(0, maxitems)
else:
n = nitems
kwargs = kwargs.copy()
kwargs["_depth"] = _depth + 1
kwargs["maxdepth"] = maxdepth
kwargs["maxitems"] = maxitems
if unique:
# Make sure the sequence could (but not always) have unique values
seq = []
reprs = collections.Counter()
reprmax = {"bool": 2, "emptystring": 1}
reprmap = {
"RandomBool": "bool",
"RandomNumpyBool": "bool",
"RandomBytesAscii_0": "emptystring",
"RandomBytesExt_0": "emptystring",
"RandomUnicode_0": "emptystring",
}
for _ in try_unique_range("sequence", seq):
if len(seq) == n:
break
item = random.choice(seq_types)(**kwargs)
repritem = repr(item)
repritem = reprmap.get(repritem, repritem)
nrepr = reprmax.get(repritem, None)
if item not in seq and not reprs[repritem] == nrepr:
seq.append(item)
reprs[repritem] += 1
else:
seq = [random.choice(seq_types)(**kwargs) for _ in range(n)]
return seq
def generate_sequence(seq, unique=False):
if unique:
for _ in try_unique_range("generate_sequence", seq):
for item in seq:
item.generate()
useq = []
for item in seq:
if item in useq:
break
useq.append(item)
else:
break
else:
for item in seq:
item.generate()
class SequenceBase(DataBase):
CLASS = None
def __init__(self, **kwargs):
if not hasattr(self, "_values"):
self._values = init_sequence(self.seq_types(), unique=False, **kwargs)
super(SequenceBase, self).__init__(**kwargs)
@classmethod
def seq_types(cls):
return (RandomData,)
def __eq__(self, other):
if not self.isinstance(other):
return False
return self._values == other._values
@property
def data(self):
return self.CLASS(v.data for v in self._values)
def generate(self):
generate_sequence(self._values, unique=False)
class HashableSequenceBase(SequenceBase):
@classmethod
def seq_types(cls):
return (RandomHashable,)
class RandomTuple(SequenceBase):
CLASS = tuple
class RandomHashableTuple(HashableSequenceBase):
CLASS = tuple
class RandomList(SequenceBase):
CLASS = list
# ====== Native sets ======
def sort_equal(a, b):
if len(a) != len(b):
return False
for v in a:
if v not in b:
return False
return True
class SetBase(HashableSequenceBase):
def __init__(self, **kwargs):
self._values = init_sequence(self.seq_types(), unique=True, **kwargs)
super(SetBase, self).__init__(**kwargs)
def __eq__(self, other):
if not self.isinstance(other):
return False
return sort_equal(self._values, other._values)
@property
def data(self):
values = [v.data for v in self._values]
if values:
random.shuffle(values)
return self.CLASS(values)
def generate(self):
generate_sequence(self._values, unique=True)
class RandomSet(SetBase):
CLASS = set
class RandomFrozenSet(SetBase):
CLASS = frozenset
# ====== Native mappings ======
class OrderedMappingBase(DataBase):
CLASS = None
def __init__(self, **kwargs):
keys = init_sequence((RandomHashable,), unique=True, **kwargs)
self._values = init_sequence(
(RandomData,), unique=False, nitems=len(keys), **kwargs
)
self._keys = keys
def __eq__(self, other):
if not self.isinstance(other):
return False
return self._keys == other._keys and self._values == other._values
def generate(self):
generate_sequence(self._keys, unique=True)
generate_sequence(self._values, unique=False)
def data_items(self):
keys = [k.data for k in self._keys]
values = [k.data for k in self._values]
return zip(keys, values)
@property
def data(self):
return self.CLASS(self.data_items())
class UnorderedMappingBase(OrderedMappingBase):
def __eq__(self, other):
if not self.isinstance(other):
return False
if not sort_equal(self._keys, other._keys):
return False
for k, v in zip(self._keys, self._values):
i = other._keys.index(k)
if v != other._values[i]:
return False
return True
def data_items(self):
keys = [k.data for k in self._keys]
values = [k.data for k in self._values]
items = list(zip(keys, values))
random.shuffle(items)
return items
class RandomDict(UnorderedMappingBase):
CLASS = dict
class RandomOrderedDict(OrderedMappingBase):
CLASS = collections.OrderedDict
# ====== Numpy ======
class NumpyScalarBase(NumberBase):
def __init__(self, **kwargs):
self._class = random.choice(self.NPTYPES)
super(NumpyScalarBase, self).__init__(**kwargs)
@classmethod
@abstractmethod
def datagen(cls):
pass
def generate(self):
self.data = self._class(*self.datagen())
class RandomNumpyBool(NumpyScalarBase):
NPTYPES = (np.bool,)
@classmethod
def datagen(cls):
return (random.getrandbits(1),)
class RandomNumpyInt(NumpyScalarBase):
NPTYPES = (
np.byte,
np.short,
np.int,
np.longlong,
np.int8,
np.int16,
np.int32,
np.int64,
)
@classmethod
def datagen(cls):
return (random.randint(-100, 100),)
class RandomNumpyUInt(NumpyScalarBase):
NPTYPES = (
np.ubyte,
np.ushort,
np.uint,
np.ulonglong,
np.uint8,
np.uint16,
np.uint32,
np.uint64,
)
@classmethod
def datagen(cls):
return (random.randint(0, 100),)
class RandomNumpyFloat(NumpyScalarBase):
NPTYPES = (np.single, np.double, np.float, np.float16, np.float32, np.float64)
@classmethod
def datagen(cls):
return (randomfloat(),)
class RandomNumpyComplex(NumpyScalarBase):
NPTYPES = (np.complex,)
@classmethod
def datagen(cls):
return randomfloat(), randomfloat()
class RandomNumpyArray(SequenceBase):
CLASS = np.array
@classmethod
def seq_types(cls):
return (RandomNumber,)
@property
def data(self):
return self.CLASS([v.data for v in self._values])
class RandomNumpyArray0(RandomNumpyArray):
def __init__(self, **kwargs):
super(RandomNumpyArray0, self).__init__(nitems=1, **kwargs)
@property
def data(self):
return self.CLASS(self._values[0].data)
# ====== Uncertainties ======
class RandomUNumber(NumpyScalarBase):
NPTYPES = (ufloat,)
@classmethod
def datagen(cls):
return randomfloat(finite=True), randomfloat(finite=True)
# ====== Pint ======
class RandomPintNumber(NumberBase):
UNIT = ureg.dimensionless
def __init__(self, **kwargs):
self._magnitude = RandomPintMagnitude(**kwargs)
super(RandomPintNumber, self).__init__(**kwargs)
def generate(self):
self._magnitude.generate()
@property
def data(self):
return ureg.Quantity(self._magnitude.data, self.UNIT)
class RandomPintArray(SequenceBase):
CLASS = ureg.Quantity
UNIT = ureg.dimensionless
@classmethod
def seq_types(cls):
return (RandomPintMagnitude,)
@property
def data(self):
magnitudes = [v.data for v in self._values]
return self.CLASS(magnitudes, self.UNIT)
class RandomPintArray0(RandomPintArray):
def __init__(self, **kwargs):
super(RandomPintArray0, self).__init__(nitems=1, **kwargs)
@property
def data(self):
magnitude = self._values[0].data
return self.CLASS(np.array(magnitude), self.UNIT)
# ====== Choices ======
class ChoiceBase(DataBase):
def __init__(self, **kwargs):
self.data = random.choice(self.choices(**kwargs))(**kwargs)
super(ChoiceBase, self).__init__(**kwargs)
@classmethod
@abstractmethod
def choices(cls, **kwargs):
pass
def __eq__(self, other):
return self.baseinstance == other.baseinstance
@property
def data(self):
return self._choice.data
@data.setter
def data(self, value):
self._choice = value
def generate(self):
self._choice.generate()
def __repr__(self):
return repr(self._choice)
@property
def baseinstance(self):
return self._choice.baseinstance
class RandomNativeNumber(ChoiceBase):
@classmethod
def choices(cls, **kwargs):
return (RandomBool, RandomInt, RandomFloat, RandomComplex)
class RandomNumpyNumber(ChoiceBase):
@classmethod
def choices(cls, **kwargs):
return (RandomNumpyBool, RandomNumpyInt, RandomNumpyFloat, RandomNumpyComplex)
class RandomHashableNumber(ChoiceBase):
@classmethod
def choices(cls, types=TYPES, **kwargs):
ret = [RandomNativeNumber]
if "numpy" in types:
ret.append(RandomNumpyNumber)
return tuple(ret)
class RandomNumber(ChoiceBase):
@classmethod
def choices(cls, types=TYPES, **kwargs):
ret = [RandomNativeNumber]
if "numpy" in types:
ret.append(RandomNumpyNumber)
if "uncertainties" in types:
ret.append(RandomUNumber)
if "pint" in types:
ret.append(RandomPintNumber)
return tuple(ret)
class RandomPintMagnitude(ChoiceBase):
@classmethod
def choices(cls, types=TYPES, **kwargs):
ret = RandomInt, RandomFloat
if "numpy" in types:
ret += RandomNumpyInt, RandomNumpyFloat
if "uncertainties" in types:
ret += (RandomUNumber,)
return ret
class RandomString(ChoiceBase):
@classmethod
def choices(cls, **kwargs):
return RandomBytesAscii, RandomBytesExt, RandomUnicode
class RandomAtom(ChoiceBase):
@classmethod
def choices(cls, **kwargs):
return (
(RandomNone,)
+ RandomNumber.choices(**kwargs)
+ RandomString.choices(**kwargs)
)
class RandomHashableAtom(ChoiceBase):
@classmethod
def choices(cls, **kwargs):
return (
(RandomNone,)
+ RandomHashableNumber.choices(**kwargs)
+ RandomString.choices(**kwargs)
)
class RandomMutableSequence(ChoiceBase):
@classmethod
def choices(cls, **kwargs):
return RandomList, RandomSet
class RandomImmutableSequence(ChoiceBase):
@classmethod
def choices(cls, **kwargs):
return RandomTuple, RandomFrozenSet
class RandomNumpySequence(ChoiceBase):
@classmethod
def choices(cls, **kwargs):
return RandomNumpyArray, RandomNumpyArray0
class RandomPintSequence(ChoiceBase):
@classmethod
def choices(cls, **kwargs):
return RandomPintArray, RandomPintArray0
class RandomSequence(ChoiceBase):
@classmethod
def choices(cls, **kwargs):
return (
RandomMutableSequence,
RandomImmutableSequence,
RandomNumpySequence,
RandomPintSequence,
)
class RandomHashableSequence(ChoiceBase):
@classmethod
def choices(cls, **kwargs):
return RandomHashableTuple, RandomFrozenSet
class RandomHashable(ChoiceBase):
@classmethod
def choices(cls, **kwargs):
return RandomHashableAtom.choices(**kwargs) + RandomHashableSequence.choices(
**kwargs
)
class RandomMapping(ChoiceBase):
@classmethod
def choices(cls, **kwargs):
return RandomDict, RandomOrderedDict
class RandomData(ChoiceBase):
@classmethod
def choices(cls, **kwargs):
return RandomAtom, RandomSequence, RandomMapping
def factory(**kwargs):
return RandomData(**kwargs)
|
|
{
# Journey Page - journey tab
"uidJourneyTabJourneyPanel": {
W3Const.w3PropType: W3Const.w3TypePanel,
W3Const.w3PropSubUI: [
"uidJourneyTabJourneyQueryPanel",
"uidJourneyTabJourneyAddPanel"
]
},
# Query
"uidJourneyTabJourneyQueryPanel": {
W3Const.w3PropType: W3Const.w3TypePanel,
W3Const.w3PropSubUI: [
"uidJourneyFilterTable",
"uidJourneyTable",
"uidJourneyToMapPanel"
]
},
# Query: Jump to Map
"uidJourneyToMapPanel": {
W3Const.w3PropType: W3Const.w3TypePanel,
W3Const.w3PropSubUI: [
"uidJourneyGotoMapButton"
],
W3Const.w3PropCSS: {
"clear": "both",
"padding-top": "5px",
"float": "left"
}
},
"uidJourneyGotoMapButton": {
W3Const.w3PropType: W3Const.w3TypeButton,
W3Const.w3PropString: "sidGotoMap",
W3Const.w3PropEvent: {
W3Const.w3EventClick: [
"EJDisplaySelectedJourneyOnMap()"
]
},
W3Const.w3PropAttr: {
W3Const.w3AttrDisabled: "true"
}
},
# Query: Display
"uidJourneyTable": {
W3Const.w3PropType: W3Const.w3TypeTable,
W3Const.w3PropSubUI: [
["uidNullLabel", "uidColumnName", "uidTableHeaderDatetime", "uidColumnTraveler", "uidColumnEvent", "uidTableHeaderBalance", "uidColumnNote", "uidTableHeaderInvisibleData"]
],
W3Const.w3PropCSS: {
"border": "1px solid"
},
W3Const.w3PropSinkApi: {
W3Const.w3ApiID: "aidJourney",
W3Const.w3SinkRow: [
{
# Empty no map
W3Const.w3ApiDataType: W3Const.w3ApiDataTypeFunc,
W3Const.w3ApiDataValue: "EJCreateJourneySelectBox(w3PlaceHolder_1, w3PlaceHolder_2)"
},
{
# Column 2 map to API result field "name"
W3Const.w3ApiDataType: W3Const.w3ApiDataTypeString,
W3Const.w3ApiDataValue: "name"
},
{
# Column 3 map to API result field "datetime"
W3Const.w3ApiDataType: W3Const.w3ApiDataTypeString,
W3Const.w3ApiDataValue: "datetime"
},
{
# Column 4 map to API result field "traveler"
W3Const.w3ApiDataType: W3Const.w3ApiDataTypeString,
W3Const.w3ApiDataValue: "traveler"
},
{
# Column 5 map to API result field "event"
W3Const.w3ApiDataType: W3Const.w3ApiDataTypeString,
W3Const.w3ApiDataValue: "event"
},
{
# Column 6 map to API result field "balance"
W3Const.w3ApiDataType: W3Const.w3ApiDataTypeNum,
W3Const.w3ApiDataValue: "balance"
},
{
# Column 7 map to API result field "note"
W3Const.w3ApiDataType: W3Const.w3ApiDataTypeString,
W3Const.w3ApiDataValue: "note"
},
{
# Column 8 map to API result field "id"
W3Const.w3ApiDataType: W3Const.w3ApiDataTypeNum,
W3Const.w3ApiDataValue: "id"
}]
}
},
# Query: Filter
"uidJourneyFilterTable": {
W3Const.w3PropType: W3Const.w3TypeTable,
W3Const.w3PropSubUI: [
[], # No header
["uidFromLabel", "uidJourneyFilterFromDatePicker", "uidToLabel", "uidJourneyFilterToDatePicker", "uidJourneyFilterGetButton", "uidJourneyFilterAddButton"]
]
},
"uidJourneyFilterFromDatePicker": {
W3Const.w3PropType: W3Const.w3TypeDatePicker,
},
"uidJourneyFilterToDatePicker": {
W3Const.w3PropType: W3Const.w3TypeDatePicker,
},
"uidJourneyFilterGetButton": {
W3Const.w3PropType: W3Const.w3TypeButton,
W3Const.w3PropString: "sidGet",
W3Const.w3PropTriggerApi: [
{
W3Const.w3ApiID: "aidJourney",
W3Const.w3ApiParams: [
{
W3Const.w3ApiDataType: W3Const.w3ApiDataTypeUID,
W3Const.w3ApiDataValue: "uidJourneyFilterFromDatePicker"
},
{
W3Const.w3ApiDataType: W3Const.w3ApiDataTypeUID,
W3Const.w3ApiDataValue: "uidJourneyFilterToDatePicker"
},
{
W3Const.w3ApiDataType: W3Const.w3ApiDataTypeVar,
W3Const.w3ApiDataValue: W3Const.w3Session
}]
}],
W3Const.w3PropEvent: {
W3Const.w3EventClick: [
W3Const.w3PlaceHolder_1
]
}
},
"uidJourneyFilterAddButton": {
W3Const.w3PropType: W3Const.w3TypeButton,
W3Const.w3PropString: "sidAdd",
W3Const.w3PropEvent: {
W3Const.w3EventClick: [
"W3HideUI('uidJourneyTabJourneyQueryPanel')",
"W3DisplayUI('uidJourneyTabJourneyAddPanel')"
]
}
},
# Add
"uidJourneyTabJourneyAddPanel": {
W3Const.w3PropType: W3Const.w3TypePanel,
W3Const.w3PropSubUI: [
"uidJourneyAddTable",
"uidJourneyAddOperationPanel"
],
W3Const.w3PropCSS: {
"display": "none"
}
},
"uidJourneyAddOperationPanel": {
W3Const.w3PropType: W3Const.w3TypePanel,
W3Const.w3PropSubUI: [
"uidJourneyAddOperationTable"
]
},
"uidJourneyAddOperationTable": {
W3Const.w3PropType: W3Const.w3TypeTable,
W3Const.w3PropSubUI: [
[], # No header
["uidJourneyAddSubmitButton", "uidJourneyAddCancelButton"]
],
W3Const.w3PropCSS: {
"float": "right"
}
},
"uidJourneyAddSubmitButton": {
W3Const.w3PropType: W3Const.w3TypeButton,
W3Const.w3PropString: "sidSubmit",
W3Const.w3PropTriggerApi: [
{
W3Const.w3ApiID: "aidAddJourney",
W3Const.w3ApiParams: [
{
W3Const.w3ApiDataType: W3Const.w3ApiDataTypeUID,
W3Const.w3ApiDataValue: "uidJourneyAddName"
},
{
W3Const.w3ApiDataType: W3Const.w3ApiDataTypeUID,
W3Const.w3ApiDataValue: "uidJourneyAddDatetime"
},
{
W3Const.w3ApiDataType: W3Const.w3ApiDataTypeUID,
W3Const.w3ApiDataValue: "uidJourneyAddTraveler"
},
{
W3Const.w3ApiDataType: W3Const.w3ApiDataTypeUID,
W3Const.w3ApiDataValue: "uidJourneyAddEvent"
},
{
W3Const.w3ApiDataType: W3Const.w3ApiDataTypeUID,
W3Const.w3ApiDataValue: "uidJourneyAddNote"
},
{
W3Const.w3ApiDataType: W3Const.w3ApiDataTypeVar,
W3Const.w3ApiDataValue: W3Const.w3Session
}]
}],
W3Const.w3PropEvent: {
W3Const.w3EventClick: [
W3Const.w3PlaceHolder_1
]
}
},
"uidJourneyAddCancelButton": {
W3Const.w3PropType: W3Const.w3TypeButton,
W3Const.w3PropString: "sidCancel",
W3Const.w3PropEvent: {
W3Const.w3EventClick: [
"W3HideUI('uidJourneyTabJourneyAddPanel')",
"W3DisplayUI('uidJourneyTabJourneyQueryPanel')"
]
}
},
"uidJourneyAddTable": {
W3Const.w3PropType: W3Const.w3TypeTable,
W3Const.w3PropSubUI: [
[], # No header
["uidNameLabel", "uidJourneyAddName"],
["uidDatetimeLabel", "uidJourneyAddDatetime"],
["uidTravelerLabel", "uidJourneyAddTraveler"],
["uidEventLabel", "uidJourneyAddEvent"],
["uidNoteLabel", "uidJourneyAddNote"]
]
},
"uidJourneyAddDatetime": {
W3Const.w3PropType: W3Const.w3TypeDatePicker
},
"uidJourneyAddName": {
W3Const.w3PropType: W3Const.w3TypeText
},
"uidJourneyAddNote": {
W3Const.w3PropType: W3Const.w3TypeText
},
"uidJourneyAddTraveler": {
W3Const.w3PropType: W3Const.w3TypeText
},
"uidJourneyAddEvent": {
W3Const.w3PropType: W3Const.w3TypeText
}
}
|
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import logging
from oauthlib import oauth2
from oauthlib.oauth2.rfc6749.endpoints import base
from pyramid.response import Response
from pyramid.compat import native_
log = logging.getLogger(__name__)
OAUTH_PARAMS = (
'access_token',
'client_id',
'client_secret',
'code',
'grant_type',
'response_type',
'redirect_uri',
'scope',
'state',
)
class Server(
oauth2.AuthorizationEndpoint,
oauth2.ResourceEndpoint,
oauth2.RevocationEndpoint,
oauth2.TokenEndpoint,
base.BaseEndpoint,
):
def __init__(self):
base.BaseEndpoint.__init__(self)
# For grants and responses these are string keys.
self._default_grant_type = ''
self._default_response_type = ''
self._default_token = ''
self._grant_types = {}
self._response_types = {}
self._tokens = {}
@property
def default_token_type(self):
return self.tokens.get('')
@base.catch_errors_and_unavailability
def create_authorization_response(self, request,
scopes=None, credentials=None):
request.scopes = scopes
for k, v in (credentials or {}).items():
setattr(request, k, v)
handler = self.response_types.get(
request.response_type,
self.default_response_type_handler,
)
token = self.default_token_type
if token is None:
raise AttributeError('No default token type registered.')
return handler.create_authorization_response(request, token)
@base.catch_errors_and_unavailability
def create_revocation_response(self, request):
pass
@base.catch_errors_and_unavailability
def create_token_response(self, request, credentials=None):
request.scopes = None
request.extra_credentials = credentials
handler = self.grant_types.get(
request.grant_type,
self.default_grant_type_handler,
)
token = self.default_token_type
if token is None:
raise AttributeError('No default token type registered.')
return handler.create_token_response(request, token)
@base.catch_errors_and_unavailability
def validate_authorization_request(self, request):
request.scopes = None
handler = self.response_types.get(
request.response_type,
self.default_response_type_handler,
)
return handler.validate_authorization_request(request)
@base.catch_errors_and_unavailability
def verify_request(self, request, scopes=None):
request.scopes = scopes
request.token_type = self.find_token_type(request)
handler = self.tokens.get(
request.token_type,
self.default_token_type_handler,
)
return handler.validate_request(request)
def add_grant_type(config, grant_type, name='', **kwargs):
grant_type = config.maybe_dotted(grant_type)(**kwargs)
def register():
config.registry.oauth.grant_types[name] = grant_type
intr = config.introspectable(
category_name='oauth handlers',
discriminator=('grant', name),
title=name or '<default>',
type_name='grant',
)
intr['value'] = grant_type
config.action(('oauth grant type', name), register,
introspectables=(intr,), order=1)
def add_response_type(config, response_type, name='', **kwargs):
response_type = config.maybe_dotted(response_type)(**kwargs)
def register():
config.registry.oauth.response_types[name] = response_type
intr = config.introspectable(
category_name='oauth handlers',
discriminator=('response', name),
title=name or '<default>',
type_name='response',
)
intr['value'] = response_type
config.action(('oauth response type', name), register,
introspectables=(intr,), order=1)
def add_token_type(config, token_type, name='', **kwargs):
token_type = config.maybe_dotted(token_type)(**kwargs)
def register():
config.registry.oauth.tokens[name] = token_type
intr = config.introspectable(
category_name='oauth handlers',
discriminator=('token', name),
title=name or '<default>',
type_name='token',
)
intr['value'] = token_type
config.action(('oauth token type', name), register,
introspectables=(intr,), order=1)
def add_oauth_param(config, name):
def getter(request):
return request.params.get(name)
config.add_request_method(getter, str(name), reify=True)
def duplicate_params(request):
keys = list(request.params)
return [k for k in keys if keys.count(k) > 1]
def oauth_response(result):
headers, body, status = result
return Response(body=body, status=status, headers={
native_(name, encoding='latin-1'): native_(value, encoding='latin-1')
for name, value
in headers.items()
})
def register(config, server):
config.registry.oauth = server
def includeme(config):
server = Server()
intr = config.introspectable(
category_name='oauth servers',
discriminator='server',
title='<default>',
type_name='server',
)
intr['value'] = server
config.action('oauth server', register,
introspectables=(intr,), args=(config, server))
config.add_directive('add_grant_type', add_grant_type, True)
config.add_directive('add_response_type', add_response_type, True)
config.add_directive('add_token_type', add_token_type, True)
config.add_directive('add_oauth_param', add_oauth_param)
config.add_request_method(
lambda request, scopes=None, credentials=None:
oauth_response(
server.create_authorization_response(
request,
scopes=scopes,
credentials=credentials
)
),
str('create_authorization_response'))
config.add_request_method(
lambda request:
server.create_revocation_response(request),
str('create_revocation_response'))
config.add_request_method(
lambda request, credentials=None:
oauth_response(
server.create_token_response(request, credentials=credentials)
),
str('create_token_response'))
config.add_request_method(
lambda request:
server.validate_authorization_request(request),
str('validate_authorization_request'))
config.add_request_method(
lambda request, scopes=None:
server.verify_request(request, scopes=scopes),
str('verify_request'))
config.set_request_property(duplicate_params, str('duplicate_params'))
for name in OAUTH_PARAMS:
config.add_oauth_param(str(name))
|
|
#!/usr/bin/env python3
# Copyright (c) 2013-2014, Ruslan Baratov
# All rights reserved.
help_wiki = "https://github.com/ruslo/sugar/wiki/Examples-testing"
import os
import argparse
import re
import sys
import subprocess
import copy
import detail.trash
import detail.os_detect
import detail.argparse
top_dir = os.getcwd()
parser = argparse.ArgumentParser()
parser.add_argument(
'--include',
type=detail.argparse.is_dir,
nargs='*',
help="include this directory patterns (low priority)"
)
parser.add_argument(
'--exclude',
type=detail.argparse.is_dir,
nargs='*',
help="exclude this directory patterns (high priority)"
)
parser.add_argument(
'--libcxx',
action='store_true',
help='compile and link with libcxx library'
)
parser.add_argument(
'--sim',
action='store_true',
help='build for ios simulator (Xcode only)'
)
args = parser.parse_args()
if args.exclude:
print('exclude directories: {}'.format(args.exclude))
if args.include:
print('include directories: {}'.format(args.include))
# test:
# Unix Makefiles _builds/make-debug
# Xcode _builds/xcode
# Visual Studio 11 _builds/msvc
class Config:
def __init__(self, generator, generator_params, directory, build):
self.generator = generator
self.generator_params = generator_params
self.directory = directory
self.build = build
def info(self, build_dir):
info = '[{}] [{}'.format(build_dir, self.generator)
if self.generator_params:
info += ' + {}'.format(self.generator_params)
info += '] [{}]'.format(self.build)
return info
configs = []
if detail.os_detect.windows:
sys.exit("Not tested (see {})".format(help_wiki))
configs.append(Config('Visual Studio', '', 'msvc', 'nmake')) # ???
else:
if args.libcxx:
stdlib_flag = "'-stdlib=libc++'"
libcxx_flag = "-DCMAKE_CXX_FLAGS={}".format(stdlib_flag)
else:
libcxx_flag = ''
debug_opt = '-DCMAKE_BUILD_TYPE=Debug {}'.format(libcxx_flag)
release_opt = '-DCMAKE_BUILD_TYPE=Release {}'.format(libcxx_flag)
default_opt = '{}'.format(libcxx_flag)
configs.append(Config('Unix Makefiles', default_opt, 'make-default', 'make'))
configs.append(Config(
'Unix Makefiles', release_opt, 'make-release', 'make'
))
configs.append(Config(
'Unix Makefiles', debug_opt, 'make-debug', 'make'
))
if detail.os_detect.macosx:
if args.libcxx:
params = " -DCMAKE_EXE_LINKER_FLAGS='-stdlib=libc++' {}".format(libcxx_flag)
else:
params = ''
configs.append(Config('Xcode', params, 'xcode', 'xcodebuild'))
gtest_version = '1.7.0-hunter-6'
gtest_result = 'gtest-' + gtest_version
gtest_tar_gz = 'v{}.tar.gz'.format(gtest_version)
gtest_src = 'https://github.com/hunter-packages/gtest/archive/' + gtest_tar_gz
os.chdir('third_party')
if not os.path.exists(gtest_tar_gz):
subprocess.check_call(['wget', gtest_src])
subprocess.check_call(['tar', '-xf', gtest_tar_gz])
os.chdir(gtest_result)
install_prefix = os.path.join(top_dir, 'third_party', '_install', 'native')
install_prefix = '-DCMAKE_INSTALL_PREFIX={}'.format(install_prefix)
if args.libcxx:
lib_flags = '-DCMAKE_CXX_FLAGS=-stdlib=libc++'
else:
lib_flags = ''
subprocess.check_call(
['cmake', install_prefix, lib_flags, '-H.', '-B_builds/native']
)
subprocess.check_call(
['cmake', '--build', '_builds/native', '--target', 'install', '--config', 'Release']
)
if detail.os_detect.macosx:
toolchain = '-DCMAKE_TOOLCHAIN_FILE={}/cmake/iOS.cmake'.format(os.getcwd())
install_prefix = os.path.join(top_dir, 'third_party', '_install', 'ios')
install_prefix = '-DCMAKE_INSTALL_PREFIX={}'.format(install_prefix)
subprocess.check_call(
['cmake', '-H.', '-B_builds/ios', '-GXcode', toolchain, install_prefix]
)
subprocess.check_call(
['cmake', '--build', '_builds/ios', '--target', 'install', '--config', 'Release']
)
subprocess.check_call(
['cmake', '--build', '_builds/ios', '--target', 'install', '--config', 'Debug']
)
os.chdir(top_dir)
done_list = []
def run_cmake_test(root, config_in):
config = copy.deepcopy(config_in)
library_install = False
if re.match('./06-ios/_universal_library', root):
library_install = True
if config.generator == 'Xcode':
if re.match('./00-detect', root):
config.generator_params = '' # remove warning
# skip Xcode specific
if re.match('./07-cocoa-application', root) and config.generator != 'Xcode':
print("{}: skip (Xcode only)".format(config.generator))
return
check_simulator = False
if re.match('./03-ios-gtest', root):
check_simulator = True
if config.generator != 'Xcode':
print("{}: skip (Xcode only)".format(config.generator))
return
if re.match('./04-gtest-universal', root):
check_simulator = True
if re.match('./06-ios', root):
check_simulator = True
if config.generator != 'Xcode':
print("{}: skip (Xcode only)".format(config.generator))
return
if check_simulator and config.generator == 'Xcode':
if args.sim:
build_sdk = 'iphonesimulator -arch i386'
else:
build_sdk = 'iphoneos'
config.build += ' -sdk {}'.format(build_sdk)
build_dir=os.path.join(root, '_builds', config.directory)
detail.trash.trash(build_dir, ignore_not_exist=True)
os.makedirs(build_dir)
os.chdir(build_dir)
config_info = config.info(build_dir)
try:
print('##### {}'.format(config_info))
command = ['cmake', '-G', '{}'.format(config.generator)]
command += config.generator_params.split()
if library_install:
command.append(
'-DCMAKE_INSTALL_PREFIX={}/../../install'.format(os.getcwd())
)
command.append('../..')
subprocess.check_call(command)
print('build...')
if config.generator == 'Xcode':
build_release = '{} -configuration Release'.format(config.build)
build_debug = '{} -configuration Debug'.format(config.build)
subprocess.check_call(build_release.split())
subprocess.check_call(build_debug.split())
else:
subprocess.check_call(config.build.split())
if library_install:
# additional install step
subprocess.check_call(
['xcodebuild', '-target', 'install', '-configuration', 'Release']
)
subprocess.check_call(
['xcodebuild', '-target', 'install', '-configuration', 'Debug']
)
print('done')
except subprocess.CalledProcessError:
sys.exit('run failed in "{}" directory'.format(root))
done_list.append(config_info)
os.chdir(top_dir)
# check library installed (xcodebuild may exit 0 even if build failed)
if library_install:
install_base = os.path.join(root, 'install', 'lib')
lib1 = os.path.join(install_base, 'libfoo.a')
if not os.path.exists(lib1):
sys.exit("{} not found".format(lib1))
lib2 = os.path.join(install_base, 'libfood.a')
if not os.path.exists(lib2):
sys.exit("{} not found".format(lib2))
def hit_regex(root, pattern_list):
if not pattern_list:
return False
for pattern_entry in pattern_list:
if pattern_entry and re.match(pattern_entry, root):
return True
return False
for root, dirs, files in os.walk('./'):
for filename in files:
if filename != 'CMakeLists.txt':
continue
if hit_regex(root, args.exclude):
print("skip (exclude list): '{}'".format(root))
continue
if args.include and not hit_regex(root, args.include):
print("skip (not in include list): '{}'".format(root))
continue
if re.search(r'/{}'.format(gtest_result), root):
print("skip service temporary project: {}".format(root))
continue
file_path = os.path.join(root, filename)
print('check file = {}'.format(file_path))
file_id = open(file_path)
content = file_id.read()
if not re.search(r'\nproject(.*)\n', content):
continue
detail.trash.trash(os.path.join(root, 'install'), ignore_not_exist=True)
for config in configs:
run_cmake_test(root, config)
print('DONE LIST:')
for x in done_list:
print(x)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.