code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the logic for `aq update interface --machine`."""
from aquilon.exceptions_ import ArgumentError, AquilonError
from aquilon.worker.broker import BrokerCommand # pylint: disable=W0611
from aquilon.worker.dbwrappers.interface import (verify_port_group,
choose_port_group,
assign_address,
rename_interface)
from aquilon.worker.locks import lock_queue
from aquilon.worker.templates.machine import PlenaryMachineInfo
from aquilon.worker.processes import DSDBRunner
from aquilon.aqdb.model import Machine, Interface, Model
from aquilon.utils import first_of
class CommandUpdateInterfaceMachine(BrokerCommand):
required_parameters = ["interface", "machine"]
def render(self, session, logger, interface, machine, mac, model, vendor,
boot, pg, autopg, comments, master, clear_master, default_route,
rename_to, **arguments):
"""This command expects to locate an interface based only on name
and machine - all other fields, if specified, are meant as updates.
If the machine has a host, dsdb may need to be updated.
The boot flag can *only* be set to true. This is mostly technical,
as at this point in the interface it is difficult to tell if the
flag was unset or set to false. However, it also vastly simplifies
the dsdb logic - we never have to worry about a user trying to
remove the boot flag from a host in dsdb.
"""
audit_results = []
dbhw_ent = Machine.get_unique(session, machine, compel=True)
dbinterface = Interface.get_unique(session, hardware_entity=dbhw_ent,
name=interface, compel=True)
oldinfo = DSDBRunner.snapshot_hw(dbhw_ent)
if arguments.get('hostname', None):
# Hack to set an intial interface for an aurora host...
dbhost = dbhw_ent.host
if dbhost.archetype.name == 'aurora' and \
dbhw_ent.primary_ip and not dbinterface.addresses:
assign_address(dbinterface, dbhw_ent.primary_ip,
dbhw_ent.primary_name.network)
# We may need extra IP verification (or an autoip option)...
# This may also throw spurious errors if attempting to set the
# port_group to a value it already has.
if pg is not None and dbinterface.port_group != pg.lower().strip():
dbinterface.port_group = verify_port_group(
dbinterface.hardware_entity, pg)
elif autopg:
dbinterface.port_group = choose_port_group(
session, logger, dbinterface.hardware_entity)
audit_results.append(('pg', dbinterface.port_group))
if master:
if dbinterface.addresses:
# FIXME: as a special case, if the only address is the
# primary IP, then we could just move it to the master
# interface. However this can be worked around by bonding
# the interface before calling "add host", so don't bother
# for now.
raise ArgumentError("Can not enslave {0:l} because it has "
"addresses.".format(dbinterface))
dbmaster = Interface.get_unique(session, hardware_entity=dbhw_ent,
name=master, compel=True)
if dbmaster in dbinterface.all_slaves():
raise ArgumentError("Enslaving {0:l} would create a circle, "
"which is not allowed.".format(dbinterface))
dbinterface.master = dbmaster
if clear_master:
if not dbinterface.master:
raise ArgumentError("{0} is not a slave.".format(dbinterface))
dbinterface.master = None
if comments:
dbinterface.comments = comments
if boot:
# Should we also transfer the primary IP to the new boot interface?
# That could get tricky if the new interface already has an IP
# address...
for i in dbhw_ent.interfaces:
if i == dbinterface:
i.bootable = True
i.default_route = True
else:
i.bootable = False
i.default_route = False
if default_route is not None:
dbinterface.default_route = default_route
if not first_of(dbhw_ent.interfaces, lambda x: x.default_route):
logger.client_info("Warning: {0:l} has no default route, hope "
"that's ok.".format(dbhw_ent))
#Set this mac address last so that you can update to a bootable
#interface *before* adding a mac address. This is so the validation
#that takes place in the interface class doesn't have to be worried
#about the order of update to bootable=True and mac address
if mac:
q = session.query(Interface).filter_by(mac=mac)
other = q.first()
if other and other != dbinterface:
raise ArgumentError("MAC address {0} is already in use by "
"{1:l}.".format(mac, other))
dbinterface.mac = mac
if model or vendor:
if not dbinterface.model_allowed:
raise ArgumentError("Model/vendor can not be set for a {0:lc}."
.format(dbinterface))
dbmodel = Model.get_unique(session, name=model, vendor=vendor,
machine_type='nic', compel=True)
dbinterface.model = dbmodel
if rename_to:
rename_interface(session, dbinterface, rename_to)
session.flush()
session.refresh(dbhw_ent)
plenary_info = PlenaryMachineInfo(dbhw_ent, logger=logger)
key = plenary_info.get_write_key()
try:
lock_queue.acquire(key)
plenary_info.write(locked=True)
if dbhw_ent.host and dbhw_ent.host.archetype.name != "aurora":
dsdb_runner = DSDBRunner(logger=logger)
dsdb_runner.update_host(dbhw_ent, oldinfo)
dsdb_runner.commit_or_rollback()
except AquilonError, err:
plenary_info.restore_stash()
raise ArgumentError(err)
except:
plenary_info.restore_stash()
raise
finally:
lock_queue.release(key)
for name, value in audit_results:
self.audit_result(session, name, value, **arguments)
return
| stdweird/aquilon | lib/python2.6/aquilon/worker/commands/update_interface_machine.py | Python | apache-2.0 | 7,493 |
#!/usr/bin/env python2.7
# Copyright 2015 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
''' Sample usage of function 'inventory_not_connected' to show which devices are mounted, but not connected.
Print the function's documentation then invoke the function and print the output.
'''
from __future__ import print_function as _print_function
from basics.inventory import inventory_not_connected
from basics.render import print_table
from pydoc import render_doc as doc
from pydoc import plain
def main():
print(plain(doc(inventory_not_connected)))
print("inventory_not_connected()")
print_table(inventory_not_connected(), headers='device-name')
if __name__ == "__main__":
main() | tbarrongh/cosc-learning-labs | src/learning_lab/01_inventory_not_connected.py | Python | apache-2.0 | 1,219 |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow.hooks.oracle_hook import OracleHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class OracleOperator(BaseOperator):
"""
Executes sql code in a specific Oracle database
:param oracle_conn_id: reference to a specific Oracle database
:type oracle_conn_id: str
:param sql: the sql code to be executed. (templated)
:type sql: Can receive a str representing a sql statement,
a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
"""
template_fields = ('sql',)
template_ext = ('.sql',)
ui_color = '#ededed'
@apply_defaults
def __init__(
self, sql, oracle_conn_id='oracle_default', parameters=None,
autocommit=False, *args, **kwargs):
super(OracleOperator, self).__init__(*args, **kwargs)
self.oracle_conn_id = oracle_conn_id
self.sql = sql
self.autocommit = autocommit
self.parameters = parameters
def execute(self, context):
self.log.info('Executing: %s', self.sql)
hook = OracleHook(oracle_conn_id=self.oracle_conn_id)
hook.run(
self.sql,
autocommit=self.autocommit,
parameters=self.parameters)
| akosel/incubator-airflow | airflow/operators/oracle_operator.py | Python | apache-2.0 | 2,130 |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy
import paddle.fluid.core as core
import paddle.fluid as fluid
class TestExecutor(unittest.TestCase):
def net(self):
lr = fluid.data(name="lr", shape=[1], dtype='float32')
x = fluid.data(name="x", shape=[None, 1], dtype='float32')
y = fluid.data(name="y", shape=[None, 1], dtype='float32')
y_predict = fluid.layers.fc(input=x, size=1, act=None)
cost = fluid.layers.square_error_cost(input=y_predict, label=y)
avg_cost = fluid.layers.mean(cost)
opt = fluid.optimizer.Adam(learning_rate=lr)
opt.minimize(avg_cost)
return lr, avg_cost
def test_program_check_feed(self):
main_program = fluid.Program()
startup_program = fluid.Program()
scope = fluid.Scope()
with fluid.program_guard(main_program, startup_program):
with fluid.scope_guard(scope):
cpu = fluid.CPUPlace()
exe = fluid.Executor(cpu)
lr, cost = self.net()
exe.run(startup_program)
train_data = [[1.0], [2.0], [3.0], [4.0]]
y_true = [[2.0], [4.0], [6.0], [8.0]]
a = 0
with self.assertRaises(ValueError):
exe.run(feed={'x': train_data,
'lr': a},
fetch_list=[lr, cost],
return_numpy=False,
use_prune=True)
def test_compiled_program_check_feed(self):
main_program = fluid.Program()
startup_program = fluid.Program()
scope = fluid.Scope()
with fluid.program_guard(main_program, startup_program):
with fluid.scope_guard(scope):
cpu = fluid.CPUPlace()
exe = fluid.Executor(cpu)
lr, cost = self.net()
exe.run(startup_program)
compiled_prog = fluid.CompiledProgram(
main_program).with_data_parallel(loss_name=cost.name)
train_data = [[1.0], [2.0], [3.0], [4.0]]
y_true = [[2.0], [4.0], [6.0], [8.0]]
a = 0
with self.assertRaises(ValueError):
exe.run(compiled_prog,
feed={'x': train_data,
'lr': a},
fetch_list=[lr, cost],
return_numpy=False,
use_prune=True)
if __name__ == '__main__':
unittest.main()
| PaddlePaddle/Paddle | python/paddle/fluid/tests/unittests/test_executor_check_feed.py | Python | apache-2.0 | 3,210 |
"""Test the IPython.kernel public API
Authors
-------
* MinRK
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import nose.tools as nt
from IPython.testing import decorators as dec
from IPython.kernel import launcher, connect
from IPython import kernel
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
@dec.parametric
def test_kms():
for base in ("", "Multi"):
KM = base + "KernelManager"
yield nt.assert_true(KM in dir(kernel), KM)
@dec.parametric
def test_kcs():
for base in ("", "Blocking"):
KM = base + "KernelClient"
yield nt.assert_true(KM in dir(kernel), KM)
@dec.parametric
def test_launcher():
for name in launcher.__all__:
yield nt.assert_true(name in dir(kernel), name)
@dec.parametric
def test_connect():
for name in connect.__all__:
yield nt.assert_true(name in dir(kernel), name)
| noslenfa/tdjangorest | uw/lib/python2.7/site-packages/IPython/kernel/tests/test_public_api.py | Python | apache-2.0 | 1,308 |
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import shutil
import tempfile
from pants.base.build_invalidator import CacheKey, CacheKeyGenerator
from pants.base.cache_manager import InvalidationCacheManager, InvalidationCheck, VersionedTarget
from pants_test.base_test import BaseTest
class AppendingCacheKeyGenerator(CacheKeyGenerator):
"""Generates cache keys for versions of target sets."""
@staticmethod
def combine_cache_keys(cache_keys):
if len(cache_keys) == 1:
return cache_keys[0]
else:
sorted_cache_keys = sorted(cache_keys) # For commutativity.
combined_id = ','.join([cache_key.id for cache_key in sorted_cache_keys])
combined_hash = ','.join([cache_key.hash for cache_key in sorted_cache_keys])
combined_num_sources = reduce(lambda x, y: x + y,
[cache_key.num_sources for cache_key in sorted_cache_keys], 0)
return CacheKey(combined_id, combined_hash, combined_num_sources)
def key_for_target(self, target, sources=None, transitive=False, fingerprint_strategy=None):
return CacheKey(target.id, target.id, target.num_chunking_units)
def key_for(self, tid, sources):
return CacheKey(tid, tid, len(sources))
def print_vt(vt):
print('%d (%s) %s: [ %s ]' % (len(vt.targets), vt.cache_key, vt.valid, ', '.join(['%s(%s)' % (v.id, v.cache_key) for v in vt.versioned_targets])))
class InvalidationCacheManagerTest(BaseTest):
class TestInvalidationCacheManager(InvalidationCacheManager):
def __init__(self, tmpdir):
InvalidationCacheManager.__init__(self, AppendingCacheKeyGenerator(), tmpdir, True, None)
def setUp(self):
super(InvalidationCacheManagerTest, self).setUp()
self._dir = tempfile.mkdtemp()
self.cache_manager = InvalidationCacheManagerTest.TestInvalidationCacheManager(self._dir)
def tearDown(self):
shutil.rmtree(self._dir, ignore_errors=True)
super(InvalidationCacheManagerTest, self).tearDown()
def make_vts(self, target):
return VersionedTarget(self.cache_manager, target, target.id)
def test_partition(self):
# The default EmptyPayload chunking unit happens to be 1, so each of these Targets
# has a chunking unit contribution of 1
a = self.make_target(':a', dependencies=[])
b = self.make_target(':b', dependencies=[a])
c = self.make_target(':c', dependencies=[b])
d = self.make_target(':d', dependencies=[c, a])
e = self.make_target(':e', dependencies=[d])
targets = [a, b, c, d, e]
def print_partitions(partitions):
strs = []
for partition in partitions:
strs.append('(%s)' % ', '.join([t.id for t in partition.targets]))
print('[%s]' % ' '.join(strs))
# Verify basic data structure soundness.
all_vts = self.cache_manager._wrap_targets(targets)
invalid_vts = filter(lambda vt: not vt.valid, all_vts)
self.assertEquals(5, len(invalid_vts))
self.assertEquals(5, len(all_vts))
vts_targets = [vt.targets[0] for vt in all_vts]
self.assertEquals(set(targets), set(vts_targets))
# Test a simple partition.
ic = InvalidationCheck(all_vts, [], 3)
partitioned = ic.all_vts_partitioned
print_partitions(partitioned)
# Several correct partitionings are possible, but in all cases 4 1-source targets will be
# added to the first partition before it exceeds the limit of 3, and the final target will
# be in a partition by itself.
self.assertEquals(2, len(partitioned))
self.assertEquals(4, len(partitioned[0].targets))
self.assertEquals(1, len(partitioned[1].targets))
# Test partition with colors.
red = 'red'
blue = 'blue'
colors = {
a: blue,
b: red,
c: red,
d: red,
e: blue
}
# As a reference, we partition without colors.
ic = InvalidationCheck(all_vts, [], 2)
partitioned = ic.all_vts_partitioned
print_partitions(partitioned)
self.assertEquals(2, len(partitioned))
self.assertEquals(3, len(partitioned[0].targets))
self.assertEquals(2, len(partitioned[1].targets))
# Now apply color restrictions.
ic = InvalidationCheck(all_vts, [], 2, target_colors=colors)
partitioned = ic.all_vts_partitioned
print_partitions(partitioned)
self.assertEquals(3, len(partitioned))
self.assertEquals(1, len(partitioned[0].targets))
self.assertEquals(3, len(partitioned[1].targets))
self.assertEquals(1, len(partitioned[2].targets))
| pgroudas/pants | tests/python/pants_test/tasks/test_cache_manager.py | Python | apache-2.0 | 4,697 |
#!/usr/bin/python
#
# Author: Rajesh Sinha, Karan Narain
# The base class for Twitter and GPlus Objects
#
import logging
import sys
from bs4 import BeautifulSoup
import urllib2
import re
## Some Important constants
_parser = "lxml" ## remember to pip install lxml or else use another parser
_loggingLevel = logging.DEBUG ## How much trace
class AltmetricBase:
def __init__(self, name, snLink, altmetricId, startPage, endPage):
self.name = name # Name of social network
self.snLink = snLink # The /twitter or /google link
self.amUrl = 'https://www.altmetric.com/details/' + altmetricId + snLink # full link to page1 of social network
self.startPagination = startPage
self.endPagination = endPage
self.baseLink = self.amUrl.replace(self.snLink,'') # The baselink which is shown when a non-existent page is used
self.logger = logging.getLogger(__name__)
logging.basicConfig(level=_loggingLevel)
self.logger.debug('Created Altmetric Object')
def findPosters(self, soup):
raise NotImplementedError("Subclass must implement abstract method")
def getMoreSoup(self):
""" Tries to check all possible links starting from 2 to 1000 and breaks out when
we get a redirect. There is no graceful way i.e. HTTP code on redirect when we
access a nonexistant link. So we check when almetric returns the base URL of the
research arcticle and stop then. This is a generator function and keeps returning
the beautifulsoup of the link
"""
# when the list runs out altmteric returns the base url of the research paper
for a in range(self.startPagination, self.endPagination):
link = self.amUrl + '/page:' + str(a)
self.logger.debug('Trying URL - %s', link)
try:
page = urllib2.urlopen(link)
if self.isRedirect(page):
self.logger.debug('finishing the generator...')
return
else:
self.logger.debug('Yielding Soup')
yield BeautifulSoup(page, _parser)
except urllib2.HTTPError, e:
self.logger.error('Could not open %s because of HTTP error', link)
self.logger.error("%r", e.code)
except urllib2.URLError, e:
self.logger.error('Could not open %s because of URL error', link)
self.logger.error("%r", e.args)
def isRedirect(self, page):
return page.geturl() == self.baseLink
@staticmethod
def isValidURL(url):
testUrl = 'https://www.altmetric.com/details/' + url
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
result = regex.match(testUrl)
return False if result is None else True
def openAndLoadURL(self, fname):
""" Opens the base URL for a network and returns the beautifulSoup through lxml parses """
self.logger.debug('Opening URL ' + fname)
try:
page = urllib2.urlopen(fname)
except urllib2.HTTPError, e:
self.logger.error('Could not open ' + fname+ ': HTTP Error ')
self.logger.error(e.code)
return False, None
except urllib2.URLError, e:
self.logger.error('Could not open ' + fname+ ': URL Error ')
self.logger.error(e.args)
return False, None
soup = BeautifulSoup(page, _parser)
return True, soup
def executeAltmetricAnalysis(self):
posters = []
status, soup = self.openAndLoadURL(self.amUrl)
if status:
posters = self.findPosters(soup)
self.logger.debug('Found %d posts so far', len(posters))
for soup in self.getMoreSoup():
posters.extend(self.findPosters(soup))
self.logger.debug('Found %d posts so far', len(posters))
self.logger.info('Found %d posts in total for the link', len(posters))
posters = list(set(posters))
self.logger.info('Found %d Unique Posters for the link', len(posters))
for poster in posters:
print (poster).encode('utf-8')
self.logger.info('written all the posters to stdout...')
else:
self.logger.error('found error in URL upfront so bailing out')
sys.stderr.flush()
sys.stdout.flush()
sys.exit(1)
| sinharrajesh/dbtools | google-plus-analysis/SocialNW.py | Python | apache-2.0 | 4,807 |
#!/usr/bin/env python
import logging
import sys
import os
import signal
import conf
import core
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol.TBinaryProtocol import TBinaryProtocolAcceleratedFactory
from thrift.server import TServer
from rpc import RndNodeApi
logger = logging.getLogger(__name__)
class RndProcessHandler(object):
def runTask(self, rtc):
logger.debug("starting core.ProcessMgr.runProcess(rtc): %s", rtc.taskId)
core.ProcessMgr.runProcess(rtc)
logger.debug("finished core.ProcessMgr.runProcess(rtc): %s", rtc.taskId)
def killRunningTask(self, procId, reason):
core.ProcessMgr.killRunningTask(procId, reason)
def getRunningTasks(self):
logger.debug("starting core.ProcessMgr.getRunningTasks()")
tasks = core.ProcessMgr.getRunningTasks()
logger.debug("finished core.ProcessMgr.getRunningTasks()")
return tasks
def reboot(self, now=False):
core.ProcessMgr.reboot(now)
def pingPong(self, withTasks=False):
ping = core.Profiler.getPing()
ping.isReboot = core.ProcessMgr.isReboot
if withTasks:
ping.tasks = self.getRunningTasks()
return ping
def get_server(api, handler, port, **kwargs):
processor = api.Processor(handler)
socket = TSocket.TServerSocket(port=port)
tfactory = kwargs.get('transport') or TTransport.TFramedTransportFactory()
pfactory = kwargs.get('protocol') or TBinaryProtocolAcceleratedFactory()
server = TServer.TThreadPoolServer(processor, socket, tfactory, pfactory)
server.setNumThreads(8)
return server
def exit_handler(*args):
logger.info("Caught SIGTERM. Shutting down Process Manager...")
core.ProcessMgr.shutdown()
logger.info("Process Manager finished shutting down")
os._exit(0)
signal.signal(signal.SIGTERM, exit_handler)
def start():
logger.info("Staring Render Node Daemon on TCP port %d" % conf.NETWORK_PORT)
server = get_server(RndNodeApi, RndProcessHandler(), conf.NETWORK_PORT)
try:
server.serve()
except KeyboardInterrupt:
exit_handler()
sys.exit(0)
| chadmv/plow | lib/python/plow/rndaemon/server.py | Python | apache-2.0 | 2,191 |
#!/usr/bin/env python
import lib_v2 as lib
import sys
import os
def main(argv=None):
"""
Usage is:
submit.py [--account <chargecode>] [--url <url>] -- <commandline>
Run from the working dir of the job which must contain (in addition
to the job files) a file named scheduler.conf with scheduler properties for the job.
<chargecode>, if present, gives the project to charge the job to.
Url is the url of the submitting website including the taskid parameter.
Returns 0 with "jobid=<jobid>" on stdout if job submitted ok
Returns 1 with multiline error message on stdout if error.
Returns 2 for the specific error of queue limit exceeded.
"""
#COMMAND LINE PARSING
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--account', metavar="ACCOUNT", type=str, default=lib.account,
help="The account string to use when submitting jobs. Default is read from config files.")
parser.add_argument('--url', metavar="URL", dest="URL", type=str,
help="Notification URL")
try:
cmdline_options, cmdline = parser.parse_known_args(argv)
cmdline = cmdline[1:] if not ('--' in cmdline) else cmdline[cmdline.index('--')+1:]
except Exception as e:
print "There was a problem submitting your job"
print e
sys.exit(1)
account = cmdline_options.account
url = cmdline_options.URL
#cmdline as an array (and already set)
tooltype = lib.getToolType(cmdline)
scheduler_properties = lib.getProperties("scheduler.conf")
# print scheduler_properties
scheduler_info = lib.schedulerInfo(scheduler_properties, tooltype)
# print scheduler_info
# If this is a "direct" run type job we don't need to create a qsub script, we'll just run batch_ommand.cmdline.
if scheduler_info["is_direct"]:
return lib.submitDirectJob(account, url, lib.email, lib.jobname, cmdline)
runtime = int(scheduler_info["runtime"])
useLocalDisk = False
"""
Workaround for problems with file io on oasis and longer mrbayes runs. Instead of running on
oasis, we'll copy the working dir to the compute nodes local storage and copy the results back
when the job completes. Since many mrbayes jobs timeout we need a special trick to copy results
of jobs that timeout: Right before we launch mrbayes we launch a shell script in the background
that sleeps a few min less than the job's runtime and then copies the results. If mrbayes terminates
normally the background sleep is killed automatically.
"""
if (tooltype == "mrbayes" and runtime > 60):
useLocalDisk = True
# I'm backing out the workaround by setting useLocalDisk to false.
useLocalDisk = False
# Write the command line to a file, batch_command.cmdline.
rfile = open(lib.cmdfile, "w")
rfile.write("#!/bin/sh\n")
rfile.writelines((" ".join(cmdline), "\n"))
rfile.close()
os.chmod(lib.cmdfile, 0744);
# Create the qsub script
rfile = open(lib.runfile, "w")
text = """#!/bin/sh
#PBS -q %s
#PBS -N %s
#PBS -l walltime=00:%d:00
#PBS -o scheduler_stdout.txt
#PBS -e scheduler_stderr.txt
#PBS -W umask=0007
##PBS -V
#PBS -v QOS=2
#PBS -M %s
#PBS -m ae
#PBS -A %s
""" % (scheduler_info["queue"], lib.jobname, scheduler_info["runtime"], lib.email, account)
rfile.write(text)
text = "#PBS -l nodes=%d:ppn=%d\n" % (scheduler_info["nodes"], scheduler_info["ppn"])
rfile.write(text)
rfile.write("cd %s\n" % (lib.jobdir, lib.local_jobdir)[useLocalDisk])
if useLocalDisk == True:
# Note that it's critical that newlines in the text string are all within the double
# quotes; otherwise the echo command line would be split across lines and make no sense.
text = """"Due to filesystem problems intermediate results for longer mrbayes runs
will not be available while the job is running. The result files will be
available when mrbayes finishes.
We're working to find a solution." """
rfile.write("echo %s > %s/INTERMEDIATE_RESULTS_README.TXT\n" % (text, lib.jobdir))
rfile.write("cp -r %s/* .\n" % lib.jobdir);
sleepTime = int(scheduler_info["runtime"]) - 10
rfile.write("sleep_cp.sh %s %s &\n" % (sleepTime, lib.jobdir))
text = """
source /etc/profile.d/modules.sh
echo Job starting at `date` > start.txt
curl %s\&status=START
export CIPRES_THREADSPP=%d
export CIPRES_NP=%d
%s 1>stdout.txt 2>stderr.txt
echo Job finished at `date` > done.txt
""" % (url,
int(scheduler_info["threads_per_process"]),
int(scheduler_info["mpi_processes"]),
lib.cmdfile)
rfile.write(text)
if (useLocalDisk):
text = """
echo "Job completed, starting to copy working directory."
echo "mkdir %s.complete"
mkdir %s.complete
echo "cp -r * %s.complete"
cp -r * %s.complete
echo "mv %s %s.sleep"
mv %s %s.sleep
echo "mv %s.complete %s"
mv %s.complete %s
echo "rm -rf %s.sleep"
rm -rf %s.sleep
echo "Finished copying working directory."
""" % (lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir)
rfile.write(text)
rfile.write("curl %s\&status=DONE\n" % url)
rfile.close()
return lib.submitJob()
return 0
if __name__ == "__main__":
sys.exit(main())
| SciGaP/DEPRECATED-Cipres-Airavata-POC | saminda/cipres-airavata/sdk/scripts/remote_resource/trestles/submit_v2.py | Python | apache-2.0 | 5,478 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for api module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import gc
import imp
import os
import re
import textwrap
import types
import numpy as np
from tensorflow.python.autograph import utils
from tensorflow.python.autograph.core import ag_ctx
from tensorflow.python.autograph.core import converter
from tensorflow.python.autograph.impl import api
from tensorflow.python.autograph.pyct import inspect_utils
from tensorflow.python.autograph.pyct import parser
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import test_util
from tensorflow.python.keras.engine import sequential
from tensorflow.python.keras.layers import core
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.util import function_utils
from tensorflow.python.util import tf_decorator
from tensorflow.python.util import tf_inspect
tf = utils.fake_tf()
global_n = 2
class TestResource(object):
def __init__(self):
self.x = 3
class ApiTest(test.TestCase):
@test_util.run_deprecated_v1
def test_decorator_recursive(self):
class TestClass(object):
def called_member(self, a):
if a < 0:
a = -a
return a
@api.convert(recursive=True)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= self.called_member(a)
return x
tc = TestClass()
with self.cached_session() as sess:
x = tc.test_method(
constant_op.constant([2, 4]), constant_op.constant(1),
constant_op.constant(-2))
self.assertListEqual([0, 1], self.evaluate(x).tolist())
@test_util.run_deprecated_v1
def test_decorator_not_recursive(self):
class TestClass(object):
def called_member(self, a):
return tf.negative(a)
@api.convert(recursive=False)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= self.called_member(a)
return x
tc = TestClass()
with self.cached_session() as sess:
x = tc.test_method(
constant_op.constant([2, 4]), constant_op.constant(1),
constant_op.constant(-2))
self.assertListEqual([0, 1], self.evaluate(x).tolist())
@test_util.run_deprecated_v1
def test_convert_then_do_not_convert(self):
class TestClass(object):
@api.do_not_convert
def called_member(self, a):
return tf.negative(a)
@api.convert(recursive=True)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= self.called_member(a)
return x
tc = TestClass()
x = tc.test_method(
constant_op.constant((2, 4)), constant_op.constant(1),
constant_op.constant(-2))
self.assertAllEqual((0, 1), self.evaluate(x))
@test_util.run_deprecated_v1
def test_decorator_calls_decorated(self):
class TestClass(object):
@api.convert()
def called_member(self, a):
if a < 0:
a = -a
return a
@api.convert(recursive=True)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= self.called_member(a)
return x
tc = TestClass()
with self.cached_session() as sess:
x = tc.test_method(
constant_op.constant([2, 4]), constant_op.constant(1),
constant_op.constant(-2))
self.assertListEqual([0, 1], self.evaluate(x).tolist())
def test_decorator_preserves_argspec(self):
class TestClass(object):
def test_method(self, a):
if a < 0:
a = -a
return a
test_method_converted = api.convert()(test_method)
tc = TestClass()
self.assertListEqual(
list(tf_inspect.getfullargspec(tc.test_method)),
list(tf_inspect.getfullargspec(tc.test_method_converted)))
def test_do_not_convert_argspec(self):
class TestClass(object):
def test_method(self, x, y):
z = x + y
return z
test_method_whitelisted = api.do_not_convert(test_method)
tc = TestClass()
self.assertTrue(tf_inspect.ismethod(tc.test_method_whitelisted))
# Because the wrapped function is not generated, we can't preserve its
# arg spec.
self.assertEqual((),
tuple(function_utils.fn_args(tc.test_method_whitelisted)))
def test_do_not_convert_callable_object(self):
class TestClass(object):
def __call__(self):
return 1
tc = TestClass()
self.assertEqual(1, api.do_not_convert(tc)())
@test_util.run_deprecated_v1
def test_convert_call_site_decorator(self):
class TestClass(object):
def called_member(self, a):
if a < 0:
a = -a
return a
@api.convert(recursive=True)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= api.converted_call(self.called_member,
converter.ConversionOptions(recursive=True),
(a,), {})
return x
tc = TestClass()
x = tc.test_method(
constant_op.constant([2, 4]), constant_op.constant(1),
constant_op.constant(-2))
self.assertListEqual([0, 1], self.evaluate(x).tolist())
def test_converted_call_builtin(self):
x = api.converted_call(range, converter.ConversionOptions(recursive=True),
(3,), {})
self.assertEqual((0, 1, 2), tuple(x))
x = api.converted_call(re.compile,
converter.ConversionOptions(recursive=True),
('mnas_v4_a.*\\/.*(weights|kernel):0$',), {})
self.assertIsNotNone(x.match('mnas_v4_a/weights:0'))
def test_converted_call_function(self):
def test_fn(x):
if x < 0:
return -x
return x
x = api.converted_call(test_fn, converter.ConversionOptions(recursive=True),
(constant_op.constant(-1),), {})
self.assertEqual(1, self.evaluate(x))
@test_util.run_v1_only('b/120545219')
def test_converted_call_functools_partial(self):
def test_fn(x, y, z):
if x < 0:
return -x, -y, -z
return x, y, z
x = api.converted_call(
functools.partial(test_fn, constant_op.constant(-1), z=-3),
converter.ConversionOptions(recursive=True),
(constant_op.constant(-2),), {})
self.assertEqual((1, 2, 3), self.evaluate(x))
x = api.converted_call(
functools.partial(
functools.partial(test_fn, constant_op.constant(-1)), z=-3),
converter.ConversionOptions(recursive=True),
(constant_op.constant(-2),), {})
self.assertEqual((1, 2, 3), self.evaluate(x))
def test_converted_call_method(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def test_method(self):
if self.x < 0:
return -self.x
return self.x
tc = TestClass(constant_op.constant(-1))
x = api.converted_call(tc.test_method,
converter.ConversionOptions(recursive=True), (), {})
self.assertEqual(1, self.evaluate(x))
def test_converted_call_synthetic_method(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def test_function(self):
if self.x < 0:
return -self.x
return self.x
tc = TestClass(constant_op.constant(-1))
test_method = types.MethodType(test_function, tc)
x = api.converted_call(test_method,
converter.ConversionOptions(recursive=True), (), {})
self.assertEqual(1, self.evaluate(x))
def test_converted_call_method_wrapper(self):
class TestClass(object):
def foo(self):
pass
tc = TestClass()
# `method.__get__()` returns a so-called method-wrapper.
wrapper = api.converted_call(tc.foo.__get__,
converter.ConversionOptions(recursive=True),
(tc,), {})
self.assertEqual(wrapper, tc.foo)
def test_converted_call_method_as_object_attribute(self):
class AnotherClass(object):
def __init__(self):
self.another_class_attr = constant_op.constant(1)
def method(self):
if self.another_class_attr > 0:
return self.another_class_attr + 1
return self.another_class_attr + 10
class TestClass(object):
def __init__(self, another_obj_method):
self.another_obj_method = another_obj_method
obj = AnotherClass()
tc = TestClass(obj.method)
x = api.converted_call(tc.another_obj_method,
converter.ConversionOptions(recursive=True), (), {})
self.assertEqual(self.evaluate(x), 2)
def test_converted_call_method_converts_recursively(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def other_method(self):
if self.x < 0:
return -self.x
return self.x
def test_method(self):
return self.other_method()
tc = TestClass(constant_op.constant(-1))
x = api.converted_call(tc.test_method,
converter.ConversionOptions(recursive=True), (), {})
self.assertEqual(1, self.evaluate(x))
def test_converted_call_method_by_class(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def test_method(self):
if self.x < 0:
return -self.x
return self.x
tc = TestClass(constant_op.constant(-1))
x = api.converted_call(TestClass.test_method,
converter.ConversionOptions(recursive=True), (tc,),
{})
self.assertEqual(1, self.evaluate(x))
def test_converted_call_callable_object(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def __call__(self):
if self.x < 0:
return -self.x
return self.x
tc = TestClass(constant_op.constant(-1))
x = api.converted_call(tc, converter.ConversionOptions(recursive=True), (),
{})
self.assertEqual(1, self.evaluate(x))
def test_converted_call_callable_metaclass(self):
class TestMetaclass(type):
x = constant_op.constant(-1)
def __call__(cls):
if cls.x < 0:
cls.x = -cls.x
return cls
tc = TestMetaclass('TestClass', (), {})
# This functools.partial will hide the class form the constructor
# check. Not ideal. See b/120224672.
tc = functools.partial(tc)
converted_tc = api.converted_call(
tc, converter.ConversionOptions(recursive=True), (), {})
self.assertIsInstance(converted_tc, TestMetaclass)
self.assertEqual(1, self.evaluate(converted_tc.x))
@test_util.run_deprecated_v1
def test_converted_call_constructor(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def test_method(self):
if self.x < 0:
return -self.x
return self.x
tc = api.converted_call(TestClass,
converter.ConversionOptions(recursive=True),
(constant_op.constant(-1),), {})
# tc is still a TestClass - constructors are whitelisted.
# TODO(b/124016764): Support this use case.
# The error below is specific to the `if` statement not being converted.
with self.assertRaises(TypeError):
tc.test_method()
def test_converted_call_mangled_properties(self):
class TestClass(object):
def __init__(self, x):
self.__private = x
def test_method(self):
if self.__private < 0:
return self.__private
return self.__private
tc = TestClass(constant_op.constant(-1))
# The error below is specific to the `if` statement not being converted.
with self.assertRaisesRegex(NotImplementedError, 'Mangled names'):
api.converted_call(tc.test_method,
converter.ConversionOptions(recursive=True), (), {})
tc.test_method()
def test_converted_call_already_converted(self):
def f(x):
return x == 0
x = api.converted_call(f, converter.ConversionOptions(recursive=True),
(constant_op.constant(0),), {})
self.assertTrue(self.evaluate(x))
converted_f = api.to_graph(
f, experimental_optional_features=converter.Feature.ALL)
x = api.converted_call(converted_f,
converter.ConversionOptions(recursive=True),
(constant_op.constant(0),), {})
self.assertTrue(self.evaluate(x))
def test_converted_call_then_already_converted_dynamic(self):
@api.convert()
def g(x):
if x > 0:
return x
else:
return -x
def f(g, x):
return g(x)
x = api.converted_call(f, converter.ConversionOptions(recursive=True),
(g, constant_op.constant(1)), {})
self.assertEqual(self.evaluate(x), 1)
def test_converted_call_forced_when_explicitly_whitelisted(self):
@api.do_not_convert()
def f(x):
return x + 1
x = api.converted_call(
f, converter.ConversionOptions(recursive=True, user_requested=True),
(constant_op.constant(0),), {})
self.assertTrue(self.evaluate(x))
converted_f = api.to_graph(
f, experimental_optional_features=converter.Feature.ALL)
x = api.converted_call(converted_f,
converter.ConversionOptions(recursive=True), (0,),
{})
self.assertEqual(x, 1)
@test_util.run_deprecated_v1
def test_converted_call_no_user_code(self):
def f(x):
return len(x)
opts = converter.ConversionOptions(internal_convert_user_code=False)
# f should not be converted, causing len to error out.
with self.assertRaisesRegexp(Exception, 'len is not well defined'):
api.converted_call(f, opts, (constant_op.constant([0]),), {})
# len on the other hand should work fine.
x = api.converted_call(len, opts, (constant_op.constant([0]),), {})
# The constant has static shape so the result is a primitive not a Tensor.
self.assertEqual(x, 1)
def test_converted_call_no_kwargs_allowed(self):
def f(*args):
# Note: np.broadcast rejects any **kwargs, even *{}
return np.broadcast(args[:1])
opts = converter.ConversionOptions(internal_convert_user_code=False)
self.assertIsNotNone(api.converted_call(f, opts, (1, 2, 3, 4), None))
def test_converted_call_whitelisted_method(self):
opts = converter.ConversionOptions(recursive=True)
model = sequential.Sequential([core.Dense(2)])
x = api.converted_call(model.call, opts, (constant_op.constant([[0.0]]),),
{'training': True})
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual([[0.0, 0.0]], self.evaluate(x))
def test_converted_call_whitelisted_method_via_owner(self):
opts = converter.ConversionOptions(recursive=True)
model = sequential.Sequential([core.Dense(2)])
x = api.converted_call(model.call, opts, (constant_op.constant([[0.0]]),),
{'training': True})
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual([[0.0, 0.0]], self.evaluate(x))
def test_converted_call_numpy(self):
opts = converter.ConversionOptions(recursive=True)
x = api.converted_call(np.arange, opts, (5,), {})
self.assertAllEqual(x, list(range(5)))
def test_converted_call_tf_op_forced(self):
# TODO(mdan): Add the missing level of support to LOGICAL_EXPRESSIONS.
opts = converter.ConversionOptions(
user_requested=True, optional_features=None)
x = api.converted_call(gen_math_ops.add, opts, (1, 1), {})
self.assertAllEqual(self.evaluate(x), 2)
def test_converted_call_exec_generated_code(self):
temp_mod = imp.new_module('test_module')
dynamic_code = """
def foo(x):
return x + 1
"""
exec(textwrap.dedent(dynamic_code), temp_mod.__dict__) # pylint:disable=exec-used
opts = converter.ConversionOptions(optional_features=None)
x = api.converted_call(temp_mod.foo, opts, (1,), {})
self.assertAllEqual(x, 2)
def test_converted_call_namedtuple(self):
opts = converter.ConversionOptions(recursive=True)
x = api.converted_call(collections.namedtuple, opts,
('TestNamedtuple', ('a', 'b')), {})
self.assertTrue(inspect_utils.isnamedtuple(x))
def test_converted_call_namedtuple_via_collections(self):
opts = converter.ConversionOptions(recursive=True)
x = api.converted_call(collections.namedtuple, opts,
('TestNamedtuple', ('a', 'b')), {})
self.assertTrue(inspect_utils.isnamedtuple(x))
def test_converted_call_namedtuple_subclass_bound_method(self):
class TestClass(collections.namedtuple('TestNamedtuple', ('a', 'b'))):
def test_method(self, x):
while tf.reduce_sum(x) > self.a:
x //= self.b
return x
opts = converter.ConversionOptions(recursive=True)
obj = TestClass(5, 2)
x = api.converted_call(obj.test_method, opts,
(constant_op.constant([2, 4]),), {})
self.assertAllEqual(self.evaluate(x), [1, 2])
def test_converted_call_namedtuple_method(self):
class TestClass(collections.namedtuple('TestNamedtuple', ('a', 'b'))):
pass
opts = converter.ConversionOptions(recursive=True)
obj = TestClass(5, 2)
# _asdict is a documented method of namedtuple.
x = api.converted_call(obj._asdict, opts, (), {})
self.assertDictEqual(x, {'a': 5, 'b': 2})
def test_converted_call_namedtuple_subclass_unbound_method(self):
class TestClass(collections.namedtuple('TestNamedtuple', ('a', 'b'))):
def test_method(self, x):
while tf.reduce_sum(x) > self.a:
x //= self.b
return x
opts = converter.ConversionOptions(recursive=True)
obj = TestClass(5, 2)
x = api.converted_call(TestClass.test_method, opts,
(obj, constant_op.constant([2, 4])), {})
self.assertAllEqual(self.evaluate(x), [1, 2])
def test_converted_call_lambda(self):
opts = converter.ConversionOptions(recursive=True)
l = lambda x: x == 0
x = api.converted_call(l, opts, (constant_op.constant(0),), {})
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(True, self.evaluate(x))
def test_converted_call_defun_object_method(self):
opts = converter.ConversionOptions(recursive=True)
# pylint:disable=method-hidden
class TestClass(object):
def method(self):
return 1
def prepare(self):
self.method = function.defun(self.method)
# pylint:enable=method-hidden
tc = TestClass()
tc.prepare()
x = api.converted_call(tc.method, opts, (), {})
self.assertAllEqual(1, self.evaluate(x))
def test_converted_call_through_tf_dataset(self):
def other_fn(x):
if x > 0:
return x
return -x
def f():
return dataset_ops.Dataset.range(-3, 3).map(other_fn)
# Dataset iteration only works inside tf.
@def_function.function
def graph_fn():
opts = converter.ConversionOptions(recursive=True)
ds = api.converted_call(f, opts, (), {})
itr = iter(ds)
return next(itr), next(itr), next(itr)
self.assertAllEqual(self.evaluate(graph_fn()), (3, 2, 1))
def assertNoMemoryLeaks(self, f):
object_ids_before = {id(o) for o in gc.get_objects()}
f()
gc.collect()
objects_after = tuple(
o for o in gc.get_objects() if id(o) not in object_ids_before)
self.assertEmpty(
tuple(o for o in objects_after if isinstance(o, TestResource)))
def test_converted_call_no_leaks_via_closure(self):
def test_fn():
res = TestResource()
def f(y):
return res.x + y
opts = converter.ConversionOptions(recursive=True)
api.converted_call(f, opts, (1,), {})
self.assertNoMemoryLeaks(test_fn)
def test_converted_call_no_leaks_via_inner_function_closure(self):
def test_fn():
res = TestResource()
def f(y):
def inner_f():
return res.x + y
return inner_f
opts = converter.ConversionOptions(recursive=True)
api.converted_call(f, opts, (1,), {})()
self.assertNoMemoryLeaks(test_fn)
def test_context_tracking_direct_calls(self):
@api.do_not_convert()
def unconverted_fn():
self.assertEqual(ag_ctx.control_status_ctx().status,
ag_ctx.Status.DISABLED)
@api.convert()
def converted_fn():
self.assertEqual(ag_ctx.control_status_ctx().status,
ag_ctx.Status.ENABLED)
unconverted_fn()
self.assertEqual(ag_ctx.control_status_ctx().status,
ag_ctx.Status.ENABLED)
self.assertEqual(ag_ctx.control_status_ctx().status,
ag_ctx.Status.UNSPECIFIED)
converted_fn()
self.assertEqual(ag_ctx.control_status_ctx().status,
ag_ctx.Status.UNSPECIFIED)
@api.call_with_unspecified_conversion_status
def unspecified_fn():
self.assertEqual(ag_ctx.control_status_ctx().status,
ag_ctx.Status.UNSPECIFIED)
unspecified_fn()
def test_to_graph_basic(self):
def test_fn(x, s):
while tf.reduce_sum(x) > s:
x //= 2
return x
compiled_fn = api.to_graph(test_fn)
with tf.Graph().as_default():
x = compiled_fn(constant_op.constant((4, 8)), 4)
self.assertAllEqual(self.evaluate(x), (1, 2))
@test_util.run_deprecated_v1
def test_to_graph_with_defaults(self):
foo = 4
def test_fn(x, s=foo):
while tf.reduce_sum(x) > s:
x //= 2
return x
compiled_fn = api.to_graph(test_fn)
with self.cached_session() as sess:
x = compiled_fn(constant_op.constant([4, 8]))
self.assertListEqual([1, 2], self.evaluate(x).tolist())
def test_to_graph_with_globals(self):
def test_fn(x):
global global_n
global_n = x + global_n
return global_n
converted_fn = api.to_graph(test_fn)
prev_val = global_n
converted_fn(10)
self.assertGreater(global_n, prev_val)
def test_to_graph_with_kwargs_clashing_converted_call(self):
def called_fn(**kwargs):
return kwargs['f'] + kwargs['owner']
def test_fn():
# These arg names intentionally match converted_call's
return called_fn(f=1, owner=2)
compiled_fn = api.to_graph(test_fn)
self.assertEqual(compiled_fn(), 3)
def test_to_graph_with_kwargs_clashing_unconverted_call(self):
@api.do_not_convert
def called_fn(**kwargs):
return kwargs['f'] + kwargs['owner']
def test_fn():
# These arg names intentionally match _call_unconverted's
return called_fn(f=1, owner=2)
compiled_fn = api.to_graph(test_fn)
self.assertEqual(compiled_fn(), 3)
def test_to_graph_caching(self):
def test_fn(x):
if x > 0:
return x
else:
return -x
converted_functions = tuple(api.to_graph(test_fn) for _ in (-1, 0, 1))
# All outputs are from the same module. We can't use __module__ because
# that's reset when we instantiate the function (see conversion.py).
# TODO(mdan): Can and should we overwrite __module__ instead?
module_names = frozenset(f.ag_module for f in converted_functions)
self.assertEqual(len(module_names), 1)
self.assertNotIn('__main__', module_names)
self.assertEqual(len(frozenset(id(f) for f in converted_functions)), 3)
def test_to_graph_caching_different_options(self):
def called_fn():
pass
def test_fn():
return called_fn()
converted_recursive = api.to_graph(test_fn, recursive=True)
converted_non_recursive = api.to_graph(test_fn, recursive=False)
self.assertNotEqual(converted_recursive.ag_module,
converted_non_recursive.ag_module)
self.assertRegex(tf_inspect.getsource(converted_recursive),
'FunctionScope(.*recursive=True.*)')
self.assertRegex(tf_inspect.getsource(converted_non_recursive),
'FunctionScope(.*recursive=False.*)')
def test_to_graph_preserves_bindings(self):
y = 3
def test_fn():
return y
converted = api.to_graph(test_fn)
self.assertEqual(converted(), 3)
y = 7
self.assertEqual(converted(), 7)
def test_to_graph_source_map(self):
def test_fn(y):
return y**2
self.assertTrue(hasattr(api.to_graph(test_fn), 'ag_source_map'))
def test_to_graph_sets_conversion_context(self):
def g():
self.assertEqual(ag_ctx.control_status_ctx().status,
ag_ctx.Status.ENABLED)
return 0
# Note: the autograph=False sets the contect to Status.DISABLED. The test
# verifies that to_graph overrides that.
@def_function.function(autograph=False)
def f():
converted_g = api.to_graph(g)
converted_g()
f()
def test_to_code_basic(self):
def test_fn(x, s):
while tf.reduce_sum(x) > s:
x /= 2
return x
# Just check that the output is parseable Python code.
self.assertIsNotNone(parser.parse_str(api.to_code(test_fn)))
def test_to_code_with_wrapped_function(self):
@def_function.function
def test_fn(x, s):
while tf.reduce_sum(x) > s:
x /= 2
return x
with self.assertRaisesRegex(Exception, 'try passing.*python_function'):
api.to_code(test_fn)
def test_tf_convert_direct(self):
def f():
if tf.reduce_sum([1, 2]) > 0:
return -1
return 1
# Note: the autograph setting of tf.function has nothing to do with the
# test case. We just disable it to avoid confusion.
@def_function.function(autograph=False)
def test_fn(ctx):
return api.tf_convert(f, ctx)()
self.assertEqual(
self.evaluate(
test_fn(ag_ctx.ControlStatusCtx(status=ag_ctx.Status.ENABLED))), -1)
with self.assertRaisesRegex(TypeError, 'tf.Tensor.*bool'):
# The code in `f` is only valid with AutoGraph.
test_fn(ag_ctx.ControlStatusCtx(status=ag_ctx.Status.DISABLED))
def test_tf_convert_unspecified_not_converted_by_default(self):
def f():
self.assertEqual(ag_ctx.control_status_ctx().status,
ag_ctx.Status.UNSPECIFIED)
if tf.reduce_sum([1, 2]) > 0:
return -1
return 1
@def_function.function
def test_fn(ctx):
return api.tf_convert(f, ctx, convert_by_default=False)()
with self.assertRaisesRegex(TypeError, 'tf.Tensor.*bool'):
# The code in `f` is only valid with AutoGraph.
test_fn(ag_ctx.ControlStatusCtx(status=ag_ctx.Status.UNSPECIFIED))
def test_tf_convert_whitelisted_method(self):
model = sequential.Sequential([core.Dense(2)])
converted_call = api.tf_convert(
model.call, ag_ctx.ControlStatusCtx(status=ag_ctx.Status.ENABLED))
_, converted_target = tf_decorator.unwrap(converted_call)
self.assertIs(converted_target.__func__, model.call.__func__)
def test_tf_convert_wrapped(self):
def f():
if tf.reduce_sum([1, 2]) > 0:
return -1
return 1
@functools.wraps(f)
def wrapper(*args, **kwargs):
return wrapper.__wrapped__(*args, **kwargs)
decorated_f = tf_decorator.make_decorator(f, wrapper)
# Note: the autograph setting of tf has nothing to do with the
# test case. We just disable it to avoid confusion.
@def_function.function(autograph=False)
def test_fn(ctx):
return api.tf_convert(decorated_f, ctx)()
self.assertEqual(
self.evaluate(
test_fn(ag_ctx.ControlStatusCtx(status=ag_ctx.Status.ENABLED))), -1)
# tf_convert mutates the decorator, so we need to create a new one for
# another test.
decorated_f = tf_decorator.make_decorator(f, wrapper)
with self.assertRaisesRegex(TypeError, 'tf.Tensor.*bool'):
# The code in `f` is only valid with AutoGraph.
test_fn(ag_ctx.ControlStatusCtx(status=ag_ctx.Status.DISABLED))
def test_super_with_one_arg(self):
test_case_self = self
class TestBase(object):
def plus_three(self, x):
return x + 3
class TestSubclass(TestBase):
def plus_three(self, x):
test_case_self.fail('This should never be called.')
def one_arg(self, x):
test_base_unbound = super(TestSubclass)
test_base = test_base_unbound.__get__(self, TestSubclass)
return test_base.plus_three(x)
tc = api.converted_call(TestSubclass,
converter.ConversionOptions(recursive=True), (), {})
self.assertEqual(5, tc.one_arg(2))
def test_super_with_two_args(self):
test_case_self = self
class TestBase(object):
def plus_three(self, x):
return x + 3
class TestSubclass(TestBase):
def plus_three(self, x):
test_case_self.fail('This should never be called.')
def two_args(self, x):
return super(TestSubclass, self).plus_three(x)
tc = api.converted_call(TestSubclass,
converter.ConversionOptions(recursive=True), (), {})
self.assertEqual(5, tc.two_args(2))
if __name__ == '__main__':
os.environ['AUTOGRAPH_STRICT_CONVERSION'] = '1'
test.main()
| chemelnucfin/tensorflow | tensorflow/python/autograph/impl/api_test.py | Python | apache-2.0 | 30,339 |
import time
from headset import Headset
from stream import Stream
from common import Version, BytesStatus
class WirelessHeadset(Headset):
"""This class represents the wireless version of the mindwave
Args:
dev: device link
headset: the id of mindwave wireless version
It has the basic functionality to connect, autoconnect and disconnect
"""
def __init__(self, dev=None, headset_id=None, rate=None):
Headset.__init__(self, headset_id)
self.device = dev
self.bauderate = rate
self.stream = Stream(device=self.device, bauderate=rate, version=Version.MINDWAVE)
time.sleep(2)
self.connect()
self.run(self.stream)
# def open(self):
# if not self.stream or not self.stream.IsOpen():
# #self.stream = stream.stream(self.device, baudrate=115200, parity=stream.PARITY_NONE, stopbits=stream.STOPBITS_ONE,
# # bytesize=stream.EIGHTBITS, writeTimeout=0, timeout=3, rtscts=True, xonxoff=False)
# self.stream = serial.Serial(self.device, self.baudrate, timeout=0.001, rtscts=True)
def autoconnect(self):
"""This method autoconnects to the mindwave every."""
self.stream.getStream().write(BytesStatus.AUTOCONNECT)
#the dongle switch to autoconnect mode it must wait 10 second to connect any headset
time.sleep(10)
def connect(self):
"""This method connects to the mindwave with the id."""
if self.id is not None:
# we send a byte to CONNECTED and other byte in hex of headset id
self.stream.getStream().write(''.join([BytesStatus.CONNECT, self.id.decode('hex')]))
else:
self.autoconnect()
def disconnect(self):
"""This method disconnects the mindwave."""
self.stream.getStream().write(BytesStatus.DISCONNECT)
def echo_raw(self):
"""This method prints the raw data from mindwave."""
while 1:
#time.sleep()
data = self.stream.read(1)
for b in data:
print '0x%s, ' % b.encode('hex'),
print "" | jacquelinekay/gsoc-ros-neural | mindwave_driver/src/mindwave_driver/wireless_headset.py | Python | apache-2.0 | 2,166 |
#!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests to cover DfpUtils."""
__author__ = '[email protected] (Joseph DiLallo)'
import os
import sys
import tempfile
import unittest
sys.path.insert(0, os.path.join('..', '..', '..'))
import mock
from adspygoogle import DfpClient
from adspygoogle.common.Errors import ValidationError
from adspygoogle.dfp import DfpUtils
class DfpUtilsTest(unittest.TestCase):
"""Unittest suite for DfpUtils."""
def testDataFileCurrencies(self):
"""Test whether csv data file with currencies is valid."""
cols = 2
for item in DfpUtils.GetCurrencies():
self.assertEqual(len(item), cols)
def testDataFileTimezones(self):
"""Test whether csv data file with timezones is valid."""
cols = 1
for item in DfpUtils.GetTimezones():
self.assertEqual(len(item), cols)
def testGetAllEntitiesByStatement(self):
client = mock.Mock()
line_item_service = mock.Mock()
rval = 'Line items for everyone!'
def VerifyExpectedCall(arg):
self.assertEqual({'values': None,
'query': 'ORDER BY name LIMIT 500 OFFSET 0'}, arg)
return [{'results': [rval]}]
client.GetLineItemService.return_value = line_item_service
line_item_service._service_name = 'LineItemService'
line_item_service.GetLineItemsByStatement.side_effect = VerifyExpectedCall
line_items = DfpUtils.GetAllEntitiesByStatement(
client, 'LineItem', 'ORDER BY name')
self.assertEqual([rval], line_items)
def testGetAllEntitiesByStatementWithLimit(self):
"""Test whether GetAllEntitiesByStatement() fails when LIMIT is provided."""
headers = {
'email': 'fake_email',
'password': 'fake_password',
'applicationName': 'fake_application_name',
'authToken': ' '
}
client = DfpClient(headers=headers)
self.failUnlessRaises(
ValidationError, DfpUtils.GetAllEntitiesByStatement,
client, 'User', 'ORDER BY name LIMIT 1')
def testGetAllEntitiesByStatementWithService(self):
line_item_service = mock.Mock()
rval = 'Line items for everyone!'
def VerifyExpectedCall(arg):
self.assertEqual({'values': None,
'query': 'ORDER BY name LIMIT 500 OFFSET 0'}, arg)
return [{'results': [rval]}]
line_item_service._service_name = 'LineItemService'
line_item_service.GetLineItemsByStatement.side_effect = VerifyExpectedCall
line_items = DfpUtils.GetAllEntitiesByStatementWithService(
line_item_service, 'ORDER BY name')
self.assertEqual([rval], line_items)
def testDownloadPqlResultSetToCsv(self):
pql_service = mock.Mock()
csv_file = tempfile.NamedTemporaryFile()
csv_file_name = csv_file.name
header = [{'labelName': 'Some random header...'},
{'labelName': 'Another header...'}]
rval = [{'values': [{'value': 'Some random PQL response...',
'Value_Type': 'TextValue'},
{'value': {'date': {
'year': '1999', 'month': '04', 'day': '03'}},
'Value_Type': 'DateValue'},
{'value': '123',
'Value_Type': 'NumberValue'},
{'value': {'date': {'year': '2012',
'month': '11',
'day': '05'},
'hour': '12',
'minute': '12',
'second': '12',
'timeZoneID': 'PST8PDT'},
'Value_Type': 'DateTimeValue'}]},
{'values': [{'value': 'A second row of PQL response!',
'Value_Type': 'TextValue'},
{'value': {'date': {
'year': '2009', 'month': '02', 'day': '05'}},
'Value_Type': 'DateValue'},
{'value': '345',
'Value_Type': 'NumberValue'},
{'value': {'date': {'year': '2013',
'month': '01',
'day': '03'},
'hour': '02',
'minute': '02',
'second': '02',
'timeZoneID': 'GMT'},
'Value_Type': 'DateTimeValue'}]}]
def VerifyExpectedCall(arg):
self.assertEqual({'values': None,
'query': ('SELECT Id, Name FROM Line_Item '
'LIMIT 500 OFFSET 0')}, arg)
return [{'rows': rval, 'columnTypes': header}]
pql_service._service_name = 'PublisherQueryLanguageService'
pql_service.select.side_effect = VerifyExpectedCall
file_returned = DfpUtils.DownloadPqlResultSetToCsv(
pql_service, 'SELECT Id, Name FROM Line_Item', csv_file)
self.assertEqual(file_returned.name, csv_file_name)
self.assertEqual(file_returned.readline(),
('"Some random header...",'
'"Another header..."\r\n'))
self.assertEqual(file_returned.readline(),
('"Some random PQL response...",'
'"1999-04-03",'
'123,'
'"2012-11-05T12:12:12-08:00"\r\n'))
self.assertEqual(file_returned.readline(),
('"A second row of PQL response!",'
'"2009-02-05",'
'345,'
'"2013-01-03T02:02:02Z"\r\n'))
csv_file.close()
def testDownloadPqlResultToList(self):
pql_service = mock.Mock()
header = [{'labelName': 'Some random header...'},
{'labelName': 'Another header...'}]
rval = [{'values': [{'value': 'Some random PQL response...',
'Value_Type': 'TextValue'},
{'value': {'date': {
'year': '1999', 'month': '04', 'day': '03'}},
'Value_Type': 'DateValue'},
{'value': '123',
'Value_Type': 'NumberValue'},
{'value': {'date': {'year': '2012',
'month': '11',
'day': '05'},
'hour': '12',
'minute': '12',
'second': '12',
'timeZoneID': 'PST8PDT'},
'Value_Type': 'DateTimeValue'}]},
{'values': [{'value': 'A second row of PQL response!',
'Value_Type': 'TextValue'},
{'value': {'date': {
'year': '2009', 'month': '02', 'day': '05'}},
'Value_Type': 'DateValue'},
{'value': '345',
'Value_Type': 'NumberValue'},
{'value': {'date': {'year': '2013',
'month': '01',
'day': '03'},
'hour': '02',
'minute': '02',
'second': '02',
'timeZoneID': 'GMT'},
'Value_Type': 'DateTimeValue'}]}]
def VerifyExpectedCall(arg):
self.assertEqual({'values': None,
'query': ('SELECT Id, Name FROM Line_Item '
'LIMIT 500 OFFSET 0')}, arg)
return [{'rows': rval, 'columnTypes': header}]
pql_service._service_name = 'PublisherQueryLanguageService'
pql_service.select.side_effect = VerifyExpectedCall
result_set = DfpUtils.DownloadPqlResultToList(
pql_service, 'SELECT Id, Name FROM Line_Item')
row1 = [DfpUtils._ConvertValueForCsv(field) for field in rval[0]['values']]
row2 = [DfpUtils._ConvertValueForCsv(field) for field in rval[1]['values']]
self.assertEqual([[header[0]['labelName'], header[1]['labelName']],
row1, row2], result_set)
def testFilterStatement(self):
values = [{
'key': 'test_key',
'value': {
'xsi_type': 'TextValue',
'value': 'test_value'
}
}]
test_statement = DfpUtils.FilterStatement()
self.assertEqual(test_statement.ToStatement(),
{'query': ' LIMIT 500 OFFSET 0',
'values': None})
test_statement.IncreaseOffsetBy(30)
self.assertEqual(test_statement.ToStatement(),
{'query': ' LIMIT 500 OFFSET 30',
'values': None})
test_statement.offset = 123
test_statement.limit = 5
self.assertEqual(test_statement.ToStatement(),
{'query': ' LIMIT 5 OFFSET 123',
'values': None})
test_statement = DfpUtils.FilterStatement(
'SELECT Id FROM Line_Item WHERE key = :test_key', limit=300, offset=20,
values=values)
self.assertEqual(test_statement.ToStatement(),
{'query': 'SELECT Id FROM Line_Item WHERE key = '
':test_key LIMIT 300 OFFSET 20',
'values': values})
if __name__ == '__main__':
unittest.main()
| caioserra/apiAdwords | tests/adspygoogle/dfp/dfp_utils_test.py | Python | apache-2.0 | 10,061 |
# -*- coding: utf-8 -*-
"""Parser for Windows EventLog (EVT) files."""
import pyevt
from plaso import dependencies
from plaso.events import time_events
from plaso.lib import errors
from plaso.lib import eventdata
from plaso.lib import specification
from plaso.parsers import interface
from plaso.parsers import manager
dependencies.CheckModuleVersion(u'pyevt')
class WinEvtRecordEvent(time_events.PosixTimeEvent):
"""Convenience class for a Windows EventLog (EVT) record event.
Attributes:
computer_name: the computer name stored in the event record.
event_category: the event category.
event_identifier: the event identifier.
event_type: the event type.
facility: the event facility.
message_identifier: the event message identifier.
offset: the data offset of the event record with in the file.
record_number: the event record number.
recovered: boolean value to indicate the record was recovered.
severity: the event severity.
source_name: the name of the event source.
strings: array of event strings.
user_sid: the user security identifier (SID) stored in the event record.
"""
DATA_TYPE = u'windows:evt:record'
def __init__(
self, timestamp, timestamp_description, evt_record, record_number,
event_identifier, recovered=False):
"""Initializes the event.
Args:
timestamp: the POSIX timestamp value.
timestamp_description: a description string for the timestamp value.
evt_record: the EVT record (instance of pyevt.record).
record_number: the event record number.
event_identifier: the event identifier.
recovered: optional boolean value to indicate the record was recovered.
"""
super(WinEvtRecordEvent, self).__init__(timestamp, timestamp_description)
self.offset = evt_record.offset
self.recovered = recovered
if record_number is not None:
self.record_number = evt_record.identifier
# We want the event identifier to match the behavior of that of the EVTX
# event records.
if event_identifier is not None:
self.event_identifier = event_identifier & 0xffff
self.facility = (event_identifier >> 16) & 0x0fff
self.severity = event_identifier >> 30
self.message_identifier = event_identifier
self.event_type = evt_record.event_type
self.event_category = evt_record.event_category
self.source_name = evt_record.source_name
# Computer name is the value stored in the event record and does not
# necessarily corresponds with the actual hostname.
self.computer_name = evt_record.computer_name
self.user_sid = evt_record.user_security_identifier
self.strings = list(evt_record.strings)
class WinEvtParser(interface.SingleFileBaseParser):
"""Parses Windows EventLog (EVT) files."""
_INITIAL_FILE_OFFSET = None
NAME = u'winevt'
DESCRIPTION = u'Parser for Windows EventLog (EVT) files.'
@classmethod
def GetFormatSpecification(cls):
"""Retrieves the format specification.
Returns:
The format specification (instance of FormatSpecification).
"""
format_specification = specification.FormatSpecification(cls.NAME)
format_specification.AddNewSignature(b'LfLe', offset=4)
return format_specification
def _ParseRecord(
self, parser_mediator, record_index, evt_record, recovered=False):
"""Extract data from a Windows EventLog (EVT) record.
Args:
parser_mediator: a parser mediator object (instance of ParserMediator).
record_index: the event record index.
evt_record: an event record (instance of pyevt.record).
recovered: optional boolean value to indicate the record was recovered.
"""
try:
record_number = evt_record.identifier
except OverflowError as exception:
parser_mediator.ProduceParseError((
u'unable to read record identifier from event record: {0:d} '
u'with error: {1:s}').format(record_index, exception))
record_number = None
try:
event_identifier = evt_record.event_identifier
except OverflowError as exception:
parser_mediator.ProduceParseError((
u'unable to read event identifier from event record: {0:d} '
u'with error: {1:s}').format(record_index, exception))
event_identifier = None
try:
creation_time = evt_record.get_creation_time_as_integer()
except OverflowError as exception:
parser_mediator.ProduceParseError((
u'unable to read creation time from event record: {0:d} '
u'with error: {1:s}').format(record_index, exception))
creation_time = None
if creation_time is not None:
event_object = WinEvtRecordEvent(
creation_time, eventdata.EventTimestamp.CREATION_TIME,
evt_record, record_number, event_identifier, recovered=recovered)
parser_mediator.ProduceEvent(event_object)
try:
written_time = evt_record.get_written_time_as_integer()
except OverflowError as exception:
parser_mediator.ProduceParseError((
u'unable to read written time from event record: {0:d} '
u'with error: {1:s}').format(record_index, exception))
written_time = None
if written_time is not None:
event_object = WinEvtRecordEvent(
written_time, eventdata.EventTimestamp.WRITTEN_TIME,
evt_record, record_number, event_identifier, recovered=recovered)
parser_mediator.ProduceEvent(event_object)
# TODO: what if both creation_time and written_time are None.
def ParseFileObject(self, parser_mediator, file_object, **kwargs):
"""Parses a Windows EventLog (EVT) file-like object.
Args:
parser_mediator: a parser mediator object (instance of ParserMediator).
file_object: a file-like object.
Raises:
UnableToParseFile: when the file cannot be parsed.
"""
evt_file = pyevt.file()
evt_file.set_ascii_codepage(parser_mediator.codepage)
try:
evt_file.open_file_object(file_object)
except IOError as exception:
display_name = parser_mediator.GetDisplayName()
raise errors.UnableToParseFile(
u'[{0:s}] unable to parse file {1:s} with error: {2:s}'.format(
self.NAME, display_name, exception))
for record_index, evt_record in enumerate(evt_file.records):
try:
self._ParseRecord(parser_mediator, record_index, evt_record)
except IOError as exception:
parser_mediator.ProduceParseError(
u'unable to parse event record: {0:d} with error: {1:s}'.format(
record_index, exception))
for record_index, evt_record in enumerate(evt_file.recovered_records):
try:
self._ParseRecord(
parser_mediator, record_index, evt_record, recovered=True)
except IOError as exception:
parser_mediator.ProduceParseError((
u'unable to parse recovered event record: {0:d} with error: '
u'{1:s}').format(record_index, exception))
evt_file.close()
manager.ParsersManager.RegisterParser(WinEvtParser)
| ostree/plaso | plaso/parsers/winevt.py | Python | apache-2.0 | 7,068 |
#!/usr/bin/env python
"""A simple wrapper to send email alerts."""
from email import encoders
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import logging
import re
import smtplib
import socket
from grr.lib import config_lib
from grr.lib import registry
from grr.lib.rdfvalues import standard as rdf_standard
class EmailAlerterBase(object):
"""The email alerter base class."""
__metaclass__ = registry.MetaclassRegistry
def RemoveHtmlTags(self, data):
p = re.compile(r"<.*?>")
return p.sub("", data)
def AddEmailDomain(self, address):
suffix = config_lib.CONFIG["Logging.domain"]
if isinstance(address, rdf_standard.DomainEmailAddress):
address = str(address)
if suffix and "@" not in address:
return address + "@%s" % suffix
return address
def SplitEmailsAndAppendEmailDomain(self, address_list):
"""Splits a string of comma-separated emails, appending default domain."""
result = []
# Process email addresses, and build up a list.
if isinstance(address_list, rdf_standard.DomainEmailAddress):
address_list = [str(address_list)]
elif isinstance(address_list, basestring):
address_list = [address for address in address_list.split(",") if address]
for address in address_list:
result.append(self.AddEmailDomain(address))
return result
def SendEmail(self,
to_addresses,
from_address,
subject,
message,
attachments=None,
is_html=True,
cc_addresses=None,
message_id=None,
headers=None):
raise NotImplementedError()
class SMTPEmailAlerter(EmailAlerterBase):
def SendEmail(self,
to_addresses,
from_address,
subject,
message,
attachments=None,
is_html=True,
cc_addresses=None,
message_id=None,
headers=None):
"""This method sends an email notification.
Args:
to_addresses: [email protected] string, list of addresses as csv string,
or rdf_standard.DomainEmailAddress
from_address: [email protected] string
subject: email subject string
message: message contents string, as HTML or plain text
attachments: iterable of filename string and file data tuples,
e.g. {"/file/name/string": filedata}
is_html: true if message is in HTML format
cc_addresses: [email protected] string, or list of addresses as
csv string
message_id: smtp message_id. Used to enable conversation threading
headers: dict of str-> str, headers to set
Raises:
RuntimeError: for problems connecting to smtp server.
"""
headers = headers or {}
msg = MIMEMultipart("alternative")
if is_html:
text = self.RemoveHtmlTags(message)
part1 = MIMEText(text, "plain")
msg.attach(part1)
part2 = MIMEText(message, "html")
msg.attach(part2)
else:
part1 = MIMEText(message, "plain")
msg.attach(part1)
if attachments:
for file_name, file_data in attachments.iteritems():
part = MIMEBase("application", "octet-stream")
part.set_payload(file_data)
encoders.encode_base64(part)
part.add_header("Content-Disposition",
"attachment; filename=\"%s\"" % file_name)
msg.attach(part)
msg["Subject"] = subject
from_address = self.AddEmailDomain(from_address)
to_addresses = self.SplitEmailsAndAppendEmailDomain(to_addresses)
cc_addresses = self.SplitEmailsAndAppendEmailDomain(cc_addresses or "")
msg["From"] = from_address
msg["To"] = ",".join(to_addresses)
if cc_addresses:
msg["CC"] = ",".join(cc_addresses)
if message_id:
msg.add_header("Message-ID", message_id)
for header, value in headers.iteritems():
msg.add_header(header, value)
try:
s = smtplib.SMTP(config_lib.CONFIG["Worker.smtp_server"],
int(config_lib.CONFIG["Worker.smtp_port"]))
s.ehlo()
if config_lib.CONFIG["Worker.smtp_starttls"]:
s.starttls()
s.ehlo()
if (config_lib.CONFIG["Worker.smtp_user"] and
config_lib.CONFIG["Worker.smtp_password"]):
s.login(config_lib.CONFIG["Worker.smtp_user"],
config_lib.CONFIG["Worker.smtp_password"])
s.sendmail(from_address, to_addresses + cc_addresses, msg.as_string())
s.quit()
except (socket.error, smtplib.SMTPException) as e:
raise RuntimeError("Could not connect to SMTP server to send email. "
"Please check config option Worker.smtp_server. "
"Currently set to %s. Error: %s" %
(config_lib.CONFIG["Worker.smtp_server"], e))
EMAIL_ALERTER = None
class EmailAlerterInit(registry.InitHook):
def RunOnce(self):
global EMAIL_ALERTER
email_alerter_cls_name = config_lib.CONFIG["Server.email_alerter_class"]
logging.debug("Using email alerter: %s", email_alerter_cls_name)
cls = EmailAlerterBase.GetPlugin(email_alerter_cls_name)
EMAIL_ALERTER = cls()
| pidydx/grr | grr/lib/email_alerts.py | Python | apache-2.0 | 5,328 |
"""
(c) 2020 Kirk Byers <[email protected]>
(c) 2016 Elisa Jasinska <[email protected]>
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
from __future__ import unicode_literals, print_function
from ansible.module_utils.basic import AnsibleModule
# FIX for Ansible 2.8 moving this function and making it private
# greatly simplified for napalm-ansible's use
def return_values(obj):
"""Return native stringified values from datastructures.
For use with removing sensitive values pre-jsonification."""
yield str(obj)
DOCUMENTATION = """
---
module: napalm_get_facts
author: "Elisa Jasinska (@fooelisa)"
version_added: "2.1"
short_description: "Gathers facts from a network device via napalm"
description:
- "Gathers facts from a network device via the Python module napalm"
requirements:
- napalm
options:
hostname:
description:
- IP or FQDN of the device you want to connect to
required: False
username:
description:
- Username
required: False
password:
description:
- Password
required: False
dev_os:
description:
- OS of the device
required: False
provider:
description:
- Dictionary which acts as a collection of arguments used to define the characteristics
of how to connect to the device.
Note - hostname, username, password and dev_os must be defined in either provider
or local param
Note - local param takes precedence, e.g. hostname is preferred to provider['hostname']
required: False
timeout:
description:
- Time in seconds to wait for the device to respond
required: False
default: 60
optional_args:
description:
- Dictionary of additional arguments passed to underlying driver
required: False
default: None
ignore_notimplemented:
description:
- "Ignores NotImplementedError for filters which aren't supported by the driver. Returns
invalid filters in a list called: not_implemented"
required: False
default: False
choices: [True, False]
filter:
description:
- "A list of facts to retreive from a device and provided though C(ansible_facts)
The list of facts available are maintained at:
http://napalm.readthedocs.io/en/latest/support/
Note- not all getters are implemented on all supported device types"
required: False
default: ['facts']
args:
description:
- dictionary of kwargs arguments to pass to the filter. The outer key is the name of
the getter (same as the filter)
required: False
default: None
"""
EXAMPLES = """
- name: get facts from device
napalm_get_facts:
hostname: '{{ inventory_hostname }}'
username: '{{ user }}'
dev_os: '{{ os }}'
password: '{{ passwd }}'
filter: ['facts']
register: result
- name: print data
debug:
var: result
- name: Getters
napalm_get_facts:
provider: "{{ ios_provider }}"
filter:
- "lldp_neighbors_detail"
- "interfaces"
- name: get facts from device
napalm_get_facts:
hostname: "{{ host }}"
username: "{{ user }}"
dev_os: "{{ os }}"
password: "{{ password }}"
optional_args:
port: "{{ port }}"
filter: ['facts', 'route_to', 'interfaces']
args:
route_to:
protocol: static
destination: 8.8.8.8
"""
RETURN = """
changed:
description: "whether the command has been executed on the device"
returned: always
type: bool
sample: True
ansible_facts:
description: "Facts gathered on the device provided via C(ansible_facts)"
returned: certain keys are returned depending on filter
type: dict
"""
napalm_found = False
try:
from napalm import get_network_driver
from napalm.base import ModuleImportError
napalm_found = True
except ImportError:
pass
def main():
module = AnsibleModule(
argument_spec=dict(
hostname=dict(type="str", required=False, aliases=["host"]),
username=dict(type="str", required=False),
password=dict(type="str", required=False, no_log=True),
provider=dict(type="dict", required=False),
dev_os=dict(type="str", required=False),
timeout=dict(type="int", required=False, default=60),
ignore_notimplemented=dict(type="bool", required=False, default=False),
args=dict(type="dict", required=False, default=None),
optional_args=dict(type="dict", required=False, default=None),
filter=dict(type="list", required=False, default=["facts"]),
),
supports_check_mode=True,
)
if not napalm_found:
module.fail_json(msg="the python module napalm is required")
provider = module.params["provider"] or {}
no_log = ["password", "secret"]
for param in no_log:
if provider.get(param):
module.no_log_values.update(return_values(provider[param]))
if provider.get("optional_args") and provider["optional_args"].get(param):
module.no_log_values.update(
return_values(provider["optional_args"].get(param))
)
if module.params.get("optional_args") and module.params["optional_args"].get(
param
):
module.no_log_values.update(
return_values(module.params["optional_args"].get(param))
)
# allow host or hostname
provider["hostname"] = provider.get("hostname", None) or provider.get("host", None)
# allow local params to override provider
for param, pvalue in provider.items():
if module.params.get(param) is not False:
module.params[param] = module.params.get(param) or pvalue
hostname = module.params["hostname"]
username = module.params["username"]
dev_os = module.params["dev_os"]
password = module.params["password"]
timeout = module.params["timeout"]
filter_list = module.params["filter"]
args = module.params["args"] or {}
ignore_notimplemented = module.params["ignore_notimplemented"]
implementation_errors = []
argument_check = {"hostname": hostname, "username": username, "dev_os": dev_os}
for key, val in argument_check.items():
if val is None:
module.fail_json(msg=str(key) + " is required")
if module.params["optional_args"] is None:
optional_args = {}
else:
optional_args = module.params["optional_args"]
try:
network_driver = get_network_driver(dev_os)
except ModuleImportError as e:
module.fail_json(msg="Failed to import napalm driver: " + str(e))
try:
device = network_driver(
hostname=hostname,
username=username,
password=password,
timeout=timeout,
optional_args=optional_args,
)
device.open()
except Exception as e:
module.fail_json(msg="cannot connect to device: " + str(e))
# retreive data from device
facts = {}
NAPALM_GETTERS = [
getter for getter in dir(network_driver) if getter.startswith("get_")
]
# Allow NX-OS checkpoint file to be retrieved via Ansible for use with replace config
NAPALM_GETTERS.append("get_checkpoint_file")
for getter in filter_list:
getter_function = "get_{}".format(getter)
if getter_function not in NAPALM_GETTERS:
module.fail_json(msg="filter not recognized: " + getter)
try:
if getter_function == "get_checkpoint_file":
getter_function = "_get_checkpoint_file"
get_func = getattr(device, getter_function)
result = get_func(**args.get(getter, {}))
facts[getter] = result
except NotImplementedError:
if ignore_notimplemented:
implementation_errors.append(getter)
else:
module.fail_json(
msg="The filter {} is not supported in napalm-{} [get_{}()]".format(
getter, dev_os, getter
)
)
except Exception as e:
module.fail_json(
msg="[{}] cannot retrieve device data: ".format(getter) + str(e)
)
# close device connection
try:
device.close()
except Exception as e:
module.fail_json(msg="cannot close device connection: " + str(e))
new_facts = {}
# Prepend all facts with napalm_ for unique namespace
for filter_name, filter_value in facts.items():
# Make napalm get_facts to be directly accessible as variables
if filter_name == "facts":
for fact_name, fact_value in filter_value.items():
napalm_fact_name = "napalm_" + fact_name
new_facts[napalm_fact_name] = fact_value
new_filter_name = "napalm_" + filter_name
new_facts[new_filter_name] = filter_value
results = {"ansible_facts": new_facts}
if ignore_notimplemented:
results["not_implemented"] = sorted(implementation_errors)
module.exit_json(**results)
if __name__ == "__main__":
main()
| napalm-automation/napalm-ansible | napalm_ansible/modules/napalm_get_facts.py | Python | apache-2.0 | 9,930 |
"""Tests for the nut integration."""
import json
from unittest.mock import MagicMock, patch
from homeassistant.components.nut.const import DOMAIN
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_RESOURCES
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry, load_fixture
def _get_mock_pynutclient(list_vars=None, list_ups=None):
pynutclient = MagicMock()
type(pynutclient).list_ups = MagicMock(return_value=list_ups)
type(pynutclient).list_vars = MagicMock(return_value=list_vars)
return pynutclient
async def async_init_integration(
hass: HomeAssistant, ups_fixture: str, resources: list, add_options: bool = False
) -> MockConfigEntry:
"""Set up the nexia integration in Home Assistant."""
ups_fixture = f"nut/{ups_fixture}.json"
list_vars = json.loads(load_fixture(ups_fixture))
mock_pynut = _get_mock_pynutclient(list_ups={"ups1": "UPS 1"}, list_vars=list_vars)
with patch(
"homeassistant.components.nut.PyNUTClient",
return_value=mock_pynut,
):
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: "mock", CONF_PORT: "mock", CONF_RESOURCES: resources},
options={CONF_RESOURCES: resources} if add_options else {},
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
| aronsky/home-assistant | tests/components/nut/util.py | Python | apache-2.0 | 1,446 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-06 05:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0002_hackathon_name'),
]
operations = [
migrations.AlterField(
model_name='hackathon',
name='name',
field=models.CharField(max_length=100, unique=True),
),
]
| andrewsosa/hackfsu_com | api/api/migrations/0003_auto_20161206_0538.py | Python | apache-2.0 | 458 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from . import ObjectCreationParameters
__author__ = 'Shamal Faily'
class ResponseParameters(ObjectCreationParameters.ObjectCreationParameters):
def __init__(self,respName,respRisk,tags,cProps,rType):
ObjectCreationParameters.ObjectCreationParameters.__init__(self)
self.theName = respName
self.theTags = tags
self.theRisk = respRisk
self.theEnvironmentProperties = cProps
self.theResponseType = rType
def name(self): return self.theName
def tags(self): return self.theTags
def risk(self): return self.theRisk
def environmentProperties(self): return self.theEnvironmentProperties
def responseType(self): return self.theResponseType
| nathanbjenx/cairis | cairis/core/ResponseParameters.py | Python | apache-2.0 | 1,469 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack import profile
class Profile(profile.Profile):
def __init__(self, region, plugins=None, **kwargs):
super(Profile, self).__init__(plugins=plugins or ['rackspace'])
self.set_region(self.ALL, region)
global_services = ('cloudMetrics', 'cloudMetricsIngest',
'cloudMonitoring', 'rackCDN')
for service in self.get_services():
if service.service_name in global_services:
service.region = None
| briancurtin/rackspace-sdk-plugin | rackspace/profile.py | Python | apache-2.0 | 1,044 |
# Copyright (C) 2013-2015 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2013-2015 YAMAMOTO Takashi <yamamoto at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# there are two representations of value and mask this module deal with.
#
# "user"
# (value, mask) or value. the latter means no mask.
# value and mask are strings.
#
# "internal"
# value and mask are on-wire bytes.
# mask is None if no mask.
# There are two types of OXM/NXM headers.
#
# 32-bit OXM/NXM header
# +-------------------------------+-------------+-+---------------+
# | class | field |m| length |
# +-------------------------------+-------------+-+---------------+
#
# 64-bit experimenter OXM header
# +-------------------------------+-------------+-+---------------+
# | class (OFPXMC_EXPERIMENTER) | field |m| length |
# +-------------------------------+-------------+-+---------------+
# | experimenter ID |
# +---------------------------------------------------------------+
# NOTE: EXT-256 had a variation of experimenter OXM header.
# It has been rectified since then. Currently this implementation
# supports only the old version.
#
# ONF EXT-256 (old, exp_type = 2560)
# +-------------------------------+-------------+-+---------------+
# | class (OFPXMC_EXPERIMENTER) | ????? |m| length |
# +-------------------------------+-------------+-+---------------+
# | experimenter ID (ONF_EXPERIMENTER_ID) |
# +-------------------------------+---------------+---------------+
# | exp_type (PBB_UCA=2560) | pbb_uca |
# +-------------------------------+---------------+
#
# ONF EXT-256 (new, oxm_field = 41)
# +-------------------------------+-------------+-+---------------+
# | class (OFPXMC_EXPERIMENTER) | PBB_UCA=41 |m| length |
# +-------------------------------+-------------+-+---------------+
# | experimenter ID (ONF_EXPERIMENTER_ID) |
# +-------------------------------+---------------+---------------+
# | reserved, should be zero | pbb_uca |
# +-------------------------------+---------------+
import itertools
import struct
from ryu.ofproto import ofproto_common
from ryu.lib.pack_utils import msg_pack_into
from ryu.lib import type_desc
OFPXMC_NXM_0 = 0 # Nicira Extended Match (NXM_OF_)
OFPXMC_NXM_1 = 1 # Nicira Extended Match (NXM_NX_)
OFPXMC_OPENFLOW_BASIC = 0x8000
OFPXMC_PACKET_REGS = 0x8001
OFPXMC_EXPERIMENTER = 0xffff
class _OxmClass(object):
def __init__(self, name, num, type_):
self.name = name
self.oxm_type = num | (self._class << 7)
# TODO(yamamoto): Clean this up later.
# Probably when we drop EXT-256 style experimenter OXMs.
self.num = self.oxm_type
self.type = type_
class OpenFlowBasic(_OxmClass):
_class = OFPXMC_OPENFLOW_BASIC
class PacketRegs(_OxmClass):
_class = OFPXMC_PACKET_REGS
class _Experimenter(_OxmClass):
_class = OFPXMC_EXPERIMENTER
def __init__(self, name, num, type_):
super(_Experimenter, self).__init__(name, num, type_)
self.num = (self.experimenter_id, self.oxm_type)
class ONFExperimenter(_Experimenter):
experimenter_id = ofproto_common.ONF_EXPERIMENTER_ID
class OldONFExperimenter(_Experimenter):
# This class is for the old version of EXT-256
experimenter_id = ofproto_common.ONF_EXPERIMENTER_ID
def __init__(self, name, num, type_):
super(OldONFExperimenter, self).__init__(name, 0, type_)
self.num = (self.experimenter_id, num)
self.exp_type = num
class OpenStateExperimenter(_Experimenter):
experimenter_id = ofproto_common.OPENSTATE_EXPERIMENTER_ID
class NiciraExperimenter(_Experimenter):
experimenter_id = ofproto_common.NX_EXPERIMENTER_ID
class NiciraExtended0(_OxmClass):
"""Nicira Extended Match (NXM_0)
NXM header format is same as 32-bit (non-experimenter) OXMs.
"""
_class = OFPXMC_NXM_0
class NiciraExtended1(_OxmClass):
"""Nicira Extended Match (NXM_1)
NXM header format is same as 32-bit (non-experimenter) OXMs.
"""
_class = OFPXMC_NXM_1
def generate(modname):
import sys
import functools
mod = sys.modules[modname]
def add_attr(k, v):
setattr(mod, k, v)
for i in mod.oxm_types:
uk = i.name.upper()
if isinstance(i.num, tuple):
continue
oxm_class = i.num >> 7
if oxm_class != OFPXMC_OPENFLOW_BASIC:
continue
ofpxmt = i.num & 0x3f
td = i.type
add_attr('OFPXMT_OFB_' + uk, ofpxmt)
add_attr('OXM_OF_' + uk, mod.oxm_tlv_header(ofpxmt, td.size))
add_attr('OXM_OF_' + uk + '_W', mod.oxm_tlv_header_w(ofpxmt, td.size))
name_to_field = dict((f.name, f) for f in mod.oxm_types)
num_to_field = dict((f.num, f) for f in mod.oxm_types)
add_attr('oxm_from_user', functools.partial(_from_user, name_to_field))
add_attr('oxm_from_user_header',
functools.partial(_from_user_header, name_to_field))
add_attr('oxm_to_user', functools.partial(_to_user, num_to_field))
add_attr('oxm_to_user_header',
functools.partial(_to_user_header, num_to_field))
add_attr('_oxm_field_desc', functools.partial(_field_desc, num_to_field))
add_attr('oxm_normalize_user', functools.partial(_normalize_user, mod))
add_attr('oxm_parse', functools.partial(_parse, mod))
add_attr('oxm_parse_header', functools.partial(_parse_header, mod))
add_attr('oxm_serialize', functools.partial(_serialize, mod))
add_attr('oxm_serialize_header', functools.partial(_serialize_header, mod))
add_attr('oxm_to_jsondict', _to_jsondict)
add_attr('oxm_from_jsondict', _from_jsondict)
def _get_field_info_by_name(name_to_field, name):
try:
f = name_to_field[name]
t = f.type
num = f.num
except KeyError:
t = type_desc.UnknownType
if name.startswith('field_'):
num = int(name.split('_')[1])
else:
raise KeyError('unknown match field ' + name)
return num, t
def _from_user_header(name_to_field, name):
(num, t) = _get_field_info_by_name(name_to_field, name)
return num
def _from_user(name_to_field, name, user_value):
(num, t) = _get_field_info_by_name(name_to_field, name)
# the 'list' case below is a bit hack; json.dumps silently maps
# python tuples into json lists.
if isinstance(user_value, (tuple, list)):
(value, mask) = user_value
else:
value = user_value
mask = None
if value is not None:
value = t.from_user(value)
if mask is not None:
mask = t.from_user(mask)
return num, value, mask
def _get_field_info_by_number(num_to_field, n):
try:
f = num_to_field[n]
t = f.type
name = f.name
except KeyError:
t = type_desc.UnknownType
name = 'field_%d' % (n,)
return name, t
def _to_user_header(num_to_field, n):
(name, t) = _get_field_info_by_number(num_to_field, n)
return name
def _to_user(num_to_field, n, v, m):
(name, t) = _get_field_info_by_number(num_to_field, n)
if v is not None:
if hasattr(t, 'size') and t.size != len(v):
raise Exception(
'Unexpected OXM payload length %d for %s (expected %d)'
% (len(v), name, t.size))
value = t.to_user(v)
else:
value = None
if m is None:
user_value = value
else:
user_value = (value, t.to_user(m))
return name, user_value
def _field_desc(num_to_field, n):
return num_to_field[n]
def _normalize_user(mod, k, uv):
(n, v, m) = mod.oxm_from_user(k, uv)
# apply mask
if m is not None:
v = ''.join(chr(ord(x) & ord(y)) for (x, y) in itertools.izip(v, m))
(k2, uv2) = mod.oxm_to_user(n, v, m)
assert k2 == k
return (k2, uv2)
def _parse_header_impl(mod, buf, offset):
hdr_pack_str = '!I'
(header, ) = struct.unpack_from(hdr_pack_str, buf, offset)
hdr_len = struct.calcsize(hdr_pack_str)
oxm_type = header >> 9 # class|field
oxm_hasmask = mod.oxm_tlv_header_extract_hasmask(header)
oxm_class = oxm_type >> 7
oxm_length = header & 0xff
if oxm_class == OFPXMC_EXPERIMENTER:
# Experimenter OXMs have 64-bit header. (vs 32-bit for other OXMs)
exp_hdr_pack_str = '!I' # experimenter_id
(exp_id, ) = struct.unpack_from(exp_hdr_pack_str, buf,
offset + hdr_len)
exp_hdr_len = struct.calcsize(exp_hdr_pack_str)
assert exp_hdr_len == 4
oxm_field = oxm_type & 0x7f
if exp_id == ofproto_common.ONF_EXPERIMENTER_ID and oxm_field == 0:
# XXX
# This block implements EXT-256 style experimenter OXM.
onf_exp_type_pack_str = '!H'
(exp_type, ) = struct.unpack_from(onf_exp_type_pack_str, buf,
offset + hdr_len + exp_hdr_len)
exp_hdr_len += struct.calcsize(onf_exp_type_pack_str)
assert exp_hdr_len == 4 + 2
num = (exp_id, exp_type)
elif exp_id == ofproto_common.OPENSTATE_EXPERIMENTER_ID:
num = oxm_type
else:
num = (exp_id, oxm_type)
else:
num = oxm_type
exp_hdr_len = 0
value_len = oxm_length - exp_hdr_len
if oxm_hasmask:
value_len //= 2
assert value_len > 0
field_len = hdr_len + oxm_length
total_hdr_len = hdr_len + exp_hdr_len
return num, total_hdr_len, oxm_hasmask, value_len, field_len
def _parse_header(mod, buf, offset):
(oxm_type_num, total_hdr_len, hasmask, value_len,
field_len) = _parse_header_impl(mod, buf, offset)
return oxm_type_num, field_len - value_len
def _parse(mod, buf, offset):
(oxm_type_num, total_hdr_len, hasmask, value_len,
field_len) = _parse_header_impl(mod, buf, offset)
# Note: OXM payload length (oxm_len) includes Experimenter ID (exp_hdr_len)
# for experimenter OXMs.
value_offset = offset + total_hdr_len
value_pack_str = '!%ds' % value_len
assert struct.calcsize(value_pack_str) == value_len
(value, ) = struct.unpack_from(value_pack_str, buf, value_offset)
if hasmask:
(mask, ) = struct.unpack_from(value_pack_str, buf,
value_offset + value_len)
else:
mask = None
return oxm_type_num, value, mask, field_len
def _make_exp_hdr(mod, n):
exp_hdr = bytearray()
try:
desc = mod._oxm_field_desc(n)
except KeyError:
return n, exp_hdr
if isinstance(desc, _Experimenter): # XXX
(exp_id, exp_type) = n
assert desc.experimenter_id == exp_id
if isinstance(desc, OldONFExperimenter): # XXX
# XXX
# This block implements EXT-256 style experimenter OXM.
exp_hdr_pack_str = '!IH' # experimenter_id, exp_type
msg_pack_into(exp_hdr_pack_str, exp_hdr, 0,
desc.experimenter_id, desc.exp_type)
else:
assert desc.oxm_type == exp_type
exp_hdr_pack_str = '!I' # experimenter_id
msg_pack_into(exp_hdr_pack_str, exp_hdr, 0,
desc.experimenter_id)
assert len(exp_hdr) == struct.calcsize(exp_hdr_pack_str)
n = desc.oxm_type
assert (n >> 7) == OFPXMC_EXPERIMENTER
return n, exp_hdr
def _serialize_header(mod, n, buf, offset):
try:
desc = mod._oxm_field_desc(n)
value_len = desc.type.size
except KeyError:
value_len = 0
n, exp_hdr = _make_exp_hdr(mod, n)
exp_hdr_len = len(exp_hdr)
pack_str = "!I%ds" % (exp_hdr_len,)
msg_pack_into(pack_str, buf, offset,
(n << 9) | (0 << 8) | (exp_hdr_len + value_len),
bytes(exp_hdr))
return struct.calcsize(pack_str)
def _serialize(mod, n, value, mask, buf, offset):
n, exp_hdr = _make_exp_hdr(mod, n)
exp_hdr_len = len(exp_hdr)
value_len = len(value)
if mask:
assert value_len == len(mask)
pack_str = "!I%ds%ds%ds" % (exp_hdr_len, value_len, len(mask))
msg_pack_into(pack_str, buf, offset,
(n << 9) | (1 << 8) | (exp_hdr_len + value_len * 2),
bytes(exp_hdr), value, mask)
else:
pack_str = "!I%ds%ds" % (exp_hdr_len, value_len,)
msg_pack_into(pack_str, buf, offset,
(n << 9) | (0 << 8) | (exp_hdr_len + value_len),
bytes(exp_hdr), value)
return struct.calcsize(pack_str)
def _to_jsondict(k, uv):
if isinstance(uv, tuple):
(value, mask) = uv
else:
value = uv
mask = None
return {"OXMTlv": {"field": k, "value": value, "mask": mask}}
def _from_jsondict(j):
tlv = j['OXMTlv']
field = tlv['field']
value = tlv['value']
mask = tlv.get('mask')
if mask is None:
uv = value
else:
uv = (value, mask)
return (field, uv)
| Tesi-Luca-Davide/ryu | ryu/ofproto/oxm_fields.py | Python | apache-2.0 | 13,622 |
from plow.gui.manifest import QtCore, QtGui
from plow.gui.util import formatDateTime, formatDuration
__all__ = [
"Text",
"Number",
"Decimal",
"DateTime",
"PillWidget",
"Checkbox"
]
class FormWidget(QtGui.QWidget):
"""
The base class for all form widgets.
"""
__LOCKED_PIX = None
def __init__(self, value, parent=None):
QtGui.QWidget.__init__(self, parent)
layout = QtGui.QGridLayout(self)
layout.setSpacing(0)
layout.setContentsMargins(0, 0, 0, 0)
self._widget = None
self.__status = QtGui.QLabel(self)
self.__status.setContentsMargins(5, 0, 0, 0)
layout.addWidget(self.__status, 0, 2)
if not FormWidget.__LOCKED_PIX:
FormWidget.__LOCKED_PIX = QtGui.QPixmap(":/images/locked.png")
FormWidget.__LOCKED_PIX = FormWidget.__LOCKED_PIX.scaled(
QtCore.QSize(12, 12), QtCore.Qt.KeepAspectRatio, QtCore.Qt.SmoothTransformation)
def setReadOnly(self, value):
self._setReadOnly(value)
if value:
self.__status.setPixmap(FormWidget.__LOCKED_PIX)
else:
self.__status.setText("")
def setSuffix(self, value):
self._setSuffix(value)
def _setSuffix(self, value):
self.layout().addWidget(QtGui.QLabel(value), 0, 1)
def _setReadOnly(self, value):
pass
def setWidget(self, widget):
self._widget = widget
self.layout().addWidget(widget, 0, 0)
class Text(FormWidget):
def __init__(self, text, parent=None):
FormWidget.__init__(self, parent)
self.setWidget(QtGui.QLineEdit(text, self))
self._widget.setFocusPolicy(QtCore.Qt.NoFocus)
self._widget.setCursorPosition(1)
def _setReadOnly(self, value):
self._widget.setReadOnly(value)
class Number(FormWidget):
def __init__(self, value, parent=None):
FormWidget.__init__(self, parent)
widget = QtGui.QSpinBox(self)
widget.setMinimum(0)
widget.setMaximum(1000000)
widget.setMinimumWidth(100)
widget.setValue(value)
self.setWidget(widget)
self._widget.setFocusPolicy(QtCore.Qt.NoFocus)
def _setReadOnly(self, value):
self._widget.setReadOnly(value)
self._widget.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
def _setSuffix(self, value):
self._widget.setSuffix(value)
class Decimal(FormWidget):
def __init__(self, value, parent=None):
FormWidget.__init__(self, parent)
widget = QtGui.QDoubleSpinBox(self)
widget.setValue(value)
self.setWidget(widget)
widget.setMinimumWidth(100)
self._widget.setFocusPolicy(QtCore.Qt.NoFocus)
def _setReadOnly(self, value):
self._widget.setReadOnly(value)
self._widget.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
def _setSuffix(self, value):
self._widget.setSuffix(value)
class DateTime(FormWidget):
def __init__(self, value, parent=None):
FormWidget.__init__(self, parent)
self.setWidget(QtGui.QLabel(formatDateTime(value), self))
class Duration(FormWidget):
def __init__(self, times, parent=None):
FormWidget.__init__(self, parent)
self.setWidget(QtGui.QLabel(formatDuration(times[0], times[1]), self))
class PillWidget(FormWidget):
def __init__(self, value, parent):
FormWidget.__init__(self, parent)
data, color = value
self.label = QtGui.QLabel(data, self)
self.label.setStyleSheet("border: 1px solid #222222; background-color: %s; border-radius: 6px;" % color)
self.label.setMinimumWidth(100)
self.setWidget(self.label)
class Checkbox(FormWidget):
def __init__(self, bvalue, parent=None):
FormWidget.__init__(self, parent)
self.setWidget(QtGui.QCheckBox(self))
self._widget.setCheckState(QtCore.Qt.Checked if bvalue else QtCore.Qt.Unchecked)
self._widget.setFocusPolicy(QtCore.Qt.NoFocus)
def _setReadOnly(self, value):
self._widget.setReadOnly(value)
| chadmv/plow | lib/python/plow/gui/form/fwidgets.py | Python | apache-2.0 | 4,081 |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for google.apphosting.tools.devappserver2.module."""
import httplib
import logging
import os
import re
import time
import unittest
import google
import mox
from google.appengine.api import appinfo
from google.appengine.api import request_info
from google.appengine.tools.devappserver2 import api_server
from google.appengine.tools.devappserver2 import application_configuration
from google.appengine.tools.devappserver2 import constants
from google.appengine.tools.devappserver2 import dispatcher
from google.appengine.tools.devappserver2 import instance
from google.appengine.tools.devappserver2 import module
from google.appengine.tools.devappserver2 import start_response_utils
from google.appengine.tools.devappserver2 import wsgi_server
class ModuleConfigurationStub(object):
def __init__(self,
application_root='/root',
application='app',
module_name='default',
automatic_scaling=appinfo.AutomaticScaling(),
version='version',
runtime='python27',
threadsafe=False,
skip_files='',
inbound_services=['warmup'],
handlers=[appinfo.URLMap(url=r'/python-(.*)',
script=r'\1.py')],
normalized_libraries=None,
env_variables=None,
manual_scaling=None,
basic_scaling=None):
self.application_root = application_root
self.application = application
self.module_name = module_name
self.automatic_scaling = automatic_scaling
self.manual_scaling = manual_scaling
self.basic_scaling = basic_scaling
self.major_version = version
self.runtime = runtime
self.threadsafe = threadsafe
self.skip_files = skip_files
self.inbound_services = inbound_services
self.handlers = handlers
self.normalized_libraries = normalized_libraries or []
self.env_variables = env_variables or []
self.version_id = '%s:%s.%s' % (module_name, version, '12345')
self.is_backend = False
def check_for_updates(self):
return set()
class ModuleFacade(module.Module):
def __init__(self,
module_configuration=ModuleConfigurationStub(),
instance_factory=None,
ready=True,
allow_skipped_files=False):
super(ModuleFacade, self).__init__(
module_configuration,
host='fakehost',
balanced_port=0,
api_port=8080,
auth_domain='gmail.com',
runtime_stderr_loglevel=1,
php_executable_path='/usr/bin/php-cgi',
enable_php_remote_debugging=False,
python_config=None,
cloud_sql_config=None,
default_version_port=8080,
port_registry=dispatcher.PortRegistry(),
request_data=None,
dispatcher=None,
max_instances=None,
use_mtime_file_watcher=False,
automatic_restarts=True,
allow_skipped_files=allow_skipped_files)
if instance_factory is not None:
self._instance_factory = instance_factory
self._ready = ready
@property
def ready(self):
return self._ready
@property
def balanced_port(self):
return self._balanced_port
class AutoScalingModuleFacade(module.AutoScalingModule):
def __init__(self,
module_configuration=ModuleConfigurationStub(),
balanced_port=0,
instance_factory=None,
max_instances=None,
ready=True):
super(AutoScalingModuleFacade, self).__init__(
module_configuration,
host='fakehost',
balanced_port=balanced_port,
api_port=8080,
auth_domain='gmail.com',
runtime_stderr_loglevel=1,
php_executable_path='/usr/bin/php-cgi',
enable_php_remote_debugging=False,
python_config=None,
cloud_sql_config=None,
default_version_port=8080,
port_registry=dispatcher.PortRegistry(),
request_data=None,
dispatcher=None,
max_instances=max_instances,
use_mtime_file_watcher=False,
automatic_restarts=True,
allow_skipped_files=False)
if instance_factory is not None:
self._instance_factory = instance_factory
self._ready = ready
@property
def ready(self):
return self._ready
@property
def balanced_port(self):
return self._balanced_port
class ManualScalingModuleFacade(module.ManualScalingModule):
def __init__(self,
module_configuration=ModuleConfigurationStub(),
balanced_port=0,
instance_factory=None,
ready=True):
super(ManualScalingModuleFacade, self).__init__(
module_configuration,
host='fakehost',
balanced_port=balanced_port,
api_port=8080,
auth_domain='gmail.com',
runtime_stderr_loglevel=1,
php_executable_path='/usr/bin/php-cgi',
enable_php_remote_debugging=False,
python_config=None,
cloud_sql_config=None,
default_version_port=8080,
port_registry=dispatcher.PortRegistry(),
request_data=None,
dispatcher=None,
max_instances=None,
use_mtime_file_watcher=False,
automatic_restarts=True,
allow_skipped_files=False)
if instance_factory is not None:
self._instance_factory = instance_factory
self._ready = ready
@property
def ready(self):
return self._ready
@property
def balanced_port(self):
return self._balanced_port
class BasicScalingModuleFacade(module.BasicScalingModule):
def __init__(self,
host='fakehost',
module_configuration=ModuleConfigurationStub(),
balanced_port=0,
instance_factory=None,
ready=True):
super(BasicScalingModuleFacade, self).__init__(
module_configuration,
host,
balanced_port=balanced_port,
api_port=8080,
auth_domain='gmail.com',
runtime_stderr_loglevel=1,
php_executable_path='/usr/bin/php-cgi',
enable_php_remote_debugging=False,
python_config=None,
cloud_sql_config=None,
default_version_port=8080,
port_registry=dispatcher.PortRegistry(),
request_data=None,
dispatcher=None,
max_instances=None,
use_mtime_file_watcher=False,
automatic_restarts=True,
allow_skipped_files=False)
if instance_factory is not None:
self._instance_factory = instance_factory
self._ready = ready
@property
def ready(self):
return self._ready
@property
def balanced_port(self):
return self._balanced_port
class BuildRequestEnvironTest(unittest.TestCase):
def setUp(self):
api_server.test_setup_stubs()
self.module = ModuleFacade()
def test_build_request_environ(self):
expected_environ = {
constants.FAKE_IS_ADMIN_HEADER: '1',
'HTTP_HOST': 'fakehost:8080',
'HTTP_HEADER': 'Value',
'HTTP_OTHER': 'Values',
'CONTENT_LENGTH': '4',
'PATH_INFO': '/foo',
'QUERY_STRING': 'bar=baz',
'REQUEST_METHOD': 'PUT',
'REMOTE_ADDR': '1.2.3.4',
'SERVER_NAME': 'fakehost',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.1',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.multithread': True,
'wsgi.multiprocess': True}
environ = self.module.build_request_environ(
'PUT', '/foo?bar=baz', [('Header', 'Value'), ('Other', 'Values')],
'body', '1.2.3.4', 8080)
self.assertEqual('', environ.pop('wsgi.errors').getvalue())
self.assertEqual('body', environ.pop('wsgi.input').getvalue())
self.assertEqual(expected_environ, environ)
def test_build_request_environ_fake_is_logged_in(self):
expected_environ = {
constants.FAKE_IS_ADMIN_HEADER: '1',
constants.FAKE_LOGGED_IN_HEADER: '1',
'HTTP_HOST': 'fakehost:8080',
'HTTP_HEADER': 'Value',
'HTTP_OTHER': 'Values',
'CONTENT_LENGTH': '4',
'PATH_INFO': '/foo',
'QUERY_STRING': 'bar=baz',
'REQUEST_METHOD': 'PUT',
'REMOTE_ADDR': '1.2.3.4',
'SERVER_NAME': 'fakehost',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.1',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.multithread': True,
'wsgi.multiprocess': True}
environ = self.module.build_request_environ(
'PUT', '/foo?bar=baz', [('Header', 'Value'), ('Other', 'Values')],
'body', '1.2.3.4', 8080, fake_login=True)
self.assertEqual('', environ.pop('wsgi.errors').getvalue())
self.assertEqual('body', environ.pop('wsgi.input').getvalue())
self.assertEqual(expected_environ, environ)
def test_build_request_environ_unicode_body(self):
expected_environ = {
constants.FAKE_IS_ADMIN_HEADER: '1',
'HTTP_HOST': 'fakehost',
'HTTP_HEADER': 'Value',
'HTTP_OTHER': 'Values',
'CONTENT_LENGTH': '4',
'PATH_INFO': '/foo',
'QUERY_STRING': 'bar=baz',
'REQUEST_METHOD': 'PUT',
'REMOTE_ADDR': '1.2.3.4',
'SERVER_NAME': 'fakehost',
'SERVER_PORT': '80',
'SERVER_PROTOCOL': 'HTTP/1.1',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.multithread': True,
'wsgi.multiprocess': True}
environ = self.module.build_request_environ(
'PUT', '/foo?bar=baz', [('Header', 'Value'), ('Other', 'Values')],
u'body', '1.2.3.4', 80)
self.assertEqual('', environ.pop('wsgi.errors').getvalue())
self.assertEqual('body', environ.pop('wsgi.input').getvalue())
self.assertEqual(expected_environ, environ)
class TestModuleCreateUrlHandlers(unittest.TestCase):
"""Tests for module.Module._create_url_handlers."""
def setUp(self):
self.module_configuration = ModuleConfigurationStub()
self.instance_factory = instance.InstanceFactory(None, 1)
self.servr = ModuleFacade(instance_factory=self.instance_factory,
module_configuration=self.module_configuration)
self.instance_factory.START_URL_MAP = appinfo.URLMap(
url='/_ah/start',
script='start_handler',
login='admin')
self.instance_factory.WARMUP_URL_MAP = appinfo.URLMap(
url='/_ah/warmup',
script='warmup_handler',
login='admin')
def test_match_all(self):
self.module_configuration.handlers = [appinfo.URLMap(url=r'.*',
script=r'foo.py')]
handlers = self.servr._create_url_handlers()
self.assertEqual(6, len(handlers))
def test_match_start_only(self):
self.module_configuration.handlers = [appinfo.URLMap(url=r'/_ah/start',
script=r'foo.py')]
handlers = self.servr._create_url_handlers()
self.assertEqual(7, len(handlers))
self.assertEqual(self.instance_factory.WARMUP_URL_MAP, handlers[0].url_map)
def test_match_warmup_only(self):
self.module_configuration.handlers = [appinfo.URLMap(url=r'/_ah/warmup',
script=r'foo.py')]
handlers = self.servr._create_url_handlers()
self.assertEqual(7, len(handlers))
self.assertEqual(self.instance_factory.START_URL_MAP, handlers[0].url_map)
def test_match_neither_warmup_nor_start(self):
self.module_configuration.handlers = [appinfo.URLMap(url=r'/',
script=r'foo.py')]
handlers = self.servr._create_url_handlers()
self.assertEqual(8, len(handlers))
self.assertEqual(self.instance_factory.WARMUP_URL_MAP, handlers[0].url_map)
self.assertEqual(self.instance_factory.START_URL_MAP, handlers[1].url_map)
def test_match_static_only(self):
self.module_configuration.handlers = [
appinfo.URLMap(url=r'/_ah/start', static_dir='foo'),
appinfo.URLMap(url=r'/_ah/warmup', static_files='foo', upload='foo')]
handlers = self.servr._create_url_handlers()
self.assertEqual(9, len(handlers))
self.assertEqual(self.instance_factory.WARMUP_URL_MAP, handlers[0].url_map)
self.assertEqual(self.instance_factory.START_URL_MAP, handlers[1].url_map)
def test_match_start_only_no_inbound_warmup(self):
self.module_configuration.inbound_services = None
self.module_configuration.handlers = [appinfo.URLMap(url=r'/_ah/start',
script=r'foo.py')]
handlers = self.servr._create_url_handlers()
self.assertEqual(6, len(handlers))
def test_match_warmup_only_no_inbound_warmup(self):
self.module_configuration.inbound_services = None
self.module_configuration.handlers = [appinfo.URLMap(url=r'/_ah/warmup',
script=r'foo.py')]
handlers = self.servr._create_url_handlers()
self.assertEqual(7, len(handlers))
self.assertEqual(self.instance_factory.START_URL_MAP, handlers[0].url_map)
def test_match_neither_warmup_nor_start_no_inbound_warmup(self):
self.module_configuration.inbound_services = None
self.module_configuration.handlers = [appinfo.URLMap(url=r'/',
script=r'foo.py')]
handlers = self.servr._create_url_handlers()
self.assertEqual(7, len(handlers))
self.assertEqual(self.instance_factory.START_URL_MAP, handlers[0].url_map)
class TestModuleGetRuntimeConfig(unittest.TestCase):
"""Tests for module.Module._get_runtime_config."""
def setUp(self):
self.module_configuration = ModuleConfigurationStub(skip_files='foo')
self.module_configuration.handlers = [
appinfo.URLMap(url=r'/static', static_dir='static'),
appinfo.URLMap(url=r'/app_read_static', static_dir='app_read_static',
application_readable=True),
appinfo.URLMap(url=r'/static_images/*.png',
static_files=r'static_images/\\1',
upload=r'static_images/*.png'),
appinfo.URLMap(url=r'/app_readable_static_images/*.png',
static_files=r'app_readable_static_images/\\1',
upload=r'app_readable_static_images/*.png',
application_readable=True),
]
self.instance_factory = instance.InstanceFactory(None, 1)
def test_static_files_regex(self):
servr = ModuleFacade(instance_factory=self.instance_factory,
module_configuration=self.module_configuration)
config = servr._get_runtime_config()
self.assertEqual(r'^(static%s.*)|(static_images/*.png)$' %
re.escape(os.path.sep),
config.static_files)
def test_allow_skipped_files(self):
servr = ModuleFacade(instance_factory=self.instance_factory,
module_configuration=self.module_configuration,
allow_skipped_files=True)
config = servr._get_runtime_config()
self.assertFalse(config.HasField('skip_files'))
self.assertFalse(config.HasField('static_files'))
class TestModuleShutdownInstance(unittest.TestCase):
"""Tests for module.Module._shutdown_instance."""
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.module_configuration = ModuleConfigurationStub()
self.instance_factory = instance.InstanceFactory(None, 1)
self.servr = ModuleFacade(instance_factory=self.instance_factory,
module_configuration=self.module_configuration)
self.mox.StubOutWithMock(logging, 'exception')
self.mox.StubOutWithMock(self.servr, '_handle_request')
self.mox.StubOutWithMock(self.servr._quit_event, 'wait')
self.mox.StubOutWithMock(module.Module, 'build_request_environ')
self.inst = self.mox.CreateMock(instance.Instance)
self.time = 0
self.mox.stubs.Set(time, 'time', lambda: self.time)
def tearDown(self):
self.mox.UnsetStubs()
def test_shutdown_instance(self):
def advance_time(*unused_args, **unused_kwargs):
self.time += 10
environ = object()
self.servr.build_request_environ(
'GET', '/_ah/stop', [], '', '0.1.0.3', 9000, fake_login=True).AndReturn(
environ)
self.servr._handle_request(
environ,
start_response_utils.null_start_response,
inst=self.inst,
request_type=instance.SHUTDOWN_REQUEST).WithSideEffects(advance_time)
self.servr._quit_event.wait(20)
self.inst.quit(force=True)
self.mox.ReplayAll()
self.servr._shutdown_instance(self.inst, 9000)
self.mox.VerifyAll()
class TestAutoScalingModuleWarmup(unittest.TestCase):
"""Tests for module.AutoScalingModule._warmup."""
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.mox.StubOutWithMock(module.Module, 'build_request_environ')
def tearDown(self):
self.mox.UnsetStubs()
def test_warmup(self):
s = AutoScalingModuleFacade(balanced_port=8080)
self.mox.StubOutWithMock(s, '_handle_request')
self.mox.StubOutWithMock(s._condition, 'notify')
inst = self.mox.CreateMock(instance.Instance)
environ = object()
s.build_request_environ('GET', '/_ah/warmup', [], '', '0.1.0.3', 8080,
fake_login=True).AndReturn(environ)
s._handle_request(environ,
mox.IgnoreArg(),
inst=inst,
request_type=instance.READY_REQUEST)
s._condition.notify(1)
self.mox.ReplayAll()
s._warmup(inst)
self.mox.VerifyAll()
class TestAutoScalingModuleAddInstance(unittest.TestCase):
"""Tests for module.AutoScalingModule._add_instance."""
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.factory = self.mox.CreateMock(instance.InstanceFactory)
self.factory.max_concurrent_requests = 10
def tearDown(self):
self.mox.UnsetStubs()
def test_permit_warmup(self):
s = AutoScalingModuleFacade(instance_factory=self.factory)
self.mox.StubOutWithMock(s, '_async_warmup')
self.mox.StubOutWithMock(s._condition, 'notify')
inst = self.mox.CreateMock(instance.Instance)
self.factory.new_instance(mox.Regex('[a-f0-9]{36}'),
expect_ready_request=True).AndReturn(inst)
inst.start().AndReturn(True)
s._async_warmup(inst)
self.mox.ReplayAll()
self.assertEqual(inst, s._add_instance(permit_warmup=True))
self.mox.VerifyAll()
self.assertEqual(1, len(s._instances))
def test_no_permit_warmup(self):
s = AutoScalingModuleFacade(instance_factory=self.factory)
self.mox.StubOutWithMock(s._condition, 'notify')
inst = self.mox.CreateMock(instance.Instance)
self.factory.new_instance(mox.Regex('[a-f0-9]{36}'),
expect_ready_request=False).AndReturn(inst)
inst.start().AndReturn(True)
s._condition.notify(10)
self.mox.ReplayAll()
self.assertEqual(inst, s._add_instance(permit_warmup=False))
self.mox.VerifyAll()
self.assertIn(inst, s._instances)
def test_failed_to_start(self):
s = AutoScalingModuleFacade(instance_factory=self.factory)
self.mox.StubOutWithMock(s, '_async_warmup')
self.mox.StubOutWithMock(s._condition, 'notify')
inst = self.mox.CreateMock(instance.Instance)
self.factory.new_instance(mox.Regex('[a-f0-9]{36}'),
expect_ready_request=True).AndReturn(inst)
inst.start().AndReturn(False)
self.mox.ReplayAll()
self.assertIsNone(s._add_instance(permit_warmup=True))
self.mox.VerifyAll()
self.assertEqual(1, len(s._instances))
def test_max_instances(self):
s = AutoScalingModuleFacade(instance_factory=self.factory,
max_instances=1)
self.mox.StubOutWithMock(s._condition, 'notify')
inst = self.mox.CreateMock(instance.Instance)
self.factory.new_instance(mox.Regex('[a-f0-9]{36}'),
expect_ready_request=False).AndReturn(inst)
inst.start().AndReturn(True)
s._condition.notify(10)
self.mox.ReplayAll()
self.assertEqual(inst, s._add_instance(permit_warmup=False))
self.assertEqual(None, s._add_instance(permit_warmup=False))
self.mox.VerifyAll()
self.assertEqual(1, len(s._instances))
class TestAutoScalingInstancePoolHandleScriptRequest(unittest.TestCase):
"""Tests for module.AutoScalingModule.handle."""
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.inst = self.mox.CreateMock(instance.Instance)
self.environ = {}
self.start_response = object()
self.response = [object()]
self.url_map = object()
self.match = object()
self.request_id = object()
self.auto_module = AutoScalingModuleFacade(
instance_factory=instance.InstanceFactory(object(), 10))
self.mox.StubOutWithMock(self.auto_module, '_choose_instance')
self.mox.StubOutWithMock(self.auto_module, '_add_instance')
self.mox.stubs.Set(time, 'time', lambda: 0.0)
def tearDown(self):
self.mox.UnsetStubs()
def test_handle_script_request(self):
self.auto_module._choose_instance(0.1).AndReturn(self.inst)
self.inst.handle(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id,
instance.NORMAL_REQUEST).AndReturn(self.response)
self.mox.ReplayAll()
self.assertEqual(
self.response,
self.auto_module._handle_script_request(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id))
self.mox.VerifyAll()
self.assertEqual([(mox.IgnoreArg(), 1)],
list(self.auto_module._outstanding_request_history))
def test_handle_cannot_accept_request(self):
self.auto_module._choose_instance(0.1).AndReturn(self.inst)
self.auto_module._choose_instance(0.1).AndReturn(self.inst)
self.inst.handle(
self.environ, self.start_response, self.url_map, self.match,
self.request_id, instance.NORMAL_REQUEST).AndRaise(
instance.CannotAcceptRequests)
self.inst.handle(
self.environ, self.start_response, self.url_map, self.match,
self.request_id, instance.NORMAL_REQUEST).AndReturn(
self.response)
self.mox.ReplayAll()
self.assertEqual(
self.response,
self.auto_module._handle_script_request(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id))
self.mox.VerifyAll()
self.assertEqual([(mox.IgnoreArg(), 1)],
list(self.auto_module._outstanding_request_history))
def test_handle_new_instance(self):
self.auto_module._choose_instance(0.1).AndReturn(None)
self.auto_module._add_instance(permit_warmup=False).AndReturn(self.inst)
self.inst.handle(
self.environ, self.start_response, self.url_map, self.match,
self.request_id, instance.NORMAL_REQUEST).AndReturn(
self.response)
self.mox.ReplayAll()
self.assertEqual(
self.response,
self.auto_module._handle_script_request(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id))
self.mox.VerifyAll()
def test_handle_new_instance_none_returned(self):
self.auto_module._choose_instance(0.1).AndReturn(None)
self.auto_module._add_instance(permit_warmup=False).AndReturn(None)
self.auto_module._choose_instance(0.2).AndReturn(self.inst)
self.inst.handle(
self.environ, self.start_response, self.url_map, self.match,
self.request_id, instance.NORMAL_REQUEST).AndReturn(
self.response)
self.mox.ReplayAll()
self.assertEqual(
self.response,
self.auto_module._handle_script_request(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id))
self.mox.VerifyAll()
class TestAutoScalingInstancePoolTrimRequestTimesAndOutstanding(
unittest.TestCase):
"""Tests for AutoScalingModule._trim_outstanding_request_history."""
def setUp(self):
api_server.test_setup_stubs()
def test_trim_outstanding_request_history(self):
servr = AutoScalingModuleFacade(
instance_factory=instance.InstanceFactory(object(), 10))
servr._outstanding_request_history.append((0, 100))
servr._outstanding_request_history.append((1.0, 101))
servr._outstanding_request_history.append((1.2, 102))
servr._outstanding_request_history.append((2.5, 103))
now = time.time()
servr._outstanding_request_history.append((now, 42))
servr._outstanding_request_history.append((now + 1, 43))
servr._outstanding_request_history.append((now + 3, 44))
servr._outstanding_request_history.append((now + 4, 45))
servr._trim_outstanding_request_history()
self.assertEqual([(now, 42), (now + 1, 43), (now + 3, 44), (now + 4, 45)],
list(servr._outstanding_request_history))
class TestAutoScalingInstancePoolGetNumRequiredInstances(unittest.TestCase):
"""Tests for AutoScalingModule._outstanding_request_history."""
def setUp(self):
api_server.test_setup_stubs()
self.servr = AutoScalingModuleFacade(
instance_factory=instance.InstanceFactory(object(), 5))
def test_get_num_required_instances(self):
now = time.time()
self.servr._outstanding_request_history.append((now, 42))
self.servr._outstanding_request_history.append((now + 1, 43))
self.servr._outstanding_request_history.append((now + 3, 44))
self.servr._outstanding_request_history.append((now + 4, 45))
self.assertEqual(9, self.servr._get_num_required_instances())
def test_no_requests(self):
self.assertEqual(0, self.servr._get_num_required_instances())
class TestAutoScalingInstancePoolSplitInstances(unittest.TestCase):
"""Tests for module.AutoScalingModule._split_instances."""
class Instance(object):
def __init__(self, num_outstanding_requests, can_accept_requests=True):
self.num_outstanding_requests = num_outstanding_requests
self.can_accept_requests = can_accept_requests
def __repr__(self):
return str(self.num_outstanding_requests)
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.servr = AutoScalingModuleFacade(
instance_factory=instance.InstanceFactory(object(), 10))
self.mox.StubOutWithMock(self.servr, '_get_num_required_instances')
def tearDown(self):
self.mox.UnsetStubs()
def test_split_instances(self):
instance1 = self.Instance(1)
instance2 = self.Instance(2, can_accept_requests=False)
instance3 = self.Instance(3)
instance4 = self.Instance(4)
instance5 = self.Instance(5)
instance6 = self.Instance(6)
instance7 = self.Instance(7)
instance8 = self.Instance(8, can_accept_requests=False)
instance9 = self.Instance(9)
instance10 = self.Instance(10)
self.servr._get_num_required_instances().AndReturn(5)
self.servr._instances = set([instance1, instance2, instance3, instance4,
instance5, instance6, instance7, instance8,
instance9, instance10])
self.mox.ReplayAll()
self.assertEqual(
(set([instance10, instance9, instance7,
instance6, instance5]),
set([instance1, instance2, instance3, instance4, instance8])),
self.servr._split_instances())
self.mox.VerifyAll()
def test_split_instances_no_instances(self):
self.servr._get_num_required_instances().AndReturn(5)
self.servr._instances = set([])
self.mox.ReplayAll()
self.assertEqual((set([]), set([])),
self.servr._split_instances())
self.mox.VerifyAll()
def test_split_instances_no_instances_not_enough_accepting_requests(self):
instance1 = self.Instance(1)
instance2 = self.Instance(1, can_accept_requests=False)
instance3 = self.Instance(2, can_accept_requests=False)
self.servr._get_num_required_instances().AndReturn(5)
self.servr._instances = set([instance1, instance2, instance3])
self.mox.ReplayAll()
self.assertEqual((set([instance1]), set([instance2, instance3])),
self.servr._split_instances())
self.mox.VerifyAll()
def test_split_instances_no_required_instances(self):
instance1 = self.Instance(1)
instance2 = self.Instance(2, can_accept_requests=False)
instance3 = self.Instance(3, can_accept_requests=False)
instance4 = self.Instance(4)
instance5 = self.Instance(5)
instance6 = self.Instance(6)
instance7 = self.Instance(7)
instance8 = self.Instance(8)
self.servr._get_num_required_instances().AndReturn(0)
self.servr._instances = set([instance1, instance2, instance3, instance4,
instance5, instance6, instance7, instance8])
self.mox.ReplayAll()
self.assertEqual(
(set(),
set([instance8, instance7, instance6, instance5, instance4,
instance3, instance2, instance1])),
self.servr._split_instances())
self.mox.VerifyAll()
class TestAutoScalingInstancePoolChooseInstances(unittest.TestCase):
"""Tests for module.AutoScalingModule._choose_instance."""
class Instance(object):
def __init__(self, num_outstanding_requests, can_accept_requests=True):
self.num_outstanding_requests = num_outstanding_requests
self.remaining_request_capacity = 10 - num_outstanding_requests
self.can_accept_requests = can_accept_requests
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.servr = AutoScalingModuleFacade(
instance_factory=instance.InstanceFactory(object(), 10))
self.mox.StubOutWithMock(self.servr, '_split_instances')
self.mox.StubOutWithMock(self.servr._condition, 'wait')
self.time = 10
self.mox.stubs.Set(time, 'time', lambda: self.time)
def advance_time(self, *unused_args):
self.time += 10
def tearDown(self):
self.mox.UnsetStubs()
def test_choose_instance_required_available(self):
instance1 = self.Instance(1)
instance2 = self.Instance(2)
instance3 = self.Instance(3)
instance4 = self.Instance(4)
self.servr._split_instances().AndReturn((set([instance3, instance4]),
set([instance1, instance2])))
self.mox.ReplayAll()
self.assertEqual(instance3, # Least busy required instance.
self.servr._choose_instance(15))
self.mox.VerifyAll()
def test_choose_instance_no_instances(self):
self.servr._split_instances().AndReturn((set([]), set([])))
self.servr._condition.wait(5).WithSideEffects(self.advance_time)
self.mox.ReplayAll()
self.assertEqual(None, self.servr._choose_instance(15))
self.mox.VerifyAll()
def test_choose_instance_no_instance_that_can_accept_requests(self):
instance1 = self.Instance(1, can_accept_requests=False)
self.servr._split_instances().AndReturn((set([]), set([instance1])))
self.servr._condition.wait(5).WithSideEffects(self.advance_time)
self.mox.ReplayAll()
self.assertEqual(None, self.servr._choose_instance(15))
self.mox.VerifyAll()
def test_choose_instance_required_full(self):
instance1 = self.Instance(1)
instance2 = self.Instance(2)
instance3 = self.Instance(10)
instance4 = self.Instance(10)
self.servr._split_instances().AndReturn((set([instance3, instance4]),
set([instance1, instance2])))
self.mox.ReplayAll()
self.assertEqual(instance2, # Busyest non-required instance.
self.servr._choose_instance(15))
self.mox.VerifyAll()
def test_choose_instance_must_wait(self):
instance1 = self.Instance(10)
instance2 = self.Instance(10)
self.servr._split_instances().AndReturn((set([instance1]),
set([instance2])))
self.servr._condition.wait(5).WithSideEffects(self.advance_time)
self.mox.ReplayAll()
self.assertIsNone(self.servr._choose_instance(15))
self.mox.VerifyAll()
class TestAutoScalingInstancePoolAdjustInstances(unittest.TestCase):
"""Tests for module.AutoScalingModule._adjust_instances."""
class Instance(object):
def __init__(self, num_outstanding_requests):
self.num_outstanding_requests = num_outstanding_requests
def quit(self):
pass
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.servr = AutoScalingModuleFacade(
module_configuration=ModuleConfigurationStub(
automatic_scaling=appinfo.AutomaticScaling(
min_pending_latency='0.1s',
max_pending_latency='1.0s',
min_idle_instances=1,
max_idle_instances=2)),
instance_factory=instance.InstanceFactory(object(), 10))
self.mox.StubOutWithMock(self.servr, '_split_instances')
self.mox.StubOutWithMock(self.servr, '_add_instance')
def tearDown(self):
self.mox.UnsetStubs()
def test_adjust_instances_create_new(self):
instance1 = self.Instance(0)
instance2 = self.Instance(2)
instance3 = self.Instance(3)
instance4 = self.Instance(4)
self.servr._instances = set([instance1, instance2, instance3, instance4])
self.servr._split_instances().AndReturn(
(set([instance1, instance2, instance3, instance4]),
set([])))
self.servr._add_instance(permit_warmup=True)
self.mox.ReplayAll()
self.servr._adjust_instances()
self.mox.VerifyAll()
def test_adjust_instances_quit_idle(self):
instance1 = self.Instance(0)
instance2 = self.Instance(2)
instance3 = self.Instance(3)
instance4 = self.Instance(4)
self.mox.StubOutWithMock(instance1, 'quit')
self.servr._instances = set([instance1, instance2, instance3, instance4])
self.servr._split_instances().AndReturn(
(set([]),
set([instance1, instance2, instance3, instance4])))
instance1.quit()
self.mox.ReplayAll()
self.servr._adjust_instances()
self.mox.VerifyAll()
def test_adjust_instances_quit_idle_with_race(self):
instance1 = self.Instance(0)
instance2 = self.Instance(2)
instance3 = self.Instance(3)
instance4 = self.Instance(4)
self.mox.StubOutWithMock(instance1, 'quit')
self.servr._instances = set([instance1, instance2, instance3, instance4])
self.servr._split_instances().AndReturn(
(set([]),
set([instance1, instance2, instance3, instance4])))
instance1.quit().AndRaise(instance.CannotQuitServingInstance)
self.mox.ReplayAll()
self.servr._adjust_instances()
self.mox.VerifyAll()
class TestAutoScalingInstancePoolHandleChanges(unittest.TestCase):
"""Tests for module.AutoScalingModule._handle_changes."""
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.instance_factory = instance.InstanceFactory(object(), 10)
self.servr = AutoScalingModuleFacade(
instance_factory=self.instance_factory)
self.mox.StubOutWithMock(self.instance_factory, 'files_changed')
self.mox.StubOutWithMock(self.instance_factory, 'configuration_changed')
self.mox.StubOutWithMock(self.servr, '_maybe_restart_instances')
self.mox.StubOutWithMock(self.servr, '_create_url_handlers')
self.mox.StubOutWithMock(self.servr._module_configuration,
'check_for_updates')
self.mox.StubOutWithMock(self.servr._watcher, 'has_changes')
def tearDown(self):
self.mox.UnsetStubs()
def test_no_changes(self):
self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
self.servr._watcher.has_changes().AndReturn(False)
self.servr._maybe_restart_instances(config_changed=False,
file_changed=False)
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
def test_irrelevant_config_change(self):
self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
self.servr._watcher.has_changes().AndReturn(False)
self.servr._maybe_restart_instances(config_changed=False,
file_changed=False)
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
def test_restart_config_change(self):
conf_change = frozenset([application_configuration.ENV_VARIABLES_CHANGED])
self.servr._module_configuration.check_for_updates().AndReturn(conf_change)
self.servr._watcher.has_changes().AndReturn(False)
self.instance_factory.configuration_changed(conf_change)
self.servr._maybe_restart_instances(config_changed=True, file_changed=False)
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
def test_handler_change(self):
conf_change = frozenset([application_configuration.HANDLERS_CHANGED])
self.servr._module_configuration.check_for_updates().AndReturn(conf_change)
self.servr._watcher.has_changes().AndReturn(False)
self.servr._create_url_handlers()
self.instance_factory.configuration_changed(conf_change)
self.servr._maybe_restart_instances(config_changed=True, file_changed=False)
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
def test_file_change(self):
self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
self.servr._watcher.has_changes().AndReturn(True)
self.instance_factory.files_changed()
self.servr._maybe_restart_instances(config_changed=False, file_changed=True)
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
class TestAutoScalingInstancePoolMaybeRestartInstances(unittest.TestCase):
"""Tests for module.AutoScalingModule._maybe_restart_instances."""
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.instance_factory = instance.InstanceFactory(object(), 10)
self.instance_factory.FILE_CHANGE_INSTANCE_RESTART_POLICY = instance.ALWAYS
self.servr = AutoScalingModuleFacade(instance_factory=self.instance_factory)
self.inst1 = self.mox.CreateMock(instance.Instance)
self.inst2 = self.mox.CreateMock(instance.Instance)
self.inst3 = self.mox.CreateMock(instance.Instance)
self.inst1.total_requests = 2
self.inst2.total_requests = 0
self.inst3.total_requests = 4
self.servr._instances.add(self.inst1)
self.servr._instances.add(self.inst2)
self.servr._instances.add(self.inst3)
def tearDown(self):
self.mox.UnsetStubs()
def test_no_changes(self):
self.mox.ReplayAll()
self.servr._maybe_restart_instances(config_changed=False,
file_changed=False)
self.mox.VerifyAll()
def test_config_change(self):
self.inst1.quit(allow_async=True).InAnyOrder()
self.inst2.quit(allow_async=True).InAnyOrder()
self.inst3.quit(allow_async=True).InAnyOrder()
self.mox.ReplayAll()
self.servr._maybe_restart_instances(config_changed=True,
file_changed=False)
self.mox.VerifyAll()
def test_file_change_restart_always(self):
self.instance_factory.FILE_CHANGE_INSTANCE_RESTART_POLICY = instance.ALWAYS
self.inst1.quit(allow_async=True).InAnyOrder()
self.inst2.quit(allow_async=True).InAnyOrder()
self.inst3.quit(allow_async=True).InAnyOrder()
self.mox.ReplayAll()
self.servr._maybe_restart_instances(config_changed=False,
file_changed=True)
self.mox.VerifyAll()
self.assertSequenceEqual(set(), self.servr._instances)
def test_file_change_restart_after_first_request(self):
self.instance_factory.FILE_CHANGE_INSTANCE_RESTART_POLICY = (
instance.AFTER_FIRST_REQUEST)
self.inst1.quit(allow_async=True).InAnyOrder()
self.inst3.quit(allow_async=True).InAnyOrder()
self.mox.ReplayAll()
self.servr._maybe_restart_instances(config_changed=False,
file_changed=True)
self.mox.VerifyAll()
self.assertSequenceEqual(set([self.inst2]), self.servr._instances)
def test_file_change_restart_never(self):
self.instance_factory.FILE_CHANGE_INSTANCE_RESTART_POLICY = instance.NEVER
self.mox.ReplayAll()
self.servr._maybe_restart_instances(config_changed=False,
file_changed=True)
self.mox.VerifyAll()
self.assertSequenceEqual(set([self.inst1, self.inst2, self.inst3]),
self.servr._instances)
class TestAutoScalingInstancePoolLoopAdjustingInstances(unittest.TestCase):
"""Tests for module.AutoScalingModule._adjust_instances."""
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.servr = AutoScalingModuleFacade(
instance_factory=instance.InstanceFactory(object(), 10))
def tearDown(self):
self.mox.UnsetStubs()
def test_loop_and_quit(self):
self.mox.StubOutWithMock(self.servr, '_adjust_instances')
self.mox.StubOutWithMock(self.servr, '_handle_changes')
inst1 = self.mox.CreateMock(instance.Instance)
inst2 = self.mox.CreateMock(instance.Instance)
inst3 = self.mox.CreateMock(instance.Instance)
self.servr._instances.add(inst1)
self.servr._instances.add(inst2)
self.servr._instances.add(inst3)
self.servr._handle_changes()
def do_quit(*unused_args):
self.servr._quit_event.set()
self.servr._adjust_instances().WithSideEffects(do_quit)
self.mox.ReplayAll()
self.servr._loop_adjusting_instances()
self.mox.VerifyAll()
class TestAutoScalingInstancePoolAutomaticScaling(unittest.TestCase):
def setUp(self):
api_server.test_setup_stubs()
def _create_module(self, automatic_scaling):
return AutoScalingModuleFacade(
module_configuration=ModuleConfigurationStub(
automatic_scaling=automatic_scaling),
instance_factory=instance.InstanceFactory(object(), 10))
def test_unset_automatic_settings(self):
settings = appinfo.AutomaticScaling()
pool = self._create_module(settings)
self.assertEqual(0.1, pool._min_pending_latency)
self.assertEqual(0.5, pool._max_pending_latency)
self.assertEqual(1, pool._min_idle_instances)
self.assertEqual(1000, pool._max_idle_instances)
def test_automatic_automatic_settings(self):
settings = appinfo.AutomaticScaling(
min_pending_latency='automatic',
max_pending_latency='automatic',
min_idle_instances='automatic',
max_idle_instances='automatic')
pool = self._create_module(settings)
self.assertEqual(0.1, pool._min_pending_latency)
self.assertEqual(0.5, pool._max_pending_latency)
self.assertEqual(1, pool._min_idle_instances)
self.assertEqual(1000, pool._max_idle_instances)
def test_explicit_automatic_settings(self):
settings = appinfo.AutomaticScaling(
min_pending_latency='1234ms',
max_pending_latency='5.67s',
min_idle_instances='3',
max_idle_instances='20')
pool = self._create_module(settings)
self.assertEqual(1.234, pool._min_pending_latency)
self.assertEqual(5.67, pool._max_pending_latency)
self.assertEqual(3, pool._min_idle_instances)
self.assertEqual(20, pool._max_idle_instances)
class TestManualScalingModuleStart(unittest.TestCase):
"""Tests for module.ManualScalingModule._start_instance."""
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.mox.StubOutWithMock(module.Module, 'build_request_environ')
def tearDown(self):
self.mox.UnsetStubs()
def test_instance_start_success(self):
s = ManualScalingModuleFacade(balanced_port=8080)
self.mox.StubOutWithMock(s, '_handle_request')
self.mox.StubOutWithMock(s._condition, 'notify')
wsgi_servr = self.mox.CreateMock(wsgi_server.WsgiServer)
wsgi_servr.port = 12345
inst = self.mox.CreateMock(instance.Instance)
inst.instance_id = 0
inst.start().AndReturn(True)
environ = object()
s.build_request_environ('GET', '/_ah/start', [], '', '0.1.0.3', 12345,
fake_login=True).AndReturn(environ)
s._handle_request(environ,
mox.IgnoreArg(),
inst=inst,
request_type=instance.READY_REQUEST)
s._condition.notify(1)
self.mox.ReplayAll()
s._start_instance(wsgi_servr, inst)
self.mox.VerifyAll()
def test_instance_start_failure(self):
s = ManualScalingModuleFacade(balanced_port=8080)
self.mox.StubOutWithMock(s, '_handle_request')
self.mox.StubOutWithMock(s._condition, 'notify')
wsgi_servr = self.mox.CreateMock(wsgi_server.WsgiServer)
wsgi_servr.port = 12345
inst = self.mox.CreateMock(instance.Instance)
inst.instance_id = 0
inst.start().AndReturn(False)
self.mox.ReplayAll()
s._start_instance(wsgi_servr, inst)
self.mox.VerifyAll()
class TestManualScalingModuleAddInstance(unittest.TestCase):
"""Tests for module.ManualScalingModule._add_instance."""
class WsgiServer(object):
def __init__(self, port):
self.port = port
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.factory = self.mox.CreateMock(instance.InstanceFactory)
self.factory.max_concurrent_requests = 10
def tearDown(self):
self.mox.UnsetStubs()
def test_add_while_started(self):
servr = ManualScalingModuleFacade(instance_factory=self.factory)
inst = self.mox.CreateMock(instance.Instance)
self.mox.StubOutWithMock(module._THREAD_POOL, 'submit')
self.mox.StubOutWithMock(wsgi_server.WsgiServer, 'start')
self.mox.StubOutWithMock(wsgi_server.WsgiServer, 'port')
wsgi_server.WsgiServer.port = 12345
self.factory.new_instance(0, expect_ready_request=True).AndReturn(inst)
wsgi_server.WsgiServer.start()
module._THREAD_POOL.submit(servr._start_instance,
mox.IsA(wsgi_server.WsgiServer), inst)
self.mox.ReplayAll()
servr._add_instance()
self.mox.VerifyAll()
self.assertIn(inst, servr._instances)
self.assertEqual((servr, inst), servr._port_registry.get(12345))
def test_add_while_stopped(self):
servr = ManualScalingModuleFacade(instance_factory=self.factory)
servr._suspended = True
inst = self.mox.CreateMock(instance.Instance)
self.mox.StubOutWithMock(wsgi_server.WsgiServer, 'start')
self.mox.StubOutWithMock(wsgi_server.WsgiServer, 'port')
wsgi_server.WsgiServer.port = 12345
self.mox.StubOutWithMock(module._THREAD_POOL, 'submit')
self.factory.new_instance(0, expect_ready_request=True).AndReturn(inst)
wsgi_server.WsgiServer.start()
self.mox.ReplayAll()
servr._add_instance()
self.mox.VerifyAll()
self.assertIn(inst, servr._instances)
self.assertEqual((servr, inst), servr._port_registry.get(12345))
class TestManualScalingInstancePoolHandleScriptRequest(unittest.TestCase):
"""Tests for module.ManualScalingModule.handle."""
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.inst = self.mox.CreateMock(instance.Instance)
self.inst.instance_id = 0
self.environ = {}
self.start_response = object()
self.response = [object()]
self.url_map = object()
self.match = object()
self.request_id = object()
self.manual_module = ManualScalingModuleFacade(
instance_factory=instance.InstanceFactory(object(), 10))
self.mox.StubOutWithMock(self.manual_module, '_choose_instance')
self.mox.StubOutWithMock(self.manual_module, '_add_instance')
self.mox.StubOutWithMock(self.manual_module._condition, 'notify')
self.mox.stubs.Set(time, 'time', lambda: 0.0)
def tearDown(self):
self.mox.UnsetStubs()
def test_handle_script_request(self):
self.manual_module._choose_instance(10.0).AndReturn(self.inst)
self.inst.handle(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id,
instance.NORMAL_REQUEST).AndReturn(self.response)
self.manual_module._condition.notify()
self.mox.ReplayAll()
self.assertEqual(
self.response,
self.manual_module._handle_script_request(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id))
self.mox.VerifyAll()
def test_handle_cannot_accept_request(self):
self.manual_module._choose_instance(10.0).AndReturn(self.inst)
self.manual_module._choose_instance(10.0).AndReturn(self.inst)
self.inst.handle(
self.environ, self.start_response, self.url_map, self.match,
self.request_id, instance.NORMAL_REQUEST).AndRaise(
instance.CannotAcceptRequests)
self.manual_module._condition.notify()
self.inst.handle(
self.environ, self.start_response, self.url_map, self.match,
self.request_id, instance.NORMAL_REQUEST).AndReturn(
self.response)
self.manual_module._condition.notify()
self.mox.ReplayAll()
self.assertEqual(
self.response,
self.manual_module._handle_script_request(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id))
self.mox.VerifyAll()
def test_handle_must_wait(self):
self.manual_module._choose_instance(10.0).AndReturn(None)
self.manual_module._choose_instance(10.0).AndReturn(self.inst)
self.inst.handle(
self.environ, self.start_response, self.url_map, self.match,
self.request_id, instance.NORMAL_REQUEST).AndReturn(
self.response)
self.manual_module._condition.notify()
self.mox.ReplayAll()
self.assertEqual(
self.response,
self.manual_module._handle_script_request(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id))
self.mox.VerifyAll()
def test_handle_timeout(self):
self.time = 0.0
def advance_time(*unused_args):
self.time += 11
self.mox.stubs.Set(time, 'time', lambda: self.time)
self.mox.StubOutWithMock(self.manual_module, '_error_response')
self.manual_module._choose_instance(10.0).WithSideEffects(advance_time)
self.manual_module._error_response(self.environ, self.start_response,
503).AndReturn(self.response)
self.mox.ReplayAll()
self.assertEqual(
self.response,
self.manual_module._handle_script_request(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id))
self.mox.VerifyAll()
class TestManualScalingInstancePoolChooseInstances(unittest.TestCase):
"""Tests for module.ManualScalingModule._choose_instance."""
class Instance(object):
def __init__(self, can_accept_requests):
self.can_accept_requests = can_accept_requests
def setUp(self):
self.mox = mox.Mox()
api_server.test_setup_stubs()
self.servr = ManualScalingModuleFacade(
instance_factory=instance.InstanceFactory(object(), 10))
self.mox.StubOutWithMock(self.servr._condition, 'wait')
self.time = 0
self.mox.stubs.Set(time, 'time', lambda: self.time)
def advance_time(self, *unused_args):
self.time += 10
def tearDown(self):
self.mox.UnsetStubs()
def test_choose_instance_first_can_accept(self):
instance1 = self.Instance(True)
instance2 = self.Instance(True)
self.servr._instances = [instance1, instance2]
self.mox.ReplayAll()
self.assertEqual(instance1, self.servr._choose_instance(1))
self.mox.VerifyAll()
def test_choose_instance_first_cannot_accept(self):
instance1 = self.Instance(False)
instance2 = self.Instance(True)
self.servr._instances = [instance1, instance2]
self.mox.ReplayAll()
self.assertEqual(instance2, self.servr._choose_instance(1))
self.mox.VerifyAll()
def test_choose_instance_none_can_accept(self):
instance1 = self.Instance(False)
instance2 = self.Instance(False)
self.servr._instances = [instance1, instance2]
self.servr._condition.wait(5).WithSideEffects(self.advance_time)
self.mox.ReplayAll()
self.assertEqual(None, self.servr._choose_instance(5))
self.mox.VerifyAll()
def test_choose_instance_no_instances(self):
self.servr._condition.wait(5).WithSideEffects(self.advance_time)
self.mox.ReplayAll()
self.assertEqual(None, self.servr._choose_instance(5))
self.mox.VerifyAll()
class TestManualScalingInstancePoolSetNumInstances(unittest.TestCase):
"""Tests for module.ManualScalingModule.set_num_instances."""
def setUp(self):
self.mox = mox.Mox()
api_server.test_setup_stubs()
self.module = ManualScalingModuleFacade(
instance_factory=instance.InstanceFactory(object(), 10))
self._instance = self.mox.CreateMock(instance.Instance)
self._wsgi_server = self.mox.CreateMock(wsgi_server.WsgiServer)
self._wsgi_server.port = 8080
self.module._instances = [self._instance]
self.module._wsgi_servers = [self._wsgi_server]
self.mox.StubOutWithMock(module._THREAD_POOL, 'submit')
self.mox.StubOutWithMock(self.module, '_add_instance')
self.mox.StubOutWithMock(self.module, '_shutdown_instance')
def tearDown(self):
self.mox.UnsetStubs()
def test_no_op(self):
self.mox.ReplayAll()
self.assertEqual(1, self.module.get_num_instances())
self.module.set_num_instances(1)
self.mox.VerifyAll()
def test_add_an_instance(self):
self.module._add_instance()
self.mox.ReplayAll()
self.assertEqual(1, self.module.get_num_instances())
self.module.set_num_instances(2)
self.mox.VerifyAll()
def test_remove_an_instance(self):
module._THREAD_POOL.submit(self.module._quit_instance,
self._instance,
self._wsgi_server)
self._instance.quit(expect_shutdown=True)
self._wsgi_server.quit()
self.module._shutdown_instance(self._instance, 8080)
self.mox.ReplayAll()
self.assertEqual(1, self.module.get_num_instances())
self.module.set_num_instances(0)
self.module._quit_instance(self._instance,
self._wsgi_server)
self.mox.VerifyAll()
class TestManualScalingInstancePoolSuspendAndResume(unittest.TestCase):
"""Tests for module.ManualScalingModule.suspend and resume."""
def setUp(self):
self.mox = mox.Mox()
api_server.test_setup_stubs()
self.factory = self.mox.CreateMock(instance.InstanceFactory)
self.module = ManualScalingModuleFacade(
instance_factory=self.factory)
self._instance = self.mox.CreateMock(instance.Instance)
self._wsgi_server = wsgi_server.WsgiServer(('localhost', 0), None)
self.module._instances = [self._instance]
self.module._wsgi_servers = [self._wsgi_server]
self.mox.StubOutWithMock(module._THREAD_POOL, 'submit')
self.mox.StubOutWithMock(self.module, '_shutdown_instance')
self._wsgi_server.start()
def tearDown(self):
self._wsgi_server.quit()
self.mox.UnsetStubs()
def test_already_suspended(self):
self.module._suspended = True
self.assertRaises(request_info.ModuleAlreadyStoppedError,
self.module.suspend)
def test_already_resumed(self):
self.assertRaises(request_info.ModuleAlreadyStartedError,
self.module.resume)
def test_suspend_instance(self):
module._THREAD_POOL.submit(self.module._suspend_instance, self._instance,
self._wsgi_server.port)
self._instance.quit(expect_shutdown=True)
port = object()
self.module._shutdown_instance(self._instance, port)
self.mox.ReplayAll()
self.module.suspend()
self.module._suspend_instance(self._instance, port)
self.mox.VerifyAll()
self.assertEqual(404, self._wsgi_server._error)
self.assertEqual(None, self._wsgi_server._app)
self.assertTrue(self.module._suspended)
def test_resume(self):
self.module._suspended = True
self.module._instances = [object()]
self.factory.new_instance(0, expect_ready_request=True).AndReturn(
self._instance)
module._THREAD_POOL.submit(self.module._start_instance, self._wsgi_server,
self._instance)
self.mox.ReplayAll()
self.module.resume()
self.mox.VerifyAll()
self.assertEqual(self.module._handle_request,
self._wsgi_server._app.func)
self.assertEqual({'inst': self._instance},
self._wsgi_server._app.keywords)
self.assertFalse(self.module._suspended)
def test_restart(self):
self._new_instance = self.mox.CreateMock(instance.Instance)
self.factory.new_instance(0, expect_ready_request=True).AndReturn(
self._new_instance)
module._THREAD_POOL.submit(self.module._suspend_instance, self._instance,
self._wsgi_server.port)
module._THREAD_POOL.submit(self.module._start_instance, self._wsgi_server,
self._new_instance)
self._instance.quit(expect_shutdown=True)
port = object()
self.module._shutdown_instance(self._instance, port)
self.mox.ReplayAll()
self.module.restart()
self.module._suspend_instance(self._instance, port)
self.mox.VerifyAll()
self.assertEqual(self.module._handle_request,
self._wsgi_server._app.func)
self.assertEqual({'inst': self._new_instance},
self._wsgi_server._app.keywords)
self.assertFalse(self.module._suspended)
class TestManualScalingInstancePoolHandleChanges(unittest.TestCase):
"""Tests for module.ManualScalingModule._handle_changes."""
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.instance_factory = instance.InstanceFactory(object(), 10)
self.servr = ManualScalingModuleFacade(
instance_factory=self.instance_factory)
self.mox.StubOutWithMock(self.instance_factory, 'files_changed')
self.mox.StubOutWithMock(self.instance_factory, 'configuration_changed')
self.mox.StubOutWithMock(self.servr, 'restart')
self.mox.StubOutWithMock(self.servr, '_create_url_handlers')
self.mox.StubOutWithMock(self.servr._module_configuration,
'check_for_updates')
self.mox.StubOutWithMock(self.servr._watcher, 'has_changes')
def tearDown(self):
self.mox.UnsetStubs()
def test_no_changes(self):
self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
self.servr._watcher.has_changes().AndReturn(False)
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
def test_irrelevant_config_change(self):
self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
self.servr._watcher.has_changes().AndReturn(False)
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
def test_restart_config_change(self):
conf_change = frozenset([application_configuration.ENV_VARIABLES_CHANGED])
self.servr._module_configuration.check_for_updates().AndReturn(conf_change)
self.servr._watcher.has_changes().AndReturn(False)
self.instance_factory.configuration_changed(conf_change)
self.servr.restart()
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
def test_handler_change(self):
conf_change = frozenset([application_configuration.HANDLERS_CHANGED])
self.servr._module_configuration.check_for_updates().AndReturn(conf_change)
self.servr._watcher.has_changes().AndReturn(False)
self.servr._create_url_handlers()
self.instance_factory.configuration_changed(conf_change)
self.servr.restart()
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
def test_file_change(self):
self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
self.servr._watcher.has_changes().AndReturn(True)
self.instance_factory.files_changed()
self.servr.restart()
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
def test_restart_config_change_suspended(self):
self.servr._suspended = True
conf_change = frozenset([application_configuration.ENV_VARIABLES_CHANGED])
self.servr._module_configuration.check_for_updates().AndReturn(conf_change)
self.servr._watcher.has_changes().AndReturn(False)
self.instance_factory.configuration_changed(conf_change)
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
def test_handler_change_suspended(self):
self.servr._suspended = True
conf_change = frozenset([application_configuration.HANDLERS_CHANGED])
self.servr._module_configuration.check_for_updates().AndReturn(conf_change)
self.servr._watcher.has_changes().AndReturn(False)
self.servr._create_url_handlers()
self.instance_factory.configuration_changed(conf_change)
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
def test_file_change_suspended(self):
self.servr._suspended = True
self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
self.servr._watcher.has_changes().AndReturn(True)
self.instance_factory.files_changed()
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
class TestBasicScalingModuleStart(unittest.TestCase):
"""Tests for module.BasicScalingModule._start_instance."""
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.mox.StubOutWithMock(module.Module, 'build_request_environ')
def tearDown(self):
self.mox.UnsetStubs()
def test_instance_start_success(self):
s = BasicScalingModuleFacade(balanced_port=8080)
self.mox.StubOutWithMock(s, '_handle_request')
self.mox.StubOutWithMock(s._condition, 'notify')
wsgi_servr = self.mox.CreateMock(wsgi_server.WsgiServer)
wsgi_servr.port = 12345
s._wsgi_servers[0] = wsgi_servr
inst = self.mox.CreateMock(instance.Instance)
inst.instance_id = 0
s._instances[0] = inst
inst.start().AndReturn(True)
environ = object()
s.build_request_environ('GET', '/_ah/start', [], '', '0.1.0.3', 12345,
fake_login=True).AndReturn(environ)
s._handle_request(environ,
mox.IgnoreArg(),
inst=inst,
request_type=instance.READY_REQUEST)
s._condition.notify(1)
self.mox.ReplayAll()
s._start_instance(0)
self.mox.VerifyAll()
def test_instance_start_failure(self):
s = BasicScalingModuleFacade(balanced_port=8080)
self.mox.StubOutWithMock(s, '_handle_request')
self.mox.StubOutWithMock(s._condition, 'notify')
wsgi_servr = self.mox.CreateMock(wsgi_server.WsgiServer)
wsgi_servr.port = 12345
s._wsgi_servers[0] = wsgi_servr
inst = self.mox.CreateMock(instance.Instance)
inst.instance_id = 0
s._instances[0] = inst
inst.start().AndReturn(False)
self.mox.ReplayAll()
s._start_instance(0)
self.mox.VerifyAll()
def test_start_any_instance_success(self):
s = BasicScalingModuleFacade(balanced_port=8080)
s._instance_running = [True, False, False, True]
inst = object()
s._instances = [None, inst, None, None]
self.mox.StubOutWithMock(module._THREAD_POOL, 'submit')
module._THREAD_POOL.submit(s._start_instance, 1)
self.mox.ReplayAll()
self.assertEqual(inst, s._start_any_instance())
self.mox.VerifyAll()
self.assertEqual([True, True, False, True], s._instance_running)
def test_start_any_instance_all_already_running(self):
s = BasicScalingModuleFacade(balanced_port=8080)
s._instance_running = [True, True, True, True]
self.mox.StubOutWithMock(module._THREAD_POOL, 'submit')
self.mox.ReplayAll()
self.assertIsNone(s._start_any_instance())
self.mox.VerifyAll()
self.assertEqual([True, True, True, True], s._instance_running)
class TestBasicScalingInstancePoolHandleScriptRequest(unittest.TestCase):
"""Tests for module.BasicScalingModule.handle."""
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.inst = self.mox.CreateMock(instance.Instance)
self.inst.instance_id = 0
self.environ = {}
self.start_response = object()
self.response = [object()]
self.url_map = object()
self.match = object()
self.request_id = object()
self.basic_module = BasicScalingModuleFacade(
instance_factory=instance.InstanceFactory(object(), 10))
self.mox.StubOutWithMock(self.basic_module, '_choose_instance')
self.mox.StubOutWithMock(self.basic_module, '_start_any_instance')
self.mox.StubOutWithMock(self.basic_module, '_start_instance')
self.mox.StubOutWithMock(self.basic_module._condition, 'wait')
self.mox.StubOutWithMock(self.basic_module._condition, 'notify')
self.time = 10
self.mox.stubs.Set(time, 'time', lambda: self.time)
def advance_time(self, *unused_args):
self.time += 11
def tearDown(self):
self.mox.UnsetStubs()
def test_handle_script_request(self):
self.basic_module._choose_instance(20).AndReturn(self.inst)
self.inst.handle(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id,
instance.NORMAL_REQUEST).AndReturn(self.response)
self.basic_module._condition.notify()
self.mox.ReplayAll()
self.assertEqual(
self.response,
self.basic_module._handle_script_request(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id))
self.mox.VerifyAll()
def test_handle_cannot_accept_request(self):
self.basic_module._choose_instance(20).AndReturn(self.inst)
self.inst.handle(
self.environ, self.start_response, self.url_map, self.match,
self.request_id, instance.NORMAL_REQUEST).AndRaise(
instance.CannotAcceptRequests)
self.basic_module._condition.notify()
self.basic_module._choose_instance(20).AndReturn(self.inst)
self.inst.handle(
self.environ, self.start_response, self.url_map, self.match,
self.request_id, instance.NORMAL_REQUEST).AndReturn(
self.response)
self.basic_module._condition.notify()
self.mox.ReplayAll()
self.assertEqual(
self.response,
self.basic_module._handle_script_request(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id))
self.mox.VerifyAll()
def test_handle_timeout(self):
self.mox.StubOutWithMock(self.basic_module, '_error_response')
self.basic_module._choose_instance(20).WithSideEffects(self.advance_time)
self.basic_module._error_response(self.environ, self.start_response,
503).AndReturn(self.response)
self.mox.ReplayAll()
self.assertEqual(
self.response,
self.basic_module._handle_script_request(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id))
self.mox.VerifyAll()
def test_handle_instance(self):
self.inst.instance_id = 0
self.inst.has_quit = False
self.inst.handle(
self.environ, self.start_response, self.url_map, self.match,
self.request_id, instance.NORMAL_REQUEST).AndReturn(
self.response)
self.basic_module._condition.notify()
self.mox.ReplayAll()
self.assertEqual(
self.response,
self.basic_module._handle_script_request(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id,
inst=self.inst))
self.mox.VerifyAll()
def test_handle_instance_start_the_instance(self):
self.inst.instance_id = 0
self.inst.has_quit = False
self.inst.handle(
self.environ, self.start_response, self.url_map, self.match,
self.request_id, instance.NORMAL_REQUEST).AndRaise(
instance.CannotAcceptRequests)
self.basic_module._start_instance(0).AndReturn(True)
self.inst.handle(
self.environ, self.start_response, self.url_map, self.match,
self.request_id, instance.NORMAL_REQUEST).AndReturn(
self.response)
self.basic_module._condition.notify()
self.mox.ReplayAll()
self.assertEqual(
self.response,
self.basic_module._handle_script_request(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id,
inst=self.inst))
self.mox.VerifyAll()
def test_handle_instance_already_running(self):
self.inst.instance_id = 0
self.inst.has_quit = False
self.basic_module._instance_running[0] = True
self.inst.handle(
self.environ, self.start_response, self.url_map, self.match,
self.request_id, instance.NORMAL_REQUEST).AndRaise(
instance.CannotAcceptRequests)
self.inst.wait(20)
self.inst.handle(
self.environ, self.start_response, self.url_map, self.match,
self.request_id, instance.NORMAL_REQUEST).AndReturn(
self.response)
self.basic_module._condition.notify()
self.mox.ReplayAll()
self.assertEqual(
self.response,
self.basic_module._handle_script_request(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id,
inst=self.inst))
self.mox.VerifyAll()
def test_handle_instance_timeout(self):
self.mox.StubOutWithMock(self.basic_module, '_error_response')
self.inst.instance_id = 0
self.inst.has_quit = False
self.basic_module._instance_running[0] = True
self.inst.handle(
self.environ, self.start_response, self.url_map, self.match,
self.request_id, instance.NORMAL_REQUEST).AndRaise(
instance.CannotAcceptRequests)
self.inst.wait(20).WithSideEffects(self.advance_time)
self.basic_module._error_response(self.environ, self.start_response,
503).AndReturn(self.response)
self.basic_module._condition.notify()
self.mox.ReplayAll()
self.assertEqual(
self.response,
self.basic_module._handle_script_request(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id,
inst=self.inst))
self.mox.VerifyAll()
class TestBasicScalingInstancePoolChooseInstances(unittest.TestCase):
"""Tests for module.BasicScalingModule._choose_instance."""
class Instance(object):
def __init__(self, can_accept_requests):
self.can_accept_requests = can_accept_requests
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.servr = BasicScalingModuleFacade(
instance_factory=instance.InstanceFactory(object(), 10))
self.mox.stubs.Set(time, 'time', lambda: self.time)
self.mox.StubOutWithMock(self.servr._condition, 'wait')
self.mox.StubOutWithMock(self.servr, '_start_any_instance')
self.time = 0
def tearDown(self):
self.mox.UnsetStubs()
def advance_time(self, *unused_args):
self.time += 10
def test_choose_instance_first_can_accept(self):
instance1 = self.Instance(True)
instance2 = self.Instance(True)
self.servr._instances = [instance1, instance2]
self.mox.ReplayAll()
self.assertEqual(instance1, self.servr._choose_instance(1))
self.mox.VerifyAll()
def test_choose_instance_first_cannot_accept(self):
instance1 = self.Instance(False)
instance2 = self.Instance(True)
self.servr._instances = [instance1, instance2]
self.mox.ReplayAll()
self.assertEqual(instance2, self.servr._choose_instance(1))
self.mox.VerifyAll()
def test_choose_instance_none_can_accept(self):
instance1 = self.Instance(False)
instance2 = self.Instance(False)
self.servr._instance_running = [True, True]
self.servr._instances = [instance1, instance2]
self.servr._start_any_instance().AndReturn(None)
self.servr._condition.wait(1).WithSideEffects(self.advance_time)
self.mox.ReplayAll()
self.assertEqual(None, self.servr._choose_instance(1))
self.mox.VerifyAll()
def test_choose_instance_start_an_instance(self):
instance1 = self.Instance(False)
instance2 = self.Instance(False)
mock_instance = self.mox.CreateMock(instance.Instance)
self.servr._instances = [instance1, instance2]
self.servr._instance_running = [True, False]
self.servr._start_any_instance().AndReturn(mock_instance)
mock_instance.wait(1)
self.mox.ReplayAll()
self.assertEqual(mock_instance, self.servr._choose_instance(1))
self.mox.VerifyAll()
def test_choose_instance_no_instances(self):
self.servr._start_any_instance().AndReturn(None)
self.servr._condition.wait(1).WithSideEffects(self.advance_time)
self.mox.ReplayAll()
self.assertEqual(None, self.servr._choose_instance(1))
self.mox.VerifyAll()
class TestBasicScalingInstancePoolInstanceManagement(unittest.TestCase):
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.factory = self.mox.CreateMock(instance.InstanceFactory)
self.factory.max_concurrent_requests = 10
self.mox.StubOutWithMock(module._THREAD_POOL, 'submit')
self.module = BasicScalingModuleFacade(instance_factory=self.factory,
host='localhost')
self.wsgi_server = self.module._wsgi_servers[0]
self.wsgi_server.start()
def tearDown(self):
self.wsgi_server.quit()
self.mox.UnsetStubs()
def test_restart(self):
old_instances = [self.mox.CreateMock(instance.Instance),
self.mox.CreateMock(instance.Instance)]
self.module._instances = old_instances[:]
self.module._instance_running = [True, False]
new_instance = self.mox.CreateMock(instance.Instance)
self.factory.new_instance(0, expect_ready_request=True).AndReturn(
new_instance)
module._THREAD_POOL.submit(self.module._start_instance, 0)
old_instances[0].quit(expect_shutdown=True)
module._THREAD_POOL.submit(self.module._shutdown_instance, old_instances[0],
self.wsgi_server.port)
self.mox.ReplayAll()
self.module.restart()
self.mox.VerifyAll()
self.assertEqual([True, False], self.module._instance_running)
self.assertEqual(new_instance, self.module._instances[0])
self.assertEqual(self.module._handle_request,
self.module._wsgi_servers[0]._app.func)
self.assertEqual({'inst': new_instance},
self.module._wsgi_servers[0]._app.keywords)
def test_shutdown_idle_instances(self):
s = BasicScalingModuleFacade(instance_factory=self.factory)
old_instances = [self.mox.CreateMock(instance.Instance),
self.mox.CreateMock(instance.Instance),
self.mox.CreateMock(instance.Instance)]
self.module._instances = old_instances[:]
old_instances[0].idle_seconds = (self.module._instance_idle_timeout + 1)
old_instances[1].idle_seconds = 0
old_instances[2].idle_seconds = (self.module._instance_idle_timeout + 1)
self.module._instance_running = [True, True, False]
new_instance = self.mox.CreateMock(instance.Instance)
self.factory.new_instance(0, expect_ready_request=True).AndReturn(
new_instance)
old_instances[0].quit(expect_shutdown=True)
module._THREAD_POOL.submit(self.module._shutdown_instance, old_instances[0],
self.wsgi_server.port)
self.mox.ReplayAll()
self.module._shutdown_idle_instances()
self.mox.VerifyAll()
self.assertEqual([False, True, False], self.module._instance_running)
self.assertEqual(new_instance, self.module._instances[0])
self.assertEqual(self.module._handle_request,
self.module._wsgi_servers[0]._app.func)
self.assertEqual({'inst': new_instance},
self.module._wsgi_servers[0]._app.keywords)
class TestBasicScalingInstancePoolHandleChanges(unittest.TestCase):
"""Tests for module.BasicScalingModule._handle_changes."""
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.instance_factory = instance.InstanceFactory(object(), 10)
self.servr = BasicScalingModuleFacade(
instance_factory=self.instance_factory)
self.mox.StubOutWithMock(self.instance_factory, 'files_changed')
self.mox.StubOutWithMock(self.instance_factory, 'configuration_changed')
self.mox.StubOutWithMock(self.servr, 'restart')
self.mox.StubOutWithMock(self.servr, '_create_url_handlers')
self.mox.StubOutWithMock(self.servr._module_configuration,
'check_for_updates')
self.mox.StubOutWithMock(self.servr._watcher, 'has_changes')
def tearDown(self):
self.mox.UnsetStubs()
def test_no_changes(self):
self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
self.servr._watcher.has_changes().AndReturn(False)
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
def test_irrelevant_config_change(self):
self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
self.servr._watcher.has_changes().AndReturn(False)
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
def test_restart_config_change(self):
conf_change = frozenset([application_configuration.ENV_VARIABLES_CHANGED])
self.servr._module_configuration.check_for_updates().AndReturn(conf_change)
self.servr._watcher.has_changes().AndReturn(False)
self.instance_factory.configuration_changed(conf_change)
self.servr.restart()
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
def test_handler_change(self):
conf_change = frozenset([application_configuration.HANDLERS_CHANGED])
self.servr._module_configuration.check_for_updates().AndReturn(conf_change)
self.servr._watcher.has_changes().AndReturn(False)
self.servr._create_url_handlers()
self.instance_factory.configuration_changed(conf_change)
self.servr.restart()
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
def test_file_change(self):
self.servr._module_configuration.check_for_updates().AndReturn(frozenset())
self.servr._watcher.has_changes().AndReturn(True)
self.instance_factory.files_changed().AndReturn(True)
self.servr.restart()
self.mox.ReplayAll()
self.servr._handle_changes()
self.mox.VerifyAll()
class TestInteractiveCommandModule(unittest.TestCase):
def setUp(self):
api_server.test_setup_stubs()
self.mox = mox.Mox()
self.inst = self.mox.CreateMock(instance.Instance)
self.inst.instance_id = 0
self.environ = object()
self.start_response = object()
self.response = [object()]
self.url_map = object()
self.match = object()
self.request_id = object()
self.servr = module.InteractiveCommandModule(
ModuleConfigurationStub(),
'fakehost',
balanced_port=8000,
api_port=9000,
auth_domain='gmail.com',
runtime_stderr_loglevel=1,
php_executable_path='/usr/bin/php-cgi',
enable_php_remote_debugging=False,
python_config=None,
cloud_sql_config=None,
default_version_port=8080,
port_registry=dispatcher.PortRegistry(),
request_data=None,
dispatcher=None,
use_mtime_file_watcher=False,
allow_skipped_files=False)
self.mox.StubOutWithMock(self.servr._instance_factory, 'new_instance')
self.mox.StubOutWithMock(self.servr, '_handle_request')
self.mox.StubOutWithMock(self.servr, 'build_request_environ')
def test_send_interactive_command(self):
def good_response(unused_environ, start_response, request_type):
start_response('200 OK', [])
return ['10\n']
environ = object()
self.servr.build_request_environ(
'POST', '/', [], 'print 5+5', '192.0.2.0', 8000).AndReturn(environ)
self.servr._handle_request(
environ,
mox.IgnoreArg(),
request_type=instance.INTERACTIVE_REQUEST).WithSideEffects(
good_response)
self.mox.ReplayAll()
self.assertEqual('10\n', self.servr.send_interactive_command('print 5+5'))
self.mox.VerifyAll()
def test_send_interactive_command_handle_request_exception(self):
environ = object()
self.servr.build_request_environ(
'POST', '/', [], 'print 5+5', '192.0.2.0', 8000).AndReturn(environ)
self.servr._handle_request(
environ,
mox.IgnoreArg(),
request_type=instance.INTERACTIVE_REQUEST).AndRaise(Exception('error'))
self.mox.ReplayAll()
self.assertRaisesRegexp(module.InteractiveCommandError,
'error',
self.servr.send_interactive_command,
'print 5+5')
self.mox.VerifyAll()
def test_send_interactive_command_handle_request_failure(self):
def good_response(unused_environ, start_response, request_type):
start_response('503 Service Unavailable', [])
return ['Instance was restarted while executing command']
environ = object()
self.servr.build_request_environ(
'POST', '/', [], 'print 5+5', '192.0.2.0', 8000).AndReturn(environ)
self.servr._handle_request(
environ,
mox.IgnoreArg(),
request_type=instance.INTERACTIVE_REQUEST).WithSideEffects(
good_response)
self.mox.ReplayAll()
self.assertRaisesRegexp(module.InteractiveCommandError,
'Instance was restarted while executing command',
self.servr.send_interactive_command,
'print 5+5')
self.mox.VerifyAll()
def test_handle_script_request(self):
self.servr._instance_factory.new_instance(
mox.IgnoreArg(),
expect_ready_request=False).AndReturn(self.inst)
self.inst.start()
self.inst.handle(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id,
instance.INTERACTIVE_REQUEST).AndReturn(['10\n'])
self.mox.ReplayAll()
self.assertEqual(
['10\n'],
self.servr._handle_script_request(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id))
self.mox.VerifyAll()
def test_handle_script_request_busy(self):
self.servr._instance_factory.new_instance(
mox.IgnoreArg(),
expect_ready_request=False).AndReturn(self.inst)
self.inst.start()
self.inst.handle(
self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id,
instance.INTERACTIVE_REQUEST).AndRaise(instance.CannotAcceptRequests())
self.inst.wait(mox.IgnoreArg())
self.inst.handle(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id,
instance.INTERACTIVE_REQUEST).AndReturn(['10\n'])
self.mox.ReplayAll()
self.assertEqual(
['10\n'],
self.servr._handle_script_request(self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id))
self.mox.VerifyAll()
def test_handle_script_request_timeout(self):
self.servr._MAX_REQUEST_WAIT_TIME = 0
start_response = start_response_utils.CapturingStartResponse()
self.mox.ReplayAll()
self.assertEqual(
['The command timed-out while waiting for another one to complete'],
self.servr._handle_script_request(self.environ,
start_response,
self.url_map,
self.match,
self.request_id))
self.mox.VerifyAll()
self.assertEqual('503 Service Unavailable',
start_response.status)
def test_handle_script_request_restart(self):
def restart_and_raise(*args):
self.servr._inst = None
raise httplib.BadStatusLine('line')
start_response = start_response_utils.CapturingStartResponse()
self.servr._instance_factory.new_instance(
mox.IgnoreArg(),
expect_ready_request=False).AndReturn(self.inst)
self.inst.start()
self.inst.handle(
self.environ,
start_response,
self.url_map,
self.match,
self.request_id,
instance.INTERACTIVE_REQUEST).WithSideEffects(restart_and_raise)
self.mox.ReplayAll()
self.assertEqual(
['Instance was restarted while executing command'],
self.servr._handle_script_request(self.environ,
start_response,
self.url_map,
self.match,
self.request_id))
self.mox.VerifyAll()
self.assertEqual('503 Service Unavailable',
start_response.status)
def test_handle_script_request_unexpected_instance_exception(self):
self.servr._instance_factory.new_instance(
mox.IgnoreArg(),
expect_ready_request=False).AndReturn(self.inst)
self.inst.start()
self.inst.handle(
self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id,
instance.INTERACTIVE_REQUEST).AndRaise(httplib.BadStatusLine('line'))
self.mox.ReplayAll()
self.assertRaises(
httplib.BadStatusLine,
self.servr._handle_script_request,
self.environ,
self.start_response,
self.url_map,
self.match,
self.request_id)
self.mox.VerifyAll()
if __name__ == '__main__':
unittest.main()
| dvliman/jaikuengine | .google_appengine/google/appengine/tools/devappserver2/module_test.py | Python | apache-2.0 | 90,007 |
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2019, Ilya Etingof <[email protected]>
# License: http://snmplabs.com/pyasn1/license.html
#
from pyasn1 import debug
from pyasn1 import error
from pyasn1.codec.ber import eoo
from pyasn1.compat.integer import from_bytes
from pyasn1.compat.octets import oct2int, octs2ints, ints2octs, null
from pyasn1.type import base
from pyasn1.type import char
from pyasn1.type import tag
from pyasn1.type import tagmap
from pyasn1.type import univ
from pyasn1.type import useful
__all__ = ['decode']
LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_DECODER)
noValue = base.noValue
class AbstractDecoder(object):
protoComponent = None
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
raise error.PyAsn1Error('Decoder not implemented for %s' % (tagSet,))
def indefLenValueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
raise error.PyAsn1Error('Indefinite length mode decoder not implemented for %s' % (tagSet,))
class AbstractSimpleDecoder(AbstractDecoder):
@staticmethod
def substrateCollector(asn1Object, substrate, length):
return substrate[:length], substrate[length:]
def _createComponent(self, asn1Spec, tagSet, value, **options):
if options.get('native'):
return value
elif asn1Spec is None:
return self.protoComponent.clone(value, tagSet=tagSet)
elif value is noValue:
return asn1Spec
else:
return asn1Spec.clone(value)
class ExplicitTagDecoder(AbstractSimpleDecoder):
protoComponent = univ.Any('')
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if substrateFun:
return substrateFun(
self._createComponent(asn1Spec, tagSet, '', **options),
substrate, length
)
head, tail = substrate[:length], substrate[length:]
value, _ = decodeFun(head, asn1Spec, tagSet, length, **options)
if LOG:
LOG('explicit tag container carries %d octets of trailing payload '
'(will be lost!): %s' % (len(_), debug.hexdump(_)))
return value, tail
def indefLenValueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if substrateFun:
return substrateFun(
self._createComponent(asn1Spec, tagSet, '', **options),
substrate, length
)
value, substrate = decodeFun(substrate, asn1Spec, tagSet, length, **options)
eooMarker, substrate = decodeFun(substrate, allowEoo=True, **options)
if eooMarker is eoo.endOfOctets:
return value, substrate
else:
raise error.PyAsn1Error('Missing end-of-octets terminator')
explicitTagDecoder = ExplicitTagDecoder()
class IntegerDecoder(AbstractSimpleDecoder):
protoComponent = univ.Integer(0)
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if tagSet[0].tagFormat != tag.tagFormatSimple:
raise error.PyAsn1Error('Simple tag format expected')
head, tail = substrate[:length], substrate[length:]
if not head:
return self._createComponent(asn1Spec, tagSet, 0, **options), tail
value = from_bytes(head, signed=True)
return self._createComponent(asn1Spec, tagSet, value, **options), tail
class BooleanDecoder(IntegerDecoder):
protoComponent = univ.Boolean(0)
def _createComponent(self, asn1Spec, tagSet, value, **options):
return IntegerDecoder._createComponent(
self, asn1Spec, tagSet, value and 1 or 0, **options)
class BitStringDecoder(AbstractSimpleDecoder):
protoComponent = univ.BitString(())
supportConstructedForm = True
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
head, tail = substrate[:length], substrate[length:]
if substrateFun:
return substrateFun(self._createComponent(
asn1Spec, tagSet, noValue, **options), substrate, length)
if not head:
raise error.PyAsn1Error('Empty BIT STRING substrate')
if tagSet[0].tagFormat == tag.tagFormatSimple: # XXX what tag to check?
trailingBits = oct2int(head[0])
if trailingBits > 7:
raise error.PyAsn1Error(
'Trailing bits overflow %s' % trailingBits
)
value = self.protoComponent.fromOctetString(
head[1:], internalFormat=True, padding=trailingBits)
return self._createComponent(asn1Spec, tagSet, value, **options), tail
if not self.supportConstructedForm:
raise error.PyAsn1Error('Constructed encoding form prohibited '
'at %s' % self.__class__.__name__)
if LOG:
LOG('assembling constructed serialization')
# All inner fragments are of the same type, treat them as octet string
substrateFun = self.substrateCollector
bitString = self.protoComponent.fromOctetString(null, internalFormat=True)
while head:
component, head = decodeFun(head, self.protoComponent,
substrateFun=substrateFun, **options)
trailingBits = oct2int(component[0])
if trailingBits > 7:
raise error.PyAsn1Error(
'Trailing bits overflow %s' % trailingBits
)
bitString = self.protoComponent.fromOctetString(
component[1:], internalFormat=True,
prepend=bitString, padding=trailingBits
)
return self._createComponent(asn1Spec, tagSet, bitString, **options), tail
def indefLenValueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if substrateFun:
return substrateFun(self._createComponent(asn1Spec, tagSet, noValue, **options), substrate, length)
# All inner fragments are of the same type, treat them as octet string
substrateFun = self.substrateCollector
bitString = self.protoComponent.fromOctetString(null, internalFormat=True)
while substrate:
component, substrate = decodeFun(substrate, self.protoComponent,
substrateFun=substrateFun,
allowEoo=True, **options)
if component is eoo.endOfOctets:
break
trailingBits = oct2int(component[0])
if trailingBits > 7:
raise error.PyAsn1Error(
'Trailing bits overflow %s' % trailingBits
)
bitString = self.protoComponent.fromOctetString(
component[1:], internalFormat=True,
prepend=bitString, padding=trailingBits
)
else:
raise error.SubstrateUnderrunError('No EOO seen before substrate ends')
return self._createComponent(asn1Spec, tagSet, bitString, **options), substrate
class OctetStringDecoder(AbstractSimpleDecoder):
protoComponent = univ.OctetString('')
supportConstructedForm = True
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
head, tail = substrate[:length], substrate[length:]
if substrateFun:
return substrateFun(self._createComponent(asn1Spec, tagSet, noValue, **options),
substrate, length)
if tagSet[0].tagFormat == tag.tagFormatSimple: # XXX what tag to check?
return self._createComponent(asn1Spec, tagSet, head, **options), tail
if not self.supportConstructedForm:
raise error.PyAsn1Error('Constructed encoding form prohibited at %s' % self.__class__.__name__)
if LOG:
LOG('assembling constructed serialization')
# All inner fragments are of the same type, treat them as octet string
substrateFun = self.substrateCollector
header = null
while head:
component, head = decodeFun(head, self.protoComponent,
substrateFun=substrateFun,
**options)
header += component
return self._createComponent(asn1Spec, tagSet, header, **options), tail
def indefLenValueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if substrateFun and substrateFun is not self.substrateCollector:
asn1Object = self._createComponent(asn1Spec, tagSet, noValue, **options)
return substrateFun(asn1Object, substrate, length)
# All inner fragments are of the same type, treat them as octet string
substrateFun = self.substrateCollector
header = null
while substrate:
component, substrate = decodeFun(substrate,
self.protoComponent,
substrateFun=substrateFun,
allowEoo=True, **options)
if component is eoo.endOfOctets:
break
header += component
else:
raise error.SubstrateUnderrunError(
'No EOO seen before substrate ends'
)
return self._createComponent(asn1Spec, tagSet, header, **options), substrate
class NullDecoder(AbstractSimpleDecoder):
protoComponent = univ.Null('')
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if tagSet[0].tagFormat != tag.tagFormatSimple:
raise error.PyAsn1Error('Simple tag format expected')
head, tail = substrate[:length], substrate[length:]
component = self._createComponent(asn1Spec, tagSet, '', **options)
if head:
raise error.PyAsn1Error('Unexpected %d-octet substrate for Null' % length)
return component, tail
class ObjectIdentifierDecoder(AbstractSimpleDecoder):
protoComponent = univ.ObjectIdentifier(())
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if tagSet[0].tagFormat != tag.tagFormatSimple:
raise error.PyAsn1Error('Simple tag format expected')
head, tail = substrate[:length], substrate[length:]
if not head:
raise error.PyAsn1Error('Empty substrate')
head = octs2ints(head)
oid = ()
index = 0
substrateLen = len(head)
while index < substrateLen:
subId = head[index]
index += 1
if subId < 128:
oid += (subId,)
elif subId > 128:
# Construct subid from a number of octets
nextSubId = subId
subId = 0
while nextSubId >= 128:
subId = (subId << 7) + (nextSubId & 0x7F)
if index >= substrateLen:
raise error.SubstrateUnderrunError(
'Short substrate for sub-OID past %s' % (oid,)
)
nextSubId = head[index]
index += 1
oid += ((subId << 7) + nextSubId,)
elif subId == 128:
# ASN.1 spec forbids leading zeros (0x80) in OID
# encoding, tolerating it opens a vulnerability. See
# https://www.esat.kuleuven.be/cosic/publications/article-1432.pdf
# page 7
raise error.PyAsn1Error('Invalid octet 0x80 in OID encoding')
# Decode two leading arcs
if 0 <= oid[0] <= 39:
oid = (0,) + oid
elif 40 <= oid[0] <= 79:
oid = (1, oid[0] - 40) + oid[1:]
elif oid[0] >= 80:
oid = (2, oid[0] - 80) + oid[1:]
else:
raise error.PyAsn1Error('Malformed first OID octet: %s' % head[0])
return self._createComponent(asn1Spec, tagSet, oid, **options), tail
class RealDecoder(AbstractSimpleDecoder):
protoComponent = univ.Real()
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if tagSet[0].tagFormat != tag.tagFormatSimple:
raise error.PyAsn1Error('Simple tag format expected')
head, tail = substrate[:length], substrate[length:]
if not head:
return self._createComponent(asn1Spec, tagSet, 0.0, **options), tail
fo = oct2int(head[0])
head = head[1:]
if fo & 0x80: # binary encoding
if not head:
raise error.PyAsn1Error("Incomplete floating-point value")
if LOG:
LOG('decoding binary encoded REAL')
n = (fo & 0x03) + 1
if n == 4:
n = oct2int(head[0])
head = head[1:]
eo, head = head[:n], head[n:]
if not eo or not head:
raise error.PyAsn1Error('Real exponent screwed')
e = oct2int(eo[0]) & 0x80 and -1 or 0
while eo: # exponent
e <<= 8
e |= oct2int(eo[0])
eo = eo[1:]
b = fo >> 4 & 0x03 # base bits
if b > 2:
raise error.PyAsn1Error('Illegal Real base')
if b == 1: # encbase = 8
e *= 3
elif b == 2: # encbase = 16
e *= 4
p = 0
while head: # value
p <<= 8
p |= oct2int(head[0])
head = head[1:]
if fo & 0x40: # sign bit
p = -p
sf = fo >> 2 & 0x03 # scale bits
p *= 2 ** sf
value = (p, 2, e)
elif fo & 0x40: # infinite value
if LOG:
LOG('decoding infinite REAL')
value = fo & 0x01 and '-inf' or 'inf'
elif fo & 0xc0 == 0: # character encoding
if not head:
raise error.PyAsn1Error("Incomplete floating-point value")
if LOG:
LOG('decoding character encoded REAL')
try:
if fo & 0x3 == 0x1: # NR1
value = (int(head), 10, 0)
elif fo & 0x3 == 0x2: # NR2
value = float(head)
elif fo & 0x3 == 0x3: # NR3
value = float(head)
else:
raise error.SubstrateUnderrunError(
'Unknown NR (tag %s)' % fo
)
except ValueError:
raise error.SubstrateUnderrunError(
'Bad character Real syntax'
)
else:
raise error.SubstrateUnderrunError(
'Unknown encoding (tag %s)' % fo
)
return self._createComponent(asn1Spec, tagSet, value, **options), tail
class AbstractConstructedDecoder(AbstractDecoder):
protoComponent = None
class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
protoRecordComponent = None
protoSequenceComponent = None
def _getComponentTagMap(self, asn1Object, idx):
raise NotImplementedError()
def _getComponentPositionByType(self, asn1Object, tagSet, idx):
raise NotImplementedError()
def _decodeComponents(self, substrate, tagSet=None, decodeFun=None, **options):
components = []
componentTypes = set()
while substrate:
component, substrate = decodeFun(substrate, **options)
if component is eoo.endOfOctets:
break
components.append(component)
componentTypes.add(component.tagSet)
# Now we have to guess is it SEQUENCE/SET or SEQUENCE OF/SET OF
# The heuristics is:
# * 1+ components of different types -> likely SEQUENCE/SET
# * otherwise -> likely SEQUENCE OF/SET OF
if len(componentTypes) > 1:
protoComponent = self.protoRecordComponent
else:
protoComponent = self.protoSequenceComponent
asn1Object = protoComponent.clone(
# construct tagSet from base tag from prototype ASN.1 object
# and additional tags recovered from the substrate
tagSet=tag.TagSet(protoComponent.tagSet.baseTag, *tagSet.superTags)
)
if LOG:
LOG('guessed %r container type (pass `asn1Spec` to guide the '
'decoder)' % asn1Object)
for idx, component in enumerate(components):
asn1Object.setComponentByPosition(
idx, component,
verifyConstraints=False,
matchTags=False, matchConstraints=False
)
return asn1Object, substrate
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if tagSet[0].tagFormat != tag.tagFormatConstructed:
raise error.PyAsn1Error('Constructed tag format expected')
head, tail = substrate[:length], substrate[length:]
if substrateFun is not None:
if asn1Spec is not None:
asn1Object = asn1Spec.clone()
elif self.protoComponent is not None:
asn1Object = self.protoComponent.clone(tagSet=tagSet)
else:
asn1Object = self.protoRecordComponent, self.protoSequenceComponent
return substrateFun(asn1Object, substrate, length)
if asn1Spec is None:
asn1Object, trailing = self._decodeComponents(
head, tagSet=tagSet, decodeFun=decodeFun, **options
)
if trailing:
if LOG:
LOG('Unused trailing %d octets encountered: %s' % (
len(trailing), debug.hexdump(trailing)))
return asn1Object, tail
asn1Object = asn1Spec.clone()
asn1Object.clear()
if asn1Spec.typeId in (univ.Sequence.typeId, univ.Set.typeId):
namedTypes = asn1Spec.componentType
isSetType = asn1Spec.typeId == univ.Set.typeId
isDeterministic = not isSetType and not namedTypes.hasOptionalOrDefault
if LOG:
LOG('decoding %sdeterministic %s type %r chosen by type ID' % (
not isDeterministic and 'non-' or '', isSetType and 'SET' or '',
asn1Spec))
seenIndices = set()
idx = 0
while head:
if not namedTypes:
componentType = None
elif isSetType:
componentType = namedTypes.tagMapUnique
else:
try:
if isDeterministic:
componentType = namedTypes[idx].asn1Object
elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted:
componentType = namedTypes.getTagMapNearPosition(idx)
else:
componentType = namedTypes[idx].asn1Object
except IndexError:
raise error.PyAsn1Error(
'Excessive components decoded at %r' % (asn1Spec,)
)
component, head = decodeFun(head, componentType, **options)
if not isDeterministic and namedTypes:
if isSetType:
idx = namedTypes.getPositionByType(component.effectiveTagSet)
elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted:
idx = namedTypes.getPositionNearType(component.effectiveTagSet, idx)
asn1Object.setComponentByPosition(
idx, component,
verifyConstraints=False,
matchTags=False, matchConstraints=False
)
seenIndices.add(idx)
idx += 1
if LOG:
LOG('seen component indices %s' % seenIndices)
if namedTypes:
if not namedTypes.requiredComponents.issubset(seenIndices):
raise error.PyAsn1Error(
'ASN.1 object %s has uninitialized '
'components' % asn1Object.__class__.__name__)
if namedTypes.hasOpenTypes:
openTypes = options.get('openTypes', {})
if LOG:
LOG('using open types map: %r' % openTypes)
if openTypes or options.get('decodeOpenTypes', False):
for idx, namedType in enumerate(namedTypes.namedTypes):
if not namedType.openType:
continue
if namedType.isOptional and not asn1Object.getComponentByPosition(idx).isValue:
continue
governingValue = asn1Object.getComponentByName(
namedType.openType.name
)
try:
openType = openTypes[governingValue]
except KeyError:
try:
openType = namedType.openType[governingValue]
except KeyError:
if LOG:
LOG('failed to resolve open type by governing '
'value %r' % (governingValue,))
continue
if LOG:
LOG('resolved open type %r by governing '
'value %r' % (openType, governingValue))
containerValue = asn1Object.getComponentByPosition(idx)
if containerValue.typeId in (
univ.SetOf.typeId, univ.SequenceOf.typeId):
for pos, containerElement in enumerate(
containerValue):
component, rest = decodeFun(
containerValue[pos].asOctets(),
asn1Spec=openType, **options
)
containerValue[pos] = component
else:
component, rest = decodeFun(
asn1Object.getComponentByPosition(idx).asOctets(),
asn1Spec=openType, **options
)
asn1Object.setComponentByPosition(idx, component)
else:
asn1Object.verifySizeSpec()
else:
asn1Object = asn1Spec.clone()
asn1Object.clear()
componentType = asn1Spec.componentType
if LOG:
LOG('decoding type %r chosen by given `asn1Spec`' % componentType)
idx = 0
while head:
component, head = decodeFun(head, componentType, **options)
asn1Object.setComponentByPosition(
idx, component,
verifyConstraints=False,
matchTags=False, matchConstraints=False
)
idx += 1
return asn1Object, tail
def indefLenValueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if tagSet[0].tagFormat != tag.tagFormatConstructed:
raise error.PyAsn1Error('Constructed tag format expected')
if substrateFun is not None:
if asn1Spec is not None:
asn1Object = asn1Spec.clone()
elif self.protoComponent is not None:
asn1Object = self.protoComponent.clone(tagSet=tagSet)
else:
asn1Object = self.protoRecordComponent, self.protoSequenceComponent
return substrateFun(asn1Object, substrate, length)
if asn1Spec is None:
return self._decodeComponents(
substrate, tagSet=tagSet, decodeFun=decodeFun,
**dict(options, allowEoo=True)
)
asn1Object = asn1Spec.clone()
asn1Object.clear()
if asn1Spec.typeId in (univ.Sequence.typeId, univ.Set.typeId):
namedTypes = asn1Object.componentType
isSetType = asn1Object.typeId == univ.Set.typeId
isDeterministic = not isSetType and not namedTypes.hasOptionalOrDefault
if LOG:
LOG('decoding %sdeterministic %s type %r chosen by type ID' % (
not isDeterministic and 'non-' or '', isSetType and 'SET' or '',
asn1Spec))
seenIndices = set()
idx = 0
while substrate:
if len(namedTypes) <= idx:
asn1Spec = None
elif isSetType:
asn1Spec = namedTypes.tagMapUnique
else:
try:
if isDeterministic:
asn1Spec = namedTypes[idx].asn1Object
elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted:
asn1Spec = namedTypes.getTagMapNearPosition(idx)
else:
asn1Spec = namedTypes[idx].asn1Object
except IndexError:
raise error.PyAsn1Error(
'Excessive components decoded at %r' % (asn1Object,)
)
component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True, **options)
if component is eoo.endOfOctets:
break
if not isDeterministic and namedTypes:
if isSetType:
idx = namedTypes.getPositionByType(component.effectiveTagSet)
elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted:
idx = namedTypes.getPositionNearType(component.effectiveTagSet, idx)
asn1Object.setComponentByPosition(
idx, component,
verifyConstraints=False,
matchTags=False, matchConstraints=False
)
seenIndices.add(idx)
idx += 1
else:
raise error.SubstrateUnderrunError(
'No EOO seen before substrate ends'
)
if LOG:
LOG('seen component indices %s' % seenIndices)
if namedTypes:
if not namedTypes.requiredComponents.issubset(seenIndices):
raise error.PyAsn1Error('ASN.1 object %s has uninitialized components' % asn1Object.__class__.__name__)
if namedTypes.hasOpenTypes:
openTypes = options.get('openTypes', {})
if LOG:
LOG('using open types map: %r' % openTypes)
if openTypes or options.get('decodeOpenTypes', False):
for idx, namedType in enumerate(namedTypes.namedTypes):
if not namedType.openType:
continue
if namedType.isOptional and not asn1Object.getComponentByPosition(idx).isValue:
continue
governingValue = asn1Object.getComponentByName(
namedType.openType.name
)
try:
openType = openTypes[governingValue]
except KeyError:
try:
openType = namedType.openType[governingValue]
except KeyError:
if LOG:
LOG('failed to resolve open type by governing '
'value %r' % (governingValue,))
continue
if LOG:
LOG('resolved open type %r by governing '
'value %r' % (openType, governingValue))
containerValue = asn1Object.getComponentByPosition(idx)
if containerValue.typeId in (
univ.SetOf.typeId, univ.SequenceOf.typeId):
for pos, containerElement in enumerate(
containerValue):
component, rest = decodeFun(
containerValue[pos].asOctets(),
asn1Spec=openType, **dict(options, allowEoo=True)
)
containerValue[pos] = component
else:
component, rest = decodeFun(
asn1Object.getComponentByPosition(idx).asOctets(),
asn1Spec=openType, **dict(options, allowEoo=True)
)
if component is not eoo.endOfOctets:
asn1Object.setComponentByPosition(idx, component)
else:
asn1Object.verifySizeSpec()
else:
asn1Object = asn1Spec.clone()
asn1Object.clear()
componentType = asn1Spec.componentType
if LOG:
LOG('decoding type %r chosen by given `asn1Spec`' % componentType)
idx = 0
while substrate:
component, substrate = decodeFun(substrate, componentType, allowEoo=True, **options)
if component is eoo.endOfOctets:
break
asn1Object.setComponentByPosition(
idx, component,
verifyConstraints=False,
matchTags=False, matchConstraints=False
)
idx += 1
else:
raise error.SubstrateUnderrunError(
'No EOO seen before substrate ends'
)
return asn1Object, substrate
class SequenceOrSequenceOfDecoder(UniversalConstructedTypeDecoder):
protoRecordComponent = univ.Sequence()
protoSequenceComponent = univ.SequenceOf()
class SequenceDecoder(SequenceOrSequenceOfDecoder):
protoComponent = univ.Sequence()
class SequenceOfDecoder(SequenceOrSequenceOfDecoder):
protoComponent = univ.SequenceOf()
class SetOrSetOfDecoder(UniversalConstructedTypeDecoder):
protoRecordComponent = univ.Set()
protoSequenceComponent = univ.SetOf()
class SetDecoder(SetOrSetOfDecoder):
protoComponent = univ.Set()
class SetOfDecoder(SetOrSetOfDecoder):
protoComponent = univ.SetOf()
class ChoiceDecoder(AbstractConstructedDecoder):
protoComponent = univ.Choice()
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
head, tail = substrate[:length], substrate[length:]
if asn1Spec is None:
asn1Object = self.protoComponent.clone(tagSet=tagSet)
else:
asn1Object = asn1Spec.clone()
if substrateFun:
return substrateFun(asn1Object, substrate, length)
if asn1Object.tagSet == tagSet:
if LOG:
LOG('decoding %s as explicitly tagged CHOICE' % (tagSet,))
component, head = decodeFun(
head, asn1Object.componentTagMap, **options
)
else:
if LOG:
LOG('decoding %s as untagged CHOICE' % (tagSet,))
component, head = decodeFun(
head, asn1Object.componentTagMap,
tagSet, length, state, **options
)
effectiveTagSet = component.effectiveTagSet
if LOG:
LOG('decoded component %s, effective tag set %s' % (component, effectiveTagSet))
asn1Object.setComponentByType(
effectiveTagSet, component,
verifyConstraints=False,
matchTags=False, matchConstraints=False,
innerFlag=False
)
return asn1Object, tail
def indefLenValueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if asn1Spec is None:
asn1Object = self.protoComponent.clone(tagSet=tagSet)
else:
asn1Object = asn1Spec.clone()
if substrateFun:
return substrateFun(asn1Object, substrate, length)
if asn1Object.tagSet == tagSet:
if LOG:
LOG('decoding %s as explicitly tagged CHOICE' % (tagSet,))
component, substrate = decodeFun(
substrate, asn1Object.componentType.tagMapUnique, **options
)
# eat up EOO marker
eooMarker, substrate = decodeFun(
substrate, allowEoo=True, **options
)
if eooMarker is not eoo.endOfOctets:
raise error.PyAsn1Error('No EOO seen before substrate ends')
else:
if LOG:
LOG('decoding %s as untagged CHOICE' % (tagSet,))
component, substrate = decodeFun(
substrate, asn1Object.componentType.tagMapUnique,
tagSet, length, state, **options
)
effectiveTagSet = component.effectiveTagSet
if LOG:
LOG('decoded component %s, effective tag set %s' % (component, effectiveTagSet))
asn1Object.setComponentByType(
effectiveTagSet, component,
verifyConstraints=False,
matchTags=False, matchConstraints=False,
innerFlag=False
)
return asn1Object, substrate
class AnyDecoder(AbstractSimpleDecoder):
protoComponent = univ.Any()
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if asn1Spec is None:
isUntagged = True
elif asn1Spec.__class__ is tagmap.TagMap:
isUntagged = tagSet not in asn1Spec.tagMap
else:
isUntagged = tagSet != asn1Spec.tagSet
if isUntagged:
fullSubstrate = options['fullSubstrate']
# untagged Any container, recover inner header substrate
length += len(fullSubstrate) - len(substrate)
substrate = fullSubstrate
if LOG:
LOG('decoding as untagged ANY, substrate %s' % debug.hexdump(substrate))
if substrateFun:
return substrateFun(self._createComponent(asn1Spec, tagSet, noValue, **options),
substrate, length)
head, tail = substrate[:length], substrate[length:]
return self._createComponent(asn1Spec, tagSet, head, **options), tail
def indefLenValueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if asn1Spec is None:
isTagged = False
elif asn1Spec.__class__ is tagmap.TagMap:
isTagged = tagSet in asn1Spec.tagMap
else:
isTagged = tagSet == asn1Spec.tagSet
if isTagged:
# tagged Any type -- consume header substrate
header = null
if LOG:
LOG('decoding as tagged ANY')
else:
fullSubstrate = options['fullSubstrate']
# untagged Any, recover header substrate
header = fullSubstrate[:-len(substrate)]
if LOG:
LOG('decoding as untagged ANY, header substrate %s' % debug.hexdump(header))
# Any components do not inherit initial tag
asn1Spec = self.protoComponent
if substrateFun and substrateFun is not self.substrateCollector:
asn1Object = self._createComponent(asn1Spec, tagSet, noValue, **options)
return substrateFun(asn1Object, header + substrate, length + len(header))
if LOG:
LOG('assembling constructed serialization')
# All inner fragments are of the same type, treat them as octet string
substrateFun = self.substrateCollector
while substrate:
component, substrate = decodeFun(substrate, asn1Spec,
substrateFun=substrateFun,
allowEoo=True, **options)
if component is eoo.endOfOctets:
break
header += component
else:
raise error.SubstrateUnderrunError(
'No EOO seen before substrate ends'
)
if substrateFun:
return header, substrate
else:
return self._createComponent(asn1Spec, tagSet, header, **options), substrate
# character string types
class UTF8StringDecoder(OctetStringDecoder):
protoComponent = char.UTF8String()
class NumericStringDecoder(OctetStringDecoder):
protoComponent = char.NumericString()
class PrintableStringDecoder(OctetStringDecoder):
protoComponent = char.PrintableString()
class TeletexStringDecoder(OctetStringDecoder):
protoComponent = char.TeletexString()
class VideotexStringDecoder(OctetStringDecoder):
protoComponent = char.VideotexString()
class IA5StringDecoder(OctetStringDecoder):
protoComponent = char.IA5String()
class GraphicStringDecoder(OctetStringDecoder):
protoComponent = char.GraphicString()
class VisibleStringDecoder(OctetStringDecoder):
protoComponent = char.VisibleString()
class GeneralStringDecoder(OctetStringDecoder):
protoComponent = char.GeneralString()
class UniversalStringDecoder(OctetStringDecoder):
protoComponent = char.UniversalString()
class BMPStringDecoder(OctetStringDecoder):
protoComponent = char.BMPString()
# "useful" types
class ObjectDescriptorDecoder(OctetStringDecoder):
protoComponent = useful.ObjectDescriptor()
class GeneralizedTimeDecoder(OctetStringDecoder):
protoComponent = useful.GeneralizedTime()
class UTCTimeDecoder(OctetStringDecoder):
protoComponent = useful.UTCTime()
tagMap = {
univ.Integer.tagSet: IntegerDecoder(),
univ.Boolean.tagSet: BooleanDecoder(),
univ.BitString.tagSet: BitStringDecoder(),
univ.OctetString.tagSet: OctetStringDecoder(),
univ.Null.tagSet: NullDecoder(),
univ.ObjectIdentifier.tagSet: ObjectIdentifierDecoder(),
univ.Enumerated.tagSet: IntegerDecoder(),
univ.Real.tagSet: RealDecoder(),
univ.Sequence.tagSet: SequenceOrSequenceOfDecoder(), # conflicts with SequenceOf
univ.Set.tagSet: SetOrSetOfDecoder(), # conflicts with SetOf
univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any
# character string types
char.UTF8String.tagSet: UTF8StringDecoder(),
char.NumericString.tagSet: NumericStringDecoder(),
char.PrintableString.tagSet: PrintableStringDecoder(),
char.TeletexString.tagSet: TeletexStringDecoder(),
char.VideotexString.tagSet: VideotexStringDecoder(),
char.IA5String.tagSet: IA5StringDecoder(),
char.GraphicString.tagSet: GraphicStringDecoder(),
char.VisibleString.tagSet: VisibleStringDecoder(),
char.GeneralString.tagSet: GeneralStringDecoder(),
char.UniversalString.tagSet: UniversalStringDecoder(),
char.BMPString.tagSet: BMPStringDecoder(),
# useful types
useful.ObjectDescriptor.tagSet: ObjectDescriptorDecoder(),
useful.GeneralizedTime.tagSet: GeneralizedTimeDecoder(),
useful.UTCTime.tagSet: UTCTimeDecoder()
}
# Type-to-codec map for ambiguous ASN.1 types
typeMap = {
univ.Set.typeId: SetDecoder(),
univ.SetOf.typeId: SetOfDecoder(),
univ.Sequence.typeId: SequenceDecoder(),
univ.SequenceOf.typeId: SequenceOfDecoder(),
univ.Choice.typeId: ChoiceDecoder(),
univ.Any.typeId: AnyDecoder()
}
# Put in non-ambiguous types for faster codec lookup
for typeDecoder in tagMap.values():
if typeDecoder.protoComponent is not None:
typeId = typeDecoder.protoComponent.__class__.typeId
if typeId is not None and typeId not in typeMap:
typeMap[typeId] = typeDecoder
(stDecodeTag,
stDecodeLength,
stGetValueDecoder,
stGetValueDecoderByAsn1Spec,
stGetValueDecoderByTag,
stTryAsExplicitTag,
stDecodeValue,
stDumpRawValue,
stErrorCondition,
stStop) = [x for x in range(10)]
class Decoder(object):
defaultErrorState = stErrorCondition
#defaultErrorState = stDumpRawValue
defaultRawDecoder = AnyDecoder()
supportIndefLength = True
# noinspection PyDefaultArgument
def __init__(self, tagMap, typeMap={}):
self.__tagMap = tagMap
self.__typeMap = typeMap
# Tag & TagSet objects caches
self.__tagCache = {}
self.__tagSetCache = {}
self.__eooSentinel = ints2octs((0, 0))
def __call__(self, substrate, asn1Spec=None,
tagSet=None, length=None, state=stDecodeTag,
decodeFun=None, substrateFun=None,
**options):
if LOG:
LOG('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate)))
allowEoo = options.pop('allowEoo', False)
# Look for end-of-octets sentinel
if allowEoo and self.supportIndefLength:
if substrate[:2] == self.__eooSentinel:
if LOG:
LOG('end-of-octets sentinel found')
return eoo.endOfOctets, substrate[2:]
value = noValue
tagMap = self.__tagMap
typeMap = self.__typeMap
tagCache = self.__tagCache
tagSetCache = self.__tagSetCache
fullSubstrate = substrate
while state is not stStop:
if state is stDecodeTag:
if not substrate:
raise error.SubstrateUnderrunError(
'Short octet stream on tag decoding'
)
# Decode tag
isShortTag = True
firstOctet = substrate[0]
substrate = substrate[1:]
try:
lastTag = tagCache[firstOctet]
except KeyError:
integerTag = oct2int(firstOctet)
tagClass = integerTag & 0xC0
tagFormat = integerTag & 0x20
tagId = integerTag & 0x1F
if tagId == 0x1F:
isShortTag = False
lengthOctetIdx = 0
tagId = 0
try:
while True:
integerTag = oct2int(substrate[lengthOctetIdx])
lengthOctetIdx += 1
tagId <<= 7
tagId |= (integerTag & 0x7F)
if not integerTag & 0x80:
break
substrate = substrate[lengthOctetIdx:]
except IndexError:
raise error.SubstrateUnderrunError(
'Short octet stream on long tag decoding'
)
lastTag = tag.Tag(
tagClass=tagClass, tagFormat=tagFormat, tagId=tagId
)
if isShortTag:
# cache short tags
tagCache[firstOctet] = lastTag
if tagSet is None:
if isShortTag:
try:
tagSet = tagSetCache[firstOctet]
except KeyError:
# base tag not recovered
tagSet = tag.TagSet((), lastTag)
tagSetCache[firstOctet] = tagSet
else:
tagSet = tag.TagSet((), lastTag)
else:
tagSet = lastTag + tagSet
state = stDecodeLength
if LOG:
LOG('tag decoded into %s, decoding length' % tagSet)
if state is stDecodeLength:
# Decode length
if not substrate:
raise error.SubstrateUnderrunError(
'Short octet stream on length decoding'
)
firstOctet = oct2int(substrate[0])
if firstOctet < 128:
size = 1
length = firstOctet
elif firstOctet > 128:
size = firstOctet & 0x7F
# encoded in size bytes
encodedLength = octs2ints(substrate[1:size + 1])
# missing check on maximum size, which shouldn't be a
# problem, we can handle more than is possible
if len(encodedLength) != size:
raise error.SubstrateUnderrunError(
'%s<%s at %s' % (size, len(encodedLength), tagSet)
)
length = 0
for lengthOctet in encodedLength:
length <<= 8
length |= lengthOctet
size += 1
else:
size = 1
length = -1
substrate = substrate[size:]
if length == -1:
if not self.supportIndefLength:
raise error.PyAsn1Error('Indefinite length encoding not supported by this codec')
else:
if len(substrate) < length:
raise error.SubstrateUnderrunError('%d-octet short' % (length - len(substrate)))
state = stGetValueDecoder
if LOG:
LOG('value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length])))
if state is stGetValueDecoder:
if asn1Spec is None:
state = stGetValueDecoderByTag
else:
state = stGetValueDecoderByAsn1Spec
#
# There're two ways of creating subtypes in ASN.1 what influences
# decoder operation. These methods are:
# 1) Either base types used in or no IMPLICIT tagging has been
# applied on subtyping.
# 2) Subtype syntax drops base type information (by means of
# IMPLICIT tagging.
# The first case allows for complete tag recovery from substrate
# while the second one requires original ASN.1 type spec for
# decoding.
#
# In either case a set of tags (tagSet) is coming from substrate
# in an incremental, tag-by-tag fashion (this is the case of
# EXPLICIT tag which is most basic). Outermost tag comes first
# from the wire.
#
if state is stGetValueDecoderByTag:
try:
concreteDecoder = tagMap[tagSet]
except KeyError:
concreteDecoder = None
if concreteDecoder:
state = stDecodeValue
else:
try:
concreteDecoder = tagMap[tagSet[:1]]
except KeyError:
concreteDecoder = None
if concreteDecoder:
state = stDecodeValue
else:
state = stTryAsExplicitTag
if LOG:
LOG('codec %s chosen by a built-in type, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "<none>", state is stDecodeValue and 'value' or 'as explicit tag'))
debug.scope.push(concreteDecoder is None and '?' or concreteDecoder.protoComponent.__class__.__name__)
if state is stGetValueDecoderByAsn1Spec:
if asn1Spec.__class__ is tagmap.TagMap:
try:
chosenSpec = asn1Spec[tagSet]
except KeyError:
chosenSpec = None
if LOG:
LOG('candidate ASN.1 spec is a map of:')
for firstOctet, v in asn1Spec.presentTypes.items():
LOG(' %s -> %s' % (firstOctet, v.__class__.__name__))
if asn1Spec.skipTypes:
LOG('but neither of: ')
for firstOctet, v in asn1Spec.skipTypes.items():
LOG(' %s -> %s' % (firstOctet, v.__class__.__name__))
LOG('new candidate ASN.1 spec is %s, chosen by %s' % (chosenSpec is None and '<none>' or chosenSpec.prettyPrintType(), tagSet))
elif tagSet == asn1Spec.tagSet or tagSet in asn1Spec.tagMap:
chosenSpec = asn1Spec
if LOG:
LOG('candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__)
else:
chosenSpec = None
if chosenSpec is not None:
try:
# ambiguous type or just faster codec lookup
concreteDecoder = typeMap[chosenSpec.typeId]
if LOG:
LOG('value decoder chosen for an ambiguous type by type ID %s' % (chosenSpec.typeId,))
except KeyError:
# use base type for codec lookup to recover untagged types
baseTagSet = tag.TagSet(chosenSpec.tagSet.baseTag, chosenSpec.tagSet.baseTag)
try:
# base type or tagged subtype
concreteDecoder = tagMap[baseTagSet]
if LOG:
LOG('value decoder chosen by base %s' % (baseTagSet,))
except KeyError:
concreteDecoder = None
if concreteDecoder:
asn1Spec = chosenSpec
state = stDecodeValue
else:
state = stTryAsExplicitTag
else:
concreteDecoder = None
state = stTryAsExplicitTag
if LOG:
LOG('codec %s chosen by ASN.1 spec, decoding %s' % (state is stDecodeValue and concreteDecoder.__class__.__name__ or "<none>", state is stDecodeValue and 'value' or 'as explicit tag'))
debug.scope.push(chosenSpec is None and '?' or chosenSpec.__class__.__name__)
if state is stDecodeValue:
if not options.get('recursiveFlag', True) and not substrateFun: # deprecate this
substrateFun = lambda a, b, c: (a, b[:c])
options.update(fullSubstrate=fullSubstrate)
if length == -1: # indef length
value, substrate = concreteDecoder.indefLenValueDecoder(
substrate, asn1Spec,
tagSet, length, stGetValueDecoder,
self, substrateFun,
**options
)
else:
value, substrate = concreteDecoder.valueDecoder(
substrate, asn1Spec,
tagSet, length, stGetValueDecoder,
self, substrateFun,
**options
)
if LOG:
LOG('codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, isinstance(value, base.Asn1Item) and value.prettyPrint() or value, substrate and debug.hexdump(substrate) or '<none>'))
state = stStop
break
if state is stTryAsExplicitTag:
if (tagSet and
tagSet[0].tagFormat == tag.tagFormatConstructed and
tagSet[0].tagClass != tag.tagClassUniversal):
# Assume explicit tagging
concreteDecoder = explicitTagDecoder
state = stDecodeValue
else:
concreteDecoder = None
state = self.defaultErrorState
if LOG:
LOG('codec %s chosen, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "<none>", state is stDecodeValue and 'value' or 'as failure'))
if state is stDumpRawValue:
concreteDecoder = self.defaultRawDecoder
if LOG:
LOG('codec %s chosen, decoding value' % concreteDecoder.__class__.__name__)
state = stDecodeValue
if state is stErrorCondition:
raise error.PyAsn1Error(
'%s not in asn1Spec: %r' % (tagSet, asn1Spec)
)
if LOG:
debug.scope.pop()
LOG('decoder left scope %s, call completed' % debug.scope)
return value, substrate
#: Turns BER octet stream into an ASN.1 object.
#:
#: Takes BER octet-stream and decode it into an ASN.1 object
#: (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which
#: may be a scalar or an arbitrary nested structure.
#:
#: Parameters
#: ----------
#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
#: BER octet-stream
#:
#: Keyword Args
#: ------------
#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
#: A pyasn1 type object to act as a template guiding the decoder. Depending on the ASN.1 structure
#: being decoded, *asn1Spec* may or may not be required. Most common reason for
#: it to require is that ASN.1 structure is encoded in *IMPLICIT* tagging mode.
#:
#: Returns
#: -------
#: : :py:class:`tuple`
#: A tuple of pyasn1 object recovered from BER substrate (:py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
#: and the unprocessed trailing portion of the *substrate* (may be empty)
#:
#: Raises
#: ------
#: ~pyasn1.error.PyAsn1Error, ~pyasn1.error.SubstrateUnderrunError
#: On decoding errors
#:
#: Examples
#: --------
#: Decode BER serialisation without ASN.1 schema
#:
#: .. code-block:: pycon
#:
#: >>> s, _ = decode(b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03')
#: >>> str(s)
#: SequenceOf:
#: 1 2 3
#:
#: Decode BER serialisation with ASN.1 schema
#:
#: .. code-block:: pycon
#:
#: >>> seq = SequenceOf(componentType=Integer())
#: >>> s, _ = decode(b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03', asn1Spec=seq)
#: >>> str(s)
#: SequenceOf:
#: 1 2 3
#:
decode = Decoder(tagMap, typeMap)
# XXX
# non-recursive decoding; return position rather than substrate
| kawamon/hue | desktop/core/ext-py/pyasn1-0.4.6/pyasn1/codec/ber/decoder.py | Python | apache-2.0 | 58,050 |
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import paddle.fluid as fluid
import os
def data_generator():
data = [0, 1, 2, 3]
for val in data:
yield val
class TestDistributedReader(unittest.TestCase):
def test_distributed_reader(self):
trainer_num = 4
os.environ['PADDLE_TRAINER_ID'] = str(1)
os.environ['PADDLE_TRAINERS_NUM'] = str(trainer_num)
reader = fluid.contrib.reader.distributed_batch_reader(data_generator)
data = next(reader())
assert data == 1
os.unsetenv('PADDLE_TRAINER_ID')
os.unsetenv('PADDLE_TRAINERS_NUM')
if __name__ == '__main__':
unittest.main()
| tensor-tang/Paddle | python/paddle/fluid/contrib/tests/test_distributed_reader.py | Python | apache-2.0 | 1,305 |
"""
Plots a scatter plot of 2 metrics provided.
Data could be given from postgres or a csv file.
"""
from matplotlib.colors import LogNorm
from mpl_toolkits.mplot3d import Axes3D
import sys
import numpy as np
import argparse
import matplotlib
import matplotlib.pyplot as plt
import pandas as pd
from common import add_db_args
from common import add_plot_limit_args
from common import set_db_connection
from common import set_plot_limits
def parse_args(*argument_list):
parser = argparse.ArgumentParser()
source_group = parser.add_mutually_exclusive_group(required=True)
source_group.add_argument('--csv')
source_group.add_argument('--table')
source_group.add_argument('--query')
plot_type_group = parser.add_mutually_exclusive_group(required=True)
plot_type_group.add_argument('--scatter', nargs=2)
plot_type_group.add_argument('--histogram')
plot_type_group.add_argument('--hist2d', nargs=2)
plot_type_group.add_argument('--scatter3d', nargs=3)
parser.add_argument('--histogram-bins', type=int, default=100)
parser.add_argument('--filter-num-rtus', type=int)
parser.add_argument('--filter-controller', type=int)
parser.add_argument('--labels',
help='Labels for labeled data (different colors on the '
'plot)')
parser.add_argument('--miscellaneous-cutoff', type=float, default=0.001,
help='Part of the data, that should a label have in '
'order to be show in the plot')
parser.add_argument('--do-not-scale-down', action='store_false',
dest='scale_down')
parser.add_argument('--scale-down', action='store_true')
parser.add_argument('--savefig')
add_plot_limit_args(parser)
add_db_args(parser)
args = parser.parse_args(*argument_list)
if args.csv is None:
set_db_connection(args)
return args
def plot_scatter3d(data, args):
data = data[data[args.scatter3d[0]].notnull()][data[args.scatter3d[1]].notnull()][data[args.scatter3d[2]].notnull()]
data = data[:100000]
print len(data)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(data[args.scatter3d[0]],
data[args.scatter3d[1]],
data[args.scatter3d[2]])
def _plot_hist2d(data, args):
data = data[data[args.hist2d[0]].notnull()][data[args.hist2d[1]].notnull()]
if data.shape[0] < 1000:
sys.exit(1)
df = data.replace([np.inf, -np.inf], np.nan).dropna(subset=args.hist2d)
plt.hist2d(df[args.hist2d[0]].astype(float),
df[args.hist2d[1]].astype(float),
bins=args.histogram_bins,
norm=LogNorm())
plt.colorbar()
set_plot_limits(plt, args)
plt.xlabel(args.hist2d[0])
plt.ylabel(args.hist2d[1])
set_plot_limits(plt, args)
plt.title("N = {}".format(data.shape[0]))
def plot_distribution(args):
if args.csv is not None:
data = pd.read_csv(args.csv)
print ' '.join(list(data.columns.values))
if args.filter_num_rtus:
print 'before filtering size =', data.shape[0]
data = data[data['num_rtus'] == args.filter_num_rtus]
print 'after filtering size =', data.shape[0]
if args.filter_controller:
print 'before filtering size =', data.shape[0]
data = data[data['controller_id'] == args.filter_controller]
print 'after filtering size =', data.shape[0]
if 'controller_id' in data:
print 'total controller_ids included =', len(set(data['controller_id']))
if 'num_rtus' in data:
print 'distinct num_rtus =', len(set(data['num_rtus'])), set(data['num_rtus'])
else:
cursor = args.db_connection.cursor()
cursor.execute("select relname from pg_class where relkind='r' and relname !~ '^(pg_|sql_)';") # noqa
if args.query:
with open(args.query, 'r') as infile:
sql = ''.join(list(infile))
else:
sql = """
SELECT {select} FROM {table};
""".format(select='*', table=args.table)
print sql
cursor.execute(sql)
colnames = [desc[0] for desc in cursor.description]
data = pd.DataFrame(cursor.fetchall(), columns=colnames)
# Set args.data, so we can pass only args to functions
args.data = data
data_size = data.shape[0]
if args.scatter is not None:
if args.labels:
interesting_data = data[[args.scatter[0], args.scatter[1], args.labels]]
different_labels = set(data[args.labels])
for label, color in zip(different_labels,
matplotlib.colors.cnames.keys()):
df = interesting_data.query('{column} == "{label}"'.format(
column=args.labels, label=label))
plt.scatter(df[args.scatter[0]], df[args.scatter[1]],
c=color, label=label)
else:
plt.scatter(data[args.scatter[0]], data[args.scatter[1]],
c=color)
plt.xlabel(args.scatter[0])
plt.ylabel(args.scatter[1])
elif args.histogram is not None:
if args.labels:
interesting_data = data[[args.histogram, args.labels]]
different_labels = set(data[args.labels])
data_to_plot, colors_to_use, labels_to_show = [], [], []
miscellaneous_labels = set()
misc_frame, misc_color = pd.DataFrame(), None
for label, color in zip(different_labels,
matplotlib.colors.cnames.keys()):
df = interesting_data.query('{column} == "{label}"'.format(
column=args.labels, label=label))
if df.shape[0] < args.miscellaneous_cutoff * data_size:
miscellaneous_labels.add(label)
misc_frame = pd.concat([misc_frame, df[args.histogram]])
misc_color = color
continue
labels_to_show.append('{label} ({count})'.format(label=label,
count=df.shape[0]))
data_to_plot.append(df[args.histogram])
colors_to_use.append(color)
if misc_color is not None:
labels_to_show.append('miscellaneous ({count})'.format(
count=misc_frame.shape[0]))
data_to_plot.append(misc_frame)
# colors_to_use.append(misc_color)
colors_to_use.append('cyan')
plt.hist(data_to_plot, args.histogram_bins, histtype='bar',
color=colors_to_use, label=labels_to_show)
else:
df = data.replace([np.inf, -np.inf], np.nan).dropna(subset=[args.histogram])
plt.hist(df[args.histogram].astype(float),
bins=args.histogram_bins,
label=args.histogram)
plt.yscale('log')
plt.xlabel(args.histogram)
if args.scale_down:
plt.ylim(ymax=int(data_size * args.miscellaneous_cutoff))
elif args.hist2d is not None:
_plot_hist2d(data, args)
elif args.scatter3d is not None:
plot_scatter3d(data, args)
plt.legend()
if not args.scatter3d and not args.histogram:
set_plot_limits(plt, args)
if args.savefig is not None:
plt.savefig(args.savefig, dpi=320)
plt.clf()
else:
plt.show()
if __name__ == '__main__':
args = parse_args()
plot_distribution(args)
| kexinrong/macrobase | tools/py_analysis/plot_distribution.py | Python | apache-2.0 | 7,071 |
"""Implementation of basic magic functions."""
import argparse
import textwrap
import io
import sys
from pprint import pformat
from IPython.core import magic_arguments, page
from IPython.core.error import UsageError
from IPython.core.magic import Magics, magics_class, line_magic, magic_escapes
from IPython.utils.text import format_screen, dedent, indent
from IPython.testing.skipdoctest import skip_doctest
from IPython.utils.ipstruct import Struct
from warnings import warn
from logging import error
class MagicsDisplay(object):
def __init__(self, magics_manager, ignore=None):
self.ignore = ignore if ignore else []
self.magics_manager = magics_manager
def _lsmagic(self):
"""The main implementation of the %lsmagic"""
mesc = magic_escapes['line']
cesc = magic_escapes['cell']
mman = self.magics_manager
magics = mman.lsmagic()
out = ['Available line magics:',
mesc + (' '+mesc).join(sorted([m for m,v in magics['line'].items() if (v not in self.ignore)])),
'',
'Available cell magics:',
cesc + (' '+cesc).join(sorted([m for m,v in magics['cell'].items() if (v not in self.ignore)])),
'',
mman.auto_status()]
return '\n'.join(out)
def _repr_pretty_(self, p, cycle):
p.text(self._lsmagic())
def __str__(self):
return self._lsmagic()
def _jsonable(self):
"""turn magics dict into jsonable dict of the same structure
replaces object instances with their class names as strings
"""
magic_dict = {}
mman = self.magics_manager
magics = mman.lsmagic()
for key, subdict in magics.items():
d = {}
magic_dict[key] = d
for name, obj in subdict.items():
try:
classname = obj.__self__.__class__.__name__
except AttributeError:
classname = 'Other'
d[name] = classname
return magic_dict
def _repr_json_(self):
return self._jsonable()
@magics_class
class BasicMagics(Magics):
"""Magics that provide central IPython functionality.
These are various magics that don't fit into specific categories but that
are all part of the base 'IPython experience'."""
@magic_arguments.magic_arguments()
@magic_arguments.argument(
'-l', '--line', action='store_true',
help="""Create a line magic alias."""
)
@magic_arguments.argument(
'-c', '--cell', action='store_true',
help="""Create a cell magic alias."""
)
@magic_arguments.argument(
'name',
help="""Name of the magic to be created."""
)
@magic_arguments.argument(
'target',
help="""Name of the existing line or cell magic."""
)
@magic_arguments.argument(
'-p', '--params', default=None,
help="""Parameters passed to the magic function."""
)
@line_magic
def alias_magic(self, line=''):
"""Create an alias for an existing line or cell magic.
Examples
--------
::
In [1]: %alias_magic t timeit
Created `%t` as an alias for `%timeit`.
Created `%%t` as an alias for `%%timeit`.
In [2]: %t -n1 pass
1 loops, best of 3: 954 ns per loop
In [3]: %%t -n1
...: pass
...:
1 loops, best of 3: 954 ns per loop
In [4]: %alias_magic --cell whereami pwd
UsageError: Cell magic function `%%pwd` not found.
In [5]: %alias_magic --line whereami pwd
Created `%whereami` as an alias for `%pwd`.
In [6]: %whereami
Out[6]: u'/home/testuser'
In [7]: %alias_magic h history -p "-l 30" --line
Created `%h` as an alias for `%history -l 30`.
"""
args = magic_arguments.parse_argstring(self.alias_magic, line)
shell = self.shell
mman = self.shell.magics_manager
escs = ''.join(magic_escapes.values())
target = args.target.lstrip(escs)
name = args.name.lstrip(escs)
params = args.params
if (params and
((params.startswith('"') and params.endswith('"'))
or (params.startswith("'") and params.endswith("'")))):
params = params[1:-1]
# Find the requested magics.
m_line = shell.find_magic(target, 'line')
m_cell = shell.find_magic(target, 'cell')
if args.line and m_line is None:
raise UsageError('Line magic function `%s%s` not found.' %
(magic_escapes['line'], target))
if args.cell and m_cell is None:
raise UsageError('Cell magic function `%s%s` not found.' %
(magic_escapes['cell'], target))
# If --line and --cell are not specified, default to the ones
# that are available.
if not args.line and not args.cell:
if not m_line and not m_cell:
raise UsageError(
'No line or cell magic with name `%s` found.' % target
)
args.line = bool(m_line)
args.cell = bool(m_cell)
params_str = "" if params is None else " " + params
if args.line:
mman.register_alias(name, target, 'line', params)
print('Created `%s%s` as an alias for `%s%s%s`.' % (
magic_escapes['line'], name,
magic_escapes['line'], target, params_str))
if args.cell:
mman.register_alias(name, target, 'cell', params)
print('Created `%s%s` as an alias for `%s%s%s`.' % (
magic_escapes['cell'], name,
magic_escapes['cell'], target, params_str))
@line_magic
def lsmagic(self, parameter_s=''):
"""List currently available magic functions."""
return MagicsDisplay(self.shell.magics_manager, ignore=[self.pip])
def _magic_docs(self, brief=False, rest=False):
"""Return docstrings from magic functions."""
mman = self.shell.magics_manager
docs = mman.lsmagic_docs(brief, missing='No documentation')
if rest:
format_string = '**%s%s**::\n\n%s\n\n'
else:
format_string = '%s%s:\n%s\n'
return ''.join(
[format_string % (magic_escapes['line'], fname,
indent(dedent(fndoc)))
for fname, fndoc in sorted(docs['line'].items())]
+
[format_string % (magic_escapes['cell'], fname,
indent(dedent(fndoc)))
for fname, fndoc in sorted(docs['cell'].items())]
)
@line_magic
def magic(self, parameter_s=''):
"""Print information about the magic function system.
Supported formats: -latex, -brief, -rest
"""
mode = ''
try:
mode = parameter_s.split()[0][1:]
except IndexError:
pass
brief = (mode == 'brief')
rest = (mode == 'rest')
magic_docs = self._magic_docs(brief, rest)
if mode == 'latex':
print(self.format_latex(magic_docs))
return
else:
magic_docs = format_screen(magic_docs)
out = ["""
IPython's 'magic' functions
===========================
The magic function system provides a series of functions which allow you to
control the behavior of IPython itself, plus a lot of system-type
features. There are two kinds of magics, line-oriented and cell-oriented.
Line magics are prefixed with the % character and work much like OS
command-line calls: they get as an argument the rest of the line, where
arguments are passed without parentheses or quotes. For example, this will
time the given statement::
%timeit range(1000)
Cell magics are prefixed with a double %%, and they are functions that get as
an argument not only the rest of the line, but also the lines below it in a
separate argument. These magics are called with two arguments: the rest of the
call line and the body of the cell, consisting of the lines below the first.
For example::
%%timeit x = numpy.random.randn((100, 100))
numpy.linalg.svd(x)
will time the execution of the numpy svd routine, running the assignment of x
as part of the setup phase, which is not timed.
In a line-oriented client (the terminal or Qt console IPython), starting a new
input with %% will automatically enter cell mode, and IPython will continue
reading input until a blank line is given. In the notebook, simply type the
whole cell as one entity, but keep in mind that the %% escape can only be at
the very start of the cell.
NOTE: If you have 'automagic' enabled (via the command line option or with the
%automagic function), you don't need to type in the % explicitly for line
magics; cell magics always require an explicit '%%' escape. By default,
IPython ships with automagic on, so you should only rarely need the % escape.
Example: typing '%cd mydir' (without the quotes) changes your working directory
to 'mydir', if it exists.
For a list of the available magic functions, use %lsmagic. For a description
of any of them, type %magic_name?, e.g. '%cd?'.
Currently the magic system has the following functions:""",
magic_docs,
"Summary of magic functions (from %slsmagic):" % magic_escapes['line'],
str(self.lsmagic()),
]
page.page('\n'.join(out))
@line_magic
def page(self, parameter_s=''):
"""Pretty print the object and display it through a pager.
%page [options] OBJECT
If no object is given, use _ (last output).
Options:
-r: page str(object), don't pretty-print it."""
# After a function contributed by Olivier Aubert, slightly modified.
# Process options/args
opts, args = self.parse_options(parameter_s, 'r')
raw = 'r' in opts
oname = args and args or '_'
info = self.shell._ofind(oname)
if info['found']:
txt = (raw and str or pformat)( info['obj'] )
page.page(txt)
else:
print('Object `%s` not found' % oname)
@line_magic
def profile(self, parameter_s=''):
"""Print your currently active IPython profile.
See Also
--------
prun : run code using the Python profiler
(:meth:`~IPython.core.magics.execution.ExecutionMagics.prun`)
"""
raise UsageError("The `%profile` magic has been deprecated since IPython 2.0. "
"and removed in IPython 6.0. Please use the value of `get_ipython().profile` instead "
"to see current profile in use. Perhaps you meant to use `%prun` to profile code?")
@line_magic
def pprint(self, parameter_s=''):
"""Toggle pretty printing on/off."""
ptformatter = self.shell.display_formatter.formatters['text/plain']
ptformatter.pprint = bool(1 - ptformatter.pprint)
print('Pretty printing has been turned',
['OFF','ON'][ptformatter.pprint])
@line_magic
def colors(self, parameter_s=''):
"""Switch color scheme for prompts, info system and exception handlers.
Currently implemented schemes: NoColor, Linux, LightBG.
Color scheme names are not case-sensitive.
Examples
--------
To get a plain black and white terminal::
%colors nocolor
"""
def color_switch_err(name):
warn('Error changing %s color schemes.\n%s' %
(name, sys.exc_info()[1]), stacklevel=2)
new_scheme = parameter_s.strip()
if not new_scheme:
raise UsageError(
"%colors: you must specify a color scheme. See '%colors?'")
# local shortcut
shell = self.shell
# Set shell colour scheme
try:
shell.colors = new_scheme
shell.refresh_style()
except:
color_switch_err('shell')
# Set exception colors
try:
shell.InteractiveTB.set_colors(scheme = new_scheme)
shell.SyntaxTB.set_colors(scheme = new_scheme)
except:
color_switch_err('exception')
# Set info (for 'object?') colors
if shell.color_info:
try:
shell.inspector.set_active_scheme(new_scheme)
except:
color_switch_err('object inspector')
else:
shell.inspector.set_active_scheme('NoColor')
@line_magic
def xmode(self, parameter_s=''):
"""Switch modes for the exception handlers.
Valid modes: Plain, Context and Verbose.
If called without arguments, acts as a toggle."""
def xmode_switch_err(name):
warn('Error changing %s exception modes.\n%s' %
(name,sys.exc_info()[1]))
shell = self.shell
new_mode = parameter_s.strip().capitalize()
try:
shell.InteractiveTB.set_mode(mode=new_mode)
print('Exception reporting mode:',shell.InteractiveTB.mode)
except:
xmode_switch_err('user')
@line_magic
def pip(self, args=''):
"""
Intercept usage of ``pip`` in IPython and direct user to run command outside of IPython.
"""
print(textwrap.dedent('''
The following command must be run outside of the IPython shell:
$ pip {args}
The Python package manager (pip) can only be used from outside of IPython.
Please reissue the `pip` command in a separate terminal or command prompt.
See the Python documentation for more informations on how to install packages:
https://docs.python.org/3/installing/'''.format(args=args)))
@line_magic
def quickref(self, arg):
""" Show a quick reference sheet """
from IPython.core.usage import quick_reference
qr = quick_reference + self._magic_docs(brief=True)
page.page(qr)
@line_magic
def doctest_mode(self, parameter_s=''):
"""Toggle doctest mode on and off.
This mode is intended to make IPython behave as much as possible like a
plain Python shell, from the perspective of how its prompts, exceptions
and output look. This makes it easy to copy and paste parts of a
session into doctests. It does so by:
- Changing the prompts to the classic ``>>>`` ones.
- Changing the exception reporting mode to 'Plain'.
- Disabling pretty-printing of output.
Note that IPython also supports the pasting of code snippets that have
leading '>>>' and '...' prompts in them. This means that you can paste
doctests from files or docstrings (even if they have leading
whitespace), and the code will execute correctly. You can then use
'%history -t' to see the translated history; this will give you the
input after removal of all the leading prompts and whitespace, which
can be pasted back into an editor.
With these features, you can switch into this mode easily whenever you
need to do testing and changes to doctests, without having to leave
your existing IPython session.
"""
# Shorthands
shell = self.shell
meta = shell.meta
disp_formatter = self.shell.display_formatter
ptformatter = disp_formatter.formatters['text/plain']
# dstore is a data store kept in the instance metadata bag to track any
# changes we make, so we can undo them later.
dstore = meta.setdefault('doctest_mode',Struct())
save_dstore = dstore.setdefault
# save a few values we'll need to recover later
mode = save_dstore('mode',False)
save_dstore('rc_pprint',ptformatter.pprint)
save_dstore('xmode',shell.InteractiveTB.mode)
save_dstore('rc_separate_out',shell.separate_out)
save_dstore('rc_separate_out2',shell.separate_out2)
save_dstore('rc_separate_in',shell.separate_in)
save_dstore('rc_active_types',disp_formatter.active_types)
if not mode:
# turn on
# Prompt separators like plain python
shell.separate_in = ''
shell.separate_out = ''
shell.separate_out2 = ''
ptformatter.pprint = False
disp_formatter.active_types = ['text/plain']
shell.magic('xmode Plain')
else:
# turn off
shell.separate_in = dstore.rc_separate_in
shell.separate_out = dstore.rc_separate_out
shell.separate_out2 = dstore.rc_separate_out2
ptformatter.pprint = dstore.rc_pprint
disp_formatter.active_types = dstore.rc_active_types
shell.magic('xmode ' + dstore.xmode)
# mode here is the state before we switch; switch_doctest_mode takes
# the mode we're switching to.
shell.switch_doctest_mode(not mode)
# Store new mode and inform
dstore.mode = bool(not mode)
mode_label = ['OFF','ON'][dstore.mode]
print('Doctest mode is:', mode_label)
@line_magic
def gui(self, parameter_s=''):
"""Enable or disable IPython GUI event loop integration.
%gui [GUINAME]
This magic replaces IPython's threaded shells that were activated
using the (pylab/wthread/etc.) command line flags. GUI toolkits
can now be enabled at runtime and keyboard
interrupts should work without any problems. The following toolkits
are supported: wxPython, PyQt4, PyGTK, Tk and Cocoa (OSX)::
%gui wx # enable wxPython event loop integration
%gui qt4|qt # enable PyQt4 event loop integration
%gui qt5 # enable PyQt5 event loop integration
%gui gtk # enable PyGTK event loop integration
%gui gtk3 # enable Gtk3 event loop integration
%gui tk # enable Tk event loop integration
%gui osx # enable Cocoa event loop integration
# (requires %matplotlib 1.1)
%gui # disable all event loop integration
WARNING: after any of these has been called you can simply create
an application object, but DO NOT start the event loop yourself, as
we have already handled that.
"""
opts, arg = self.parse_options(parameter_s, '')
if arg=='': arg = None
try:
return self.shell.enable_gui(arg)
except Exception as e:
# print simple error message, rather than traceback if we can't
# hook up the GUI
error(str(e))
@skip_doctest
@line_magic
def precision(self, s=''):
"""Set floating point precision for pretty printing.
Can set either integer precision or a format string.
If numpy has been imported and precision is an int,
numpy display precision will also be set, via ``numpy.set_printoptions``.
If no argument is given, defaults will be restored.
Examples
--------
::
In [1]: from math import pi
In [2]: %precision 3
Out[2]: u'%.3f'
In [3]: pi
Out[3]: 3.142
In [4]: %precision %i
Out[4]: u'%i'
In [5]: pi
Out[5]: 3
In [6]: %precision %e
Out[6]: u'%e'
In [7]: pi**10
Out[7]: 9.364805e+04
In [8]: %precision
Out[8]: u'%r'
In [9]: pi**10
Out[9]: 93648.047476082982
"""
ptformatter = self.shell.display_formatter.formatters['text/plain']
ptformatter.float_precision = s
return ptformatter.float_format
@magic_arguments.magic_arguments()
@magic_arguments.argument(
'-e', '--export', action='store_true', default=False,
help=argparse.SUPPRESS
)
@magic_arguments.argument(
'filename', type=str,
help='Notebook name or filename'
)
@line_magic
def notebook(self, s):
"""Export and convert IPython notebooks.
This function can export the current IPython history to a notebook file.
For example, to export the history to "foo.ipynb" do "%notebook foo.ipynb".
The -e or --export flag is deprecated in IPython 5.2, and will be
removed in the future.
"""
args = magic_arguments.parse_argstring(self.notebook, s)
from nbformat import write, v4
cells = []
hist = list(self.shell.history_manager.get_range())
if(len(hist)<=1):
raise ValueError('History is empty, cannot export')
for session, execution_count, source in hist[:-1]:
cells.append(v4.new_code_cell(
execution_count=execution_count,
source=source
))
nb = v4.new_notebook(cells=cells)
with io.open(args.filename, 'w', encoding='utf-8') as f:
write(nb, f, version=4)
| unnikrishnankgs/va | venv/lib/python3.5/site-packages/IPython/core/magics/basic.py | Python | bsd-2-clause | 21,310 |
"""
Django ID mapper
Modified for Evennia by making sure that no model references
leave caching unexpectedly (no use if WeakRefs).
Also adds cache_size() for monitoring the size of the cache.
"""
import os, threading
#from twisted.internet import reactor
#from twisted.internet.threads import blockingCallFromThread
from twisted.internet.reactor import callFromThread
from django.core.exceptions import ObjectDoesNotExist, FieldError
from django.db.models.base import Model, ModelBase
from django.db.models.signals import post_save, pre_delete, post_syncdb
from src.utils.utils import dbref, get_evennia_pids, to_str
from manager import SharedMemoryManager
_FIELD_CACHE_GET = None
_FIELD_CACHE_SET = None
_GA = object.__getattribute__
_SA = object.__setattr__
_DA = object.__delattr__
# determine if our current pid is different from the server PID (i.e.
# if we are in a subprocess or not)
from src import PROC_MODIFIED_OBJS
# get info about the current process and thread
_SELF_PID = os.getpid()
_SERVER_PID, _PORTAL_PID = get_evennia_pids()
_IS_SUBPROCESS = (_SERVER_PID and _PORTAL_PID) and not _SELF_PID in (_SERVER_PID, _PORTAL_PID)
_IS_MAIN_THREAD = threading.currentThread().getName() == "MainThread"
#_SERVER_PID = None
#_PORTAL_PID = None
# #global _SERVER_PID, _PORTAL_PID, _IS_SUBPROCESS, _SELF_PID
# if not _SERVER_PID and not _PORTAL_PID:
# _IS_SUBPROCESS = (_SERVER_PID and _PORTAL_PID) and not _SELF_PID in (_SERVER_PID, _PORTAL_PID)
class SharedMemoryModelBase(ModelBase):
# CL: upstream had a __new__ method that skipped ModelBase's __new__ if
# SharedMemoryModelBase was not in the model class's ancestors. It's not
# clear what was the intended purpose, but skipping ModelBase.__new__
# broke things; in particular, default manager inheritance.
def __call__(cls, *args, **kwargs):
"""
this method will either create an instance (by calling the default implementation)
or try to retrieve one from the class-wide cache by infering the pk value from
args and kwargs. If instance caching is enabled for this class, the cache is
populated whenever possible (ie when it is possible to infer the pk value).
"""
def new_instance():
return super(SharedMemoryModelBase, cls).__call__(*args, **kwargs)
instance_key = cls._get_cache_key(args, kwargs)
# depending on the arguments, we might not be able to infer the PK, so in that case we create a new instance
if instance_key is None:
return new_instance()
cached_instance = cls.get_cached_instance(instance_key)
if cached_instance is None:
cached_instance = new_instance()
cls.cache_instance(cached_instance)
return cached_instance
def _prepare(cls):
cls.__instance_cache__ = {} #WeakValueDictionary()
super(SharedMemoryModelBase, cls)._prepare()
def __new__(cls, classname, bases, classdict, *args, **kwargs):
"""
Field shortcut creation:
Takes field names db_* and creates property wrappers named without the db_ prefix. So db_key -> key
This wrapper happens on the class level, so there is no overhead when creating objects. If a class
already has a wrapper of the given name, the automatic creation is skipped. Note: Remember to
document this auto-wrapping in the class header, this could seem very much like magic to the user otherwise.
"""
def create_wrapper(cls, fieldname, wrappername, editable=True, foreignkey=False):
"Helper method to create property wrappers with unique names (must be in separate call)"
def _get(cls, fname):
"Wrapper for getting database field"
#print "_get:", fieldname, wrappername,_GA(cls,fieldname)
return _GA(cls, fieldname)
def _get_foreign(cls, fname):
"Wrapper for returing foreignkey fields"
value = _GA(cls, fieldname)
#print "_get_foreign:value:", value
try:
return _GA(value, "typeclass")
except:
return value
def _set_nonedit(cls, fname, value):
"Wrapper for blocking editing of field"
raise FieldError("Field %s cannot be edited." % fname)
def _set(cls, fname, value):
"Wrapper for setting database field"
_SA(cls, fname, value)
# only use explicit update_fields in save if we actually have a
# primary key assigned already (won't be set when first creating object)
update_fields = [fname] if _GA(cls, "_get_pk_val")(_GA(cls, "_meta")) is not None else None
_GA(cls, "save")(update_fields=update_fields)
def _set_foreign(cls, fname, value):
"Setter only used on foreign key relations, allows setting with #dbref"
try:
value = _GA(value, "dbobj")
except AttributeError:
pass
if isinstance(value, (basestring, int)):
value = to_str(value, force_string=True)
if (value.isdigit() or value.startswith("#")):
# we also allow setting using dbrefs, if so we try to load the matching object.
# (we assume the object is of the same type as the class holding the field, if
# not a custom handler must be used for that field)
dbid = dbref(value, reqhash=False)
if dbid:
model = _GA(cls, "_meta").get_field(fname).model
try:
value = model._default_manager.get(id=dbid)
except ObjectDoesNotExist:
# maybe it is just a name that happens to look like a dbid
pass
_SA(cls, fname, value)
# only use explicit update_fields in save if we actually have a
# primary key assigned already (won't be set when first creating object)
update_fields = [fname] if _GA(cls, "_get_pk_val")(_GA(cls, "_meta")) is not None else None
_GA(cls, "save")(update_fields=update_fields)
def _del_nonedit(cls, fname):
"wrapper for not allowing deletion"
raise FieldError("Field %s cannot be edited." % fname)
def _del(cls, fname):
"Wrapper for clearing database field - sets it to None"
_SA(cls, fname, None)
update_fields = [fname] if _GA(cls, "_get_pk_val")(_GA(cls, "_meta")) is not None else None
_GA(cls, "save")(update_fields=update_fields)
# wrapper factories
fget = lambda cls: _get(cls, fieldname)
if not editable:
fset = lambda cls, val: _set_nonedit(cls, fieldname, val)
elif foreignkey:
fget = lambda cls: _get_foreign(cls, fieldname)
fset = lambda cls, val: _set_foreign(cls, fieldname, val)
else:
fset = lambda cls, val: _set(cls, fieldname, val)
fdel = lambda cls: _del(cls, fieldname) if editable else _del_nonedit(cls,fieldname)
# assigning
classdict[wrappername] = property(fget, fset, fdel)
#type(cls).__setattr__(cls, wrappername, property(fget, fset, fdel))#, doc))
# exclude some models that should not auto-create wrapper fields
if cls.__name__ in ("ServerConfig", "TypeNick"):
return
# dynamically create the wrapper properties for all fields not already handled (manytomanyfields are always handlers)
for fieldname, field in ((fname, field) for fname, field in classdict.items()
if fname.startswith("db_") and type(field).__name__ != "ManyToManyField"):
foreignkey = type(field).__name__ == "ForeignKey"
#print fieldname, type(field).__name__, field
wrappername = "dbid" if fieldname == "id" else fieldname.replace("db_", "", 1)
if wrappername not in classdict:
# makes sure not to overload manually created wrappers on the model
#print "wrapping %s -> %s" % (fieldname, wrappername)
create_wrapper(cls, fieldname, wrappername, editable=field.editable, foreignkey=foreignkey)
return super(SharedMemoryModelBase, cls).__new__(cls, classname, bases, classdict, *args, **kwargs)
#def __init__(cls, *args, **kwargs):
# """
# Field shortcut creation:
# Takes field names db_* and creates property wrappers named without the db_ prefix. So db_key -> key
# This wrapper happens on the class level, so there is no overhead when creating objects. If a class
# already has a wrapper of the given name, the automatic creation is skipped. Note: Remember to
# document this auto-wrapping in the class header, this could seem very much like magic to the user otherwise.
# """
# super(SharedMemoryModelBase, cls).__init__(*args, **kwargs)
# def create_wrapper(cls, fieldname, wrappername, editable=True):
# "Helper method to create property wrappers with unique names (must be in separate call)"
# def _get(cls, fname):
# "Wrapper for getting database field"
# value = _GA(cls, fieldname)
# if type(value) in (basestring, int, float, bool):
# return value
# elif hasattr(value, "typeclass"):
# return _GA(value, "typeclass")
# return value
# def _set_nonedit(cls, fname, value):
# "Wrapper for blocking editing of field"
# raise FieldError("Field %s cannot be edited." % fname)
# def _set(cls, fname, value):
# "Wrapper for setting database field"
# #print "_set:", fname
# if hasattr(value, "dbobj"):
# value = _GA(value, "dbobj")
# elif isinstance(value, basestring) and (value.isdigit() or value.startswith("#")):
# # we also allow setting using dbrefs, if so we try to load the matching object.
# # (we assume the object is of the same type as the class holding the field, if
# # not a custom handler must be used for that field)
# dbid = dbref(value, reqhash=False)
# if dbid:
# try:
# value = cls._default_manager.get(id=dbid)
# except ObjectDoesNotExist:
# # maybe it is just a name that happens to look like a dbid
# from src.utils.logger import log_trace
# log_trace()
# #print "_set wrapper:", fname, value, type(value), cls._get_pk_val(cls._meta)
# _SA(cls, fname, value)
# # only use explicit update_fields in save if we actually have a
# # primary key assigned already (won't be set when first creating object)
# update_fields = [fname] if _GA(cls, "_get_pk_val")(_GA(cls, "_meta")) is not None else None
# _GA(cls, "save")(update_fields=update_fields)
# def _del_nonedit(cls, fname):
# "wrapper for not allowing deletion"
# raise FieldError("Field %s cannot be edited." % fname)
# def _del(cls, fname):
# "Wrapper for clearing database field - sets it to None"
# _SA(cls, fname, None)
# update_fields = [fname] if _GA(cls, "_get_pk_val")(_GA(cls, "_meta")) is not None else None
# _GA(cls, "save")(update_fields=update_fields)
# # create class field wrappers
# fget = lambda cls: _get(cls, fieldname)
# fset = lambda cls, val: _set(cls, fieldname, val) if editable else _set_nonedit(cls, fieldname, val)
# fdel = lambda cls: _del(cls, fieldname) if editable else _del_nonedit(cls,fieldname)
# type(cls).__setattr__(cls, wrappername, property(fget, fset, fdel))#, doc))
# # exclude some models that should not auto-create wrapper fields
# if cls.__name__ in ("ServerConfig", "TypeNick"):
# return
# # dynamically create the wrapper properties for all fields not already handled
# for field in cls._meta.fields:
# fieldname = field.name
# if fieldname.startswith("db_"):
# wrappername = "dbid" if fieldname == "id" else fieldname.replace("db_", "")
# if not hasattr(cls, wrappername):
# # makes sure not to overload manually created wrappers on the model
# #print "wrapping %s -> %s" % (fieldname, wrappername)
# create_wrapper(cls, fieldname, wrappername, editable=field.editable)
class SharedMemoryModel(Model):
# CL: setting abstract correctly to allow subclasses to inherit the default
# manager.
__metaclass__ = SharedMemoryModelBase
objects = SharedMemoryManager()
class Meta:
abstract = True
def _get_cache_key(cls, args, kwargs):
"""
This method is used by the caching subsystem to infer the PK value from the constructor arguments.
It is used to decide if an instance has to be built or is already in the cache.
"""
result = None
# Quick hack for my composites work for now.
if hasattr(cls._meta, 'pks'):
pk = cls._meta.pks[0]
else:
pk = cls._meta.pk
# get the index of the pk in the class fields. this should be calculated *once*, but isn't atm
pk_position = cls._meta.fields.index(pk)
if len(args) > pk_position:
# if it's in the args, we can get it easily by index
result = args[pk_position]
elif pk.attname in kwargs:
# retrieve the pk value. Note that we use attname instead of name, to handle the case where the pk is a
# a ForeignKey.
result = kwargs[pk.attname]
elif pk.name != pk.attname and pk.name in kwargs:
# ok we couldn't find the value, but maybe it's a FK and we can find the corresponding object instead
result = kwargs[pk.name]
if result is not None and isinstance(result, Model):
# if the pk value happens to be a model instance (which can happen wich a FK), we'd rather use its own pk as the key
result = result._get_pk_val()
return result
_get_cache_key = classmethod(_get_cache_key)
def _flush_cached_by_key(cls, key):
try:
del cls.__instance_cache__[key]
except KeyError:
pass
_flush_cached_by_key = classmethod(_flush_cached_by_key)
def get_cached_instance(cls, id):
"""
Method to retrieve a cached instance by pk value. Returns None when not found
(which will always be the case when caching is disabled for this class). Please
note that the lookup will be done even when instance caching is disabled.
"""
return cls.__instance_cache__.get(id)
get_cached_instance = classmethod(get_cached_instance)
def cache_instance(cls, instance):
"""
Method to store an instance in the cache.
"""
if instance._get_pk_val() is not None:
cls.__instance_cache__[instance._get_pk_val()] = instance
cache_instance = classmethod(cache_instance)
def get_all_cached_instances(cls):
"return the objects so far cached by idmapper for this class."
return cls.__instance_cache__.values()
get_all_cached_instances = classmethod(get_all_cached_instances)
def flush_cached_instance(cls, instance):
"""
Method to flush an instance from the cache. The instance will always be flushed from the cache,
since this is most likely called from delete(), and we want to make sure we don't cache dead objects.
"""
cls._flush_cached_by_key(instance._get_pk_val())
flush_cached_instance = classmethod(flush_cached_instance)
def flush_instance_cache(cls):
cls.__instance_cache__ = {} #WeakValueDictionary()
flush_instance_cache = classmethod(flush_instance_cache)
def save(cls, *args, **kwargs):
"save method tracking process/thread issues"
if _IS_SUBPROCESS:
# we keep a store of objects modified in subprocesses so
# we know to update their caches in the central process
PROC_MODIFIED_OBJS.append(cls)
if _IS_MAIN_THREAD:
# in main thread - normal operation
super(SharedMemoryModel, cls).save(*args, **kwargs)
else:
# in another thread; make sure to save in reactor thread
def _save_callback(cls, *args, **kwargs):
super(SharedMemoryModel, cls).save(*args, **kwargs)
#blockingCallFromThread(reactor, _save_callback, cls, *args, **kwargs)
callFromThread(_save_callback, cls, *args, **kwargs)
# Use a signal so we make sure to catch cascades.
def flush_cache(**kwargs):
def class_hierarchy(root):
"""Recursively yield a class hierarchy."""
yield root
for subcls in root.__subclasses__():
for cls in class_hierarchy(subcls):
yield cls
for model in class_hierarchy(SharedMemoryModel):
model.flush_instance_cache()
#request_finished.connect(flush_cache)
post_syncdb.connect(flush_cache)
def flush_cached_instance(sender, instance, **kwargs):
# XXX: Is this the best way to make sure we can flush?
if not hasattr(instance, 'flush_cached_instance'):
return
sender.flush_cached_instance(instance)
pre_delete.connect(flush_cached_instance)
def update_cached_instance(sender, instance, **kwargs):
if not hasattr(instance, 'cache_instance'):
return
sender.cache_instance(instance)
post_save.connect(update_cached_instance)
def cache_size(mb=True):
"""
Returns a dictionary with estimates of the
cache size of each subclass.
mb - return the result in MB.
"""
import sys
sizedict = {"_total": [0, 0]}
def getsize(model):
instances = model.get_all_cached_instances()
linst = len(instances)
size = sum([sys.getsizeof(o) for o in instances])
size = (mb and size/1024.0) or size
return (linst, size)
def get_recurse(submodels):
for submodel in submodels:
subclasses = submodel.__subclasses__()
if not subclasses:
tup = getsize(submodel)
sizedict["_total"][0] += tup[0]
sizedict["_total"][1] += tup[1]
sizedict[submodel.__name__] = tup
else:
get_recurse(subclasses)
get_recurse(SharedMemoryModel.__subclasses__())
sizedict["_total"] = tuple(sizedict["_total"])
return sizedict
| tectronics/evennia | src/utils/idmapper/base.py | Python | bsd-3-clause | 19,327 |
from unittest import mock
from django.db import connection, migrations
try:
from django.contrib.postgres.operations import (
BloomExtension, BtreeGinExtension, BtreeGistExtension, CITextExtension,
CreateExtension, CryptoExtension, HStoreExtension, TrigramExtension,
UnaccentExtension,
)
except ImportError:
BloomExtension = mock.Mock()
BtreeGinExtension = mock.Mock()
BtreeGistExtension = mock.Mock()
CITextExtension = mock.Mock()
CreateExtension = mock.Mock()
CryptoExtension = mock.Mock()
HStoreExtension = mock.Mock()
TrigramExtension = mock.Mock()
UnaccentExtension = mock.Mock()
class Migration(migrations.Migration):
operations = [
(
BloomExtension()
if getattr(connection.features, 'has_bloom_index', False)
else mock.Mock()
),
BtreeGinExtension(),
BtreeGistExtension(),
CITextExtension(),
# Ensure CreateExtension quotes extension names by creating one with a
# dash in its name.
CreateExtension('uuid-ossp'),
CryptoExtension(),
HStoreExtension(),
TrigramExtension(),
UnaccentExtension(),
]
| kaedroho/django | tests/postgres_tests/migrations/0001_setup_extensions.py | Python | bsd-3-clause | 1,212 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
""" astropy.cosmology contains classes and functions for cosmological
distance measures and other cosmology-related calculations.
See the `Astropy documentation
<https://docs.astropy.org/en/latest/cosmology/index.html>`_ for more
detailed usage examples and references.
"""
from . import core, flrw, funcs, parameter, units, utils
from . import io # needed before 'realizations' # isort: split
from . import realizations
from .core import *
from .flrw import *
from .funcs import *
from .parameter import *
from .realizations import *
from .utils import *
__all__ = (core.__all__ + flrw.__all__ # cosmology classes
+ realizations.__all__ # instances thereof
+ funcs.__all__ + parameter.__all__ + utils.__all__) # utils
| mhvk/astropy | astropy/cosmology/__init__.py | Python | bsd-3-clause | 830 |
# -*- coding:utf-8 -*-
from django.utils.translation import ugettext_lazy as _
# SearchForm's strings
SEARCH_FORM_KEYWORDS = _(u'Key Words / Profession')
SEARCH_FORM_LOCATION = _(u'City, State or Zip Code')
# SearchFiltersForm's strings
SEARCH_FILTERS_FORM_JOB_POSITION = _(u'Job Position')
SEARCH_FILTERS_FORM_EXPERIENCE_YEARS = _(u'Experience')
SEARCH_FILTERS_FORM_DISTANCE = _(u'Distance')
SEARCH_FILTERS_FORM_FULL_TIME = _(u'Full Time')
SEARCH_FILTERS_FORM_PART_TIME = _(u'Part Time')
SEARCH_FILTERS_FORM_VISA = _(u'Has a Visa / Visa required')
| hellhovnd/dentexchange | dentexchange/apps/search/strings.py | Python | bsd-3-clause | 551 |
# -*- coding: utf-8 -*-
""":mod:`itertools` is full of great examples of Python generator
usage. However, there are still some critical gaps. ``iterutils``
fills many of those gaps with featureful, tested, and Pythonic
solutions.
Many of the functions below have two versions, one which
returns an iterator (denoted by the ``*_iter`` naming pattern), and a
shorter-named convenience form that returns a list. Some of the
following are based on examples in itertools docs.
"""
import math
import random
import itertools
from collections import Mapping, Sequence, Set, ItemsView
try:
from typeutils import make_sentinel
_UNSET = make_sentinel('_UNSET')
_REMAP_EXIT = make_sentinel('_REMAP_EXIT')
except ImportError:
_REMAP_EXIT = object()
_UNSET = object()
try:
from itertools import izip
except ImportError:
# Python 3 compat
basestring = (str, bytes)
izip, xrange = zip, range
def is_iterable(obj):
"""Similar in nature to :func:`callable`, ``is_iterable`` returns
``True`` if an object is `iterable`_, ``False`` if not.
>>> is_iterable([])
True
>>> is_iterable(object())
False
.. _iterable: https://docs.python.org/2/glossary.html#term-iterable
"""
try:
iter(obj)
except TypeError:
return False
return True
def is_scalar(obj):
"""A near-mirror of :func:`is_iterable`. Returns ``False`` if an
object is an iterable container type. Strings are considered
scalar as well, because strings are more often treated as whole
values as opposed to iterables of 1-character substrings.
>>> is_scalar(object())
True
>>> is_scalar(range(10))
False
>>> is_scalar('hello')
True
"""
return not is_iterable(obj) or isinstance(obj, basestring)
def is_collection(obj):
"""The opposite of :func:`is_scalar`. Returns ``True`` if an object
is an iterable other than a string.
>>> is_collection(object())
False
>>> is_collection(range(10))
True
>>> is_collection('hello')
False
"""
return is_iterable(obj) and not isinstance(obj, basestring)
def split(src, sep=None, maxsplit=None):
"""Splits an iterable based on a separator. Like :meth:`str.split`,
but for all iterables. Returns a list of lists.
>>> split(['hi', 'hello', None, None, 'sup', None, 'soap', None])
[['hi', 'hello'], ['sup'], ['soap']]
See :func:`split_iter` docs for more info.
"""
return list(split_iter(src, sep, maxsplit))
def split_iter(src, sep=None, maxsplit=None):
"""Splits an iterable based on a separator, *sep*, a max of
*maxsplit* times (no max by default). *sep* can be:
* a single value
* an iterable of separators
* a single-argument callable that returns True when a separator is
encountered
``split_iter()`` yields lists of non-separator values. A separator will
never appear in the output.
>>> list(split_iter(['hi', 'hello', None, None, 'sup', None, 'soap', None]))
[['hi', 'hello'], ['sup'], ['soap']]
Note that ``split_iter`` is based on :func:`str.split`, so if
*sep* is ``None``, ``split()`` **groups** separators. If empty lists
are desired between two contiguous ``None`` values, simply use
``sep=[None]``:
>>> list(split_iter(['hi', 'hello', None, None, 'sup', None]))
[['hi', 'hello'], ['sup']]
>>> list(split_iter(['hi', 'hello', None, None, 'sup', None], sep=[None]))
[['hi', 'hello'], [], ['sup'], []]
Using a callable separator:
>>> falsy_sep = lambda x: not x
>>> list(split_iter(['hi', 'hello', None, '', 'sup', False], falsy_sep))
[['hi', 'hello'], [], ['sup'], []]
See :func:`split` for a list-returning version.
"""
if not is_iterable(src):
raise TypeError('expected an iterable')
if maxsplit is not None:
maxsplit = int(maxsplit)
if maxsplit == 0:
yield [src]
return
if callable(sep):
sep_func = sep
elif not is_scalar(sep):
sep = frozenset(sep)
sep_func = lambda x: x in sep
else:
sep_func = lambda x: x == sep
cur_group = []
split_count = 0
for s in src:
if maxsplit is not None and split_count >= maxsplit:
sep_func = lambda x: False
if sep_func(s):
if sep is None and not cur_group:
# If sep is none, str.split() "groups" separators
# check the str.split() docs for more info
continue
split_count += 1
yield cur_group
cur_group = []
else:
cur_group.append(s)
if cur_group or sep is not None:
yield cur_group
return
def chunked(src, size, count=None, **kw):
"""Returns a list of *count* chunks, each with *size* elements,
generated from iterable *src*. If *src* is not evenly divisible by
*size*, the final chunk will have fewer than *size* elements.
Provide the *fill* keyword argument to provide a pad value and
enable padding, otherwise no padding will take place.
>>> chunked(range(10), 3)
[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]
>>> chunked(range(10), 3, fill=None)
[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, None, None]]
>>> chunked(range(10), 3, count=2)
[[0, 1, 2], [3, 4, 5]]
See :func:`chunked_iter` for more info.
"""
chunk_iter = chunked_iter(src, size, **kw)
if count is None:
return list(chunk_iter)
else:
return list(itertools.islice(chunk_iter, count))
def chunked_iter(src, size, **kw):
"""Generates *size*-sized chunks from *src* iterable. Unless the
optional *fill* keyword argument is provided, iterables not even
divisible by *size* will have a final chunk that is smaller than
*size*.
>>> list(chunked_iter(range(10), 3))
[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]
>>> list(chunked_iter(range(10), 3, fill=None))
[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, None, None]]
Note that ``fill=None`` in fact uses ``None`` as the fill value.
"""
# TODO: add count kwarg?
if not is_iterable(src):
raise TypeError('expected an iterable')
size = int(size)
if size <= 0:
raise ValueError('expected a positive integer chunk size')
do_fill = True
try:
fill_val = kw.pop('fill')
except KeyError:
do_fill = False
fill_val = None
if kw:
raise ValueError('got unexpected keyword arguments: %r' % kw.keys())
if not src:
return
postprocess = lambda chk: chk
if isinstance(src, basestring):
postprocess = lambda chk, _sep=type(src)(): _sep.join(chk)
cur_chunk = []
i = 0
for item in src:
cur_chunk.append(item)
i += 1
if i % size == 0:
yield postprocess(cur_chunk)
cur_chunk = []
if cur_chunk:
if do_fill:
lc = len(cur_chunk)
cur_chunk[lc:] = [fill_val] * (size - lc)
yield postprocess(cur_chunk)
return
def pairwise(src):
"""Convenience function for calling :func:`windowed` on *src*, with
*size* set to 2.
>>> pairwise(range(5))
[(0, 1), (1, 2), (2, 3), (3, 4)]
>>> pairwise([])
[]
The number of pairs is always one less than the number of elements
in the iterable passed in, except on empty inputs, which returns
an empty list.
"""
return windowed(src, 2)
def pairwise_iter(src):
"""Convenience function for calling :func:`windowed_iter` on *src*,
with *size* set to 2.
>>> list(pairwise_iter(range(5)))
[(0, 1), (1, 2), (2, 3), (3, 4)]
>>> list(pairwise_iter([]))
[]
The number of pairs is always one less than the number of elements
in the iterable passed in, or zero, when *src* is empty.
"""
return windowed_iter(src, 2)
def windowed(src, size):
"""Returns tuples with exactly length *size*. If the iterable is
too short to make a window of length *size*, no tuples are
returned. See :func:`windowed_iter` for more.
"""
return list(windowed_iter(src, size))
def windowed_iter(src, size):
"""Returns tuples with length *size* which represent a sliding
window over iterable *src*.
>>> list(windowed_iter(range(7), 3))
[(0, 1, 2), (1, 2, 3), (2, 3, 4), (3, 4, 5), (4, 5, 6)]
If the iterable is too short to make a window of length *size*,
then no window tuples are returned.
>>> list(windowed_iter(range(3), 5))
[]
"""
# TODO: lists? (for consistency)
tees = itertools.tee(src, size)
try:
for i, t in enumerate(tees):
for _ in xrange(i):
next(t)
except StopIteration:
return izip([])
return izip(*tees)
def xfrange(stop, start=None, step=1.0):
"""Same as :func:`frange`, but generator-based instead of returning a
list.
>>> tuple(xfrange(1, 3, step=0.75))
(1.0, 1.75, 2.5)
See :func:`frange` for more details.
"""
if not step:
raise ValueError('step must be non-zero')
if start is None:
start, stop = 0.0, stop * 1.0
else:
# swap when all args are used
stop, start = start * 1.0, stop * 1.0
cur = start
while cur < stop:
yield cur
cur += step
def frange(stop, start=None, step=1.0):
"""A :func:`range` clone for float-based ranges.
>>> frange(5)
[0.0, 1.0, 2.0, 3.0, 4.0]
>>> frange(6, step=1.25)
[0.0, 1.25, 2.5, 3.75, 5.0]
>>> frange(100.5, 101.5, 0.25)
[100.5, 100.75, 101.0, 101.25]
>>> frange(5, 0)
[]
>>> frange(5, 0, step=-1.25)
[5.0, 3.75, 2.5, 1.25]
"""
if not step:
raise ValueError('step must be non-zero')
if start is None:
start, stop = 0.0, stop * 1.0
else:
# swap when all args are used
stop, start = start * 1.0, stop * 1.0
count = int(math.ceil((stop - start) / step))
ret = [None] * count
if not ret:
return ret
ret[0] = start
for i in xrange(1, count):
ret[i] = ret[i - 1] + step
return ret
def backoff(start, stop, count=None, factor=2.0, jitter=False):
"""Returns a list of geometrically-increasing floating-point numbers,
suitable for usage with `exponential backoff`_. Exactly like
:func:`backoff_iter`, but without the ``'repeat'`` option for
*count*. See :func:`backoff_iter` for more details.
.. _exponential backoff: https://en.wikipedia.org/wiki/Exponential_backoff
>>> backoff(1, 10)
[1.0, 2.0, 4.0, 8.0, 10.0]
"""
if count == 'repeat':
raise ValueError("'repeat' supported in backoff_iter, not backoff")
return list(backoff_iter(start, stop, count=count,
factor=factor, jitter=jitter))
def backoff_iter(start, stop, count=None, factor=2.0, jitter=False):
"""Generates a sequence of geometrically-increasing floats, suitable
for usage with `exponential backoff`_. Starts with *start*,
increasing by *factor* until *stop* is reached, optionally
stopping iteration once *count* numbers are yielded. *factor*
defaults to 2. In general retrying with properly-configured
backoff creates a better-behaved component for a larger service
ecosystem.
.. _exponential backoff: https://en.wikipedia.org/wiki/Exponential_backoff
>>> list(backoff_iter(1.0, 10.0, count=5))
[1.0, 2.0, 4.0, 8.0, 10.0]
>>> list(backoff_iter(1.0, 10.0, count=8))
[1.0, 2.0, 4.0, 8.0, 10.0, 10.0, 10.0, 10.0]
>>> list(backoff_iter(0.25, 100.0, factor=10))
[0.25, 2.5, 25.0, 100.0]
A simplified usage example:
.. code-block:: python
for timeout in backoff_iter(0.25, 5.0):
try:
res = network_call()
break
except Exception as e:
log(e)
time.sleep(timeout)
An enhancement for large-scale systems would be to add variation,
or *jitter*, to timeout values. This is done to avoid a thundering
herd on the receiving end of the network call.
Finally, for *count*, the special value ``'repeat'`` can be passed to
continue yielding indefinitely.
Args:
start (float): Positive number for baseline.
stop (float): Positive number for maximum.
count (int): Number of steps before stopping
iteration. Defaults to the number of steps between *start* and
*stop*. Pass the string, `'repeat'`, to continue iteration
indefinitely.
factor (float): Rate of exponential increase. Defaults to `2.0`,
e.g., `[1, 2, 4, 8, 16]`.
jitter (float): A factor between `-1.0` and `1.0`, used to
uniformly randomize and thus spread out timeouts in a distributed
system, avoiding rhythm effects. Positive values use the base
backoff curve as a maximum, negative values use the curve as a
minimum. Set to 1.0 or `True` for a jitter approximating
Ethernet's time-tested backoff solution. Defaults to `False`.
"""
start = float(start)
stop = float(stop)
factor = float(factor)
if start < 0.0:
raise ValueError('expected start >= 0, not %r' % start)
if factor < 1.0:
raise ValueError('expected factor >= 1.0, not %r' % factor)
if stop == 0.0:
raise ValueError('expected stop >= 0')
if stop < start:
raise ValueError('expected stop >= start, not %r' % stop)
if count is None:
denom = start if start else 1
count = 1 + math.ceil(math.log(stop/denom, factor))
count = count if start else count + 1
if count != 'repeat' and count < 0:
raise ValueError('count must be positive or "repeat", not %r' % count)
if jitter:
jitter = float(jitter)
if not (-1.0 <= jitter <= 1.0):
raise ValueError('expected jitter -1 <= j <= 1, not: %r' % jitter)
cur, i = start, 0
while count == 'repeat' or i < count:
if not jitter:
cur_ret = cur
elif jitter:
cur_ret = cur - (cur * jitter * random.random())
yield cur_ret
i += 1
if cur == 0:
cur = 1
elif cur < stop:
cur *= factor
if cur > stop:
cur = stop
return
def bucketize(src, key=None):
"""Group values in the *src* iterable by the value returned by *key*,
which defaults to :class:`bool`, grouping values by
truthiness.
>>> bucketize(range(5))
{False: [0], True: [1, 2, 3, 4]}
>>> is_odd = lambda x: x % 2 == 1
>>> bucketize(range(5), is_odd)
{False: [0, 2, 4], True: [1, 3]}
Value lists are not deduplicated:
>>> bucketize([None, None, None, 'hello'])
{False: [None, None, None], True: ['hello']}
Note in these examples there were at most two keys, ``True`` and
``False``, and each key present has a list with at least one
item. See :func:`partition` for a version specialized for binary
use cases.
"""
if not is_iterable(src):
raise TypeError('expected an iterable')
if key is None:
key = bool
if not callable(key):
raise TypeError('expected callable key function')
ret = {}
for val in src:
keyval = key(val)
ret.setdefault(keyval, []).append(val)
return ret
def partition(src, key=None):
"""No relation to :meth:`str.partition`, ``partition`` is like
:func:`bucketize`, but for added convenience returns a tuple of
``(truthy_values, falsy_values)``.
>>> nonempty, empty = partition(['', '', 'hi', '', 'bye'])
>>> nonempty
['hi', 'bye']
*key* defaults to :class:`bool`, but can be carefully overridden to
use any function that returns either ``True`` or ``False``.
>>> import string
>>> is_digit = lambda x: x in string.digits
>>> decimal_digits, hexletters = partition(string.hexdigits, is_digit)
>>> ''.join(decimal_digits), ''.join(hexletters)
('0123456789', 'abcdefABCDEF')
"""
bucketized = bucketize(src, key)
return bucketized.get(True, []), bucketized.get(False, [])
def unique(src, key=None):
"""``unique()`` returns a list of unique values, as determined by
*key*, in the order they first appeared in the input iterable,
*src*.
>>> ones_n_zeros = '11010110001010010101010'
>>> ''.join(unique(ones_n_zeros))
'10'
See :func:`unique_iter` docs for more details.
"""
return list(unique_iter(src, key))
def unique_iter(src, key=None):
"""Yield unique elements from the iterable, *src*, based on *key*,
in the order in which they first appeared in *src*.
>>> repetitious = [1, 2, 3] * 10
>>> list(unique_iter(repetitious))
[1, 2, 3]
By default, *key* is the object itself, but *key* can either be a
callable or, for convenience, a string name of the attribute on
which to uniqueify objects, falling back on identity when the
attribute is not present.
>>> pleasantries = ['hi', 'hello', 'ok', 'bye', 'yes']
>>> list(unique_iter(pleasantries, key=lambda x: len(x)))
['hi', 'hello', 'bye']
"""
if not is_iterable(src):
raise TypeError('expected an iterable, not %r' % type(src))
if key is None:
key_func = lambda x: x
elif callable(key):
key_func = key
elif isinstance(key, basestring):
key_func = lambda x: getattr(x, key, x)
else:
raise TypeError('"key" expected a string or callable, not %r' % key)
seen = set()
for i in src:
k = key_func(i)
if k not in seen:
seen.add(k)
yield i
return
def one(src, default=None, key=None):
"""Along the same lines as builtins, :func:`all` and :func:`any`, and
similar to :func:`first`, ``one()`` returns the single object in
the given iterable *src* that evaluates to ``True``, as determined
by callable *key*. If unset, *key* defaults to :class:`bool`. If
no such objects are found, *default* is returned. If *default* is
not passed, ``None`` is returned.
If *src* has more than one object that evaluates to ``True``, or
if there is no object that fulfills such condition, return
``False``. It's like an `XOR`_ over an iterable.
>>> one((True, False, False))
True
>>> one((True, False, True))
>>> one((0, 0, 'a'))
'a'
>>> one((0, False, None))
>>> one((True, True), default=False)
False
>>> bool(one(('', 1)))
True
>>> one((10, 20, 30, 42), key=lambda i: i > 40)
42
See `Martín Gaitán's original repo`_ for further use cases.
.. _Martín Gaitán's original repo: https://github.com/mgaitan/one
.. _XOR: https://en.wikipedia.org/wiki/Exclusive_or
"""
the_one = default
for i in src:
if key(i) if key else i:
if the_one:
return default
the_one = i
return the_one
def first(iterable, default=None, key=None):
"""Return first element of *iterable* that evaluates to ``True``, else
return ``None`` or optional *default*. Similar to :func:`one`.
>>> first([0, False, None, [], (), 42])
42
>>> first([0, False, None, [], ()]) is None
True
>>> first([0, False, None, [], ()], default='ohai')
'ohai'
>>> import re
>>> m = first(re.match(regex, 'abc') for regex in ['b.*', 'a(.*)'])
>>> m.group(1)
'bc'
The optional *key* argument specifies a one-argument predicate function
like that used for *filter()*. The *key* argument, if supplied, should be
in keyword form. For example, finding the first even number in an iterable:
>>> first([1, 1, 3, 4, 5], key=lambda x: x % 2 == 0)
4
Contributed by Hynek Schlawack, author of `the original standalone module`_.
.. _the original standalone module: https://github.com/hynek/first
"""
if key is None:
for el in iterable:
if el:
return el
else:
for el in iterable:
if key(el):
return el
return default
def same(iterable, ref=_UNSET):
"""``same()`` returns ``True`` when all values in *iterable* are
equal to one another, or optionally a reference value,
*ref*. Similar to :func:`all` and :func:`any` in that it evaluates
an iterable and returns a :class:`bool`. ``same()`` returns
``True`` for empty iterables.
>>> same([])
True
>>> same([1])
True
>>> same(['a', 'a', 'a'])
True
>>> same(range(20))
False
>>> same([[], []])
True
>>> same([[], []], ref='test')
False
"""
iterator = iter(iterable)
if ref is _UNSET:
try:
ref = next(iterator)
except StopIteration:
return True # those that were there were all equal
for val in iterator:
if val != ref:
return False # short circuit on first unequal value
return True
def default_visit(path, key, value):
# print('visit(%r, %r, %r)' % (path, key, value))
return key, value
# enable the extreme: monkeypatching iterutils with a different default_visit
_orig_default_visit = default_visit
def default_enter(path, key, value):
# print('enter(%r, %r)' % (key, value))
try:
iter(value)
except TypeError:
return value, False
if isinstance(value, basestring):
return value, False
elif isinstance(value, Mapping):
return value.__class__(), ItemsView(value)
elif isinstance(value, Sequence):
return value.__class__(), enumerate(value)
elif isinstance(value, Set):
return value.__class__(), enumerate(value)
return value, False
def default_exit(path, key, old_parent, new_parent, new_items):
# print('exit(%r, %r, %r, %r, %r)'
# % (path, key, old_parent, new_parent, new_items))
ret = new_parent
if isinstance(new_parent, Mapping):
new_parent.update(new_items)
elif isinstance(new_parent, Sequence):
vals = [v for i, v in new_items]
try:
new_parent.extend(vals)
except AttributeError:
ret = new_parent.__class__(vals) # tuples
elif isinstance(new_parent, Set):
vals = [v for i, v in new_items]
try:
new_parent.update(new_items)
except AttributeError:
ret = new_parent.__class__(vals) # frozensets
else:
raise RuntimeError('unexpected iterable type: %r' % type(new_parent))
return ret
def remap(root, visit=default_visit, enter=default_enter, exit=default_exit,
**kwargs):
"""The remap ("recursive map") function is used to traverse and
transform nested structures. Lists, tuples, sets, and dictionaries
are just a few of the data structures nested into heterogenous
tree-like structures that are so common in programming.
Unfortunately, Python's built-in ways to manipulate collections
are almost all flat. List comprehensions may be fast and succinct,
but they do not recurse, making it tedious to apply quick changes
or complex transforms to real-world data.
remap goes where list comprehensions cannot.
Here's an example of removing all Nones from some data:
>>> from pprint import pprint
>>> reviews = {'Star Trek': {'TNG': 10, 'DS9': 8.5, 'ENT': None},
... 'Babylon 5': 6, 'Dr. Who': None}
>>> pprint(remap(reviews, lambda p, k, v: v is not None))
{'Babylon 5': 6, 'Star Trek': {'DS9': 8.5, 'TNG': 10}}
Notice how both Nones have been removed despite the nesting in the
dictionary. Not bad for a one-liner, and that's just the beginning.
See `this remap cookbook`_ for more delicious recipes.
.. _this remap cookbook: http://sedimental.org/remap.html
remap takes four main arguments: the object to traverse and three
optional callables which determine how the remapped object will be
created.
Args:
root: The target object to traverse. By default, remap
supports iterables like :class:`list`, :class:`tuple`,
:class:`dict`, and :class:`set`, but any object traversable by
*enter* will work.
visit (callable): This function is called on every item in
*root*. It must accept three positional arguments, *path*,
*key*, and *value*. *path* is simply a tuple of parents'
keys. *visit* should return the new key-value pair. It may
also return ``True`` as shorthand to keep the old item
unmodified, or ``False`` to drop the item from the new
structure. *visit* is called after *enter*, on the new parent.
The *visit* function is called for every item in root,
including duplicate items. For traversable values, it is
called on the new parent object, after all its children
have been visited. The default visit behavior simply
returns the key-value pair unmodified.
enter (callable): This function controls which items in *root*
are traversed. It accepts the same arguments as *visit*: the
path, the key, and the value of the current item. It returns a
pair of the blank new parent, and an iterator over the items
which should be visited. If ``False`` is returned instead of
an iterator, the value will not be traversed.
The *enter* function is only called once per unique value. The
default enter behavior support mappings, sequences, and
sets. Strings and all other iterables will not be traversed.
exit (callable): This function determines how to handle items
once they have been visited. It gets the same three
arguments as the other functions -- *path*, *key*, *value*
-- plus two more: the blank new parent object returned
from *enter*, and a list of the new items, as remapped by
*visit*.
Like *enter*, the *exit* function is only called once per
unique value. The default exit behavior is to simply add
all new items to the new parent, e.g., using
:meth:`list.extend` and :meth:`dict.update` to add to the
new parent. Immutable objects, such as a :class:`tuple` or
:class:`namedtuple`, must be recreated from scratch, but
use the same type as the new parent passed back from the
*enter* function.
reraise_visit (bool): A pragmatic convenience for the *visit*
callable. When set to ``False``, remap ignores any errors
raised by the *visit* callback. Items causing exceptions
are kept. See examples for more details.
remap is designed to cover the majority of cases with just the
*visit* callable. While passing in multiple callables is very
empowering, remap is designed so very few cases should require
passing more than one function.
When passing *enter* and *exit*, it's common and easiest to build
on the default behavior. Simply add ``from boltons.iterutils import
default_enter`` (or ``default_exit``), and have your enter/exit
function call the default behavior before or after your custom
logic. See `this example`_.
Duplicate and self-referential objects (aka reference loops) are
automatically handled internally, `as shown here`_.
.. _this example: http://sedimental.org/remap.html#sort_all_lists
.. _as shown here: http://sedimental.org/remap.html#corner_cases
"""
# TODO: improve argument formatting in sphinx doc
# TODO: enter() return (False, items) to continue traverse but cancel copy?
if not callable(visit):
raise TypeError('visit expected callable, not: %r' % visit)
if not callable(enter):
raise TypeError('enter expected callable, not: %r' % enter)
if not callable(exit):
raise TypeError('exit expected callable, not: %r' % exit)
reraise_visit = kwargs.pop('reraise_visit', True)
if kwargs:
raise TypeError('unexpected keyword arguments: %r' % kwargs.keys())
path, registry, stack = (), {}, [(None, root)]
new_items_stack = []
while stack:
key, value = stack.pop()
id_value = id(value)
if key is _REMAP_EXIT:
key, new_parent, old_parent = value
id_value = id(old_parent)
path, new_items = new_items_stack.pop()
value = exit(path, key, old_parent, new_parent, new_items)
registry[id_value] = value
if not new_items_stack:
continue
elif id_value in registry:
value = registry[id_value]
else:
res = enter(path, key, value)
try:
new_parent, new_items = res
except TypeError:
# TODO: handle False?
raise TypeError('enter should return a tuple of (new_parent,'
' items_iterator), not: %r' % res)
if new_items is not False:
# traverse unless False is explicitly passed
registry[id_value] = new_parent
new_items_stack.append((path, []))
if value is not root:
path += (key,)
stack.append((_REMAP_EXIT, (key, new_parent, value)))
if new_items:
stack.extend(reversed(list(new_items)))
continue
if visit is _orig_default_visit:
# avoid function call overhead by inlining identity operation
visited_item = (key, value)
else:
try:
visited_item = visit(path, key, value)
except Exception:
if reraise_visit:
raise
visited_item = True
if visited_item is False:
continue # drop
elif visited_item is True:
visited_item = (key, value)
# TODO: typecheck?
# raise TypeError('expected (key, value) from visit(),'
# ' not: %r' % visited_item)
try:
new_items_stack[-1][1].append(visited_item)
except IndexError:
raise TypeError('expected remappable root, not: %r' % root)
return value
class PathAccessError(KeyError, IndexError, TypeError):
# TODO: could maybe get fancy with an isinstance
# TODO: should accept an idx argument
def __init__(self, exc, seg, path):
self.exc = exc
self.seg = seg
self.path = path
def __repr__(self):
cn = self.__class__.__name__
return '%s(%r, %r, %r)' % (cn, self.exc, self.seg, self.path)
def __str__(self):
return ('could not access %r from path %r, got error: %r'
% (self.seg, self.path, self.exc))
def get_path(root, path, default=_UNSET):
"""EAFP is great, but the error message on this isn't:
var_key = 'last_key'
x['key'][-1]['other_key'][var_key]
KeyError: 'last_key'
One of get_path's chief aims is to have a good exception that is
better than a plain old KeyError: 'missing_key'
"""
# TODO: integrate default
# TODO: listify kwarg? to allow indexing into sets
# TODO: raise better error on not iterable?
if isinstance(path, basestring):
path = path.split('.')
cur = root
for seg in path:
try:
cur = cur[seg]
except (KeyError, IndexError) as exc:
raise PathAccessError(exc, seg, path)
except TypeError as exc:
# either string index in a list, or a parent that
# doesn't support indexing
try:
seg = int(seg)
cur = cur[seg]
except (ValueError, KeyError, IndexError, TypeError):
raise PathAccessError(exc, seg, path)
return cur
# TODO: get_path/set_path
# TODO: recollect()
# TODO: reiter()
"""
May actually be faster to do an isinstance check for a str path
$ python -m timeit -s "x = [1]" "x[0]"
10000000 loops, best of 3: 0.0207 usec per loop
$ python -m timeit -s "x = [1]" "try: x[0] \nexcept: pass"
10000000 loops, best of 3: 0.029 usec per loop
$ python -m timeit -s "x = [1]" "try: x[1] \nexcept: pass"
1000000 loops, best of 3: 0.315 usec per loop
# setting up try/except is fast, only around 0.01us
# actually triggering the exception takes almost 10x as long
$ python -m timeit -s "x = [1]" "isinstance(x, basestring)"
10000000 loops, best of 3: 0.141 usec per loop
$ python -m timeit -s "x = [1]" "isinstance(x, str)"
10000000 loops, best of 3: 0.131 usec per loop
$ python -m timeit -s "x = [1]" "try: x.split('.')\n except: pass"
1000000 loops, best of 3: 0.443 usec per loop
$ python -m timeit -s "x = [1]" "try: x.split('.') \nexcept AttributeError: pass"
1000000 loops, best of 3: 0.544 usec per loop
"""
| doublereedkurt/boltons | boltons/iterutils.py | Python | bsd-3-clause | 32,952 |
class Module:
def __init__(self, mainMenu, params=[]):
# metadata info about the module, not modified during runtime
self.info = {
# name for the module that will appear in module menus
'Name': 'Prompt',
# list of one or more authors for the module
'Author': ['@FuzzyNop', '@harmj0y'],
# more verbose multi-line description of the module
'Description': ('Launches a specified application with an prompt for credentials with osascript.'),
# True if the module needs to run in the background
'Background' : False,
# File extension to save the file as
'OutputExtension' : "",
# if the module needs administrative privileges
'NeedsAdmin' : False,
# True if the method doesn't touch disk/is reasonably opsec safe
'OpsecSafe' : False,
# the module language
'Language' : 'python',
# the minimum language version needed
'MinLanguageVersion' : '2.6',
# list of any references/other comments
'Comments': [
"https://github.com/fuzzynop/FiveOnceInYourLife"
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'Agent to execute module on.',
'Required' : True,
'Value' : ''
},
'AppName' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'The name of the application to launch.',
'Required' : True,
'Value' : 'App Store'
},
'ListApps' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'Switch. List applications suitable for launching.',
'Required' : False,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
# During instantiation, any settable option parameters
# are passed as an object set to the module and the
# options dictionary is automatically set. This is mostly
# in case options are passed on the command line
if params:
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
listApps = self.options['ListApps']['Value']
appName = self.options['AppName']['Value']
if listApps != "":
script = """
import os
apps = [ app.split('.app')[0] for app in os.listdir('/Applications/') if not app.split('.app')[0].startswith('.')]
choices = []
for x in xrange(len(apps)):
choices.append("[%s] %s " %(x+1, apps[x]) )
print "\\nAvailable applications:\\n"
print '\\n'.join(choices)
"""
else:
# osascript prompt for the specific application
script = """
import os
print os.popen('osascript -e \\\'tell app "%s" to activate\\\' -e \\\'tell app "%s" to display dialog "%s requires your password to continue." & return default answer "" with icon 1 with hidden answer with title "%s Alert"\\\'').read()
""" % (appName, appName, appName, appName)
return script
| Hackplayers/Empire-mod-Hpys-tests | lib/modules/python/collection/osx/prompt.py | Python | bsd-3-clause | 3,830 |
#!/usr/bin/python
# ex:set fileencoding=utf-8:
from __future__ import unicode_literals
from django.conf.urls import patterns
from django.conf.urls import url
from django.contrib.admin.sites import AlreadyRegistered
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ImproperlyConfigured
from django.db.models import signals
from django.http import Http404
from django.utils import six
from django.utils.text import slugify
from rest_framework.reverse import reverse
from djangobmf.core.relationship import DocumentRelationship
from djangobmf.core.serializers.document import DocumentSerializer
from djangobmf.core.workflow import Workflow
from djangobmf.models import Document
from djangobmf.permissions import ModulePermission
from djangobmf.views import ModuleCreateView
from djangobmf.views import ModuleDeleteView
from djangobmf.views import ModuleDetail
from djangobmf.views import ModuleFormAPI
from djangobmf.views import ModuleUpdateView
from djangobmf.views import ModuleWorkflowView
from collections import OrderedDict
import logging
logger = logging.getLogger(__name__)
class Module(object):
"""
Under the ``Module`-class the framework stores every informations
needed to display and manage views and API's. It also provides
many functions used in the whole framework.
"""
open_relation = None
workflow_class = None
workflow_field_name = "state"
detail_view = ModuleDetail
def __init__(self, bmfconfig):
# validation
if not hasattr(self, 'model'):
raise ImproperlyConfigured(
'No model defined in %s.' % self.__class__
)
self.bmfconfig = bmfconfig
self._class_reports = {}
self._object_reports = {}
self._relations = []
self.signals_setup()
self.validate_workflow()
# auto add document relationship
if hasattr(self.model, '_bmfmeta') and self.model._bmfmeta.has_files:
class FileDownload(DocumentRelationship):
model_to = self.model
serializer = DocumentSerializer
self.add_relation(FileDownload, Document)
# TODO: OLD OLD OLD
self.create_view = self.create
self.delete_view = self.delete
self.update_view = self.update
# --- misc ----------------------------------------------------------------
def get_contenttype(self): # pragma: no cover
"""
returns the models contenttype
"""
return ContentType.objects.get_for_model(self.model)
# --- single views --------------------------------------------------------
# TODO
def get_update_view(self):
"""
"""
pass
# TODO
def get_delete_view(self):
"""
"""
pass
def get_detail_view(self, request, *args, **kwargs):
"""
generates a detail-view response
"""
if hasattr(self, '_detail_view'):
return self._detail_view(request, *args, **kwargs)
self._detail_view = self.detail_view.as_view(
module=self,
model=self.model
)
return self._detail_view(request, *args, **kwargs)
# --- serialization -------------------------------------------------------
# TODO
def serialize_class(self, request=None):
"""
"""
return OrderedDict([
('app', self.model._meta.app_label),
('creates', self.get_create_views()),
('ct', self.get_contenttype().pk),
('model', self.model._meta.model_name),
('name', self.model._meta.verbose_name_plural),
('open_relation', self.open_relation),
('relations', self.get_relations(request)),
])
# TODO
def serialize_object(self, obj):
"""
"""
return {}
# --- workflow ------------------------------------------------------------
# TODO
def validate_workflow(self):
"""
"""
if self.workflow_class:
if not issubclass(self.workflow_class, Workflow):
raise ImproperlyConfigured(
"%s is not a Workflow in %s" % (
self.workflow_class.__name__,
self.__name__
)
)
# self.workflow = self.workflow_class()
def has_workflow(self):
"""
"""
return bool(self.workflow_class)
# TODO
def get_workflow_states(self, obj):
"""
"""
pass
# TODO
def get_workflow_transitions(self, obj, state_name):
"""
"""
pass
# --- permissions ---------------------------------------------------------
# TODO
def get_permissions(self, obj):
"""
"""
pass
# --- Create views --------------------------------------------------------
def has_create_views(self):
"""
return True if the module has one or more create views
"""
return getattr(self, '_has_create_views', False)
# TODO
def get_create_views(self):
"""
"""
if self.bmfconfig:
namespace_api = '%s:moduleapi_%s_%s' % (
self.bmfconfig.label,
self.model._meta.app_label,
self.model._meta.model_name,
)
return [{
'name': i[1],
'url': reverse(namespace_api + ':create', kwargs={"key": i[0]}),
} for i in self.list_creates()]
return []
# TODO
def get_create_view(self, name):
"""
"""
pass
# TODO
def add_create_view(self, name, view):
"""
"""
pass
self._has_create_views = True
# --- Clone views ---------------------------------------------------------
def has_clone_views(self):
"""
return True if the module has one or more clone views
"""
return getattr(self, '_has_clone_views', False)
# TODO
def get_clone_views(self):
"""
"""
pass
# TODO
def get_clone_view(self, name):
"""
"""
pass
# TODO
def add_clone_view(self, name, view):
"""
"""
pass
self._has_clone_views = True
# --- Functions for both report types -------------------------------------
def add_report(self, report):
"""
"""
if not getattr(report, "renderer_class", None):
raise ImproperlyConfigured(
'%s needs a renderer_class attribute',
report,
)
if report.has_object:
return self.add_object_report(report)
else:
return self.add_class_report(report)
# --- Class specific reports ----------------------------------------------
# TODO
def get_class_reports(self):
"""
"""
pass
# TODO
def get_class_report(self, name):
"""
"""
pass
# TODO
def add_class_report(self, report):
"""
"""
self._class_reports[report.__name__] = {
'class': report,
}
# --- Object specific reports ---------------------------------------------
def get_object_reports(self):
"""
Returns all available reports
"""
qs = self.bmfconfig.get_model("Report").objects.filter(
contenttype=self.get_contenttype(),
enabled=True
).values('pk', 'name', 'slug', 'renderer_view')
items = []
for data in qs:
cls = self._object_reports[data['renderer_view']]
if data['renderer_view'] in self._object_reports:
items.append({
'name': data['name'],
'slug': data['slug'],
'verbose_name': cls['class'].verbose_name,
'has_form': bool(cls['class'].form_class),
})
else:
self.bmfconfig.get_model("Report").objects.filter(pk=data['pk']).update(enabled=False)
return items
def get_object_report(self, slug):
"""
"""
obj = self.bmfconfig.get_model("Report").objects.get(
contenttype=self.get_contenttype(),
enabled=True,
slug=slug,
)
if not obj.renderer:
logger.error('No renderer defined')
raise Http404
if obj.renderer_view in self._object_reports:
report = self._object_reports[obj.renderer_view]
if not report["view"]:
report["view"] = report["class"].as_view()
return report['view'], obj.renderer
else:
raise Http404
def add_object_report(self, report):
"""
"""
name = report.__module__ + '.' + report.__name__
self._object_reports[name] = {
'class': report,
'view': None, # the view is added by get_object_report
}
# --- Class specific custom apis ------------------------------------------
# TODO
def get_class_apis(self):
"""
"""
pass
# TODO
def get_class_api(self, name):
"""
"""
pass
# TODO
def add_class_api(self, name, view):
"""
"""
pass
# --- Object specific custom apis -----------------------------------------
# TODO
def get_object_apis(self):
"""
"""
pass
# TODO
def get_object_api(self, name):
"""
"""
pass
# TODO
def add_object_api(self, name, view):
"""
"""
pass
# --- Object specific custom apis -----------------------------------------
def has_relations(self):
"""
return True if the module has one or more relations
"""
return bool(self._relations)
# TODO
def get_relations(self, request):
"""
"""
relations = []
for relation in self._relations:
perm = '%s.view_%s'
info = (relation._model_to._meta.app_label, relation._model_to._meta.model_name)
if not request.user.has_perms([perm % info]):
continue
data = OrderedDict([
('app_label', relation._model_from._meta.app_label),
('model_name', relation._model_from._meta.model_name),
('name', relation.name),
('slug', relation.slug),
('template', relation.template),
])
relations.append(data)
return relations
# TODO
def get_relation(self, name):
"""
"""
pass
# TODO
def add_relation(self, cls, model_from):
"""
"""
relation = cls()
relation._model_from = model_from
for obj in self._relations:
if obj == relation:
raise AlreadyRegistered(
'Can not register the relationship %s' % cls.__name__
)
self._relations.append(relation)
# --- number ranges -------------------------------------------------------
def has_numberranges(self):
"""
"""
pass
# TODO
def get_numberranges(self):
"""
"""
pass
# TODO
def get_numberrange(self, name):
"""
"""
pass
# TODO
def add_numberrange(self, name, number_range):
"""
"""
pass
# --- Signals -------------------------------------------------------------
def signals_setup(self):
"""
Bind own signal methods to the djangos signals
"""
logger.debug("Setup signals for %s", self.__class__.__name__)
signals.pre_delete.connect(self.signal_pre_delete, sender=self.model)
signals.pre_init.connect(self.signal_pre_init, sender=self.model)
signals.pre_save.connect(self.signal_pre_save, sender=self.model)
signals.post_delete.connect(self.signal_post_delete, sender=self.model)
signals.post_init.connect(self.signal_post_init, sender=self.model)
signals.post_save.connect(self.signal_post_save, sender=self.model)
def signal_pre_delete(self, *args, **kwargs):
"""
This function is called bevor a model instance is deleted
"""
pass
def signal_pre_init(self, *args, **kwargs):
"""
This function is called bevor a model instance is initialized
"""
pass
def signal_pre_save(self, *args, **kwargs):
"""
This function is called bevor a model instance is saved
"""
pass
def signal_post_delete(self, *args, **kwargs):
"""
This function is called after a model instance is deleted
"""
pass
def signal_post_init(self, *args, **kwargs):
"""
This function is called after a model instance is initialized
"""
pass
def signal_post_save(self, *args, **kwargs):
"""
This function is called after a model instance is saved
"""
pass
# TODO: OLD OLD OLD OLD OLD OLD OLD OLD OLD OLD OLD OLD OLD OLD OLD OLD OLD
detail = ModuleDetail
create = ModuleCreateView
delete = ModuleDeleteView
update = ModuleUpdateView
permissions = ModulePermission
detail_urlpatterns = None
api_urlpatterns = None
def list_creates(self):
if hasattr(self, 'listed_creates'):
return self.listed_creates
self.listed_creates = []
if isinstance(self.create, dict):
for label, view in six.iteritems(self.create):
key = slugify(label)
if isinstance(view, (list, tuple)) and len(view) == 2:
# overwrite the label, and use the correct the view function
label = view[0]
view = view[1]
self.listed_creates.append((key, label, view))
elif issubclass(self.create, ModuleCreateView):
self.listed_creates.append(('default', 'default', self.create))
return self.listed_creates
def get_detail_urls(self):
# add custom url patterns
if self.detail_urlpatterns:
return self.detail_urlpatterns
return patterns('')
def get_api_urls(self):
creates = self.list_creates()
urlpatterns = patterns(
'',
url(
r'^update/(?P<pk>[0-9]+)/$',
self.update.as_view(
module=self,
model=self.model
),
name='update',
),
url(
r'^update/(?P<pk>[0-9]+)/form/$',
ModuleFormAPI.as_view(
module=self,
model=self.model,
form_view=self.update,
),
name='update-form',
),
url(
r'^delete/(?P<pk>[0-9]+)/$',
self.delete.as_view(
module=self,
model=self.model
),
name='delete',
),
)
if self.model._bmfmeta.can_clone:
urlpatterns += patterns(
'',
url(
r'^clone/(?P<pk>[0-9]+)/$',
self.clone.as_view(
module=self,
model=self.model
),
name='clone',
),
url(
r'^clone/(?P<pk>[0-9]+)/form/$',
ModuleFormAPI.as_view(
module=self,
model=self.model,
form_view=self.clone,
),
name='clone-form',
),
)
for key, label, view in creates:
urlpatterns += patterns(
'',
url(
r'^create/(?P<key>%s)/$' % key,
view.as_view(
module=self,
model=self.model
),
name='create',
),
url(
r'^create/(?P<key>%s)/form/$' % key,
ModuleFormAPI.as_view(
module=self,
model=self.model,
form_view=view,
),
name='create-form',
),
)
# workflow interactions
if self.model._bmfmeta.has_workflow:
urlpatterns += patterns(
'',
url(
r'^workflow/(?P<pk>[0-9]+)/(?P<transition>\w+)/$',
ModuleWorkflowView.as_view(
module=self,
model=self.model
),
name='workflow',
),
)
# add custom url patterns
if self.api_urlpatterns:
urlpatterns += self.api_urlpatterns
return urlpatterns
| django-bmf/django-bmf | djangobmf/core/module.py | Python | bsd-3-clause | 17,355 |
import sys
from apps.cowry.exceptions import PaymentMethodNotFound
from django.utils.importlib import import_module
def _load_from_module(path):
package, attr = path.rsplit('.', 1)
module = import_module(package)
return getattr(module, attr)
# TODO read django settings to find out what adapters to load.
# TODO Ensure not duplicate payment method names.
# ADAPTERS = getattr(settings, 'COWRY_ADAPTERS')
ADAPTERS = ('apps.cowry_docdata.adapters.DocDataPaymentAdapter',)
_adapters = []
for adapter_str in ADAPTERS:
adapter_class = _load_from_module(adapter_str)
_adapters.append(adapter_class())
def _adapter_for_payment_method(payment_method_id):
for adapter in _adapters:
for pmi in adapter.get_payment_methods():
if payment_method_id == pmi:
return adapter
raise PaymentMethodNotFound('', payment_method_id)
def create_payment_object(order, payment_method_id, payment_submethod='', amount='', currency=''):
adapter = _adapter_for_payment_method(payment_method_id)
payment = adapter.create_payment_object(order, payment_method_id, payment_submethod, amount, currency)
payment.save()
return payment
def get_payment_methods(amount=None, currency='', country='', recurring=None, pm_ids=None):
payment_methods = []
for adapter in _adapters:
cur_payment_methods = adapter.get_payment_methods()
for pm_id in cur_payment_methods:
if pm_ids is None or pm_id in pm_ids:
# Extract values from the configuration.
pm_config = cur_payment_methods[pm_id]
max_amount = pm_config.get('max_amount', sys.maxint)
min_amount = pm_config.get('min_amount', 0)
restricted_currencies = pm_config.get('restricted_currencies', (currency,))
restricted_countries = pm_config.get('restricted_countries', (country,))
supports_recurring = pm_config.get('supports_recurring', True)
supports_single = pm_config.get('supports_single', True)
# See if we need to exclude the current payment_method (pm).
add_pm = True
if amount and (amount > max_amount or amount < min_amount):
add_pm = False
if country not in restricted_countries:
add_pm = False
if currency not in restricted_currencies:
add_pm = False
if recurring and not supports_recurring:
add_pm = False
if not recurring and not supports_single:
add_pm = False
# For now we only return a few params. Later on we might want to return the entire object.
if add_pm:
payment_methods.append({'id': pm_id, 'name': pm_config.get('name')})
return payment_methods
def get_payment_method_ids(amount=None, currency='', country='', recurring=None, pm_ids=None):
payment_method_ids = []
for pm in get_payment_methods(amount=amount, currency=currency, country=country, recurring=recurring, pm_ids=pm_ids):
payment_method_ids.append(pm['id'])
return payment_method_ids
def get_payment_submethods(payment_method_id):
adapter = _adapter_for_payment_method(payment_method_id)
for payment_methods in adapter.get_payment_method_config():
for pmi in payment_methods.keys():
config = payment_methods[pmi]
return config.get('submethods')
| onepercentclub/onepercentclub-site | apps/cowry/factory.py | Python | bsd-3-clause | 3,526 |
"""
Example OAuthenticator to use with My Service
"""
import json
from jupyterhub.auth import LocalAuthenticator
from oauthenticator.oauth2 import OAuthLoginHandler, OAuthenticator
from tornado.auth import OAuth2Mixin
from tornado.httputil import url_concat
from tornado.httpclient import HTTPRequest, AsyncHTTPClient, HTTPError
class MyServiceMixin(OAuth2Mixin):
# authorize is the URL users are redirected to to authorize your service
_OAUTH_AUTHORIZE_URL = "https://myservice.biz/login/oauth/authorize"
# token is the URL JupyterHub accesses to finish the OAuth process
_OAUTH_ACCESS_TOKEN_URL = "https://myservice.biz/login/oauth/access_token"
class MyServiceLoginHandler(OAuthLoginHandler, MyServiceMixin):
pass
class GitHubOAuthenticator(OAuthenticator):
# login_service is the text displayed on the "Login with..." button
login_service = "My Service"
login_handler = MyServiceLoginHandler
async def authenticate(self, handler, data=None):
"""We set up auth_state based on additional GitHub info if we
receive it.
"""
code = handler.get_argument("code")
# TODO: Configure the curl_httpclient for tornado
http_client = AsyncHTTPClient()
# Exchange the OAuth code for an Access Token
# this is the TOKEN URL in your provider
params = dict(
client_id=self.client_id, client_secret=self.client_secret, code=code
)
url = url_concat("https://myservice.biz/login/oauth/access_token", params)
req = HTTPRequest(
url, method="POST", headers={"Accept": "application/json"}, body=''
)
resp = await http_client.fetch(req)
resp_json = json.loads(resp.body.decode('utf8', 'replace'))
if 'access_token' in resp_json:
access_token = resp_json['access_token']
elif 'error_description' in resp_json:
raise HTTPError(
403,
"An access token was not returned: {}".format(
resp_json['error_description']
),
)
else:
raise HTTPError(500, "Bad response: %s".format(resp))
# Determine who the logged in user is
# by using the new access token to make a request
# check with your OAuth provider for this URL.
# it could also be in the response to the token request,
# making this request unnecessary.
req = HTTPRequest(
"https://myservice.biz/api/user",
method="GET",
headers={"Authorization": f"Bearer {access_token}"},
)
resp = await http_client.fetch(req)
resp_json = json.loads(resp.body.decode('utf8', 'replace'))
# check the documentation for what field contains a unique username
# it might not be the 'username'!
username = resp_json["username"]
if not username:
# return None means that no user is authenticated
# and login has failed
return None
# here we can add additional checks such as against team whitelists
# if the OAuth provider has such a concept
# 'name' is the JupyterHub username
user_info = {"name": username}
# We can also persist auth state,
# which is information encrypted in the Jupyter database
# and can be passed to the Spawner for e.g. authenticated data access
# these fields are up to you, and not interpreted by JupyterHub
# see Authenticator.pre_spawn_start for how to use this information
user_info["auth_state"] = auth_state = {}
auth_state['access_token'] = access_token
auth_state['auth_reply'] = resp_json
return user_info
class LocalGitHubOAuthenticator(LocalAuthenticator, GitHubOAuthenticator):
"""A version that mixes in local system user creation"""
pass
| minrk/oauthenticator | docs/source/example-oauthenticator.py | Python | bsd-3-clause | 3,918 |
class ServiceError(Exception):
"""Base class for exceptions in this module."""
pass
class UnsupportedFormatError(ServiceError):
"""Used to raise exceptions when a response doesn't match expected semantics or for failed version checks."""
pass
class MissingLayerError(ServiceError):
"""Used if expected layer could not be found in the service."""
def __init__(self, message):
self.message = message
| venicegeo/eventkit-cloud | eventkit_cloud/utils/services/errors.py | Python | bsd-3-clause | 454 |
# -*- coding: utf-8 -*-
from django.contrib import admin
from ionyweb.page.models import Page, Layout
admin.site.register(Page)
admin.site.register(Layout)
| makinacorpus/ionyweb | ionyweb/page/admin.py | Python | bsd-3-clause | 158 |
from distutils.core import setup
setup(
name='PyMonad',
version='1.3',
author='Jason DeLaat',
author_email='[email protected]',
packages=['pymonad', 'pymonad.test'],
url='https://bitbucket.org/jason_delaat/pymonad',
license=open('LICENSE.txt').read(),
description='Collection of classes for programming with functors, applicative functors and monads.',
long_description=open('README.txt').read() + open("CHANGES.txt").read(),
classifiers=[ "Intended Audience :: Developers"
, "License :: OSI Approved :: BSD License"
, "Operating System :: OS Independent"
, "Programming Language :: Python :: 2.7"
, "Programming Language :: Python :: 3"
, "Topic :: Software Development"
, "Topic :: Software Development :: Libraries"
, "Topic :: Utilities"
],
)
| fnl/pymonad | setup.py | Python | bsd-3-clause | 824 |
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Module containing the various stages that a builder runs."""
import json
import logging
import os
from chromite.cbuildbot import commands
from chromite.cbuildbot import failures_lib
from chromite.cbuildbot import cbuildbot_run
from chromite.cbuildbot.stages import artifact_stages
from chromite.lib import cros_build_lib
from chromite.lib import gs
from chromite.lib import osutils
from chromite.lib import parallel
from chromite.lib import timeout_util
class InvalidTestConditionException(Exception):
"""Raised when pre-conditions for a test aren't met."""
class SignerTestStage(artifact_stages.ArchivingStage):
"""Run signer related tests."""
option_name = 'tests'
config_name = 'signer_tests'
# If the signer tests take longer than 30 minutes, abort. They usually take
# five minutes to run.
SIGNER_TEST_TIMEOUT = 1800
def PerformStage(self):
if not self.archive_stage.WaitForRecoveryImage():
raise InvalidTestConditionException('Missing recovery image.')
with timeout_util.Timeout(self.SIGNER_TEST_TIMEOUT):
commands.RunSignerTests(self._build_root, self._current_board)
class SignerResultsTimeout(failures_lib.StepFailure):
"""The signer did not produce any results inside the expected time."""
class SignerFailure(failures_lib.StepFailure):
"""The signer returned an error result."""
class MissingInstructionException(failures_lib.StepFailure):
"""We didn't receive the list of signing instructions PushImage uploaded."""
class MalformedResultsException(failures_lib.StepFailure):
"""The Signer results aren't formatted as we expect."""
class PaygenSigningRequirementsError(failures_lib.StepFailure):
"""Paygen stage can't run if signing failed."""
class PaygenCrostoolsNotAvailableError(failures_lib.StepFailure):
"""Paygen stage can't run if signing failed."""
class PaygenNoPaygenConfigForBoard(failures_lib.StepFailure):
"""Paygen can't run with a release.conf config for the board."""
class PaygenStage(artifact_stages.ArchivingStage):
"""Stage that generates release payloads.
If this stage is created with a 'channels' argument, it can run
independantly. Otherwise, it's dependent on values queued up by
the ArchiveStage (push_image).
"""
option_name = 'paygen'
config_name = 'paygen'
# Poll for new results every 30 seconds.
SIGNING_PERIOD = 30
# Timeout for PushImage to finish uploading images. 2 hours in seconds.
PUSHIMAGE_TIMEOUT = 2 * 60 * 60
# Timeout for the signing process. 2 hours in seconds.
SIGNING_TIMEOUT = 2 * 60 * 60
FINISHED = 'finished'
def __init__(self, builder_run, board, archive_stage, channels=None,
**kwargs):
"""Init that accepts the channels argument, if present.
Args:
builder_run: See builder_run on ArchivingStage.
board: See board on ArchivingStage.
archive_stage: See archive_stage on ArchivingStage.
channels: Explicit list of channels to generate payloads for.
If empty, will instead wait on values from push_image.
Channels is normally None in release builds, and normally set
for trybot 'payloads' builds.
"""
super(PaygenStage, self).__init__(builder_run, board, archive_stage,
**kwargs)
self.signing_results = {}
self.channels = channels
def _HandleStageException(self, exc_info):
"""Override and don't set status to FAIL but FORGIVEN instead."""
exc_type, exc_value, _exc_tb = exc_info
# If Paygen fails to find anything needed in release.conf, treat it
# as a warning, not a failure. This is common during new board bring up.
if issubclass(exc_type, PaygenNoPaygenConfigForBoard):
return self._HandleExceptionAsWarning(exc_info)
# If the exception is a TestLabFailure that means we couldn't schedule the
# test. We don't fail the build for that. We do the CompoundFailure dance,
# because that's how we'll get failures from background processes returned
# to us.
if (issubclass(exc_type, failures_lib.TestLabFailure) or
(issubclass(exc_type, failures_lib.CompoundFailure) and
exc_value.MatchesFailureType(failures_lib.TestLabFailure))):
return self._HandleExceptionAsWarning(exc_info)
return super(PaygenStage, self)._HandleStageException(exc_info)
def _JsonFromUrl(self, gs_ctx, url):
"""Fetch a GS Url, and parse it as Json.
Args:
gs_ctx: GS Context.
url: Url to fetch and parse.
Returns:
None if the Url doesn't exist.
Parsed Json structure if it did.
Raises:
MalformedResultsException if it failed to parse.
"""
try:
signer_txt = gs_ctx.Cat(url).output
except gs.GSNoSuchKey:
return None
try:
return json.loads(signer_txt)
except ValueError:
# We should never see malformed Json, even for intermediate statuses.
raise MalformedResultsException(signer_txt)
def _SigningStatusFromJson(self, signer_json):
"""Extract a signing status from a signer result Json DOM.
Args:
signer_json: The parsed json status from a signer operation.
Returns:
string with a simple status: 'passed', 'failed', 'downloading', etc,
or '' if the json doesn't contain a status.
"""
return (signer_json or {}).get('status', {}).get('status', '')
def _CheckForResults(self, gs_ctx, instruction_urls_per_channel,
channel_notifier):
"""timeout_util.WaitForSuccess func to check a list of signer results.
Args:
gs_ctx: Google Storage Context.
instruction_urls_per_channel: Urls of the signer result files
we're expecting.
channel_notifier: BackgroundTaskRunner into which we push channels for
processing.
Returns:
Number of results not yet collected.
"""
COMPLETED_STATUS = ('passed', 'failed')
# Assume we are done, then try to prove otherwise.
results_completed = True
for channel in instruction_urls_per_channel.keys():
self.signing_results.setdefault(channel, {})
if (len(self.signing_results[channel]) ==
len(instruction_urls_per_channel[channel])):
continue
for url in instruction_urls_per_channel[channel]:
# Convert from instructions URL to instructions result URL.
url += '.json'
# We already have a result for this URL.
if url in self.signing_results[channel]:
continue
signer_json = self._JsonFromUrl(gs_ctx, url)
if self._SigningStatusFromJson(signer_json) in COMPLETED_STATUS:
# If we find a completed result, remember it.
self.signing_results[channel][url] = signer_json
# If we don't have full results for this channel, we aren't done
# waiting.
if (len(self.signing_results[channel]) !=
len(instruction_urls_per_channel[channel])):
results_completed = False
continue
# If we reach here, the channel has just been completed for the first
# time.
# If all results 'passed' the channel was successfully signed.
channel_success = True
for signer_result in self.signing_results[channel].values():
if self._SigningStatusFromJson(signer_result) != 'passed':
channel_success = False
# If we successfully completed the channel, inform paygen.
if channel_success:
channel_notifier(channel)
return results_completed
def _WaitForPushImage(self):
"""Block until push_image data is ready.
Returns:
Push_image results, expected to be of the form:
{ 'channel': ['gs://instruction_uri1', 'gs://signer_instruction_uri2'] }
Raises:
MissingInstructionException: If push_image sent us an error, or timed out.
"""
try:
instruction_urls_per_channel = self.board_runattrs.GetParallel(
'instruction_urls_per_channel', timeout=self.PUSHIMAGE_TIMEOUT)
except cbuildbot_run.AttrTimeoutError:
instruction_urls_per_channel = None
# A value of None signals an error, either in PushImage, or a timeout.
if instruction_urls_per_channel is None:
raise MissingInstructionException('PushImage results not available.')
return instruction_urls_per_channel
def _WaitForSigningResults(self,
instruction_urls_per_channel,
channel_notifier):
"""Do the work of waiting for signer results and logging them.
Args:
instruction_urls_per_channel: push_image data (see _WaitForPushImage).
channel_notifier: BackgroundTaskRunner into which we push channels for
processing.
Raises:
ValueError: If the signer result isn't valid json.
RunCommandError: If we are unable to download signer results.
"""
gs_ctx = gs.GSContext(dry_run=self._run.debug)
try:
cros_build_lib.Info('Waiting for signer results.')
timeout_util.WaitForReturnTrue(
self._CheckForResults,
func_args=(gs_ctx, instruction_urls_per_channel, channel_notifier),
timeout=self.SIGNING_TIMEOUT, period=self.SIGNING_PERIOD)
except timeout_util.TimeoutError:
msg = 'Image signing timed out.'
cros_build_lib.Error(msg)
cros_build_lib.PrintBuildbotStepText(msg)
raise SignerResultsTimeout(msg)
# Log all signer results, then handle any signing failures.
failures = []
for url_results in self.signing_results.values():
for url, signer_result in url_results.iteritems():
result_description = os.path.basename(url)
cros_build_lib.PrintBuildbotStepText(result_description)
cros_build_lib.Info('Received results for: %s', result_description)
cros_build_lib.Info(json.dumps(signer_result, indent=4))
status = self._SigningStatusFromJson(signer_result)
if status != 'passed':
failures.append(result_description)
cros_build_lib.Error('Signing failed for: %s', result_description)
if failures:
cros_build_lib.Error('Failure summary:')
for failure in failures:
cros_build_lib.Error(' %s', failure)
raise SignerFailure(failures)
def PerformStage(self):
"""Do the work of generating our release payloads."""
# Convert to release tools naming for boards.
board = self._current_board.replace('_', '-')
version = self._run.attrs.release_tag
assert version, "We can't generate payloads without a release_tag."
logging.info("Generating payloads for: %s, %s", board, version)
# Test to see if the current board has a Paygen configuration. We do
# this here, no in the sub-process so we don't have to pass back a
# failure reason.
try:
from crostools.lib import paygen_build_lib
paygen_build_lib.ValidateBoardConfig(board)
except paygen_build_lib.BoardNotConfigured:
raise PaygenNoPaygenConfigForBoard(
'No release.conf entry was found for board %s. Get a TPM to fix.' %
board)
except ImportError:
raise PaygenCrostoolsNotAvailableError()
with parallel.BackgroundTaskRunner(self._RunPaygenInProcess) as per_channel:
def channel_notifier(channel):
per_channel.put((channel, board, version, self._run.debug,
self._run.config.paygen_skip_testing,
self._run.config.paygen_skip_delta_payloads))
if self.channels:
logging.info("Using explicit channels: %s", self.channels)
# If we have an explicit list of channels, use it.
for channel in self.channels:
channel_notifier(channel)
else:
instruction_urls_per_channel = self._WaitForPushImage()
self._WaitForSigningResults(instruction_urls_per_channel,
channel_notifier)
def _RunPaygenInProcess(self, channel, board, version, debug,
skip_test_payloads, skip_delta_payloads):
"""Helper for PaygenStage that invokes payload generation.
This method is intended to be safe to invoke inside a process.
Args:
channel: Channel of payloads to generate ('stable', 'beta', etc)
board: Board of payloads to generate ('x86-mario', 'x86-alex-he', etc)
version: Version of payloads to generate.
debug: Flag telling if this is a real run, or a test run.
skip_test_payloads: Skip generating test payloads, and auto tests.
skip_delta_payloads: Skip generating delta payloads.
"""
# TODO(dgarrett): Remove when crbug.com/341152 is fixed.
# These modules are imported here because they aren't always available at
# cbuildbot startup.
# pylint: disable=F0401
try:
from crostools.lib import gspaths
from crostools.lib import paygen_build_lib
except ImportError:
# We can't generate payloads without crostools.
raise PaygenCrostoolsNotAvailableError()
# Convert to release tools naming for channels.
if not channel.endswith('-channel'):
channel += '-channel'
with osutils.TempDir(sudo_rm=True) as tempdir:
# Create the definition of the build to generate payloads for.
build = gspaths.Build(channel=channel,
board=board,
version=version)
try:
# Generate the payloads.
self._PrintLoudly('Starting %s, %s, %s' % (channel, version, board))
paygen_build_lib.CreatePayloads(build,
work_dir=tempdir,
dry_run=debug,
run_parallel=True,
run_on_builder=True,
skip_delta_payloads=skip_delta_payloads,
skip_test_payloads=skip_test_payloads,
skip_autotest=skip_test_payloads)
except (paygen_build_lib.BuildFinished,
paygen_build_lib.BuildLocked,
paygen_build_lib.BuildSkip) as e:
# These errors are normal if it's possible for another process to
# work on the same build. This process could be a Paygen server, or
# another builder (perhaps by a trybot generating payloads on request).
#
# This means the build was finished by the other process, is already
# being processed (so the build is locked), or that it's been marked
# to skip (probably done manually).
cros_build_lib.Info('Paygen skipped because: %s', e)
| bpsinc-native/src_third_party_chromite | cbuildbot/stages/release_stages.py | Python | bsd-3-clause | 14,805 |
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from __future__ import absolute_import
# Standard imports
from future import standard_library
standard_library.install_aliases()
from builtins import *
from past.utils import old_div
import unittest
import json
import logging
import re
from datetime import datetime, timedelta
# Our imports
from emission.analysis.result import carbon
import emission.core.get_database as edb
from emission.core.get_database import get_mode_db, get_section_db
import emission.tests.common as etc
from emission.core import common
class TestCarbon(unittest.TestCase):
def setUp(self):
from copy import copy
self.testUsers = ["[email protected]", "[email protected]", "[email protected]",
"[email protected]", "[email protected]"]
self.serverName = 'localhost'
# Sometimes, we may have entries left behind in the database if one of the tests failed
# or threw an exception, so let us start by cleaning up all entries
etc.dropAllCollections(edb._get_current_db())
self.ModesColl = get_mode_db()
self.assertEquals(self.ModesColl.estimated_document_count(), 0)
etc.loadTable(self.serverName, "Stage_Modes", "emission/tests/data/modes.json")
etc.loadTable(self.serverName, "Stage_Sections", "emission/tests/data/testCarbonFile")
self.SectionsColl = get_section_db()
self.walkExpect = 1057.2524056424411
self.busExpect = 2162.668467546699
self.busCarbon = old_div(267.0,1609)
self.airCarbon = old_div(217.0,1609)
self.driveCarbon = old_div(278.0,1609)
self.busOptimalCarbon = old_div(92.0,1609)
self.now = datetime.now()
self.dayago = self.now - timedelta(days=1)
self.weekago = self.now - timedelta(weeks = 1)
for section in self.SectionsColl.find():
section['section_start_datetime'] = self.dayago
section['section_end_datetime'] = self.dayago + timedelta(hours = 1)
if section['confirmed_mode'] == 5:
airSection = copy(section)
airSection['confirmed_mode'] = 9
airSection['_id'] = section['_id'] + "_air"
self.SectionsColl.insert(airSection)
# print("Section start = %s, section end = %s" %
# (section['section_start_datetime'], section['section_end_datetime']))
self.SectionsColl.save(section)
def tearDown(self):
for testUser in self.testUsers:
etc.purgeSectionData(self.SectionsColl, testUser)
self.ModesColl.remove()
self.assertEquals(self.ModesColl.estimated_document_count(), 0)
def getMyQuerySpec(self, user, modeId):
return common.getQuerySpec(user, modeId, self.weekago, self.now)
def testGetModes(self):
modes = carbon.getAllModes()
for mode in modes:
print(mode['mode_id'], mode['mode_name'])
self.assertEquals(len(modes), 9)
def testGetDisplayModes(self):
modes = carbon.getDisplayModes()
for mode in modes:
print(mode['mode_id'], mode['mode_name'])
# skipping transport, underground and not a trip
self.assertEquals(len(modes), 8)
def testGetTripCountForMode(self):
modes = carbon.getDisplayModes()
# try different modes
self.assertEqual(carbon.getTripCountForMode("[email protected]", 1, self.weekago, self.now), 1) # walk
self.assertEqual(carbon.getTripCountForMode("[email protected]", 5, self.weekago, self.now), 1) # bus
self.assertEqual(carbon.getTripCountForMode("[email protected]", 9, self.weekago, self.now), 1) # bus
# try different users
self.assertEqual(carbon.getTripCountForMode("[email protected]", 1, self.weekago, self.now), 1) # walk
self.assertEqual(carbon.getTripCountForMode("[email protected]", 5, self.weekago, self.now), 1) # bus
# try to sum across users
# We have 5 users - best, fest, rest, nest and test
self.assertEqual(carbon.getTripCountForMode(None, 1, self.weekago, self.now), 5) # walk
self.assertEqual(carbon.getTripCountForMode(None, 5, self.weekago, self.now), 5) # bus
def testTotalModeShare(self):
modeshare = carbon.getModeShare(None, self.weekago, self.now)
self.assertEqual(modeshare['walking'], 5)
self.assertEqual(modeshare['bus'], 5)
self.assertEqual(modeshare['cycling'], 0)
self.assertEqual(modeshare['car'], 0)
self.assertEqual(modeshare['train'], 0)
# self.assertFalse(modeshare.keys() contains 'not a trip')
# self.assertFalse(modeshare.keys() contains 'transport')
def testMyModeShare(self):
modeshare = carbon.getModeShare('[email protected]', self.weekago, self.now)
print(modeshare)
self.assertEqual(modeshare['walking'], 1)
self.assertEqual(modeshare['bus'], 1)
self.assertEqual(modeshare['cycling'], 0)
self.assertEqual(modeshare['car'], 0)
self.assertEqual(modeshare['train'], 0)
# self.assertFalse(modeshare.keys() contains 'not a trip')
# self.assertFalse(modeshare.keys() contains 'transport')
def testDistanceForMode(self):
# try different modes
self.assertEqual(carbon.getDistanceForMode(self.getMyQuerySpec("[email protected]", 1)),
self.walkExpect) # walk
self.assertEqual(carbon.getDistanceForMode(self.getMyQuerySpec("[email protected]", 5)),
self.busExpect) # bus
# try different users
self.assertEqual(carbon.getDistanceForMode(self.getMyQuerySpec("[email protected]", 1)), self.walkExpect) # walk
self.assertEqual(carbon.getDistanceForMode(self.getMyQuerySpec("[email protected]", 5)), self.busExpect) # bus
# try to sum across users
# We have 5 users - best, fest, rest, nest and test
self.assertEqual(carbon.getDistanceForMode(self.getMyQuerySpec(None, 1)), len(self.testUsers) * self.walkExpect) # walk
self.assertEqual(carbon.getDistanceForMode(self.getMyQuerySpec(None, 5)), len(self.testUsers) * self.busExpect) # bus
def testMyModeDistance(self):
myModeDistance = carbon.getModeShareDistance('[email protected]', self.weekago, self.now)
self.assertEqual(myModeDistance['walking'], self.walkExpect)
self.assertEqual(myModeDistance['cycling'], 0)
self.assertEqual(myModeDistance['bus'], self.busExpect)
self.assertEqual(myModeDistance['train'], 0)
def testTotalModeDistance(self):
totalModeDistance = carbon.getModeShareDistance(None, self.weekago, self.now)
self.assertEqual(totalModeDistance['walking'], len(self.testUsers) * self.walkExpect)
self.assertEqual(totalModeDistance['cycling'], 0)
self.assertEqual(totalModeDistance['bus'], len(self.testUsers) * self.busExpect)
self.assertEqual(totalModeDistance['train'], 0)
def testMyCarbonFootprint(self):
myModeDistance = carbon.getModeCarbonFootprint('[email protected]', carbon.carbonFootprintForMode, self.weekago, self.now)
self.assertEqual(myModeDistance['walking'], 0)
self.assertEqual(myModeDistance['cycling'], 0)
self.assertEqual(myModeDistance['bus_short'], (self.busCarbon * self.busExpect/1000))
self.assertEqual(myModeDistance['train_short'], 0)
# We duplicate the bus trips to get air trips, so the distance should be the same
self.assertEqual(myModeDistance['air_short'], (self.airCarbon * self.busExpect/1000))
def testTotalCarbonFootprint(self):
totalModeDistance = carbon.getModeCarbonFootprint(None, carbon.carbonFootprintForMode, self.weekago, self.now)
self.assertEqual(totalModeDistance['walking'], 0)
self.assertEqual(totalModeDistance['cycling'], 0)
# We divide by 1000 to make it comprehensible in getModeCarbonFootprint
self.assertEqual(totalModeDistance['bus_short'], old_div((self.busCarbon * len(self.testUsers) * self.busExpect),1000))
self.assertEqual(totalModeDistance['air_short'], old_div((self.airCarbon * len(self.testUsers) * self.busExpect),1000))
self.assertEqual(totalModeDistance['train_short'], 0)
def testMySummary(self):
(myModeShareCount, avgModeShareCount,
myModeShareDistance, avgModeShareDistance,
myModeCarbonFootprint, avgModeCarbonFootprint,
myModeCarbonFootprintNoLongMotorized, avgModeCarbonFootprintNoLongMotorized,
myOptimalCarbonFootprint, avgOptimalCarbonFootprint,
myOptimalCarbonFootprintNoLongMotorized, avgOptimalCarbonFootprintNoLongMotorized) = carbon.getFootprintCompare('[email protected]')
# >>> m = {'air_long': 0, 'air_short': 0.2, 'bus_long': 0, 'bus_short': 0.3}
# >>> f = [(i, m[i]) for i in m if m[i] != 0]
# >>> f
# [('bus_short', 0.3), ('air_short', 0.2)]
# >>> dict(f)
# {'bus_short': 0.3, 'air_short': 0.2}
filterZero = lambda m: dict([(i, m[i]) for i in m if m[i] != 0])
self.assertEqual(len(myModeShareCount), len(carbon.getDisplayModes()))
self.assertEqual(len(myModeShareDistance), len(carbon.getDisplayModes()))
# We have duplicated the bus trip to get bus, air and unconfirmed trips.
# we ignore the unconfirmed trip, so only expect to get three values...
self.assertAlmostEqual(sum(myModeShareDistance.values()), 2 * self.busExpect + self.walkExpect, places = 4)
self.assertEqual(filterZero(myModeShareDistance),
{'bus': self.busExpect,
'walking': self.walkExpect,
'air': self.busExpect})
logging.debug(filterZero(myModeShareDistance))
self.assertEqual(filterZero(myModeCarbonFootprint),
{'bus_short': old_div((self.busExpect * self.busCarbon),1000),
'air_short': old_div((self.busExpect * self.airCarbon),1000)})
self.assertEqual(filterZero(myModeCarbonFootprintNoLongMotorized),
{'bus_short': old_div((self.busExpect * self.busCarbon),1000)})
self.assertEqual(filterZero(myOptimalCarbonFootprint),
{'air_short': old_div((self.busExpect * self.busOptimalCarbon),1000)})
self.assertEqual(filterZero(myOptimalCarbonFootprintNoLongMotorized),
{})
def testSummaryAllTrips(self):
summary = carbon.getSummaryAllTrips(self.weekago, self.now)
# *2 because the walking trips don't count, but we have doubled the bus
# trips to count as air trips
self.assertEqual(summary['current'], old_div((self.busCarbon * self.busExpect + self.airCarbon * self.busExpect),1000))
# No * 2 because the optimal value for short bus trips is to actually move to bikes :)
self.assertEqual(summary['optimal'], old_div((self.busOptimalCarbon * self.busExpect),1000))
# These are are without air, so will only count the bus trips
self.assertEqual(summary['current no air'], old_div((self.busCarbon * self.busExpect),1000))
self.assertEqual(summary['optimal no air'], 0)
self.assertAlmostEqual(summary['all drive'], old_div((self.driveCarbon * (self.busExpect * 2 + self.walkExpect)),1000), places = 4)
def testDistinctUserCount(self):
self.assertEqual(carbon.getDistinctUserCount({}), len(self.testUsers))
def testFilteredDistinctUserCount(self):
# Now, move all the sections before a week
# Now there should be no matches in the last week
for section in self.SectionsColl.find():
section['section_start_datetime'] = self.weekago + timedelta(days = -1)
section['section_end_datetime'] = self.weekago + timedelta(days = -1) + timedelta(hours = 1)
# print("Section start = %s, section end = %s" %
# (section['section_start_datetime'], section['section_end_datetime']))
self.SectionsColl.save(section)
print("About to check for distinct users from a week ago")
self.assertEqual(carbon.getDistinctUserCount(carbon.getQuerySpec(None, None,
self.weekago, self.now)), 0)
self.assertEqual(carbon.getDistinctUserCount(carbon.getQuerySpec(None, None,
self.weekago + timedelta(weeks = -1), self.now)), len(self.testUsers))
def testDelLongMotorizedModes(self):
testMap = {'bus': 1, 'air': 3}
carbon.delLongMotorizedModes(testMap)
self.assertEqual(len(testMap), 1)
self.assertEqual(testMap, {'bus': 1})
def testDelLongMotorizedModesShortLong(self):
testMap = {'bus_short': 1, 'bus_long': 2, 'air_short': 3, 'air_long': 4}
carbon.delLongMotorizedModes(testMap)
self.assertEqual(len(testMap), 2)
self.assertIn('bus_short', testMap)
self.assertIn('bus_long', testMap)
self.assertNotIn('air_short', testMap)
self.assertNotIn('air_long', testMap)
def testGetCarbonFootprintsForMap(self):
testDistanceMap = {'a': 1, 'b': 2, 'c': 3}
testModeFootprintMap = {'a': 1, 'b': 2, 'c': 3}
footprintMap = carbon.getCarbonFootprintsForMap(testDistanceMap, testModeFootprintMap)
self.assertEqual(footprintMap, {'a': 0.001, 'b': 0.004, 'c': 0.009})
def testAvgCalculation(self):
testMap = {'a': 5, 'b': 10, 'c': 15, 'd': 3, 'e': 7, 'f': 13}
avgTestMap = carbon.convertToAvg(testMap, 5)
self.assertEquals(avgTestMap['a'], 1)
self.assertEquals(avgTestMap['b'], 2)
self.assertEquals(avgTestMap['c'], 3)
self.assertEquals(avgTestMap['d'], 0.6)
self.assertEquals(avgTestMap['e'], 1.4)
self.assertEquals(avgTestMap['f'], 2.6)
if __name__ == '__main__':
etc.configLogging()
unittest.main()
| shankari/e-mission-server | emission/incomplete_tests/TestCarbon.py | Python | bsd-3-clause | 13,159 |
"""
Module to read MODPATH output files. The module contains two
important classes that can be accessed by the user.
* EndpointFile (ascii endpoint file)
* PathlineFile (ascii pathline file)
"""
import numpy as np
from ..utils.flopy_io import loadtxt
class PathlineFile():
"""
PathlineFile Class.
Parameters
----------
filename : string
Name of the pathline file
verbose : bool
Write information to the screen. Default is False.
Attributes
----------
Methods
-------
See Also
--------
Notes
-----
The PathlineFile class provides simple ways to retrieve MODPATH 6
pathline data from a MODPATH 6 ascii pathline file.
Examples
--------
>>> import flopy
>>> pthobj = flopy.utils.PathlineFile('model.mppth')
>>> p1 = pthobj.get_data(partid=1)
"""
kijnames = ['k', 'i', 'j', 'particleid', 'particlegroup', 'linesegmentindex']
def __init__(self, filename, verbose=False):
"""
Class constructor.
"""
self.fname = filename
self.dtype, self.outdtype = self._get_dtypes()
self._build_index()
self._data = loadtxt(self.file, dtype=self.dtype, skiprows=self.skiprows)
# set number of particle ids
self.nid = self._data['particleid'].max()
# convert layer, row, and column indices; particle id and group; and
# line segment indices to zero-based
for n in self.kijnames:
self._data[n] -= 1
# close the input file
self.file.close()
return
def _build_index(self):
"""
Set position of the start of the pathline data.
"""
self.skiprows = 0
self.file = open(self.fname, 'r')
while True:
line = self.file.readline()
if isinstance(line, bytes):
line = line.decode()
if self.skiprows < 1:
if 'MODPATH_PATHLINE_FILE 6' not in line.upper():
errmsg = '{} is not a valid pathline file'.format(self.fname)
raise Exception(errmsg)
self.skiprows += 1
if 'end header' in line.lower():
break
self.file.seek(0)
def _get_dtypes(self):
"""
Build numpy dtype for the MODPATH 6 pathline file.
"""
dtype = np.dtype([("particleid", np.int), ("particlegroup", np.int),
("timepointindex", np.int), ("cumulativetimestep", np.int),
("time", np.float32), ("x", np.float32),
("y", np.float32), ("z", np.float32),
("k", np.int), ("i", np.int), ("j", np.int),
("grid", np.int), ("xloc", np.float32),
("yloc", np.float32), ("zloc", np.float32),
("linesegmentindex", np.int)])
outdtype = np.dtype([("x", np.float32), ("y", np.float32), ("z", np.float32),
("time", np.float32), ("k", np.int), ("id", np.int)])
return dtype, outdtype
def get_maxid(self):
"""
Get the maximum pathline number in the file pathline file
Returns
----------
out : int
Maximum pathline number.
"""
return self.maxid
def get_maxtime(self):
"""
Get the maximum time in pathline file
Returns
----------
out : float
Maximum pathline time.
"""
return self.data['time'].max()
def get_data(self, partid=0, totim=None, ge=True):
"""
get pathline data from the pathline file for a single pathline.
Parameters
----------
partid : int
The zero-based particle id. The first record is record 0.
totim : float
The simulation time. All pathline points for particle partid
that are greater than or equal to (ge=True) or less than or
equal to (ge=False) totim will be returned. Default is None
ge : bool
Boolean that determines if pathline times greater than or equal
to or less than or equal to totim is used to create a subset
of pathlines. Default is True.
Returns
----------
ra : numpy record array
A numpy recarray with the x, y, z, time, k, and particleid for
pathline partid.
See Also
--------
Notes
-----
Examples
--------
>>> import flopy.utils.modpathfile as mpf
>>> pthobj = flopy.utils.PathlineFile('model.mppth')
>>> p1 = pthobj.get_data(partid=1)
"""
idx = self._data['particleid'] == partid
if totim is not None:
if ge:
idx = (self._data['time'] >= totim) & (self._data['particleid'] == partid)
else:
idx = (self._data['time'] <= totim) & (self._data['particleid'] == partid)
else:
idx = self._data['particleid'] == partid
self._ta = self._data[idx]
ra = np.rec.fromarrays((self._ta['x'], self._ta['y'], self._ta['z'],
self._ta['time'], self._ta['k'], self._ta['particleid']), dtype=self.outdtype)
return ra
def get_alldata(self, totim=None, ge=True):
"""
get pathline data from the pathline file for all pathlines and all times.
Parameters
----------
totim : float
The simulation time. All pathline points for particle partid
that are greater than or equal to (ge=True) or less than or
equal to (ge=False) totim will be returned. Default is None
ge : bool
Boolean that determines if pathline times greater than or equal
to or less than or equal to totim is used to create a subset
of pathlines. Default is True.
Returns
----------
plist : a list of numpy record array
A list of numpy recarrays with the x, y, z, time, k, and particleid for
all pathlines.
See Also
--------
Notes
-----
Examples
--------
>>> import flopy.utils.modpathfile as mpf
>>> pthobj = flopy.utils.PathlineFile('model.mppth')
>>> p = pthobj.get_alldata()
"""
plist = []
for partid in range(self.nid):
plist.append(self.get_data(partid=partid, totim=totim, ge=ge))
return plist
def get_destination_pathline_data(self, dest_cells):
"""Get pathline data for set of destination cells.
Parameters
----------
dest_cells : list or array of tuples
(k, i, j) of each destination cell (zero-based)
Returns
-------
pthldest : np.recarray
Slice of pathline data array (e.g. PathlineFile._data)
containing only pathlines with final k,i,j in dest_cells.
"""
ra = self._data.view(np.recarray)
# find the intersection of endpoints and dest_cells
# convert dest_cells to same dtype for comparison
raslice = ra[['k', 'i', 'j']]
dest_cells = np.array(dest_cells, dtype=raslice.dtype)
inds = np.in1d(raslice, dest_cells)
epdest = ra[inds].copy().view(np.recarray)
# use particle ids to get the rest of the paths
inds = np.in1d(ra.particleid, epdest.particleid)
pthldes = ra[inds].copy()
pthldes.sort(order=['particleid', 'time'])
return pthldes
def write_shapefile(self, pathline_data=None,
one_per_particle=True,
direction='ending',
shpname='endpoings.shp',
sr=None, epsg=None,
**kwargs):
"""Write pathlines to shapefile.
pathline_data : np.recarry
Record array of same form as that returned by EndpointFile.get_alldata.
(if none, EndpointFile.get_alldata() is exported).
one_per_particle : boolean (default True)
True writes a single LineString with a single set of attribute data for each
particle. False writes a record/geometry for each pathline segment
(each row in the PathLine file). This option can be used to visualize
attribute information (time, model layer, etc.) across a pathline in a GIS.
direction : str
String defining if starting or ending particle locations should be
included in shapefile attribute information. Only used if one_per_particle=False.
(default is 'ending')
shpname : str
File path for shapefile
sr : flopy.utils.reference.SpatialReference instance
Used to scale and rotate Global x,y,z values in MODPATH Endpoint file
epsg : int
EPSG code for writing projection (.prj) file. If this is not supplied,
the proj4 string or epgs code associated with sr will be used.
kwargs : keyword arguments to flopy.export.shapefile_utils.recarray2shp
"""
from ..utils.reference import SpatialReference
from ..utils.geometry import LineString
from ..export.shapefile_utils import recarray2shp
pth = pathline_data
if pth is None:
pth = self._data.view(np.recarray)
pth = pth.copy()
pth.sort(order=['particleid', 'time'])
if sr is None:
sr = SpatialReference()
particles = np.unique(pth.particleid)
geoms = []
# 1 geometry for each path
if one_per_particle:
loc_inds = 0
if direction == 'ending':
loc_inds = -1
pthdata = []
for pid in particles:
ra = pth[pth.particleid == pid]
x, y = sr.transform(ra.x, ra.y)
z = ra.z
geoms.append(LineString(list(zip(x, y, z))))
pthdata.append((pid,
ra.particlegroup[0],
ra.time.max(),
ra.k[loc_inds],
ra.i[loc_inds],
ra.j[loc_inds]))
pthdata = np.array(pthdata, dtype=[('particleid', np.int),
('particlegroup', np.int),
('time', np.float),
('k', np.int),
('i', np.int),
('j', np.int)
]).view(np.recarray)
# geometry for each row in PathLine file
else:
dtype = pth.dtype
#pthdata = np.empty((0, len(dtype)), dtype=dtype).view(np.recarray)
pthdata = []
for pid in particles:
ra = pth[pth.particleid == pid]
x, y = sr.transform(ra.x, ra.y)
z = ra.z
geoms += [LineString([(x[i-1], y[i-1], z[i-1]),
(x[i], y[i], z[i])])
for i in np.arange(1, (len(ra)))]
#pthdata = np.append(pthdata, ra[1:]).view(np.recarray)
pthdata += ra[1:].tolist()
pthdata = np.array(pthdata, dtype=dtype).view(np.recarray)
# convert back to one-based
for n in set(self.kijnames).intersection(set(pthdata.dtype.names)):
pthdata[n] += 1
recarray2shp(pthdata, geoms, shpname=shpname, epsg=sr.epsg, **kwargs)
class EndpointFile():
"""
EndpointFile Class.
Parameters
----------
filename : string
Name of the endpoint file
verbose : bool
Write information to the screen. Default is False.
Attributes
----------
Methods
-------
See Also
--------
Notes
-----
The EndpointFile class provides simple ways to retrieve MODPATH 6
endpoint data from a MODPATH 6 ascii endpoint file.
Examples
--------
>>> import flopy
>>> endobj = flopy.utils.EndpointFile('model.mpend')
>>> e1 = endobj.get_data(partid=1)
"""
kijnames = ['k0', 'i0', 'j0', 'k', 'i', 'j', 'particleid', 'particlegroup']
def __init__(self, filename, verbose=False):
"""
Class constructor.
"""
self.fname = filename
self.dtype = self._get_dtypes()
self._build_index()
self._data = loadtxt(self.file, dtype=self.dtype, skiprows=self.skiprows)
# set number of particle ids
self.nid = self._data['particleid'].max()
# convert layer, row, and column indices; particle id and group; and
# line segment indices to zero-based
for n in self.kijnames:
self._data[n] -= 1
# close the input file
self.file.close()
return
def _build_index(self):
"""
Set position of the start of the pathline data.
"""
self.skiprows = 0
self.file = open(self.fname, 'r')
idx = 0
while True:
line = self.file.readline()
if isinstance(line, bytes):
line = line.decode()
if self.skiprows < 1:
if 'MODPATH_ENDPOINT_FILE 6' not in line.upper():
errmsg = '{} is not a valid endpoint file'.format(self.fname)
raise Exception(errmsg)
self.skiprows += 1
if idx == 1:
t = line.strip()
self.direction = 1
if int(t[0]) == 2:
self.direction = -1
if 'end header' in line.lower():
break
self.file.seek(0)
def _get_dtypes(self):
"""
Build numpy dtype for the MODPATH 6 endpoint file.
"""
dtype = np.dtype([("particleid", np.int), ("particlegroup", np.int),
('status', np.int), ('initialtime', np.float32),
('finaltime', np.float32), ('initialgrid', np.int),
('k0', np.int), ('i0', np.int),
('j0', np.int), ('initialcellface', np.int),
('initialzone', np.int), ('xloc0', np.float32),
('yloc0', np.float32), ('zloc0', np.float32),
('x0', np.float32), ('y0', np.float32), ('z0', np.float32),
('finalgrid', np.int), ('k', np.int), ('i', np.int),
('j', np.int), ('finalcellface', np.int),
('finalzone', np.int), ('xloc', np.float32),
('yloc', np.float32), ('zloc', np.float32),
('x', np.float32), ('y', np.float32), ('z', np.float32),
('label', '|S40')])
return dtype
def get_maxid(self):
"""
Get the maximum endpoint particle id in the file endpoint file
Returns
----------
out : int
Maximum endpoint particle id.
"""
return self.maxid
def get_maxtime(self):
"""
Get the maximum time in the endpoint file
Returns
----------
out : float
Maximum endpoint time.
"""
return self.data['finaltime'].max()
def get_maxtraveltime(self):
"""
Get the maximum travel time in the endpoint file
Returns
----------
out : float
Maximum endpoint travel time.
"""
return (self.data['finaltime'] - self.data['initialtime']).max()
def get_data(self, partid=0):
"""
Get endpoint data from the endpoint file for a single particle.
Parameters
----------
partid : int
The zero-based particle id. The first record is record 0.
(default is 0)
Returns
----------
ra : numpy record array
A numpy recarray with the endpoint particle data for
endpoint partid.
See Also
--------
Notes
-----
Examples
--------
>>> import flopy
>>> endobj = flopy.utils.EndpointFile('model.mpend')
>>> e1 = endobj.get_data(partid=1)
"""
idx = self._data['particleid'] == partid
ra = self._data[idx]
return ra
def get_alldata(self):
"""
Get endpoint data from the endpoint file for all endpoints.
Parameters
----------
Returns
----------
ra : numpy record array
A numpy recarray with the endpoint particle data
See Also
--------
Notes
-----
Examples
--------
>>> import flopy
>>> endobj = flopy.utils.EndpointFile('model.mpend')
>>> e = endobj.get_alldata()
"""
ra = self._data.view(np.recarray).copy()
# if final:
# ra = np.rec.fromarrays((self._data['x'], self._data['y'], self._data['z'],
# self._data['finaltime'], self._data['k'],
# self._data['particleid']), dtype=self.outdtype)
# else:
# ra = np.rec.fromarrays((self._data['x0'], self._data['y0'], self._data['z0'],
# self._data['initialtime'], self._data['k0'],
# self._data['particleid']), dtype=self.outdtype)
return ra
def get_destination_endpoint_data(self, dest_cells):
"""Get endpoint data for set of destination cells.
Parameters
----------
dest_cells : list or array of tuples
(k, i, j) of each destination cell (zero-based)
Returns
-------
epdest : np.recarray
Slice of endpoint data array (e.g. EndpointFile.get_alldata)
containing only data with final k,i,j in dest_cells.
"""
ra = self.get_alldata()
# find the intersection of endpoints and dest_cells
# convert dest_cells to same dtype for comparison
raslice = ra[['k', 'i', 'j']]
dest_cells = np.array(dest_cells, dtype=raslice.dtype)
inds = np.in1d(raslice, dest_cells)
epdest = ra[inds].copy().view(np.recarray)
return epdest
def write_shapefile(self, endpoint_data=None,
shpname='endpoings.shp',
direction='ending', sr=None, epsg=None,
**kwargs):
"""Write particle starting / ending locations to shapefile.
endpoint_data : np.recarry
Record array of same form as that returned by EndpointFile.get_alldata.
(if none, EndpointFile.get_alldata() is exported).
shpname : str
File path for shapefile
direction : str
String defining if starting or ending particle locations should be
considered. (default is 'ending')
sr : flopy.utils.reference.SpatialReference instance
Used to scale and rotate Global x,y,z values in MODPATH Endpoint file
epsg : int
EPSG code for writing projection (.prj) file. If this is not supplied,
the proj4 string or epgs code associated with sr will be used.
kwargs : keyword arguments to flopy.export.shapefile_utils.recarray2shp
"""
from ..utils.reference import SpatialReference
from ..utils.geometry import Point
from ..export.shapefile_utils import recarray2shp
epd = endpoint_data.copy()
if epd is None:
epd = self.get_alldata()
if direction.lower() == 'ending':
xcol, ycol, zcol = 'x', 'y', 'z'
elif direction.lower() == 'starting':
xcol, ycol, zcol = 'x0', 'y0', 'z0'
else:
errmsg = 'flopy.map.plot_endpoint direction must be "ending" ' + \
'or "starting".'
raise Exception(errmsg)
if sr is None:
sr = SpatialReference()
x, y = sr.transform(epd[xcol], epd[ycol])
z = epd[zcol]
geoms = [Point(x[i], y[i], z[i]) for i in range(len(epd))]
# convert back to one-based
for n in self.kijnames:
epd[n] += 1
recarray2shp(epd, geoms, shpname=shpname, epsg=epsg, **kwargs)
| brclark-usgs/flopy | flopy/utils/modpathfile.py | Python | bsd-3-clause | 20,645 |
# -*- encoding: utf-8 -*-
# Module iacolorhist
def iacolorhist(f, mask=None):
import numpy as np
from iahistogram import iahistogram
WFRAME=5
f = np.asarray(f)
if len(f.shape) == 1: f = f[np.newaxis,:]
if not f.dtype == 'uint8':
raise Exception,'error, can only process uint8 images'
if not f.shape[0] == 3:
raise Exception, 'error, can only process 3-band images'
r,g,b = f[0].astype(np.int), f[1].astype(np.int), f[2].astype(np.int)
n_zeros = 0
if mask:
n_zeros = f.shape[0]*f.shape[1]-len(np.nonzero(np.ravel(mask)))
r,g,b = mask*r, mask*g, mask*b
hrg = np.zeros((256,256), np.int32); hbg=hrg+0; hrb=hrg+0
img = 256*r + g; m1 = img.max()
aux = iahistogram(img.astype(np.int32)); aux[0] = aux[0] - n_zeros
np.put(np.ravel(hrg), range(m1+1), aux)
img = 256*b + g; m2 = img.max()
aux = iahistogram(img.astype(np.int32)); aux[0] = aux[0] - n_zeros
np.put(np.ravel(hbg), range(m2+1), aux)
img = 256*r + b; m3 = img.max()
aux = iahistogram(img.astype(np.int32)); aux[0] = aux[0] - n_zeros
np.put(np.ravel(hrb), range(m3+1), aux)
m=max(hrg.max(),hbg.max(),hrb.max())
hc=m*np.ones((3*WFRAME+2*256,3*WFRAME+2*256))
hc[WFRAME:WFRAME+256,WFRAME:WFRAME+256] = np.transpose(hrg)
hc[WFRAME:WFRAME+256,2*WFRAME+256:2*WFRAME+512] = np.transpose(hbg)
hc[2*WFRAME+256:2*WFRAME+512,WFRAME:WFRAME+256] = np.transpose(hrb)
return hc
| robertoalotufo/ia636 | ia636/iacolorhist.py | Python | bsd-3-clause | 1,449 |
from os.path import dirname
import sys
from django.test import TestCase
from django.conf import settings
from django.test.utils import override_settings
import oscar
from oscar.core.loading import (
get_model, AppNotFoundError, get_classes, get_class, ClassNotFoundError)
from oscar.test.factories import create_product, WishListFactory, UserFactory
from tests import temporary_python_path
class TestClassLoading(TestCase):
"""
Oscar's class loading utilities
"""
def test_load_oscar_classes_correctly(self):
Product, Category = get_classes('catalogue.models', ('Product', 'Category'))
self.assertEqual('oscar.apps.catalogue.models', Product.__module__)
self.assertEqual('oscar.apps.catalogue.models', Category.__module__)
def test_load_oscar_class_correctly(self):
Product = get_class('catalogue.models', 'Product')
self.assertEqual('oscar.apps.catalogue.models', Product.__module__)
def test_load_oscar_class_from_dashboard_subapp(self):
ReportForm = get_class('dashboard.reports.forms', 'ReportForm')
self.assertEqual('oscar.apps.dashboard.reports.forms', ReportForm.__module__)
def test_raise_exception_when_bad_appname_used(self):
with self.assertRaises(AppNotFoundError):
get_classes('fridge.models', ('Product', 'Category'))
def test_raise_exception_when_bad_classname_used(self):
with self.assertRaises(ClassNotFoundError):
get_class('catalogue.models', 'Monkey')
def test_raise_importerror_if_app_raises_importerror(self):
"""
This tests that Oscar doesn't fall back to using the Oscar catalogue
app if the overriding app throws an ImportError.
"""
apps = list(settings.INSTALLED_APPS)
apps[apps.index('oscar.apps.catalogue')] = 'tests._site.import_error_app.catalogue'
with override_settings(INSTALLED_APPS=apps):
with self.assertRaises(ImportError):
get_class('catalogue.app', 'CatalogueApplication')
class ClassLoadingWithLocalOverrideTests(TestCase):
def setUp(self):
self.installed_apps = list(settings.INSTALLED_APPS)
self.installed_apps[self.installed_apps.index('oscar.apps.shipping')] = 'tests._site.shipping'
def test_loading_class_defined_in_local_module(self):
with override_settings(INSTALLED_APPS=self.installed_apps):
(Free,) = get_classes('shipping.methods', ('Free',))
self.assertEqual('tests._site.shipping.methods', Free.__module__)
def test_loading_class_which_is_not_defined_in_local_module(self):
with override_settings(INSTALLED_APPS=self.installed_apps):
(FixedPrice,) = get_classes('shipping.methods', ('FixedPrice',))
self.assertEqual('oscar.apps.shipping.methods', FixedPrice.__module__)
def test_loading_class_from_module_not_defined_in_local_app(self):
with override_settings(INSTALLED_APPS=self.installed_apps):
(Repository,) = get_classes('shipping.repository', ('Repository',))
self.assertEqual('oscar.apps.shipping.repository', Repository.__module__)
def test_loading_classes_defined_in_both_local_and_oscar_modules(self):
with override_settings(INSTALLED_APPS=self.installed_apps):
(Free, FixedPrice) = get_classes('shipping.methods', ('Free', 'FixedPrice'))
self.assertEqual('tests._site.shipping.methods', Free.__module__)
self.assertEqual('oscar.apps.shipping.methods', FixedPrice.__module__)
def test_loading_classes_with_root_app(self):
import tests._site.shipping
path = dirname(dirname(tests._site.shipping.__file__))
with temporary_python_path([path]):
self.installed_apps[
self.installed_apps.index('tests._site.shipping')] = 'shipping'
with override_settings(INSTALLED_APPS=self.installed_apps):
(Free,) = get_classes('shipping.methods', ('Free',))
self.assertEqual('shipping.methods', Free.__module__)
def test_overriding_view_is_possible_without_overriding_app(self):
from oscar.apps.customer.app import application, CustomerApplication
# If test fails, it's helpful to know if it's caused by order of
# execution
self.assertEqual(CustomerApplication().summary_view.__module__,
'tests._site.apps.customer.views')
self.assertEqual(application.summary_view.__module__,
'tests._site.apps.customer.views')
class ClassLoadingWithLocalOverrideWithMultipleSegmentsTests(TestCase):
def setUp(self):
self.installed_apps = list(settings.INSTALLED_APPS)
self.installed_apps[self.installed_apps.index('oscar.apps.shipping')] = 'tests._site.apps.shipping'
def test_loading_class_defined_in_local_module(self):
with override_settings(INSTALLED_APPS=self.installed_apps):
(Free,) = get_classes('shipping.methods', ('Free',))
self.assertEqual('tests._site.apps.shipping.methods', Free.__module__)
class TestGetCoreAppsFunction(TestCase):
"""
oscar.get_core_apps function
"""
def test_returns_core_apps_when_no_overrides_specified(self):
apps = oscar.get_core_apps()
self.assertEqual(oscar.OSCAR_CORE_APPS, apps)
def test_uses_non_dashboard_override_when_specified(self):
apps = oscar.get_core_apps(overrides=['apps.shipping'])
self.assertTrue('apps.shipping' in apps)
self.assertTrue('oscar.apps.shipping' not in apps)
def test_uses_dashboard_override_when_specified(self):
apps = oscar.get_core_apps(overrides=['apps.dashboard.catalogue'])
self.assertTrue('apps.dashboard.catalogue' in apps)
self.assertTrue('oscar.apps.dashboard.catalogue' not in apps)
self.assertTrue('oscar.apps.catalogue' in apps)
class TestOverridingCoreApps(TestCase):
def test_means_the_overriding_model_is_registered_first(self):
klass = get_model('partner', 'StockRecord')
self.assertEqual(
'tests._site.apps.partner.models', klass.__module__)
class TestAppLabelsForModels(TestCase):
def test_all_oscar_models_have_app_labels(self):
from django.apps import apps
models = apps.get_models()
missing = []
for model in models:
# Ignore non-Oscar models
if 'oscar' not in repr(model):
continue
# Don't know how to get the actual model's Meta class. But if
# the parent doesn't have a Meta class, it's doesn't have an
# base in Oscar anyway and is not intended to be overridden
abstract_model = model.__base__
meta_class = getattr(abstract_model, 'Meta', None)
if meta_class is None:
continue
if not hasattr(meta_class, 'app_label'):
missing.append(model)
if missing:
self.fail("Those models don't have an app_label set: %s" % missing)
class TestDynamicLoadingOn3rdPartyApps(TestCase):
core_app_prefix = 'thirdparty_package.apps'
def setUp(self):
self.installed_apps = list(settings.INSTALLED_APPS)
sys.path.append('./tests/_site/')
def tearDown(self):
sys.path.remove('./tests/_site/')
def test_load_core_3rd_party_class_correctly(self):
self.installed_apps.append('thirdparty_package.apps.myapp')
with override_settings(INSTALLED_APPS=self.installed_apps):
Cow, Goat = get_classes('myapp.models', ('Cow', 'Goat'), self.core_app_prefix)
self.assertEqual('thirdparty_package.apps.myapp.models', Cow.__module__)
self.assertEqual('thirdparty_package.apps.myapp.models', Goat.__module__)
def test_load_overriden_3rd_party_class_correctly(self):
self.installed_apps.append('apps.myapp')
with override_settings(INSTALLED_APPS=self.installed_apps):
Cow, Goat = get_classes('myapp.models', ('Cow', 'Goat'), self.core_app_prefix)
self.assertEqual('thirdparty_package.apps.myapp.models', Cow.__module__)
self.assertEqual('apps.myapp.models', Goat.__module__)
class TestMovedClasses(TestCase):
def setUp(self):
user = UserFactory()
product = create_product()
self.wishlist = WishListFactory(owner=user)
self.wishlist.add(product)
def test_load_formset_old_destination(self):
BaseBasketLineFormSet = get_class('basket.forms', 'BaseBasketLineFormSet')
self.assertEqual('oscar.apps.basket.formsets', BaseBasketLineFormSet.__module__)
StockRecordFormSet = get_class('dashboard.catalogue.forms', 'StockRecordFormSet')
self.assertEqual('oscar.apps.dashboard.catalogue.formsets', StockRecordFormSet.__module__)
OrderedProductFormSet = get_class('dashboard.promotions.forms', 'OrderedProductFormSet')
OrderedProductForm = get_class('dashboard.promotions.forms', 'OrderedProductForm')
# Since OrderedProductFormSet created with metaclass, it has __module__
# attribute pointing to the Django module. Thus, we test if formset was
# loaded correctly by initiating class instance and checking its forms.
self.assertTrue(isinstance(OrderedProductFormSet().forms[0], OrderedProductForm))
LineFormset = get_class('wishlists.forms', 'LineFormset')
WishListLineForm = get_class('wishlists.forms', 'WishListLineForm')
self.assertTrue(isinstance(LineFormset(instance=self.wishlist).forms[0], WishListLineForm))
def test_load_formset_new_destination(self):
BaseBasketLineFormSet = get_class('basket.formsets', 'BaseBasketLineFormSet')
self.assertEqual('oscar.apps.basket.formsets', BaseBasketLineFormSet.__module__)
StockRecordFormSet = get_class('dashboard.catalogue.formsets', 'StockRecordFormSet')
self.assertEqual('oscar.apps.dashboard.catalogue.formsets', StockRecordFormSet.__module__)
OrderedProductFormSet = get_class('dashboard.promotions.formsets', 'OrderedProductFormSet')
OrderedProductForm = get_class('dashboard.promotions.forms', 'OrderedProductForm')
self.assertTrue(isinstance(OrderedProductFormSet().forms[0], OrderedProductForm))
LineFormset = get_class('wishlists.formsets', 'LineFormset')
WishListLineForm = get_class('wishlists.forms', 'WishListLineForm')
self.assertTrue(isinstance(LineFormset(instance=self.wishlist).forms[0], WishListLineForm))
def test_load_formsets_mixed_destination(self):
BaseBasketLineFormSet, BasketLineForm = get_classes('basket.forms', ('BaseBasketLineFormSet', 'BasketLineForm'))
self.assertEqual('oscar.apps.basket.formsets', BaseBasketLineFormSet.__module__)
self.assertEqual('oscar.apps.basket.forms', BasketLineForm.__module__)
StockRecordForm, StockRecordFormSet = get_classes(
'dashboard.catalogue.forms', ('StockRecordForm', 'StockRecordFormSet')
)
self.assertEqual('oscar.apps.dashboard.catalogue.forms', StockRecordForm.__module__)
OrderedProductForm, OrderedProductFormSet = get_classes(
'dashboard.promotions.forms', ('OrderedProductForm', 'OrderedProductFormSet')
)
self.assertEqual('oscar.apps.dashboard.promotions.forms', OrderedProductForm.__module__)
self.assertTrue(isinstance(OrderedProductFormSet().forms[0], OrderedProductForm))
LineFormset, WishListLineForm = get_classes('wishlists.forms', ('LineFormset', 'WishListLineForm'))
self.assertEqual('oscar.apps.wishlists.forms', WishListLineForm.__module__)
self.assertTrue(isinstance(LineFormset(instance=self.wishlist).forms[0], WishListLineForm))
| sonofatailor/django-oscar | tests/integration/core/test_loading.py | Python | bsd-3-clause | 11,789 |
import sys
from time import sleep
from cachey import Cache, Scorer, nbytes
def test_cache():
c = Cache(available_bytes=nbytes(1) * 3)
c.put('x', 1, 10)
assert c.get('x') == 1
assert 'x' in c
c.put('a', 1, 10)
c.put('b', 1, 10)
c.put('c', 1, 10)
assert set(c.data) == set('xbc')
c.put('d', 1, 10)
assert set(c.data) == set('xcd')
c.clear()
assert 'x' not in c
assert not c.data
assert not c.heap
def test_cache_scores_update():
c = Cache(available_bytes=nbytes(1) * 2)
c.put('x', 1, 1)
c.put('y', 1, 1)
c.get('x')
c.get('x')
c.get('x')
c.put('z', 1, 1)
assert set(c.data) == set('xz')
def test_memoize():
c = Cache(available_bytes=nbytes(1) * 3)
flag = [0]
def slow_inc(x):
flag[0] += 1
sleep(0.01)
return x + 1
memo_inc = c.memoize(slow_inc)
assert memo_inc(1) == 2
assert memo_inc(1) == 2
assert list(c.data.values()) == [2]
def test_callbacks():
hit_flag = [False]
def hit(key, value):
hit_flag[0] = (key, value)
miss_flag = [False]
def miss(key):
miss_flag[0] = key
c = Cache(100, hit=hit, miss=miss)
c.get('x')
assert miss_flag[0] == 'x'
assert hit_flag[0] == False
c.put('y', 1, 1)
c.get('y')
assert hit_flag[0] == ('y', 1)
def test_just_one_reference():
c = Cache(available_bytes=1000)
o = object()
x = sys.getrefcount(o)
c.put('key', o, cost=10)
y = sys.getrefcount(o)
assert y == x + 1
c.retire('key')
z = sys.getrefcount(o)
assert z == x
| Winterflower/cachey | cachey/tests/test_cache.py | Python | bsd-3-clause | 1,608 |
from __future__ import absolute_import, division, print_function
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
class TreeError(Exception):
"""General tree error"""
pass
class NoLengthError(TreeError):
"""Missing length when expected"""
pass
class DuplicateNodeError(TreeError):
"""Duplicate nodes with identical names"""
pass
class MissingNodeError(TreeError):
"""Expecting a node"""
pass
class NoParentError(MissingNodeError):
"""Missing a parent"""
pass
| Kleptobismol/scikit-bio | skbio/tree/_exception.py | Python | bsd-3-clause | 814 |
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test for distributed trial worker side.
"""
import os
from cStringIO import StringIO
from zope.interface.verify import verifyObject
from twisted.trial.reporter import TestResult
from twisted.trial.unittest import TestCase
from twisted.trial._dist.worker import (
LocalWorker, LocalWorkerAMP, LocalWorkerTransport, WorkerProtocol)
from twisted.trial._dist import managercommands, workercommands
from twisted.scripts import trial
from twisted.test.proto_helpers import StringTransport
from twisted.internet.interfaces import ITransport, IAddress
from twisted.internet.defer import fail, succeed
from twisted.internet.main import CONNECTION_DONE
from twisted.internet.error import ConnectionDone
from twisted.python.failure import Failure
from twisted.protocols.amp import AMP
class FakeAMP(AMP):
"""
A fake amp protocol.
"""
class WorkerProtocolTestCase(TestCase):
"""
Tests for L{WorkerProtocol}.
"""
def setUp(self):
"""
Set up a transport, a result stream and a protocol instance.
"""
self.serverTransport = StringTransport()
self.clientTransport = StringTransport()
self.server = WorkerProtocol()
self.server.makeConnection(self.serverTransport)
self.client = FakeAMP()
self.client.makeConnection(self.clientTransport)
def test_run(self):
"""
Calling the L{workercommands.Run} command on the client returns a
response with C{success} sets to C{True}.
"""
d = self.client.callRemote(workercommands.Run, testCase="doesntexist")
def check(result):
self.assertTrue(result['success'])
d.addCallback(check)
self.server.dataReceived(self.clientTransport.value())
self.clientTransport.clear()
self.client.dataReceived(self.serverTransport.value())
self.serverTransport.clear()
return d
def test_start(self):
"""
The C{start} command changes the current path.
"""
curdir = os.path.realpath(os.path.curdir)
self.addCleanup(os.chdir, curdir)
self.server.start('..')
self.assertNotEqual(os.path.realpath(os.path.curdir), curdir)
class LocalWorkerAMPTestCase(TestCase):
"""
Test case for distributed trial's manager-side local worker AMP protocol
"""
def setUp(self):
self.managerTransport = StringTransport()
self.managerAMP = LocalWorkerAMP()
self.managerAMP.makeConnection(self.managerTransport)
self.result = TestResult()
self.workerTransport = StringTransport()
self.worker = AMP()
self.worker.makeConnection(self.workerTransport)
config = trial.Options()
self.testName = "twisted.doesnexist"
config['tests'].append(self.testName)
self.testCase = trial._getSuite(config)._tests.pop()
self.managerAMP.run(self.testCase, self.result)
self.managerTransport.clear()
def pumpTransports(self):
"""
Sends data from C{self.workerTransport} to C{self.managerAMP}, and then
data from C{self.managerTransport} back to C{self.worker}.
"""
self.managerAMP.dataReceived(self.workerTransport.value())
self.workerTransport.clear()
self.worker.dataReceived(self.managerTransport.value())
def test_runSuccess(self):
"""
Run a test, and succeed.
"""
results = []
d = self.worker.callRemote(managercommands.AddSuccess,
testName=self.testName)
d.addCallback(lambda result: results.append(result['success']))
self.pumpTransports()
self.assertTrue(results)
def test_runExpectedFailure(self):
"""
Run a test, and fail expectedly.
"""
results = []
d = self.worker.callRemote(managercommands.AddExpectedFailure,
testName=self.testName, error='error',
todo='todoReason')
d.addCallback(lambda result: results.append(result['success']))
self.pumpTransports()
self.assertEqual(self.testCase, self.result.expectedFailures[0][0])
self.assertTrue(results)
def test_runError(self):
"""
Run a test, and encounter an error.
"""
results = []
d = self.worker.callRemote(managercommands.AddError,
testName=self.testName, error='error',
errorClass='exceptions.ValueError',
frames=[])
d.addCallback(lambda result: results.append(result['success']))
self.pumpTransports()
self.assertEqual(self.testCase, self.result.errors[0][0])
self.assertTrue(results)
def test_runErrorWithFrames(self):
"""
L{LocalWorkerAMP._buildFailure} recreates the C{Failure.frames} from
the C{frames} argument passed to C{AddError}.
"""
results = []
d = self.worker.callRemote(managercommands.AddError,
testName=self.testName, error='error',
errorClass='exceptions.ValueError',
frames=["file.py", "invalid code", "3"])
d.addCallback(lambda result: results.append(result['success']))
self.pumpTransports()
self.assertEqual(self.testCase, self.result.errors[0][0])
self.assertEqual(
[('file.py', 'invalid code', 3, [], [])],
self.result.errors[0][1].frames)
self.assertTrue(results)
def test_runFailure(self):
"""
Run a test, and fail.
"""
results = []
d = self.worker.callRemote(managercommands.AddFailure,
testName=self.testName, fail='fail',
failClass='exceptions.RuntimeError',
frames=[])
d.addCallback(lambda result: results.append(result['success']))
self.pumpTransports()
self.assertEqual(self.testCase, self.result.failures[0][0])
self.assertTrue(results)
def test_runSkip(self):
"""
Run a test, but skip it.
"""
results = []
d = self.worker.callRemote(managercommands.AddSkip,
testName=self.testName, reason='reason')
d.addCallback(lambda result: results.append(result['success']))
self.pumpTransports()
self.assertEqual(self.testCase, self.result.skips[0][0])
self.assertTrue(results)
def test_runUnexpectedSuccesses(self):
"""
Run a test, and succeed unexpectedly.
"""
results = []
d = self.worker.callRemote(managercommands.AddUnexpectedSuccess,
testName=self.testName,
todo='todo')
d.addCallback(lambda result: results.append(result['success']))
self.pumpTransports()
self.assertEqual(self.testCase, self.result.unexpectedSuccesses[0][0])
self.assertTrue(results)
def test_testWrite(self):
"""
L{LocalWorkerAMP.testWrite} writes the data received to its test
stream.
"""
results = []
stream = StringIO()
self.managerAMP.setTestStream(stream)
d = self.worker.callRemote(managercommands.TestWrite,
out="Some output")
d.addCallback(lambda result: results.append(result['success']))
self.pumpTransports()
self.assertEqual("Some output\n", stream.getvalue())
self.assertTrue(results)
def test_stopAfterRun(self):
"""
L{LocalWorkerAMP.run} calls C{stopTest} on its test result once the
C{Run} commands has succeeded.
"""
result = object()
stopped = []
def fakeCallRemote(command, testCase):
return succeed(result)
self.managerAMP.callRemote = fakeCallRemote
class StopTestResult(TestResult):
def stopTest(self, test):
stopped.append(test)
d = self.managerAMP.run(self.testCase, StopTestResult())
self.assertEqual([self.testCase], stopped)
return d.addCallback(self.assertIdentical, result)
class FakeAMProtocol(AMP):
"""
A fake implementation of L{AMP} for testing.
"""
id = 0
dataString = ""
def dataReceived(self, data):
self.dataString += data
def setTestStream(self, stream):
self.testStream = stream
class FakeTransport(object):
"""
A fake process transport implementation for testing.
"""
dataString = ""
calls = 0
def writeToChild(self, fd, data):
self.dataString += data
def loseConnection(self):
self.calls += 1
class LocalWorkerTestCase(TestCase):
"""
Tests for L{LocalWorker} and L{LocalWorkerTransport}.
"""
def test_childDataReceived(self):
"""
L{LocalWorker.childDataReceived} forwards the received data to linked
L{AMP} protocol if the right file descriptor, otherwise forwards to
C{ProcessProtocol.childDataReceived}.
"""
fakeTransport = FakeTransport()
localWorker = LocalWorker(FakeAMProtocol(), '.', 'test.log')
localWorker.makeConnection(fakeTransport)
localWorker._outLog = StringIO()
localWorker.childDataReceived(4, "foo")
localWorker.childDataReceived(1, "bar")
self.assertEqual("foo", localWorker._ampProtocol.dataString)
self.assertEqual("bar", localWorker._outLog.getvalue())
def test_outReceived(self):
"""
L{LocalWorker.outReceived} logs the output into its C{_outLog} log
file.
"""
fakeTransport = FakeTransport()
localWorker = LocalWorker(FakeAMProtocol(), '.', 'test.log')
localWorker.makeConnection(fakeTransport)
localWorker._outLog = StringIO()
data = "The quick brown fox jumps over the lazy dog"
localWorker.outReceived(data)
self.assertEqual(data, localWorker._outLog.getvalue())
def test_errReceived(self):
"""
L{LocalWorker.errReceived} logs the errors into its C{_errLog} log
file.
"""
fakeTransport = FakeTransport()
localWorker = LocalWorker(FakeAMProtocol(), '.', 'test.log')
localWorker.makeConnection(fakeTransport)
localWorker._errLog = StringIO()
data = "The quick brown fox jumps over the lazy dog"
localWorker.errReceived(data)
self.assertEqual(data, localWorker._errLog.getvalue())
def test_write(self):
"""
L{LocalWorkerTransport.write} forwards the written data to the given
transport.
"""
transport = FakeTransport()
localTransport = LocalWorkerTransport(transport)
data = "The quick brown fox jumps over the lazy dog"
localTransport.write(data)
self.assertEqual(data, transport.dataString)
def test_writeSequence(self):
"""
L{LocalWorkerTransport.writeSequence} forwards the written data to the
given transport.
"""
transport = FakeTransport()
localTransport = LocalWorkerTransport(transport)
data = ("The quick ", "brown fox jumps ", "over the lazy dog")
localTransport.writeSequence(data)
self.assertEqual("".join(data), transport.dataString)
def test_loseConnection(self):
"""
L{LocalWorkerTransport.loseConnection} forwards the call to the given
transport.
"""
transport = FakeTransport()
localTransport = LocalWorkerTransport(transport)
localTransport.loseConnection()
self.assertEqual(transport.calls, 1)
def test_connectionLost(self):
"""
L{LocalWorker.connectionLost} closes the log streams.
"""
class FakeStream(object):
callNumber = 0
def close(self):
self.callNumber += 1
transport = FakeTransport()
localWorker = LocalWorker(FakeAMProtocol(), '.', 'test.log')
localWorker.makeConnection(transport)
localWorker._outLog = FakeStream()
localWorker._errLog = FakeStream()
localWorker.connectionLost(None)
self.assertEqual(localWorker._outLog.callNumber, 1)
self.assertEqual(localWorker._errLog.callNumber, 1)
def test_processEnded(self):
"""
L{LocalWorker.processEnded} calls C{connectionLost} on itself and on
the L{AMP} protocol.
"""
class FakeStream(object):
callNumber = 0
def close(self):
self.callNumber += 1
transport = FakeTransport()
protocol = FakeAMProtocol()
localWorker = LocalWorker(protocol, '.', 'test.log')
localWorker.makeConnection(transport)
localWorker._outLog = FakeStream()
localWorker.processEnded(Failure(CONNECTION_DONE))
self.assertEqual(localWorker._outLog.callNumber, 1)
self.assertIdentical(None, protocol.transport)
return self.assertFailure(localWorker.endDeferred, ConnectionDone)
def test_addresses(self):
"""
L{LocalWorkerTransport.getPeer} and L{LocalWorkerTransport.getHost}
return L{IAddress} objects.
"""
localTransport = LocalWorkerTransport(None)
self.assertTrue(verifyObject(IAddress, localTransport.getPeer()))
self.assertTrue(verifyObject(IAddress, localTransport.getHost()))
def test_transport(self):
"""
L{LocalWorkerTransport} implements L{ITransport} to be able to be used
by L{AMP}.
"""
localTransport = LocalWorkerTransport(None)
self.assertTrue(verifyObject(ITransport, localTransport))
def test_startError(self):
"""
L{LocalWorker} swallows the exceptions returned by the L{AMP} protocol
start method, as it generates unnecessary errors.
"""
def failCallRemote(command, directory):
return fail(RuntimeError("oops"))
transport = FakeTransport()
protocol = FakeAMProtocol()
protocol.callRemote = failCallRemote
localWorker = LocalWorker(protocol, '.', 'test.log')
localWorker.makeConnection(transport)
self.assertEqual([], self.flushLoggedErrors(RuntimeError))
| hlzz/dotfiles | graphics/VTK-7.0.0/ThirdParty/Twisted/twisted/trial/_dist/test/test_worker.py | Python | bsd-3-clause | 15,115 |
from threading import Thread
import Queue
from django.core.urlresolvers import reverse
from django.conf import settings
from django import forms
from django.http import HttpRequest
from django.test import TestCase
import haystack
from haystack.forms import model_choices, SearchForm, ModelSearchForm
from haystack.query import EmptySearchQuerySet
from haystack.sites import SearchSite
from haystack.views import SearchView, FacetedSearchView, search_view_factory
from core.models import MockModel, AnotherMockModel
class InitialedSearchForm(SearchForm):
q = forms.CharField(initial='Search for...', required=False, label='Search')
class SearchViewTestCase(TestCase):
def setUp(self):
super(SearchViewTestCase, self).setUp()
mock_index_site = SearchSite()
mock_index_site.register(MockModel)
mock_index_site.register(AnotherMockModel)
# Stow.
self.old_site = haystack.site
haystack.site = mock_index_site
self.old_engine = getattr(settings, 'HAYSTACK_SEARCH_ENGINE')
settings.HAYSTACK_SEARCH_ENGINE = 'dummy'
def tearDown(self):
haystack.site = self.old_site
settings.HAYSTACK_SEARCH_ENGINE = self.old_engine
super(SearchViewTestCase, self).tearDown()
def test_search_no_query(self):
response = self.client.get(reverse('haystack_search'))
self.assertEqual(response.status_code, 200)
def test_search_query(self):
response = self.client.get(reverse('haystack_search'), {'q': 'hello world'})
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context[-1]['page'].object_list), 1)
self.assertEqual(response.context[-1]['page'].object_list[0].content_type(), 'haystack.dummymodel')
self.assertEqual(response.context[-1]['page'].object_list[0].pk, 1)
def test_invalid_page(self):
response = self.client.get(reverse('haystack_search'), {'q': 'hello world', 'page': '165233'})
self.assertEqual(response.status_code, 404)
def test_empty_results(self):
sv = SearchView()
self.assert_(isinstance(sv.get_results(), EmptySearchQuerySet))
def test_initial_data(self):
sv = SearchView(form_class=InitialedSearchForm)
sv.request = HttpRequest()
form = sv.build_form()
self.assert_(isinstance(form, InitialedSearchForm))
self.assertEqual(form.fields['q'].initial, 'Search for...')
self.assertEqual(form.as_p(), u'<p><label for="id_q">Search:</label> <input type="text" name="q" value="Search for..." id="id_q" /></p>')
def test_thread_safety(self):
exceptions = []
def threaded_view(queue, view, request):
import time; time.sleep(2)
try:
inst = view(request)
queue.put(request.GET['name'])
except Exception, e:
exceptions.append(e)
raise
class ThreadedSearchView(SearchView):
def __call__(self, request):
print "Name: %s" % request.GET['name']
return super(ThreadedSearchView, self).__call__(request)
view = search_view_factory(view_class=ThreadedSearchView)
queue = Queue.Queue()
request_1 = HttpRequest()
request_1.GET = {'name': 'foo'}
request_2 = HttpRequest()
request_2.GET = {'name': 'bar'}
th1 = Thread(target=threaded_view, args=(queue, view, request_1))
th2 = Thread(target=threaded_view, args=(queue, view, request_2))
th1.start()
th2.start()
th1.join()
th2.join()
foo = queue.get()
bar = queue.get()
self.assertNotEqual(foo, bar)
class ResultsPerPageTestCase(TestCase):
urls = 'core.tests.results_per_page_urls'
def test_custom_results_per_page(self):
response = self.client.get('/search/', {'q': 'hello world'})
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context[-1]['page'].object_list), 1)
self.assertEqual(response.context[-1]['paginator'].per_page, 1)
response = self.client.get('/search2/', {'q': 'hello world'})
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context[-1]['page'].object_list), 1)
self.assertEqual(response.context[-1]['paginator'].per_page, 2)
class FacetedSearchViewTestCase(TestCase):
def setUp(self):
super(FacetedSearchViewTestCase, self).setUp()
mock_index_site = SearchSite()
mock_index_site.register(MockModel)
mock_index_site.register(AnotherMockModel)
# Stow.
self.old_site = haystack.site
haystack.site = mock_index_site
self.old_engine = getattr(settings, 'HAYSTACK_SEARCH_ENGINE')
settings.HAYSTACK_SEARCH_ENGINE = 'dummy'
def tearDown(self):
haystack.site = self.old_site
settings.HAYSTACK_SEARCH_ENGINE = self.old_engine
super(FacetedSearchViewTestCase, self).tearDown()
def test_search_no_query(self):
response = self.client.get(reverse('haystack_faceted_search'))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['facets'], {})
def test_empty_results(self):
fsv = FacetedSearchView()
self.assert_(isinstance(fsv.get_results(), EmptySearchQuerySet))
class BasicSearchViewTestCase(TestCase):
def setUp(self):
super(BasicSearchViewTestCase, self).setUp()
mock_index_site = SearchSite()
mock_index_site.register(MockModel)
mock_index_site.register(AnotherMockModel)
# Stow.
self.old_site = haystack.site
haystack.site = mock_index_site
self.old_engine = getattr(settings, 'HAYSTACK_SEARCH_ENGINE')
settings.HAYSTACK_SEARCH_ENGINE = 'dummy'
def tearDown(self):
haystack.site = self.old_site
settings.HAYSTACK_SEARCH_ENGINE = self.old_engine
super(BasicSearchViewTestCase, self).tearDown()
def test_search_no_query(self):
response = self.client.get(reverse('haystack_basic_search'))
self.assertEqual(response.status_code, 200)
def test_search_query(self):
response = self.client.get(reverse('haystack_basic_search'), {'q': 'hello world'})
self.assertEqual(response.status_code, 200)
self.assertEqual(type(response.context[-1]['form']), ModelSearchForm)
self.assertEqual(len(response.context[-1]['page'].object_list), 1)
self.assertEqual(response.context[-1]['page'].object_list[0].content_type(), 'haystack.dummymodel')
self.assertEqual(response.context[-1]['page'].object_list[0].pk, 1)
self.assertEqual(response.context[-1]['query'], 'hello world')
def test_invalid_page(self):
response = self.client.get(reverse('haystack_basic_search'), {'q': 'hello world', 'page': '165233'})
self.assertEqual(response.status_code, 404)
| soad241/django-haystack | tests/core/tests/views.py | Python | bsd-3-clause | 7,147 |
from django.conf import settings
def bitgroup_cache_key(slug):
return "%s:%s" % (
getattr(settings, 'PAGEBIT_CACHE_PREFIX', 'pagebits'),
slug
)
| frankwiles/django-pagebits | pagebits/utils.py | Python | bsd-3-clause | 171 |
import logging
import pytest
def test_tracing_by_function_if_enable(track_logger, handler):
msg1 = 'TEST1'
msg2 = 'TEST2'
msg3 = 'TEST3'
track_logger.setLevel(logging.INFO)
track_logger.enable_tracking()
track_logger.debug(msg1)
track_logger.info(msg2)
track_logger.disable_tracking()
track_logger.debug(msg3)
record = handler.pop()
assert record.msg == msg2
assert record.levelname == logging.getLevelName(logging.INFO)
with pytest.raises(IndexError):
handler.pop()
def test_tracing_by_function_if_enable_with_exc(track_logger, handler):
msg1 = 'TEST1'
msg2 = 'TEST2'
msg3 = 'TEST3'
track_logger.setLevel(logging.INFO)
track_logger.enable_tracking()
try:
track_logger.debug(msg1)
track_logger.info(msg2)
raise Exception
except Exception:
track_logger.exit_with_exc()
track_logger.debug(msg3)
track_logger.disable_tracking()
record_2 = handler.pop()
record_1 = handler.pop()
assert record_1.msg == msg1
assert record_1.levelname == logging.getLevelName(logging.DEBUG)
assert record_2.msg == msg2
assert record_2.levelname == logging.getLevelName(logging.INFO)
with pytest.raises(IndexError):
handler.pop()
def test_tracing_by_context(track_logger, handler):
msg1 = 'TEST1'
msg2 = 'TEST2'
msg3 = 'TEST3'
track_logger.setLevel(logging.INFO)
with track_logger.trace:
track_logger.debug(msg1)
track_logger.info(msg2)
track_logger.debug(msg3)
record = handler.pop()
assert record.msg == msg2
assert record.levelname == logging.getLevelName(logging.INFO)
with pytest.raises(IndexError):
handler.pop()
def test_tracing_by_context_with_exc(track_logger, handler):
msg1 = 'TEST1'
msg2 = 'TEST2'
msg3 = 'TEST3'
track_logger.setLevel(logging.INFO)
try:
with track_logger.trace:
track_logger.debug(msg1)
track_logger.info(msg2)
raise Exception
except Exception:
pass
track_logger.debug(msg3)
record_2 = handler.pop()
record_1 = handler.pop()
assert record_1.msg == msg1
assert record_1.levelname == logging.getLevelName(logging.DEBUG)
assert record_2.msg == msg2
assert record_2.levelname == logging.getLevelName(logging.INFO)
with pytest.raises(IndexError):
handler.pop()
def test_tracing_by_decorator(track_logger, handler):
msg1 = 'TEST1'
msg2 = 'TEST2'
msg3 = 'TEST3'
track_logger.setLevel(logging.INFO)
@track_logger.trace
def trace_func():
track_logger.debug(msg1)
track_logger.info(msg2)
trace_func()
track_logger.debug(msg3)
record = handler.pop()
assert record.msg == msg2
assert record.levelname == logging.getLevelName(logging.INFO)
with pytest.raises(IndexError):
handler.pop()
def test_tracing_by_decorator_with_exc(track_logger, handler):
msg1 = 'TEST1'
msg2 = 'TEST2'
msg3 = 'TEST3'
track_logger.setLevel(logging.INFO)
@track_logger.trace
def trace_func():
track_logger.debug(msg1)
track_logger.info(msg2)
raise Exception
try:
trace_func()
except Exception:
pass
track_logger.debug(msg3)
record_2 = handler.pop()
record_1 = handler.pop()
assert record_1.msg == msg1
assert record_1.levelname == logging.getLevelName(logging.DEBUG)
assert record_2.msg == msg2
assert record_2.levelname == logging.getLevelName(logging.INFO)
with pytest.raises(IndexError):
handler.pop()
| kivio/python-structured-logging | tests/test_tracking_logger.py | Python | bsd-3-clause | 3,755 |
#!/usr/bin/env python
import sys
from roslaunch.xmlloader import XmlLoader, loader
from rosgraph.names import get_ros_namespace
from rqt_launchtree.launchtree_context import LaunchtreeContext
class LaunchtreeLoader(XmlLoader):
def _include_tag(self, tag, context, ros_config, default_machine, is_core, verbose):
inc_filename = self.resolve_args(tag.attributes['file'].value, context)
ros_config.push_level(inc_filename, unique=True)
result = super(LaunchtreeLoader, self)._include_tag(tag, context, ros_config, default_machine, is_core, verbose)
ros_config.pop_level()
return result
def _node_tag(self, tag, context, ros_config, default_machine, is_test=False, verbose=True):
try:
if is_test:
self._check_attrs(tag, context, ros_config, XmlLoader.TEST_ATTRS)
(name,) = self.opt_attrs(tag, context, ('name',))
test_name, time_limit, retry = self._test_attrs(tag, context)
if not name:
name = test_name
else:
self._check_attrs(tag, context, ros_config, XmlLoader.NODE_ATTRS)
(name,) = self.reqd_attrs(tag, context, ('name',))
except Exception as e:
pass # will be handled in super
ros_config.push_level(name)
result = super(LaunchtreeLoader, self)._node_tag(tag, context, ros_config, default_machine, is_test, verbose)
ros_config.pop_level()
return result
def _rosparam_tag(self, tag, context, ros_config, verbose):
param_file = tag.attributes['file'].value \
if tag.attributes.has_key('file') else ''
if param_file != '':
param_filename = self.resolve_args(param_file, context)
level_name = ros_config.push_level(param_filename, unique=True)
result = super(LaunchtreeLoader, self)._rosparam_tag(tag, context, ros_config, verbose)
if param_file != '':
ros_config.pop_level()
context.add_rosparam(tag.attributes.get('command', 'load'), param_filename, level_name)
return result
def _load_launch(self, launch, ros_config, is_core=False, filename=None, argv=None, verbose=True):
if argv is None:
argv = sys.argv
self._launch_tag(launch, ros_config, filename)
self.root_context = LaunchtreeContext(get_ros_namespace(), filename, config=ros_config)
loader.load_sysargs_into_context(self.root_context, argv)
if len(launch.getElementsByTagName('master')) > 0:
print "WARNING: ignoring defunct <master /> tag"
self._recurse_load(ros_config, launch.childNodes, self.root_context, None, is_core, verbose)
| pschillinger/rqt_launchtree | src/rqt_launchtree/launchtree_loader.py | Python | bsd-3-clause | 2,423 |
"""
Provides Matlab-like tic, tac and toc functions.
"""
import time
import numpy as np
class __Timer__:
"""Computes elapsed time, between tic, tac, and toc.
Methods
-------
tic :
Resets timer.
toc :
Returns and prints time elapsed since last tic().
tac :
Returns and prints time elapsed since last
tic(), tac() or toc() whichever occured last.
loop_timer :
Returns and prints the total and average time elapsed for n runs
of a given function.
"""
start = None
last = None
def tic(self):
"""
Save time for future use with `tac()` or `toc()`.
"""
t = time.time()
self.start = t
self.last = t
def tac(self, verbose=True, digits=2):
"""
Return and print elapsed time since last `tic()`, `tac()`, or
`toc()`.
Parameters
----------
verbose : bool, optional(default=True)
If True, then prints time.
digits : scalar(int), optional(default=2)
Number of digits printed for time elapsed.
Returns
-------
elapsed : scalar(float)
Time elapsed since last `tic()`, `tac()`, or `toc()`.
"""
if self.start is None:
raise Exception("tac() without tic()")
t = time.time()
elapsed = t-self.last
self.last = t
if verbose:
m, s = divmod(elapsed, 60)
h, m = divmod(m, 60)
print("TAC: Elapsed: %d:%02d:%0d.%0*d" %
(h, m, s, digits, (s % 1)*(10**digits)))
return elapsed
def toc(self, verbose=True, digits=2):
"""
Return and print time elapsed since last `tic()`.
Parameters
----------
verbose : bool, optional(default=True)
If True, then prints time.
digits : scalar(int), optional(default=2)
Number of digits printed for time elapsed.
Returns
-------
elapsed : scalar(float)
Time elapsed since last `tic()`.
"""
if self.start is None:
raise Exception("toc() without tic()")
t = time.time()
self.last = t
elapsed = t-self.start
if verbose:
m, s = divmod(elapsed, 60)
h, m = divmod(m, 60)
print("TOC: Elapsed: %d:%02d:%0d.%0*d" %
(h, m, s, digits, (s % 1)*(10**digits)))
return elapsed
def loop_timer(self, n, function, args=None, verbose=True, digits=2,
best_of=3):
"""
Return and print the total and average time elapsed for n runs
of function.
Parameters
----------
n : scalar(int)
Number of runs.
function : function
Function to be timed.
args : list, optional(default=None)
Arguments of the function.
verbose : bool, optional(default=True)
If True, then prints average time.
digits : scalar(int), optional(default=2)
Number of digits printed for time elapsed.
best_of : scalar(int), optional(default=3)
Average time over best_of runs.
Returns
-------
average_time : scalar(float)
Average time elapsed for n runs of function.
average_of_best : scalar(float)
Average of best_of times for n runs of function.
"""
tic()
all_times = np.empty(n)
for run in range(n):
if hasattr(args, '__iter__'):
function(*args)
elif args is None:
function()
else:
function(args)
all_times[run] = tac(verbose=False, digits=digits)
elapsed = toc(verbose=False, digits=digits)
m, s = divmod(elapsed, 60)
h, m = divmod(m, 60)
print("Total run time: %d:%02d:%0d.%0*d" %
(h, m, s, digits, (s % 1)*(10**digits)))
average_time = all_times.mean()
average_of_best = np.sort(all_times)[:best_of].mean()
if verbose:
m, s = divmod(average_time, 60)
h, m = divmod(m, 60)
print("Average time for %d runs: %d:%02d:%0d.%0*d" %
(n, h, m, s, digits, (s % 1)*(10**digits)))
m, s = divmod(average_of_best, 60)
h, m = divmod(m, 60)
print("Average of %d best times: %d:%02d:%0d.%0*d" %
(best_of, h, m, s, digits, (s % 1)*(10**digits)))
return average_time, average_of_best
__timer__ = __Timer__()
def tic():
return __timer__.tic()
def tac(verbose=True, digits=2):
return __timer__.tac(verbose, digits)
def toc(verbose=True, digits=2):
return __timer__.toc(verbose, digits)
def loop_timer(n, function, args=None, verbose=True, digits=2, best_of=3):
return __timer__.loop_timer(n, function, args, verbose, digits, best_of)
# Set docstring
_names = ['tic', 'tac', 'toc', 'loop_timer']
_funcs = [eval(name) for name in _names]
_methods = [getattr(__Timer__, name) for name in _names]
for _func, _method in zip(_funcs, _methods):
_func.__doc__ = _method.__doc__
| oyamad/QuantEcon.py | quantecon/util/timing.py | Python | bsd-3-clause | 5,239 |
#!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tests that a set of symbols are truly pruned from the translator.
Compares the pruned down "on-device" translator with the "fat" host build
which has not been pruned down.
"""
import glob
import re
import subprocess
import sys
import unittest
class SymbolInfo(object):
def __init__(self, lib_name, sym_name, t, size):
self.lib_name = lib_name
self.sym_name = sym_name
self.type = t
self.size = size
def is_weak(t):
t = t.upper()
return t == 'V' or t == 'W'
def is_local(t):
# According to the NM documentation:
# "If lowercase, the symbol is usually local... There are however a few
# lowercase symbols that are shown for special global symbols
# ("u", "v" and "w")."
return t != 'u' and not is_weak(t) and t.islower()
def merge_symbols(sdict1, sdict2):
for sym_name, v2 in sdict2.iteritems():
# Check for duplicate symbols.
if sym_name in sdict1:
v1 = sdict1[sym_name]
# Only print warning if they are not weak / differently sized.
if (not (is_weak(v2.type) or is_weak(v1.type)) and
v1.size != v2.size):
print 'Warning symbol %s defined in both %s(%d, %s) and %s(%d, %s)' % (
sym_name,
v1.lib_name, v1.size, v1.type,
v2.lib_name, v2.size, v2.type)
# Arbitrarily take the max. The sizes are approximate anyway,
# since the host binaries are built from a different compiler.
v1.size = max(v1.size, v2.size)
continue
# Otherwise just copy info over to sdict2.
sdict1[sym_name] = sdict2[sym_name]
return sdict1
class TestTranslatorPruned(unittest.TestCase):
pruned_symbols = {}
unpruned_symbols = {}
@classmethod
def get_symbol_info(cls, nm_tool, bin_name):
results = {}
nm_cmd = [nm_tool, '--size-sort', '--demangle', bin_name]
print 'Getting symbols and sizes by running:\n' + ' '.join(nm_cmd)
for line in iter(subprocess.check_output(nm_cmd).splitlines()):
(hex_size, t, sym_name) = line.split(' ', 2)
# Only track defined and non-BSS symbols.
if t != 'U' and t.upper() != 'B':
info = SymbolInfo(bin_name, sym_name, t, int(hex_size, 16))
# For local symbols, tack the library name on as a prefix.
# That should still match the regexes later.
if is_local(t):
key = bin_name + '$' + sym_name
else:
key = sym_name
# The same library can have the same local symbol. Just sum up sizes.
if key in results:
old = results[key]
old.size = old.size + info.size
else:
results[key] = info
return results
@classmethod
def setUpClass(cls):
nm_tool = sys.argv[1]
host_binaries = glob.glob(sys.argv[2])
target_binary = sys.argv[3]
print 'Getting symbol info from %s (host) and %s (target)' % (
sys.argv[2], sys.argv[3])
assert host_binaries, ('Did not glob any binaries from: ' % sys.argv[2])
for b in host_binaries:
cls.unpruned_symbols = merge_symbols(cls.unpruned_symbols,
cls.get_symbol_info(nm_tool, b))
cls.pruned_symbols = cls.get_symbol_info(nm_tool, target_binary)
# Do an early check that these aren't stripped binaries.
assert cls.unpruned_symbols, 'No symbols from host?'
assert cls.pruned_symbols, 'No symbols from target?'
def size_of_matching_syms(self, sym_regex, sym_infos):
# Check if a given sym_infos has symbols matching sym_regex, and
# return the total size of all matching symbols.
total = 0
for sym_name, sym_info in sym_infos.iteritems():
if re.search(sym_regex, sym_info.sym_name):
total += sym_info.size
return total
def test_prunedNotFullyStripped(self):
"""Make sure that the test isn't accidentally passing.
The test can accidentally pass if the translator is stripped of symbols.
Then it would look like everything is pruned out. Look for a symbol
that's guaranteed not to be pruned out.
"""
pruned = self.size_of_matching_syms('stream_init.*NaClSrpc',
TestTranslatorPruned.pruned_symbols)
self.assertNotEqual(pruned, 0)
def test_didPrune(self):
"""Check for classes/namespaces/symbols that we have intentionally pruned.
Check that the symbols are not present anymore in the translator,
and check that the symbols actually do exist in the developer tools.
That prevents the test from accidentally passing if the symbols
have been renamed to something else.
"""
total = 0
pruned_list = [
'LLParser', 'LLLexer',
'MCAsmParser', '::AsmParser',
'ARMAsmParser', 'X86AsmParser',
'ELFAsmParser', 'COFFAsmParser', 'DarwinAsmParser',
'MCAsmLexer', '::AsmLexer',
# Gigantic Asm MatchTable (globbed for all targets),
'MatchTable',
'PBQP',
# Can only check *InstPrinter::print*, not *::getRegisterName():
# https://code.google.com/p/nativeclient/issues/detail?id=3326
'ARMInstPrinter::print', 'X86.*InstPrinter::print',
# Currently pruned by hacking Triple.h. That covers most things,
# but not all. E.g., container-specific relocation handling.
'.*MachObjectWriter', 'TargetLoweringObjectFileMachO',
'MCMachOStreamer', '.*MCAsmInfoDarwin',
'.*COFFObjectWriter', 'TargetLoweringObjectFileCOFF',
'.*COFFStreamer', '.*AsmInfoGNUCOFF',
# This is not pruned out: 'MCSectionMachO', 'MCSectionCOFF',
# 'MachineModuleInfoMachO', ...
]
for sym_regex in pruned_list:
unpruned = self.size_of_matching_syms(
sym_regex, TestTranslatorPruned.unpruned_symbols)
pruned = self.size_of_matching_syms(
sym_regex, TestTranslatorPruned.pruned_symbols)
self.assertNotEqual(unpruned, 0, 'Unpruned never had ' + sym_regex)
self.assertEqual(pruned, 0, 'Pruned still has ' + sym_regex)
# Bytes pruned is approximate since the host build is different
# from the target build (different inlining / optimizations).
print 'Pruned out approx %d bytes worth of %s symbols' % (unpruned,
sym_regex)
total += unpruned
print 'Total %d bytes' % total
if __name__ == '__main__':
if len(sys.argv) != 4:
print 'Usage: %s <nm_tool> <unpruned_host_binary> <pruned_target_binary>'
sys.exit(1)
suite = unittest.TestLoader().loadTestsFromTestCase(TestTranslatorPruned)
result = unittest.TextTestRunner(verbosity=2).run(suite)
if result.wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
| CTSRD-SOAAP/chromium-42.0.2311.135 | native_client/pnacl/prune_test.py | Python | bsd-3-clause | 6,842 |
import os.path
from django.conf import settings
from django.test.utils import override_settings
import mock
from celery.result import AsyncResult
from olympia import amo
from olympia.amo.tests import TestCase, addon_factory, version_factory
from olympia.devhub import tasks, utils
from olympia.files.models import FileUpload
class TestValidatorBase(TestCase):
def setUp(self):
# Create File objects for version 1.0 and 1.1.
self.addon = addon_factory(
guid='test-desktop@nowhere', slug='test-amo-addon',
version_kw={'version': '1.0'})
self.version = self.addon.current_version
self.file = self.version.files.get()
self.version_1_1 = version_factory(addon=self.addon, version='1.1')
self.file_1_1 = self.version_1_1.files.get()
# Creating the files and versions above resets this.
self.addon.update(status=amo.STATUS_PUBLIC)
# Create a FileUpload object for an XPI containing version 1.1.
path = os.path.join(settings.ROOT,
'src/olympia/devhub/tests/addons/desktop.xpi')
self.file_upload = FileUpload.objects.create(path=path)
self.xpi_version = '1.1'
# Patch validation tasks that we expect the validator to call.
self.patchers = []
self.save_file = self.patch(
'olympia.devhub.tasks.handle_file_validation_result').subtask
self.save_upload = self.patch(
'olympia.devhub.tasks.handle_upload_validation_result').subtask
self.validate_file = self.patch(
'olympia.devhub.tasks.validate_file').subtask
self.validate_upload = self.patch(
'olympia.devhub.tasks.validate_file_path').subtask
def patch(self, thing):
"""Patch the given "thing", and revert the patch on test teardown."""
patcher = mock.patch(thing)
self.addCleanup(patcher.stop)
return patcher.start()
def check_upload(self, file_upload, listed=True):
"""Check that the given new file upload is validated properly."""
# Run validator.
utils.Validator(file_upload, listed=listed)
# We shouldn't be attempting to validate an existing file.
assert not self.validate_file.called
# Make sure we run the correct validation task for the upload.
self.validate_upload.assert_called_once_with(
[file_upload.path],
{'hash_': file_upload.hash, 'listed': listed,
'is_webextension': False})
# Make sure we run the correct save validation task, with a
# fallback error handler.
channel = (amo.RELEASE_CHANNEL_LISTED if listed
else amo.RELEASE_CHANNEL_UNLISTED)
self.save_upload.assert_has_calls([
mock.call([mock.ANY, file_upload.pk, channel, False],
immutable=True),
mock.call([file_upload.pk, channel, False], link_error=mock.ANY)])
def check_file(self, file_):
"""Check that the given file is validated properly."""
# Run validator.
utils.Validator(file_)
# We shouldn't be attempting to validate a bare upload.
assert not self.validate_upload.called
# Make sure we run the correct validation task.
self.validate_file.assert_called_once_with(
[file_.pk],
{'hash_': file_.original_hash, 'is_webextension': False})
# Make sure we run the correct save validation task, with a
# fallback error handler.
self.save_file.assert_has_calls([
mock.call([mock.ANY, file_.pk, file_.version.channel, False],
immutable=True),
mock.call([file_.pk, file_.version.channel, False],
link_error=mock.ANY)])
class TestValidatorListed(TestValidatorBase):
@mock.patch('olympia.devhub.utils.chain')
def test_run_once_per_file(self, chain):
"""Tests that only a single validation task is run for a given file."""
task = mock.Mock()
chain.return_value = task
task.delay.return_value = mock.Mock(task_id='42')
assert isinstance(tasks.validate(self.file), mock.Mock)
assert task.delay.call_count == 1
assert isinstance(tasks.validate(self.file), AsyncResult)
assert task.delay.call_count == 1
assert isinstance(tasks.validate(self.file_1_1), mock.Mock)
assert task.delay.call_count == 2
@mock.patch('olympia.devhub.utils.chain')
def test_run_once_file_upload(self, chain):
"""Tests that only a single validation task is run for a given file
upload."""
task = mock.Mock()
chain.return_value = task
task.delay.return_value = mock.Mock(task_id='42')
assert isinstance(
tasks.validate(self.file_upload, listed=True), mock.Mock)
assert task.delay.call_count == 1
assert isinstance(
tasks.validate(self.file_upload, listed=True), AsyncResult)
assert task.delay.call_count == 1
def test_cache_key(self):
"""Tests that the correct cache key is generated for a given object."""
assert (utils.Validator(self.file).cache_key ==
'validation-task:files.File:{0}:None'.format(self.file.pk))
assert (utils.Validator(self.file_upload, listed=False).cache_key ==
'validation-task:files.FileUpload:{0}:False'.format(
self.file_upload.pk))
@mock.patch('olympia.devhub.utils.parse_addon')
def test_search_plugin(self, parse_addon):
"""Test that search plugins are handled correctly."""
parse_addon.return_value = {
'guid': None,
'version': '20140103',
'is_webextension': False,
}
addon = addon_factory(type=amo.ADDON_SEARCH,
version_kw={'version': '20140101'})
assert addon.guid is None
self.check_upload(self.file_upload)
self.validate_upload.reset_mock()
self.save_file.reset_mock()
version = version_factory(addon=addon, version='20140102')
self.check_file(version.files.get())
class TestLimitValidationResults(TestCase):
"""Test that higher priority messages are truncated last."""
def make_validation(self, types):
"""Take a list of error types and make a
validation results dict."""
validation = {
'messages': [],
'errors': 0,
'warnings': 0,
'notices': 0,
}
severities = ['low', 'medium', 'high']
for type_ in types:
if type_ in severities:
type_ = 'warning'
validation[type_ + 's'] += 1
validation['messages'].append({'type': type_})
return validation
@override_settings(VALIDATOR_MESSAGE_LIMIT=2)
def test_errors_are_first(self):
validation = self.make_validation(
['error', 'warning', 'notice', 'error'])
utils.limit_validation_results(validation)
limited = validation['messages']
assert len(limited) == 3
assert '2 messages were truncated' in limited[0]['message']
assert limited[1]['type'] == 'error'
assert limited[2]['type'] == 'error'
class TestFixAddonsLinterOutput(TestCase):
def test_fix_output(self):
original_output = {
'count': 4,
'summary': {
'errors': 0,
'notices': 0,
'warnings': 4
},
'metadata': {
'manifestVersion': 2,
'name': 'My Dogs New Tab',
'type': 1,
'version': '2.13.15',
'architecture': 'extension',
'emptyFiles': [],
'jsLibs': {
'lib/vendor/jquery.js': 'jquery.2.1.4.jquery.js'
}
},
'errors': [],
'notices': [],
'warnings': [
{
'_type': 'warning',
'code': 'MANIFEST_PERMISSIONS',
'message': '/permissions: Unknown permissions ...',
'description': 'See https://mzl.la/1R1n1t0 ...',
'file': 'manifest.json'
},
{
'_type': 'warning',
'code': 'MANIFEST_PERMISSIONS',
'message': '/permissions: Unknown permissions ...',
'description': 'See https://mzl.la/1R1n1t0 ....',
'file': 'manifest.json'
},
{
'_type': 'warning',
'code': 'MANIFEST_CSP',
'message': '\'content_security_policy\' is ...',
'description': 'A custom content_security_policy ...'
},
{
'_type': 'warning',
'code': 'NO_DOCUMENT_WRITE',
'message': 'Use of document.write strongly discouraged.',
'description': 'document.write will fail in...',
'column': 13,
'file': 'lib/vendor/knockout.js',
'line': 5449
}
]
}
fixed = utils.fix_addons_linter_output(original_output)
assert fixed['success']
assert fixed['warnings'] == 4
assert 'uid' in fixed['messages'][0]
assert 'id' in fixed['messages'][0]
assert 'type' in fixed['messages'][0]
assert fixed['messages'][0]['tier'] == 1
assert fixed['compatibility_summary'] == {
'warnings': 0,
'errors': 0,
'notices': 0,
}
assert fixed['ending_tier'] == 5
assert fixed['metadata']['is_webextension'] is True
assert fixed['metadata']['processed_by_addons_linter'] is True
assert fixed['metadata']['listed'] is True
assert fixed['metadata']['identified_files'] == {
'lib/vendor/jquery.js': {'path': 'jquery.2.1.4.jquery.js'}
}
# Make sure original metadata was preserved.
for key, value in original_output['metadata'].items():
assert fixed['metadata'][key] == value
| lavish205/olympia | src/olympia/devhub/tests/test_utils.py | Python | bsd-3-clause | 10,323 |
"""
Test data sources
"""
from nose.tools import ok_, eq_
from carousel.tests import logging
from carousel.core import UREG
from carousel.core.data_sources import DataSource, DataParameter
from carousel.core.data_readers import XLRDReader
from carousel.tests import PROJ_PATH, TESTS_DIR
import os
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.DEBUG)
TUSCON = os.path.join(PROJ_PATH, 'data', 'Tuscon.json')
XLRDREADER_TESTDATA = os.path.join(TESTS_DIR, 'xlrdreader_testdata.xlsx')
def test_datasource_metaclass():
"""
Test data source meta class.
"""
class DataSourceTest1(DataSource):
"""
Test data source with parameters in file.
"""
class Meta:
data_file = 'pvpower.json'
data_path = os.path.join(PROJ_PATH, 'data')
def __prepare_data__(self):
pass
data_test1 = DataSourceTest1(TUSCON)
ok_(isinstance(data_test1, DataSource))
eq_(data_test1.param_file, os.path.join(PROJ_PATH, 'data', 'pvpower.json'))
class DataSourceTest2(DataSource):
"""
Test data source with parameters in code.
"""
latitude = DataParameter(**{
"description": "latitude",
"units": "degrees",
"isconstant": True,
"dtype": "float",
"uncertainty": 1.0
})
longitude = DataParameter(**{
"description": "longitude",
"units": "degrees",
"isconstant": True,
"dtype": "float",
"uncertainty": 1.0
})
elevation = DataParameter(**{
"description": "altitude of site above sea level",
"units": "meters",
"isconstant": True,
"dtype": "float",
"uncertainty": 1.0
})
timestamp_start = DataParameter(**{
"description": "initial timestamp",
"isconstant": True,
"dtype": "datetime"
})
timestamp_count = DataParameter(**{
"description": "number of timesteps",
"isconstant": True,
"dtype": "int"
})
module = DataParameter(**{
"description": "PV module",
"isconstant": True,
"dtype": "str"
})
inverter = DataParameter(**{
"description": "PV inverter",
"isconstant": True,
"dtype": "str"
})
module_database = DataParameter(**{
"description": "module databases",
"isconstant": True,
"dtype": "str"
})
inverter_database = DataParameter(**{
"description": "inverter database",
"isconstant": True,
"dtype": "str"
})
Tamb = DataParameter(**{
"description": "average yearly ambient air temperature",
"units": "degC",
"isconstant": True,
"dtype": "float",
"uncertainty": 1.0
})
Uwind = DataParameter(**{
"description": "average yearly wind speed",
"units": "m/s",
"isconstant": True,
"dtype": "float",
"uncertainty": 1.0
})
surface_azimuth = DataParameter(**{
"description": "site rotation",
"units": "degrees",
"isconstant": True,
"dtype": "float",
"uncertainty": 1.0
})
timezone = DataParameter(**{
"description": "timezone",
"isconstant": True,
"dtype": "str"
})
def __prepare_data__(self):
pass
data_test2 = DataSourceTest2(TUSCON)
ok_(isinstance(data_test2, DataSource))
for k, val in data_test1.parameters.iteritems():
eq_(data_test2.parameters[k], val)
class DataSourceTest4(DataSource):
"""
Test data source with parameters in file.
"""
latitude = DataParameter(**{
"description": "latitude",
"units": "radians",
"isconstant": True,
"dtype": "float",
"uncertainty": 1.0
})
class Meta:
data_file = 'pvpower.json'
data_path = os.path.join(PROJ_PATH, 'data')
def __prepare_data__(self):
pass
data_test4 = DataSourceTest4(TUSCON)
ok_(isinstance(data_test4, DataSource))
eq_(data_test4['latitude'].u, UREG.radians)
eq_(data_test4.param_file, os.path.join(PROJ_PATH, 'data', 'pvpower.json'))
def test_xlrdreader_datasource():
"""
Test data source with xlrd reader.
"""
class DataSourceTest3(DataSource):
"""
Test data source with xlrd reader and params in file.
"""
class Meta:
data_reader = XLRDReader
data_file = 'xlrdreader_param.json'
data_path = TESTS_DIR
def __prepare_data__(self):
pass
data_test3 = DataSourceTest3(XLRDREADER_TESTDATA)
ok_(isinstance(data_test3, DataSource))
eq_(data_test3._meta.data_reader, XLRDReader)
os.remove(os.path.join(TESTS_DIR, 'xlrdreader_testdata.xlsx.json'))
LOGGER.debug('xlrdreader_testdata.xlsx.json has been cleaned')
if __name__ == '__main__':
test_datasource_metaclass()
test_xlrdreader_datasource()
| mikofski/Carousel | carousel/tests/test_data.py | Python | bsd-3-clause | 5,322 |
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 4.0.2
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info as _swig_python_version_info
if _swig_python_version_info < (2, 7, 0):
raise RuntimeError("Python 2.7 or later required")
# Import the low-level C/C++ module
if __package__ or "." in __name__:
from . import _stable3d
else:
import _stable3d
try:
import builtins as __builtin__
except ImportError:
import __builtin__
_swig_new_instance_method = _stable3d.SWIG_PyInstanceMethod_New
_swig_new_static_method = _stable3d.SWIG_PyStaticMethod_New
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
def _swig_setattr_nondynamic_instance_variable(set):
def set_instance_attr(self, name, value):
if name == "thisown":
self.this.own(value)
elif name == "this":
set(self, name, value)
elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
set(self, name, value)
else:
raise AttributeError("You cannot add instance attributes to %s" % self)
return set_instance_attr
def _swig_setattr_nondynamic_class_variable(set):
def set_class_attr(cls, name, value):
if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
set(cls, name, value)
else:
raise AttributeError("You cannot add class attributes to %s" % cls)
return set_class_attr
def _swig_add_metaclass(metaclass):
"""Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
def wrapper(cls):
return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
return wrapper
class _SwigNonDynamicMeta(type):
"""Meta class to enforce nondynamic attributes (no new attributes) for a class"""
__setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
import weakref
import mfem._ser.element
import mfem._ser.globals
import mfem._ser.array
import mfem._ser.mem_manager
import mfem._ser.densemat
import mfem._ser.vector
import mfem._ser.operators
import mfem._ser.matrix
import mfem._ser.geom
import mfem._ser.intrules
import mfem._ser.table
import mfem._ser.hash
class STable3DNode(object):
r"""Proxy of C++ mfem::STable3DNode class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
Prev = property(_stable3d.STable3DNode_Prev_get, _stable3d.STable3DNode_Prev_set, doc=r"""Prev : p.mfem::STable3DNode""")
Column = property(_stable3d.STable3DNode_Column_get, _stable3d.STable3DNode_Column_set, doc=r"""Column : int""")
Floor = property(_stable3d.STable3DNode_Floor_get, _stable3d.STable3DNode_Floor_set, doc=r"""Floor : int""")
Number = property(_stable3d.STable3DNode_Number_get, _stable3d.STable3DNode_Number_set, doc=r"""Number : int""")
def __init__(self):
r"""__init__(STable3DNode self) -> STable3DNode"""
_stable3d.STable3DNode_swiginit(self, _stable3d.new_STable3DNode())
__swig_destroy__ = _stable3d.delete_STable3DNode
# Register STable3DNode in _stable3d:
_stable3d.STable3DNode_swigregister(STable3DNode)
class STable3D(object):
r"""Proxy of C++ mfem::STable3D class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, nr):
r"""__init__(STable3D self, int nr) -> STable3D"""
_stable3d.STable3D_swiginit(self, _stable3d.new_STable3D(nr))
def Push(self, r, c, f):
r"""Push(STable3D self, int r, int c, int f) -> int"""
return _stable3d.STable3D_Push(self, r, c, f)
Push = _swig_new_instance_method(_stable3d.STable3D_Push)
def Index(self, r, c, f):
r"""Index(STable3D self, int r, int c, int f) -> int"""
return _stable3d.STable3D_Index(self, r, c, f)
Index = _swig_new_instance_method(_stable3d.STable3D_Index)
def Push4(self, r, c, f, t):
r"""Push4(STable3D self, int r, int c, int f, int t) -> int"""
return _stable3d.STable3D_Push4(self, r, c, f, t)
Push4 = _swig_new_instance_method(_stable3d.STable3D_Push4)
def __call__(self, *args):
r"""
__call__(STable3D self, int r, int c, int f) -> int
__call__(STable3D self, int r, int c, int f, int t) -> int
"""
return _stable3d.STable3D___call__(self, *args)
__call__ = _swig_new_instance_method(_stable3d.STable3D___call__)
def NumberOfElements(self):
r"""NumberOfElements(STable3D self) -> int"""
return _stable3d.STable3D_NumberOfElements(self)
NumberOfElements = _swig_new_instance_method(_stable3d.STable3D_NumberOfElements)
__swig_destroy__ = _stable3d.delete_STable3D
def Print(self, *args):
r"""
Print(STable3D self, std::ostream & out=out)
Print(STable3D self, char const * file, int precision=16)
"""
return _stable3d.STable3D_Print(self, *args)
Print = _swig_new_instance_method(_stable3d.STable3D_Print)
def PrintGZ(self, file, precision=16):
r"""PrintGZ(STable3D self, char const * file, int precision=16)"""
return _stable3d.STable3D_PrintGZ(self, file, precision)
PrintGZ = _swig_new_instance_method(_stable3d.STable3D_PrintGZ)
# Register STable3D in _stable3d:
_stable3d.STable3D_swigregister(STable3D)
| mfem/PyMFEM | mfem/_ser/stable3d.py | Python | bsd-3-clause | 5,714 |
# coding: utf-8
# pylint: disable = invalid-name, W0105, C0301
from __future__ import absolute_import
import collections
from operator import gt, lt
from .compat import range_
class EarlyStopException(Exception):
"""Exception of early stopping.
Parameters
----------
best_iteration : int
The best iteration stopped.
"""
def __init__(self, best_iteration, best_score):
super(EarlyStopException, self).__init__()
self.best_iteration = best_iteration
self.best_score = best_score
# Callback environment used by callbacks
CallbackEnv = collections.namedtuple(
"LightGBMCallbackEnv",
["model",
"params",
"iteration",
"begin_iteration",
"end_iteration",
"evaluation_result_list"])
def _format_eval_result(value, show_stdv=True):
"""format metric string"""
if len(value) == 4:
return '%s\'s %s: %g' % (value[0], value[1], value[2])
elif len(value) == 5:
if show_stdv:
return '%s\'s %s: %g + %g' % (value[0], value[1], value[2], value[4])
else:
return '%s\'s %s: %g' % (value[0], value[1], value[2])
else:
raise ValueError("Wrong metric value")
def print_evaluation(period=1, show_stdv=True):
"""Create a callback that prints the evaluation results.
Parameters
----------
period : int, optional (default=1)
The period to print the evaluation results.
show_stdv : bool, optional (default=True)
Whether to show stdv (if provided).
Returns
-------
callback : function
The callback that prints the evaluation results every ``period`` iteration(s).
"""
def callback(env):
"""internal function"""
if period > 0 and env.evaluation_result_list and (env.iteration + 1) % period == 0:
result = '\t'.join([_format_eval_result(x, show_stdv) for x in env.evaluation_result_list])
print('[%d]\t%s' % (env.iteration + 1, result))
callback.order = 10
return callback
def record_evaluation(eval_result):
"""Create a callback that records the evaluation history into ``eval_result``.
Parameters
----------
eval_result : dict
A dictionary to store the evaluation results.
Returns
-------
callback : function
The callback that records the evaluation history into the passed dictionary.
"""
if not isinstance(eval_result, dict):
raise TypeError('Eval_result should be a dictionary')
eval_result.clear()
def init(env):
"""internal function"""
for data_name, _, _, _ in env.evaluation_result_list:
eval_result.setdefault(data_name, collections.defaultdict(list))
def callback(env):
"""internal function"""
if not eval_result:
init(env)
for data_name, eval_name, result, _ in env.evaluation_result_list:
eval_result[data_name][eval_name].append(result)
callback.order = 20
return callback
def reset_parameter(**kwargs):
"""Create a callback that resets the parameter after the first iteration.
Note
----
The initial parameter will still take in-effect on first iteration.
Parameters
----------
**kwargs: value should be list or function
List of parameters for each boosting round
or a customized function that calculates the parameter in terms of
current number of round (e.g. yields learning rate decay).
If list lst, parameter = lst[current_round].
If function func, parameter = func(current_round).
Returns
-------
callback : function
The callback that resets the parameter after the first iteration.
"""
def callback(env):
"""internal function"""
new_parameters = {}
for key, value in kwargs.items():
if key in ['num_class', 'boosting_type', 'metric']:
raise RuntimeError("cannot reset {} during training".format(repr(key)))
if isinstance(value, list):
if len(value) != env.end_iteration - env.begin_iteration:
raise ValueError("Length of list {} has to equal to 'num_boost_round'.".format(repr(key)))
new_param = value[env.iteration - env.begin_iteration]
else:
new_param = value(env.iteration - env.begin_iteration)
if new_param != env.params.get(key, None):
new_parameters[key] = new_param
if new_parameters:
env.model.reset_parameter(new_parameters)
env.params.update(new_parameters)
callback.before_iteration = True
callback.order = 10
return callback
def early_stopping(stopping_rounds, verbose=True):
"""Create a callback that activates early stopping.
Note
----
Activates early stopping.
Requires at least one validation data and one metric.
If there's more than one, will check all of them.
Parameters
----------
stopping_rounds : int
The possible number of rounds without the trend occurrence.
verbose : bool, optional (default=True)
Whether to print message with early stopping information.
Returns
-------
callback : function
The callback that activates early stopping.
"""
best_score = []
best_iter = []
best_score_list = []
cmp_op = []
def init(env):
"""internal function"""
if not env.evaluation_result_list:
raise ValueError('For early stopping, at least one dataset and eval metric is required for evaluation')
if verbose:
msg = "Training until validation scores don't improve for {} rounds."
print(msg.format(stopping_rounds))
for eval_ret in env.evaluation_result_list:
best_iter.append(0)
best_score_list.append(None)
if eval_ret[3]:
best_score.append(float('-inf'))
cmp_op.append(gt)
else:
best_score.append(float('inf'))
cmp_op.append(lt)
def callback(env):
"""internal function"""
if not cmp_op:
init(env)
for i in range_(len(env.evaluation_result_list)):
score = env.evaluation_result_list[i][2]
if cmp_op[i](score, best_score[i]):
best_score[i] = score
best_iter[i] = env.iteration
best_score_list[i] = env.evaluation_result_list
elif env.iteration - best_iter[i] >= stopping_rounds:
if verbose:
print('Early stopping, best iteration is:\n[%d]\t%s' % (
best_iter[i] + 1, '\t'.join([_format_eval_result(x) for x in best_score_list[i]])))
raise EarlyStopException(best_iter[i], best_score_list[i])
callback.order = 30
return callback
| Allardvm/LightGBM | python-package/lightgbm/callback.py | Python | mit | 6,890 |
'''Todo:
* Add multiple thread support for async_process functions
* Potentially thread each handler function? idk
'''
import sys
import socket
import re
import threading
import logging
import time
if sys.hexversion < 0x03000000:
#Python 2
import Queue as queue
BlockingIOError = socket.error
else:
import queue
from .ircclient import IRCClient
logger = logging.getLogger(__name__)
#Somewhat complex regex that accurately matches nick!username@host, with named groups for easy parsing and usage
user_re = re.compile(r'(?P<nick>[\w\d<-\[\]\^\{\}\~]+)!(?P<user>[\w\d<-\[\]\^\{\}\~]+)@(?P<host>.+)')
class IRCBot(IRCClient):
'''See `IRCClient` for basic client usage, here is usage for the bot system
Handler notation:
on_join(self, nick, host, channel)
on_topic(self, nick, host, channel, topic)
on_part(self, nick, host, channel, message)
on_msg(self, nick, host, channel, message)
on_privmsg(self, nick, host, message)
on_chanmsg(self, nick, host, channel, message)
on_notice(self, nick, host, channel, message)
on_nick(self, nick, new_nick, host)
'''
_handlers = {
'join': [],
'part': [],
'kick': [],
'topic': [],
'msg': [],
'privmsg': [],
'chanmsg': [],
'notice': [],
'nick': []
}
_process_thread = None
def _async_process(self):
while not self._stop_event.is_set():
time.sleep(0.01)
try:
args = self._in_queue.get_nowait()
#These "msg"s will be raw irc received lines, which have several forms
# basically, we should be looking for
# :User!Name@host COMMAND <ARGS>
userhost = user_re.search(args[0][1:])
if userhost:
nick, host, user = userhost.groups()
command = args[1]
if command == 'JOIN':
channel = args[2][1:] #JOIN Channels are : prefixed
for handler in self._handlers['join']:
handler(self, nick, host, channel)
elif command == 'TOPIC':
channel = args[2]
topic = ' '.join(args[3:])
for handler in self._handlers['topic']:
handler(self, nick, host, channel, topic)
elif command == 'PART':
channel = args[2]
message = ' '.join(args[3:])
for handler in self._handlers['part']:
handler(self, nick, host, channel, message)
elif command == 'PRIVMSG':
channel = args[2]
message = ' '.join(args[3:])[1:]
for handler in self._handlers['msg']:
handler(self, nick, host, channel, message)
if channel[0] == '#':
#this is a channel
for handler in self._handlers['chanmsg']:
handler(self, nick, host, channel, message)
else:
#private message
for handler in self._handlers['privmsg']:
handler(self, nick, host, message)
elif command == 'KICK':
channel = args[2]
kicked_nick = args[3]
reason = ' '.join(args[4:])[1:]
for handler in self._handlers['kick']:
handler(self, nick, host, channel, kicked_nick, reason)
elif command == 'NICK':
new_nick = args[2][1:]
for handler in self._handlers['nick']:
handler(self, nick, new_nick, host)
elif command == 'NOTICE':
#:nick!user@host NOTICE <userchan> :message
channel = args[2]
message = ' '.join(args[3:])
for handler in self._handlers['notice']:
handler(self, nick, host, channel, message)
else:
logger.warning("Unhandled command %s" % command)
self._in_queue.task_done()
except queue.Empty as e: pass
except Exception as e:
logger.exception("Error while handling message " + str(args))
def start(self):
IRCClient.start(self)
self._process_thread = threading.Thread(target=self._async_process)
self._process_thread.start()
def on(self, type):
'''Decorator function'''
def decorator(self, func):
'''decorated functions should be written as class methods
@on('join')
def on_join(self, channel):
print("Joined channel %s" % channel)
'''
self._handlers[type].append(func)
return func
return decorator
def on_join(self, func):
self._handlers['join'].append(func)
return func
def on_part(self, func):
self._handlers['part'].append(func)
return func
def on_kick(self, func):
self._handlers['kick'].append(func)
return func
def on_msg(self, func):
self._handlers['msg'].append(func)
return func
def on_privmsg(self, func):
self._handlers['privmsg'].append(func)
return func
def on_chanmsg(self, func):
self._handlers['chanmsg'].append(func)
return func
def on_notice(self, func):
self._handlers['notice'].append(func)
return func
def on_nick(self, func):
self._handlers['nick'].append(func)
return func
__all__ = ['IRCBot']
| codetalkio/TelegramIRCImageProxy | asyncirc/ircbot.py | Python | mit | 5,983 |
import os
def get_html_theme_path():
theme_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
return theme_dir
| iktakahiro/sphinx_theme_pd | sphinx_theme_pd/__init__.py | Python | mit | 136 |
import unittest
import chainer
from chainer import testing
from chainer.testing import attr
from chainercv.links.model.deeplab import SeparableASPP
class TestSeparableASPP(unittest.TestCase):
def setUp(self):
self.in_channels = 128
self.out_channels = 32
self.link = SeparableASPP(
self.in_channels, self.out_channels)
def check_call(self):
xp = self.link.xp
x = chainer.Variable(xp.random.uniform(
low=-1, high=1, size=(2, self.in_channels, 64, 64)
).astype(xp.float32))
y = self.link(x)
self.assertIsInstance(y, chainer.Variable)
self.assertIsInstance(y.data, xp.ndarray)
self.assertEqual(y.shape, (2, self.out_channels, 64, 64))
@attr.slow
def test_call_cpu(self):
self.check_call()
@attr.gpu
@attr.slow
def test_call_gpu(self):
self.link.to_gpu()
self.check_call()
testing.run_module(__name__, __file__)
| chainer/chainercv | tests/links_tests/model_tests/deeplab_tests/test_aspp.py | Python | mit | 975 |
"""tests/test_output_format.py.
Tests the output format handlers included with Hug
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import pytest
import hug
from collections import namedtuple
from datetime import datetime
def test_text():
'''Ensure that it's possible to output a Hug API method as text'''
hug.output_format.text("Hello World!") == "Hello World!"
hug.output_format.text(str(1)) == "1"
def test_json():
'''Ensure that it's possible to output a Hug API method as JSON'''
now = datetime.now()
test_data = {'text': 'text', 'datetime': now, 'bytes': b'bytes'}
output = hug.output_format.json(test_data).decode('utf8')
assert 'text' in output
assert 'bytes' in output
assert now.isoformat() in output
class NewObject(object):
pass
test_data['non_serializable'] = NewObject()
with pytest.raises(TypeError):
hug.output_format.json(test_data).decode('utf8')
class NamedTupleObject(namedtuple('BaseTuple', ('name', 'value'))):
pass
data = NamedTupleObject('name', 'value')
converted = hug.input_format.json(hug.output_format.json(data).decode('utf8'))
assert converted == {'name': 'name', 'value': 'value'}
def test_pretty_json():
'''Ensure that it's possible to output a Hug API method as prettified and indented JSON'''
test_data = {'text': 'text'}
assert hug.output_format.pretty_json(test_data).decode('utf8') == ('{\n'
' "text": "text"\n'
'}')
def test_json_camelcase():
'''Ensure that it's possible to output a Hug API method as camelCased JSON'''
test_data = {'under_score': {'values_can': 'Be Converted'}}
output = hug.output_format.json_camelcase(test_data).decode('utf8')
assert 'underScore' in output
assert 'valuesCan' in output
assert 'Be Converted' in output
| janusnic/hug | tests/test_output_format.py | Python | mit | 2,987 |
import unittest
import numpy
import chainer
from chainer.backends import cuda
from chainer import gradient_check
from chainer import links
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
@testing.with_requires('theano')
class TheanoFunctionTestBase(object):
forward_test_options = {}
backward_test_options = {'atol': 1e-4}
def setUp(self):
self.input_data = [
numpy.random.uniform(
-1, 1, d['shape']).astype(getattr(numpy, d['type']))
for d in self.inputs]
self.grad_data = [
numpy.random.uniform(
-1, 1, d['shape']).astype(getattr(numpy, d['type']))
for d in self.outputs]
def make_func(self):
raise NotImplementedError
def expect_forward(self):
raise NotImplementedError
def check_forward(self, input_data):
func = self.make_func()
inputs = [chainer.Variable(data) for data in input_data]
outputs = func(*inputs)
if isinstance(outputs, chainer.Variable):
outputs = (outputs,)
expect = self.expect_forward()
self.assertEqual(len(outputs), len(expect))
for o, e in zip(outputs, expect):
testing.assert_allclose(
o.data, e, **self.forward_test_options)
def test_forward_cpu(self):
self.check_forward(self.input_data)
@attr.gpu
def test_forward_gpu(self):
inputs = [cuda.to_gpu(x) for x in self.input_data]
self.check_forward(inputs)
def check_backward(self, input_data, grad_data):
func = self.make_func()
gradient_check.check_backward(
func, input_data, grad_data, **self.backward_test_options)
@condition.retry(3)
def test_backward_cpu(self):
self.check_backward(self.input_data, self.grad_data)
@attr.gpu
@condition.retry(3)
def test_backward_gpu(self):
inputs = [cuda.to_gpu(x) for x in self.input_data]
grads = [cuda.to_gpu(x) for x in self.grad_data]
self.check_backward(inputs, grads)
@testing.parameterize(
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (3, 2), 'type': 'float32'}],
'outputs': [{'shape': (3, 2), 'type': 'float32'}]},
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (2,), 'type': 'float32'}],
'outputs': [{'shape': (3, 2), 'type': 'float32'}]},
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (), 'type': 'float32'}],
'outputs': [{'shape': (3, 2), 'type': 'float32'}]},
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (3, 2), 'type': 'float64'}],
'outputs': [{'shape': (3, 2), 'type': 'float64'}]},
{'inputs': [{'shape': (3, 2), 'type': 'float16'},
{'shape': (3, 2), 'type': 'float32'}],
'outputs': [{'shape': (3, 2), 'type': 'float32'}],
'forward_test_options': {'atol': 1e-3, 'rtol': 1e-3},
'backward_test_options': {'eps': 1, 'atol': 1e-3, 'rtol': 1e-3}},
)
class TestTheanoFunction(TheanoFunctionTestBase, unittest.TestCase):
def make_func(self):
import theano.tensor as T
x = T.TensorType(self.inputs[0]['type'],
(False,) * len(self.inputs[0]['shape']))('x')
y = T.TensorType(self.inputs[1]['type'],
(False,) * len(self.inputs[1]['shape']))('y')
z = x + y
return links.TheanoFunction([x, y], [z])
def expect_forward(self):
x, y = self.input_data
return x + y,
@testing.parameterize(
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (3, 2), 'type': 'float32'}],
'outputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (3, 2), 'type': 'float32'}]},
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (2,), 'type': 'float32'}],
'outputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (3, 2), 'type': 'float32'}]},
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (), 'type': 'float32'}],
'outputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (3, 2), 'type': 'float32'}]},
)
class TestTheanoFunctionTwoOutputs(TheanoFunctionTestBase, unittest.TestCase):
def make_func(self):
import theano.tensor as T
x = T.TensorType(self.inputs[0]['type'],
(False,) * len(self.inputs[0]['shape']))('x')
y = T.TensorType(self.inputs[1]['type'],
(False,) * len(self.inputs[1]['shape']))('y')
z = x + y
w = x - y
return links.TheanoFunction([x, y], [z, w])
def expect_forward(self):
x, y = self.input_data
return x + y, x - y
@testing.parameterize(
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (2,), 'type': 'int32'}],
'outputs': [{'shape': (2, 2), 'type': 'float32'}]},
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (), 'type': 'int32'}],
'outputs': [{'shape': (2,), 'type': 'float32'}]},
)
class TestTheanoFunctionNonDifferential(
TheanoFunctionTestBase, unittest.TestCase):
def make_func(self):
import theano.tensor as T
x = T.TensorType(self.inputs[0]['type'],
(False,) * len(self.inputs[0]['shape']))('x')
i = T.TensorType(self.inputs[1]['type'],
(False,) * len(self.inputs[1]['shape']))('y')
z = x[i]
return links.TheanoFunction([x, i], z)
def expect_forward(self):
x, i = self.input_data
return x[i],
testing.run_module(__name__, __file__)
| aonotas/chainer | tests/chainer_tests/links_tests/theano_tests/test_theano_function.py | Python | mit | 5,778 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 23 13:23:20 2018
@author: BallBlueMeercat
"""
from distutils.core import setup
from Cython.Build import cythonize
setup(ext_modules = cythonize('firstderivs_cython.pyx')) | lefthandedroo/Cosmo-models | zprev versions/Models_py_backup/setup.py | Python | mit | 243 |
import Base
import VS
import GUI
import XGUITypes
import XGUIDebug
XGUIRootSingleton = None
XGUIPythonScriptAPISingleton = None
"""----------------------------------------------------------------"""
""" """
""" XGUIRoot - root management interface for the XML-GUI framework."""
""" """
"""----------------------------------------------------------------"""
class XGUIRoot:
def __init__(self):
self.templates = {}
def getTemplate(self,type,name):
if type in self.templates and name in self.templates[type]:
return self.templates[type][name]
else:
return None
def addTemplate(self,tpl):
type = tpl.getType()
name = tpl.getName()
if not type in self.templates:
XGUIDebug.trace(1,"XGUI: Initializing template category \"" + str(type) + "\"\n")
self.templates[type] = {}
XGUIDebug.trace(2,"XGUI: Loading template \"" + str(name) + "\" into category \"" + str(type) + "\"\n")
self.templates[type][name] = tpl
class XGUIPythonScript:
def __init__(self,code,filename):
code = code.replace("\r\n","\n")
code += "\n"
self.code = compile(code,filename,'exec')
def execute(self,context):
exec(self.code, context)
return context
"""----------------------------------------------------------------"""
""" """
""" XGUIPythonScriptAPI - through this class, all PythonScript """
""" API calls are routed. """
""" """
"""----------------------------------------------------------------"""
class XGUIPythonScriptAPI:
def __init__(self,layout,room):
self.layout = layout
self.room = room
"""----------------------------------------------------------------"""
""" """
""" XGUI global initialization """
""" """
"""----------------------------------------------------------------"""
def XGUIInit():
XGUIRootSingleton = XGUIRoot()
| vegastrike/Assets-Production | modules/XGUI.py | Python | gpl-2.0 | 2,379 |
import sys
def inputText():
input = sys.stdin.readline()
return input.strip()
def inputChoices(list, backcmd = "b", backtext = "back"):
repeat = True
while repeat:
repeat = False
count = 0
for item in list:
print count, "-", item
count += 1
print backcmd, "-", backtext
input = inputText()
if input == backcmd:
return None
action = int(input)
if action >= len(list):
repeat = True
return action | popazerty/beyonwiz-4.1 | tools/host_tools/FormatConverter/input.py | Python | gpl-2.0 | 432 |
#
# Chris Lumens <[email protected]>
#
# Copyright 2015 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
# trademarks that are incorporated in the source code or documentation are not
# subject to the GNU General Public License and may only be used or replicated
# with the express permission of Red Hat, Inc.
#
import unittest
from tests.baseclass import CommandTest, CommandSequenceTest
class F23_TestCase(CommandTest):
command = "reqpart"
def runTest(self):
# pass
self.assert_parse("reqpart", "reqpart\n")
# pass
self.assert_parse("reqpart --add-boot", "reqpart --add-boot\n")
class F23_AutopartReqpart_TestCase(CommandSequenceTest):
def runTest(self):
# fail - can't use both autopart and reqpart
self.assert_parse_error("""
autopart
reqpart""")
RHEL7_TestCase = F23_TestCase
if __name__ == "__main__":
unittest.main()
| cgwalters/pykickstart | tests/commands/reqpart.py | Python | gpl-2.0 | 1,533 |
#!/usr/bin/env python
#
# Plugin.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
class Plugin(dict):
"""A dictionary with attribute-style access. It maps attribute access to
the real dictionary. """
def __init__(self, init = None):
if init is None:
init = dict()
dict.__init__(self, init)
def __getstate__(self):
return list(self.__dict__.items())
def __setstate__(self, items):
for key, val in items:
self.__dict__[key] = val
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, dict.__repr__(self))
def __setitem__(self, key, value):
return super(Plugin, self).__setitem__(key, value)
def __getitem__(self, name):
return super(Plugin, self).__getitem__(name)
def __delitem__(self, name):
return super(Plugin, self).__delitem__(name)
__getattr__ = __getitem__
__setattr__ = __setitem__
| tweemeterjop/thug | thug/DOM/Plugin.py | Python | gpl-2.0 | 1,536 |
##
# Copyright 2012-2021 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for iompi compiler toolchain (includes Intel compilers (icc, ifort) and OpenMPI.
:author: Stijn De Weirdt (Ghent University)
:author: Kenneth Hoste (Ghent University)
"""
from distutils.version import LooseVersion
import re
from easybuild.toolchains.iccifort import IccIfort
from easybuild.toolchains.intel_compilers import IntelCompilersToolchain
from easybuild.toolchains.mpi.openmpi import OpenMPI
class Iompi(IccIfort, IntelCompilersToolchain, OpenMPI):
"""
Compiler toolchain with Intel compilers (icc/ifort) and OpenMPI.
"""
NAME = 'iompi'
# compiler-only subtoolchain can't be determine statically
# since depends on toolchain version (see below),
# so register both here as possible alternatives (which is taken into account elsewhere)
SUBTOOLCHAIN = [(IntelCompilersToolchain.NAME, IccIfort.NAME)]
def __init__(self, *args, **kwargs):
"""Constructor for Iompi toolchain class."""
super(Iompi, self).__init__(*args, **kwargs)
# make sure a non-symbolic version (e.g., 'system') is used before making comparisons using LooseVersion
if re.match('^[0-9]', self.version):
# need to transform a version like '2016a' with something that is safe to compare with '8.0', '2016.01'
# comparing subversions that include letters causes TypeErrors in Python 3
# 'a' is assumed to be equivalent with '.01' (January), and 'b' with '.07' (June)
# (good enough for this purpose)
self.iompi_ver = self.version.replace('a', '.01').replace('b', '.07')
if LooseVersion(self.iompi_ver) >= LooseVersion('2020.12'):
self.oneapi_gen = True
self.SUBTOOLCHAIN = IntelCompilersToolchain.NAME
self.COMPILER_MODULE_NAME = IntelCompilersToolchain.COMPILER_MODULE_NAME
else:
self.oneapi_gen = False
self.SUBTOOLCHAIN = IccIfort.NAME
self.COMPILER_MODULE_NAME = IccIfort.COMPILER_MODULE_NAME
else:
self.iompi_ver = self.version
self.oneapi_gen = False
def is_dep_in_toolchain_module(self, *args, **kwargs):
"""Check whether a specific software name is listed as a dependency in the module for this toolchain."""
if self.oneapi_gen:
res = IntelCompilersToolchain.is_dep_in_toolchain_module(self, *args, **kwargs)
else:
res = IccIfort.is_dep_in_toolchain_module(self, *args, **kwargs)
return res
def _set_compiler_vars(self):
"""Intel compilers-specific adjustments after setting compiler variables."""
if self.oneapi_gen:
IntelCompilersToolchain._set_compiler_vars(self)
else:
IccIfort._set_compiler_vars(self)
def set_variables(self):
"""Intel compilers-specific adjustments after setting compiler variables."""
if self.oneapi_gen:
IntelCompilersToolchain.set_variables(self)
else:
IccIfort.set_variables(self)
def is_deprecated(self):
"""Return whether or not this toolchain is deprecated."""
# need to transform a version like '2018b' with something that is safe to compare with '2019'
# comparing subversions that include letters causes TypeErrors in Python 3
# 'a' is assumed to be equivalent with '.01' (January), and 'b' with '.07' (June) (good enough for this purpose)
version = self.version.replace('a', '.01').replace('b', '.07')
# iompi toolchains older than iompi/2019a are deprecated since EasyBuild v4.5.0
# make sure a non-symbolic version (e.g., 'system') is used before making comparisons using LooseVersion
if re.match('^[0-9]', version) and LooseVersion(version) < LooseVersion('2019'):
deprecated = True
else:
deprecated = False
return deprecated
| hpcugent/easybuild-framework | easybuild/toolchains/iompi.py | Python | gpl-2.0 | 4,978 |
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2015 Michal Čihař <[email protected]>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.shortcuts import render, redirect
from django.utils.translation import ugettext as _
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.db.models import Sum, Count, Q
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.core.urlresolvers import reverse
from django.utils.safestring import mark_safe
import django.views.defaults
from weblate.trans.models import (
Project, SubProject, Translation, Check,
Dictionary, Change, Unit, WhiteboardMessage
)
from weblate.requirements import get_versions, get_optional_versions
from weblate.lang.models import Language
from weblate.trans.forms import (
get_upload_form, SearchForm,
AutoForm, ReviewForm, NewLanguageForm,
UserManageForm,
)
from weblate.accounts.models import Profile, notify_new_language
from weblate.trans.views.helper import (
get_project, get_subproject, get_translation,
try_set_language,
)
import weblate
import datetime
from urllib import urlencode
def home(request):
"""
Home page of Weblate showing list of projects, stats
and user links if logged in.
"""
if 'show_set_password' in request.session:
messages.warning(
request,
_(
'You have activated your account, now you should set '
'the password to be able to login next time.'
)
)
return redirect('password')
wb_messages = WhiteboardMessage.objects.all()
projects = Project.objects.all_acl(request.user)
if projects.count() == 1:
projects = SubProject.objects.filter(
project=projects[0]
).select_related()
# Warn about not filled in username (usually caused by migration of
# users from older system
if not request.user.is_anonymous() and request.user.first_name == '':
messages.warning(
request,
_('Please set your full name in your profile.')
)
# Some stats
top_translations = Profile.objects.order_by('-translated')[:10]
top_suggestions = Profile.objects.order_by('-suggested')[:10]
last_changes = Change.objects.last_changes(request.user)[:10]
return render(
request,
'index.html',
{
'projects': projects,
'top_translations': top_translations.select_related('user'),
'top_suggestions': top_suggestions.select_related('user'),
'last_changes': last_changes,
'last_changes_rss': reverse('rss'),
'last_changes_url': '',
'search_form': SearchForm(),
'whiteboard_messages': wb_messages,
}
)
def search(request):
"""
Performs site-wide search on units.
"""
search_form = SearchForm(request.GET)
context = {
'search_form': search_form,
}
if search_form.is_valid():
units = Unit.objects.search(
None,
search_form.cleaned_data,
).select_related(
'translation',
)
# Filter results by ACL
acl_projects, filtered = Project.objects.get_acl_status(request.user)
if filtered:
units = units.filter(
translation__subproject__project__in=acl_projects
)
limit = request.GET.get('limit', 50)
page = request.GET.get('page', 1)
paginator = Paginator(units, limit)
try:
units = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
units = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of
# results.
units = paginator.page(paginator.num_pages)
context['page_obj'] = units
context['title'] = _('Search for %s') % (
search_form.cleaned_data['q']
)
context['query_string'] = search_form.urlencode()
context['search_query'] = search_form.cleaned_data['q']
else:
messages.error(request, _('Invalid search query!'))
return render(
request,
'search.html',
context
)
def show_engage(request, project, lang=None):
# Get project object, skipping ACL
obj = get_project(request, project, skip_acl=True)
# Handle language parameter
language = None
if lang is not None:
language = try_set_language(lang)
context = {
'object': obj,
'project': obj,
'languages': obj.get_language_count(),
'total': obj.get_total(),
'percent': obj.get_translated_percent(language),
'url': obj.get_absolute_url(),
'language': language,
}
# Render text
if language is None:
status_text = _(
'<a href="%(url)s">Translation project for %(project)s</a> '
'currently contains %(total)s strings for translation and is '
'<a href="%(url)s">being translated into %(languages)s languages'
'</a>. Overall, these translations are %(percent)s%% complete.'
)
else:
# Translators: line of text in engagement widget, please use your
# language name instead of English
status_text = _(
'<a href="%(url)s">Translation project for %(project)s</a> into '
'English currently contains %(total)s strings for translation and '
'is %(percent)s%% complete.'
)
if 'English' in status_text:
status_text = status_text.replace('English', language.name)
context['status_text'] = mark_safe(status_text % context)
return render(
request,
'engage.html',
context
)
def show_project(request, project):
obj = get_project(request, project)
dict_langs = Dictionary.objects.filter(
project=obj
).values_list(
'language', flat=True
).distinct()
dicts = []
for language in Language.objects.filter(id__in=dict_langs):
dicts.append(
{
'language': language,
'count': Dictionary.objects.filter(
language=language,
project=obj
).count(),
}
)
last_changes = Change.objects.prefetch().filter(
Q(translation__subproject__project=obj) |
Q(dictionary__project=obj)
)[:10]
return render(
request,
'project.html',
{
'object': obj,
'project': obj,
'dicts': dicts,
'last_changes': last_changes,
'last_changes_rss': reverse(
'rss-project',
kwargs={'project': obj.slug}
),
'last_changes_url': urlencode(
{'project': obj.slug}
),
'add_user_form': UserManageForm(),
}
)
def show_subproject(request, project, subproject):
obj = get_subproject(request, project, subproject)
last_changes = Change.objects.prefetch().filter(
translation__subproject=obj
)[:10]
new_lang_form = NewLanguageForm()
return render(
request,
'subproject.html',
{
'object': obj,
'project': obj.project,
'translations': obj.translation_set.enabled(),
'show_language': 1,
'last_changes': last_changes,
'last_changes_rss': reverse(
'rss-subproject',
kwargs={'subproject': obj.slug, 'project': obj.project.slug}
),
'last_changes_url': urlencode(
{'subproject': obj.slug, 'project': obj.project.slug}
),
'new_lang_form': new_lang_form,
}
)
def show_translation(request, project, subproject, lang):
obj = get_translation(request, project, subproject, lang)
last_changes = Change.objects.prefetch().filter(
translation=obj
)[:10]
# Check locks
obj.is_locked(request.user)
# Get form
form = get_upload_form(request)()
# Is user allowed to do automatic translation?
if request.user.has_perm('trans.automatic_translation'):
autoform = AutoForm(obj)
else:
autoform = None
# Search form for everybody
search_form = SearchForm()
# Review form for logged in users
if request.user.is_anonymous():
review_form = None
else:
review_form = ReviewForm(
initial={
'date': datetime.date.today() - datetime.timedelta(days=31)
}
)
return render(
request,
'translation.html',
{
'object': obj,
'project': obj.subproject.project,
'form': form,
'autoform': autoform,
'search_form': search_form,
'review_form': review_form,
'last_changes': last_changes,
'last_changes_url': urlencode(obj.get_kwargs()),
'last_changes_rss': reverse(
'rss-translation',
kwargs=obj.get_kwargs(),
),
'show_only_component': True,
'other_translations': Translation.objects.filter(
subproject__project=obj.subproject.project,
language=obj.language,
).exclude(
pk=obj.pk
),
}
)
def not_found(request):
"""
Error handler showing list of available projects.
"""
return render(
request,
'404.html',
{
'request_path': request.path,
'title': _('Page Not Found'),
},
status=404
)
def denied(request):
"""
Error handler showing list of available projects.
"""
return render(
request,
'403.html',
{
'request_path': request.path,
'title': _('Permission Denied'),
},
status=403
)
def server_error(request):
"""
Error handler for server errors.
"""
try:
return render(
request,
'500.html',
{
'request_path': request.path,
'title': _('Internal Server Error'),
},
status=500,
)
except Exception:
return django.views.defaults.server_error(request)
def about(request):
"""
Shows about page with version information.
"""
context = {}
totals = Profile.objects.aggregate(
Sum('translated'), Sum('suggested'), Count('id')
)
total_strings = 0
total_words = 0
for project in SubProject.objects.iterator():
try:
translation = project.translation_set.all()[0]
total_strings += translation.total
total_words += translation.total_words
except (IndexError, Translation.DoesNotExist):
pass
context['title'] = _('About Weblate')
context['total_translations'] = totals['translated__sum']
context['total_suggestions'] = totals['suggested__sum']
context['total_users'] = totals['id__count']
context['total_strings'] = total_strings
context['total_words'] = total_words
context['total_languages'] = Language.objects.filter(
translation__total__gt=0
).distinct().count()
context['total_checks'] = Check.objects.count()
context['ignored_checks'] = Check.objects.filter(ignore=True).count()
context['versions'] = get_versions() + get_optional_versions()
return render(
request,
'about.html',
context
)
def data_root(request):
return render(
request,
'data-root.html',
{
'hooks_docs': weblate.get_doc_url('api', 'hooks'),
'api_docs': weblate.get_doc_url('api', 'exports'),
'rss_docs': weblate.get_doc_url('api', 'rss'),
}
)
def data_project(request, project):
obj = get_project(request, project)
return render(
request,
'data.html',
{
'object': obj,
'project': obj,
'hooks_docs': weblate.get_doc_url('api', 'hooks'),
'api_docs': weblate.get_doc_url('api', 'exports'),
'rss_docs': weblate.get_doc_url('api', 'rss'),
}
)
@login_required
def new_language(request, project, subproject):
obj = get_subproject(request, project, subproject)
form = NewLanguageForm(request.POST)
if form.is_valid():
language = Language.objects.get(code=form.cleaned_data['lang'])
same_lang = obj.translation_set.filter(language=language)
if same_lang.exists():
messages.error(
request,
_('Chosen translation already exists in this project!')
)
elif obj.new_lang == 'contact':
notify_new_language(obj, language, request.user)
messages.success(
request,
_(
"A request for a new translation has been "
"sent to the project's maintainers."
)
)
elif obj.new_lang == 'add':
obj.add_new_language(language, request)
else:
messages.error(
request,
_(
'Please choose the language into which '
'you would like to translate.'
)
)
return redirect(
'subproject',
subproject=obj.slug,
project=obj.project.slug
)
| leohmoraes/weblate | weblate/trans/views/basic.py | Python | gpl-3.0 | 14,336 |
# Copyright (c) 2015-2016 Anish Athalye. Released under GPLv3.
import tensorflow as tf
import numpy as np
import scipy.io
import pdb
MEAN_PIXEL = np.array([ 123.68 , 116.779, 103.939])
def net(data_path, input_image):
layers = (
'conv1_1', 'relu1_1', 'conv1_2', 'relu1_2', 'pool1',
'conv2_1', 'relu2_1', 'conv2_2', 'relu2_2', 'pool2',
'conv3_1', 'relu3_1', 'conv3_2', 'relu3_2', 'conv3_3',
'relu3_3', 'conv3_4', 'relu3_4', 'pool3',
'conv4_1', 'relu4_1', 'conv4_2', 'relu4_2', 'conv4_3',
'relu4_3', 'conv4_4', 'relu4_4', 'pool4',
'conv5_1', 'relu5_1', 'conv5_2', 'relu5_2', 'conv5_3',
'relu5_3', 'conv5_4', 'relu5_4'
)
data = scipy.io.loadmat(data_path)
mean = data['normalization'][0][0][0]
mean_pixel = np.mean(mean, axis=(0, 1))
weights = data['layers'][0]
net = {}
current = input_image
for i, name in enumerate(layers):
kind = name[:4]
if kind == 'conv':
kernels, bias = weights[i][0][0][0][0]
# matconvnet: weights are [width, height, in_channels, out_channels]
# tensorflow: weights are [height, width, in_channels, out_channels]
kernels = np.transpose(kernels, (1, 0, 2, 3))
bias = bias.reshape(-1)
current = _conv_layer(current, kernels, bias)
elif kind == 'relu':
current = tf.nn.relu(current)
elif kind == 'pool':
current = _pool_layer(current)
net[name] = current
assert len(net) == len(layers)
return net
def _conv_layer(input, weights, bias):
conv = tf.nn.conv2d(input, tf.constant(weights), strides=(1, 1, 1, 1),
padding='SAME')
return tf.nn.bias_add(conv, bias)
def _pool_layer(input):
return tf.nn.max_pool(input, ksize=(1, 2, 2, 1), strides=(1, 2, 2, 1),
padding='SAME')
def preprocess(image):
return image - MEAN_PIXEL
def unprocess(image):
return image + MEAN_PIXEL
| mgoubran/MeuralPaint | vgg.py | Python | gpl-3.0 | 1,993 |
# Copyright (C) 2009-2010 Sergey Koposov
# This file is part of astrolibpy
#
# astrolibpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# astrolibpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with astrolibpy. If not, see <http://www.gnu.org/licenses/>.
import numpy, re
def from_hex(arr, delim=':'):
r=re.compile('\s*(\-?)(.+)%s(.+)%s(.+)'%(delim,delim))
ret=[]
for a in arr:
m = r.search(a)
sign = m.group(1)=='-'
if sign:
sign=-1
else:
sign=1
i1 = int(m.group(2))
i2 = int(m.group(3))
i3 = float(m.group(4))
val = sign*(int(i1)+int(i2)/60.+(float(i3))/3600.)
ret.append(val)
return numpy.array(ret)
| segasai/astrolibpy | my_utils/from_hex.py | Python | gpl-3.0 | 1,118 |
from django.contrib import admin
from article.models import Article
# Register your models here.
admin.site.register(Article)
| daimon99/pyer | my_blog/article/admin.py | Python | gpl-3.0 | 127 |
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast
import contextlib
import datetime
import os
import pwd
import re
import time
from functools import wraps
from io import StringIO
from numbers import Number
try:
from hashlib import sha1
except ImportError:
from sha import sha as sha1
from jinja2.exceptions import TemplateSyntaxError, UndefinedError
from jinja2.loaders import FileSystemLoader
from jinja2.runtime import Context, StrictUndefined
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleFilterError, AnsibleUndefinedVariable, AnsibleAssertionError
from ansible.module_utils.six import string_types, text_type
from ansible.module_utils._text import to_native, to_text, to_bytes
from ansible.module_utils.common._collections_compat import Sequence, Mapping
from ansible.plugins.loader import filter_loader, lookup_loader, test_loader
from ansible.template.safe_eval import safe_eval
from ansible.template.template import AnsibleJ2Template
from ansible.template.vars import AnsibleJ2Vars
from ansible.utils.display import Display
from ansible.utils.unsafe_proxy import UnsafeProxy, wrap_var
display = Display()
__all__ = ['Templar', 'generate_ansible_template_vars']
# A regex for checking to see if a variable we're trying to
# expand is just a single variable name.
# Primitive Types which we don't want Jinja to convert to strings.
NON_TEMPLATED_TYPES = (bool, Number)
JINJA2_OVERRIDE = '#jinja2:'
USE_JINJA2_NATIVE = False
if C.DEFAULT_JINJA2_NATIVE:
try:
from jinja2.nativetypes import NativeEnvironment as Environment
from ansible.template.native_helpers import ansible_native_concat as j2_concat
USE_JINJA2_NATIVE = True
except ImportError:
from jinja2 import Environment
from jinja2.utils import concat as j2_concat
from jinja2 import __version__ as j2_version
display.warning(
'jinja2_native requires Jinja 2.10 and above. '
'Version detected: %s. Falling back to default.' % j2_version
)
else:
from jinja2 import Environment
from jinja2.utils import concat as j2_concat
def generate_ansible_template_vars(path):
b_path = to_bytes(path)
try:
template_uid = pwd.getpwuid(os.stat(b_path).st_uid).pw_name
except (KeyError, TypeError):
template_uid = os.stat(b_path).st_uid
temp_vars = {}
temp_vars['template_host'] = to_text(os.uname()[1])
temp_vars['template_path'] = path
temp_vars['template_mtime'] = datetime.datetime.fromtimestamp(os.path.getmtime(b_path))
temp_vars['template_uid'] = to_text(template_uid)
temp_vars['template_fullpath'] = os.path.abspath(path)
temp_vars['template_run_date'] = datetime.datetime.now()
managed_default = C.DEFAULT_MANAGED_STR
managed_str = managed_default.format(
host=temp_vars['template_host'],
uid=temp_vars['template_uid'],
file=temp_vars['template_path'],
)
temp_vars['ansible_managed'] = to_text(time.strftime(to_native(managed_str), time.localtime(os.path.getmtime(b_path))))
return temp_vars
def _escape_backslashes(data, jinja_env):
"""Double backslashes within jinja2 expressions
A user may enter something like this in a playbook::
debug:
msg: "Test Case 1\\3; {{ test1_name | regex_replace('^(.*)_name$', '\\1')}}"
The string inside of the {{ gets interpreted multiple times First by yaml.
Then by python. And finally by jinja2 as part of it's variable. Because
it is processed by both python and jinja2, the backslash escaped
characters get unescaped twice. This means that we'd normally have to use
four backslashes to escape that. This is painful for playbook authors as
they have to remember different rules for inside vs outside of a jinja2
expression (The backslashes outside of the "{{ }}" only get processed by
yaml and python. So they only need to be escaped once). The following
code fixes this by automatically performing the extra quoting of
backslashes inside of a jinja2 expression.
"""
if '\\' in data and '{{' in data:
new_data = []
d2 = jinja_env.preprocess(data)
in_var = False
for token in jinja_env.lex(d2):
if token[1] == 'variable_begin':
in_var = True
new_data.append(token[2])
elif token[1] == 'variable_end':
in_var = False
new_data.append(token[2])
elif in_var and token[1] == 'string':
# Double backslashes only if we're inside of a jinja2 variable
new_data.append(token[2].replace('\\', '\\\\'))
else:
new_data.append(token[2])
data = ''.join(new_data)
return data
def _count_newlines_from_end(in_str):
'''
Counts the number of newlines at the end of a string. This is used during
the jinja2 templating to ensure the count matches the input, since some newlines
may be thrown away during the templating.
'''
try:
i = len(in_str)
j = i - 1
while in_str[j] == '\n':
j -= 1
return i - 1 - j
except IndexError:
# Uncommon cases: zero length string and string containing only newlines
return i
def tests_as_filters_warning(name, func):
'''
Closure to enable displaying a deprecation warning when tests are used as a filter
This closure is only used when registering ansible provided tests as filters
This function should be removed in 2.9 along with registering ansible provided tests as filters
in Templar._get_filters
'''
@wraps(func)
def wrapper(*args, **kwargs):
display.deprecated(
'Using tests as filters is deprecated. Instead of using `result|%(name)s` use '
'`result is %(name)s`' % dict(name=name),
version='2.9'
)
return func(*args, **kwargs)
return wrapper
class AnsibleContext(Context):
'''
A custom context, which intercepts resolve() calls and sets a flag
internally if any variable lookup returns an AnsibleUnsafe value. This
flag is checked post-templating, and (when set) will result in the
final templated result being wrapped via UnsafeProxy.
'''
def __init__(self, *args, **kwargs):
super(AnsibleContext, self).__init__(*args, **kwargs)
self.unsafe = False
def _is_unsafe(self, val):
'''
Our helper function, which will also recursively check dict and
list entries due to the fact that they may be repr'd and contain
a key or value which contains jinja2 syntax and would otherwise
lose the AnsibleUnsafe value.
'''
if isinstance(val, dict):
for key in val.keys():
if self._is_unsafe(val[key]):
return True
elif isinstance(val, list):
for item in val:
if self._is_unsafe(item):
return True
elif isinstance(val, string_types) and hasattr(val, '__UNSAFE__'):
return True
return False
def _update_unsafe(self, val):
if val is not None and not self.unsafe and self._is_unsafe(val):
self.unsafe = True
def resolve(self, key):
'''
The intercepted resolve(), which uses the helper above to set the
internal flag whenever an unsafe variable value is returned.
'''
val = super(AnsibleContext, self).resolve(key)
self._update_unsafe(val)
return val
def resolve_or_missing(self, key):
val = super(AnsibleContext, self).resolve_or_missing(key)
self._update_unsafe(val)
return val
class AnsibleEnvironment(Environment):
'''
Our custom environment, which simply allows us to override the class-level
values for the Template and Context classes used by jinja2 internally.
'''
context_class = AnsibleContext
template_class = AnsibleJ2Template
class Templar:
'''
The main class for templating, with the main entry-point of template().
'''
def __init__(self, loader, shared_loader_obj=None, variables=None):
variables = {} if variables is None else variables
self._loader = loader
self._filters = None
self._tests = None
self._available_variables = variables
self._cached_result = {}
if loader:
self._basedir = loader.get_basedir()
else:
self._basedir = './'
if shared_loader_obj:
self._filter_loader = getattr(shared_loader_obj, 'filter_loader')
self._test_loader = getattr(shared_loader_obj, 'test_loader')
self._lookup_loader = getattr(shared_loader_obj, 'lookup_loader')
else:
self._filter_loader = filter_loader
self._test_loader = test_loader
self._lookup_loader = lookup_loader
# flags to determine whether certain failures during templating
# should result in fatal errors being raised
self._fail_on_lookup_errors = True
self._fail_on_filter_errors = True
self._fail_on_undefined_errors = C.DEFAULT_UNDEFINED_VAR_BEHAVIOR
self.environment = AnsibleEnvironment(
trim_blocks=True,
undefined=StrictUndefined,
extensions=self._get_extensions(),
finalize=self._finalize,
loader=FileSystemLoader(self._basedir),
)
# the current rendering context under which the templar class is working
self.cur_context = None
self.SINGLE_VAR = re.compile(r"^%s\s*(\w*)\s*%s$" % (self.environment.variable_start_string, self.environment.variable_end_string))
self._clean_regex = re.compile(r'(?:%s|%s|%s|%s)' % (
self.environment.variable_start_string,
self.environment.block_start_string,
self.environment.block_end_string,
self.environment.variable_end_string
))
self._no_type_regex = re.compile(r'.*?\|\s*(?:%s)(?:\([^\|]*\))?\s*\)?\s*(?:%s)' %
('|'.join(C.STRING_TYPE_FILTERS), self.environment.variable_end_string))
def _get_filters(self, builtin_filters):
'''
Returns filter plugins, after loading and caching them if need be
'''
if self._filters is not None:
return self._filters.copy()
self._filters = dict()
# TODO: Remove registering tests as filters in 2.9
for name, func in self._get_tests().items():
if name in builtin_filters:
# If we have a custom test named the same as a builtin filter, don't register as a filter
continue
self._filters[name] = tests_as_filters_warning(name, func)
for fp in self._filter_loader.all():
self._filters.update(fp.filters())
return self._filters.copy()
def _get_tests(self):
'''
Returns tests plugins, after loading and caching them if need be
'''
if self._tests is not None:
return self._tests.copy()
self._tests = dict()
for fp in self._test_loader.all():
self._tests.update(fp.tests())
return self._tests.copy()
def _get_extensions(self):
'''
Return jinja2 extensions to load.
If some extensions are set via jinja_extensions in ansible.cfg, we try
to load them with the jinja environment.
'''
jinja_exts = []
if C.DEFAULT_JINJA2_EXTENSIONS:
# make sure the configuration directive doesn't contain spaces
# and split extensions in an array
jinja_exts = C.DEFAULT_JINJA2_EXTENSIONS.replace(" ", "").split(',')
return jinja_exts
def set_available_variables(self, variables):
'''
Sets the list of template variables this Templar instance will use
to template things, so we don't have to pass them around between
internal methods. We also clear the template cache here, as the variables
are being changed.
'''
if not isinstance(variables, dict):
raise AnsibleAssertionError("the type of 'variables' should be a dict but was a %s" % (type(variables)))
self._available_variables = variables
self._cached_result = {}
def template(self, variable, convert_bare=False, preserve_trailing_newlines=True, escape_backslashes=True, fail_on_undefined=None, overrides=None,
convert_data=True, static_vars=None, cache=True, disable_lookups=False):
'''
Templates (possibly recursively) any given data as input. If convert_bare is
set to True, the given data will be wrapped as a jinja2 variable ('{{foo}}')
before being sent through the template engine.
'''
static_vars = [''] if static_vars is None else static_vars
# Don't template unsafe variables, just return them.
if hasattr(variable, '__UNSAFE__'):
return variable
if fail_on_undefined is None:
fail_on_undefined = self._fail_on_undefined_errors
try:
if convert_bare:
variable = self._convert_bare_variable(variable)
if isinstance(variable, string_types):
result = variable
if self._contains_vars(variable):
# Check to see if the string we are trying to render is just referencing a single
# var. In this case we don't want to accidentally change the type of the variable
# to a string by using the jinja template renderer. We just want to pass it.
only_one = self.SINGLE_VAR.match(variable)
if only_one:
var_name = only_one.group(1)
if var_name in self._available_variables:
resolved_val = self._available_variables[var_name]
if isinstance(resolved_val, NON_TEMPLATED_TYPES):
return resolved_val
elif resolved_val is None:
return C.DEFAULT_NULL_REPRESENTATION
# Using a cache in order to prevent template calls with already templated variables
sha1_hash = None
if cache:
variable_hash = sha1(text_type(variable).encode('utf-8'))
options_hash = sha1(
(
text_type(preserve_trailing_newlines) +
text_type(escape_backslashes) +
text_type(fail_on_undefined) +
text_type(overrides)
).encode('utf-8')
)
sha1_hash = variable_hash.hexdigest() + options_hash.hexdigest()
if cache and sha1_hash in self._cached_result:
result = self._cached_result[sha1_hash]
else:
result = self.do_template(
variable,
preserve_trailing_newlines=preserve_trailing_newlines,
escape_backslashes=escape_backslashes,
fail_on_undefined=fail_on_undefined,
overrides=overrides,
disable_lookups=disable_lookups,
)
if not USE_JINJA2_NATIVE:
unsafe = hasattr(result, '__UNSAFE__')
if convert_data and not self._no_type_regex.match(variable):
# if this looks like a dictionary or list, convert it to such using the safe_eval method
if (result.startswith("{") and not result.startswith(self.environment.variable_start_string)) or \
result.startswith("[") or result in ("True", "False"):
eval_results = safe_eval(result, locals=self._available_variables, include_exceptions=True)
if eval_results[1] is None:
result = eval_results[0]
if unsafe:
result = wrap_var(result)
else:
# FIXME: if the safe_eval raised an error, should we do something with it?
pass
# we only cache in the case where we have a single variable
# name, to make sure we're not putting things which may otherwise
# be dynamic in the cache (filters, lookups, etc.)
if cache:
self._cached_result[sha1_hash] = result
return result
elif isinstance(variable, (list, tuple)):
return [self.template(
v,
preserve_trailing_newlines=preserve_trailing_newlines,
fail_on_undefined=fail_on_undefined,
overrides=overrides,
disable_lookups=disable_lookups,
) for v in variable]
elif isinstance(variable, (dict, Mapping)):
d = {}
# we don't use iteritems() here to avoid problems if the underlying dict
# changes sizes due to the templating, which can happen with hostvars
for k in variable.keys():
if k not in static_vars:
d[k] = self.template(
variable[k],
preserve_trailing_newlines=preserve_trailing_newlines,
fail_on_undefined=fail_on_undefined,
overrides=overrides,
disable_lookups=disable_lookups,
)
else:
d[k] = variable[k]
return d
else:
return variable
except AnsibleFilterError:
if self._fail_on_filter_errors:
raise
else:
return variable
def is_template(self, data):
''' lets us know if data has a template'''
if isinstance(data, string_types):
try:
new = self.do_template(data, fail_on_undefined=True)
except (AnsibleUndefinedVariable, UndefinedError):
return True
except:
return False
return (new != data)
elif isinstance(data, (list, tuple)):
for v in data:
if self.is_template(v):
return True
elif isinstance(data, dict):
for k in data:
if self.is_template(k) or self.is_template(data[k]):
return True
return False
def templatable(self, data):
'''
returns True if the data can be templated w/o errors
'''
templatable = True
try:
self.template(data)
except:
templatable = False
return templatable
def _contains_vars(self, data):
'''
returns True if the data contains a variable pattern
'''
if isinstance(data, string_types):
for marker in (self.environment.block_start_string, self.environment.variable_start_string, self.environment.comment_start_string):
if marker in data:
return True
return False
def _convert_bare_variable(self, variable):
'''
Wraps a bare string, which may have an attribute portion (ie. foo.bar)
in jinja2 variable braces so that it is evaluated properly.
'''
if isinstance(variable, string_types):
contains_filters = "|" in variable
first_part = variable.split("|")[0].split(".")[0].split("[")[0]
if (contains_filters or first_part in self._available_variables) and self.environment.variable_start_string not in variable:
return "%s%s%s" % (self.environment.variable_start_string, variable, self.environment.variable_end_string)
# the variable didn't meet the conditions to be converted,
# so just return it as-is
return variable
def _finalize(self, thing):
'''
A custom finalize method for jinja2, which prevents None from being returned. This
avoids a string of ``"None"`` as ``None`` has no importance in YAML.
If using ANSIBLE_JINJA2_NATIVE we bypass this and return the actual value always
'''
if USE_JINJA2_NATIVE:
return thing
return thing if thing is not None else ''
def _fail_lookup(self, name, *args, **kwargs):
raise AnsibleError("The lookup `%s` was found, however lookups were disabled from templating" % name)
def _now_datetime(self, utc=False, fmt=None):
'''jinja2 global function to return current datetime, potentially formatted via strftime'''
if utc:
now = datetime.datetime.utcnow()
else:
now = datetime.datetime.now()
if fmt:
return now.strftime(fmt)
return now
def _query_lookup(self, name, *args, **kwargs):
''' wrapper for lookup, force wantlist true'''
kwargs['wantlist'] = True
return self._lookup(name, *args, **kwargs)
def _lookup(self, name, *args, **kwargs):
instance = self._lookup_loader.get(name.lower(), loader=self._loader, templar=self)
if instance is not None:
wantlist = kwargs.pop('wantlist', False)
allow_unsafe = kwargs.pop('allow_unsafe', C.DEFAULT_ALLOW_UNSAFE_LOOKUPS)
errors = kwargs.pop('errors', 'strict')
from ansible.utils.listify import listify_lookup_plugin_terms
loop_terms = listify_lookup_plugin_terms(terms=args, templar=self, loader=self._loader, fail_on_undefined=True, convert_bare=False)
# safely catch run failures per #5059
try:
ran = instance.run(loop_terms, variables=self._available_variables, **kwargs)
except (AnsibleUndefinedVariable, UndefinedError) as e:
raise AnsibleUndefinedVariable(e)
except Exception as e:
if self._fail_on_lookup_errors:
msg = u"An unhandled exception occurred while running the lookup plugin '%s'. Error was a %s, original message: %s" % \
(name, type(e), to_text(e))
if errors == 'warn':
display.warning(msg)
elif errors == 'ignore':
display.display(msg, log_only=True)
else:
raise AnsibleError(to_native(msg))
ran = None
if ran and not allow_unsafe:
if wantlist:
ran = wrap_var(ran)
else:
try:
ran = UnsafeProxy(",".join(ran))
except TypeError:
# Lookup Plugins should always return lists. Throw an error if that's not
# the case:
if not isinstance(ran, Sequence):
raise AnsibleError("The lookup plugin '%s' did not return a list."
% name)
# The TypeError we can recover from is when the value *inside* of the list
# is not a string
if len(ran) == 1:
ran = wrap_var(ran[0])
else:
ran = wrap_var(ran)
if self.cur_context:
self.cur_context.unsafe = True
return ran
else:
raise AnsibleError("lookup plugin (%s) not found" % name)
def do_template(self, data, preserve_trailing_newlines=True, escape_backslashes=True, fail_on_undefined=None, overrides=None, disable_lookups=False):
if USE_JINJA2_NATIVE and not isinstance(data, string_types):
return data
# For preserving the number of input newlines in the output (used
# later in this method)
data_newlines = _count_newlines_from_end(data)
if fail_on_undefined is None:
fail_on_undefined = self._fail_on_undefined_errors
try:
# allows template header overrides to change jinja2 options.
if overrides is None:
myenv = self.environment.overlay()
else:
myenv = self.environment.overlay(overrides)
# Get jinja env overrides from template
if hasattr(data, 'startswith') and data.startswith(JINJA2_OVERRIDE):
eol = data.find('\n')
line = data[len(JINJA2_OVERRIDE):eol]
data = data[eol + 1:]
for pair in line.split(','):
(key, val) = pair.split(':')
key = key.strip()
setattr(myenv, key, ast.literal_eval(val.strip()))
# Adds Ansible custom filters and tests
myenv.filters.update(self._get_filters(myenv.filters))
myenv.tests.update(self._get_tests())
if escape_backslashes:
# Allow users to specify backslashes in playbooks as "\\" instead of as "\\\\".
data = _escape_backslashes(data, myenv)
try:
t = myenv.from_string(data)
except TemplateSyntaxError as e:
raise AnsibleError("template error while templating string: %s. String: %s" % (to_native(e), to_native(data)))
except Exception as e:
if 'recursion' in to_native(e):
raise AnsibleError("recursive loop detected in template string: %s" % to_native(data))
else:
return data
if disable_lookups:
t.globals['query'] = t.globals['q'] = t.globals['lookup'] = self._fail_lookup
else:
t.globals['lookup'] = self._lookup
t.globals['query'] = t.globals['q'] = self._query_lookup
t.globals['now'] = self._now_datetime
t.globals['finalize'] = self._finalize
jvars = AnsibleJ2Vars(self, t.globals)
self.cur_context = new_context = t.new_context(jvars, shared=True)
rf = t.root_render_func(new_context)
try:
res = j2_concat(rf)
if getattr(new_context, 'unsafe', False):
res = wrap_var(res)
except TypeError as te:
if 'StrictUndefined' in to_native(te):
errmsg = "Unable to look up a name or access an attribute in template string (%s).\n" % to_native(data)
errmsg += "Make sure your variable name does not contain invalid characters like '-': %s" % to_native(te)
raise AnsibleUndefinedVariable(errmsg)
else:
display.debug("failing because of a type error, template data is: %s" % to_native(data))
raise AnsibleError("Unexpected templating type error occurred on (%s): %s" % (to_native(data), to_native(te)))
if USE_JINJA2_NATIVE and not isinstance(res, string_types):
return res
if preserve_trailing_newlines:
# The low level calls above do not preserve the newline
# characters at the end of the input data, so we use the
# calculate the difference in newlines and append them
# to the resulting output for parity
#
# jinja2 added a keep_trailing_newline option in 2.7 when
# creating an Environment. That would let us make this code
# better (remove a single newline if
# preserve_trailing_newlines is False). Once we can depend on
# that version being present, modify our code to set that when
# initializing self.environment and remove a single trailing
# newline here if preserve_newlines is False.
res_newlines = _count_newlines_from_end(res)
if data_newlines > res_newlines:
res += self.environment.newline_sequence * (data_newlines - res_newlines)
return res
except (UndefinedError, AnsibleUndefinedVariable) as e:
if fail_on_undefined:
raise AnsibleUndefinedVariable(e)
else:
display.debug("Ignoring undefined failure: %s" % to_text(e))
return data
# for backwards compatibility in case anyone is using old private method directly
_do_template = do_template
| veger/ansible | lib/ansible/template/__init__.py | Python | gpl-3.0 | 30,186 |
# -*- coding: utf-8 -*-
from south.v2 import SchemaMigration
class Migration(SchemaMigration):
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = {
}
complete_apps = ['admin']
| Karaage-Cluster/karaage-debian | karaage/legacy/admin/south_migrations/0004_auto__del_logentry.py | Python | gpl-3.0 | 237 |
#!/usr/bin/env python2
#
##############################################################################
### NZBGET POST-PROCESSING SCRIPT ###
# Post-Process to CouchPotato, SickBeard, NzbDrone, Mylar, Gamez, HeadPhones.
#
# This script sends the download to your automated media management servers.
#
# NOTE: This script requires Python to be installed on your system.
##############################################################################
#
### OPTIONS ###
## General
# Auto Update nzbToMedia (0, 1).
#
# Set to 1 if you want nzbToMedia to automatically check for and update to the latest version
#auto_update=0
# Safe Mode protection of DestDir (0, 1).
#
# Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake.
#safe_mode=1
## Gamez
# Gamez script category.
#
# category that gets called for post-processing with Gamez.
#gzCategory=games
# Gamez api key.
#gzapikey=
# Gamez host.
#
# The ipaddress for your Gamez server. e.g For the Same system use localhost or 127.0.0.1
#gzhost=localhost
# Gamez port.
#gzport=8085
# Gamez uses ssl (0, 1).
#
# Set to 1 if using ssl, else set to 0.
#gzssl=0
# Gamez library
#
# move downloaded games here.
#gzlibrary
# Gamez web_root
#
# set this if using a reverse proxy.
#gzweb_root=
# Gamez watch directory.
#
# set this to where your Gamez completed downloads are.
#gzwatch_dir=
## Posix
# Niceness for external tasks Extractor and Transcoder.
#
# Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process).
#niceness=10
# ionice scheduling class (0, 1, 2, 3).
#
# Set the ionice scheduling class. 0 for none, 1 for real time, 2 for best-effort, 3 for idle.
#ionice_class=2
# ionice scheduling class data.
#
# Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data.
#ionice_classdata=4
## WakeOnLan
# use WOL (0, 1).
#
# set to 1 to send WOL broadcast to the mac and test the server (e.g. xbmc) on the host and port specified.
#wolwake=0
# WOL MAC
#
# enter the mac address of the system to be woken.
#wolmac=00:01:2e:2D:64:e1
# Set the Host and Port of a server to verify system has woken.
#wolhost=192.168.1.37
#wolport=80
### NZBGET POST-PROCESSING SCRIPT ###
##############################################################################
import sys
import nzbToMedia
section = "Gamez"
result = nzbToMedia.main(sys.argv, section)
sys.exit(result) | DxCx/nzbToMedia | nzbToGamez.py | Python | gpl-3.0 | 2,702 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_pubsub_subscription_info
description:
- Gather info for GCP Subscription
short_description: Gather info for GCP Subscription
version_added: '2.8'
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
project:
description:
- The Google Cloud Platform project to use.
type: str
auth_kind:
description:
- The type of credential used.
type: str
required: true
choices:
- application
- machineaccount
- serviceaccount
service_account_contents:
description:
- The contents of a Service Account JSON file, either in a dictionary or as a
JSON string that represents it.
type: jsonarg
service_account_file:
description:
- The path of a Service Account JSON file if serviceaccount is selected as type.
type: path
service_account_email:
description:
- An optional service account email address if machineaccount is selected and
the user does not wish to use the default email.
type: str
scopes:
description:
- Array of scopes to be used
type: list
env_type:
description:
- Specifies which Ansible environment you're running this module within.
- This should not be set unless you know what you're doing.
- This only alters the User Agent string for any API requests.
type: str
notes:
- for authentication, you can set service_account_file using the C(gcp_service_account_file)
env variable.
- for authentication, you can set service_account_contents using the C(GCP_SERVICE_ACCOUNT_CONTENTS)
env variable.
- For authentication, you can set service_account_email using the C(GCP_SERVICE_ACCOUNT_EMAIL)
env variable.
- For authentication, you can set auth_kind using the C(GCP_AUTH_KIND) env variable.
- For authentication, you can set scopes using the C(GCP_SCOPES) env variable.
- Environment variables values will only be used if the playbook values are not set.
- The I(service_account_email) and I(service_account_file) options are mutually exclusive.
'''
EXAMPLES = '''
- name: get info on a subscription
gcp_pubsub_subscription_info:
project: test_project
auth_kind: serviceaccount
service_account_file: "/tmp/auth.pem"
'''
RETURN = '''
resources:
description: List of resources
returned: always
type: complex
contains:
name:
description:
- Name of the subscription.
returned: success
type: str
topic:
description:
- A reference to a Topic resource.
returned: success
type: dict
labels:
description:
- A set of key/value label pairs to assign to this Subscription.
returned: success
type: dict
pushConfig:
description:
- If push delivery is used with this subscription, this field is used to configure
it. An empty pushConfig signifies that the subscriber will pull and ack messages
using API methods.
returned: success
type: complex
contains:
oidcToken:
description:
- If specified, Pub/Sub will generate and attach an OIDC JWT token as an
Authorization header in the HTTP request for every pushed message.
returned: success
type: complex
contains:
serviceAccountEmail:
description:
- Service account email to be used for generating the OIDC token.
- The caller (for subscriptions.create, subscriptions.patch, and subscriptions.modifyPushConfig
RPCs) must have the iam.serviceAccounts.actAs permission for the service
account.
returned: success
type: str
audience:
description:
- 'Audience to be used when generating OIDC token. The audience claim
identifies the recipients that the JWT is intended for. The audience
value is a single case-sensitive string. Having multiple values (array)
for the audience field is not supported. More info about the OIDC
JWT token audience here: U(https://tools.ietf.org/html/rfc7519#section-4.1.3)
Note: if not specified, the Push endpoint URL will be used.'
returned: success
type: str
pushEndpoint:
description:
- A URL locating the endpoint to which messages should be pushed.
- For example, a Webhook endpoint might use "U(https://example.com/push").
returned: success
type: str
attributes:
description:
- Endpoint configuration attributes.
- Every endpoint has a set of API supported attributes that can be used
to control different aspects of the message delivery.
- The currently supported attribute is x-goog-version, which you can use
to change the format of the pushed message. This attribute indicates the
version of the data expected by the endpoint. This controls the shape
of the pushed message (i.e., its fields and metadata). The endpoint version
is based on the version of the Pub/Sub API.
- If not present during the subscriptions.create call, it will default to
the version of the API used to make such call. If not present during a
subscriptions.modifyPushConfig call, its value will not be changed. subscriptions.get
calls will always return a valid version, even if the subscription was
created without this attribute.
- 'The possible values for this attribute are: - v1beta1: uses the push
format defined in the v1beta1 Pub/Sub API.'
- "- v1 or v1beta2: uses the push format defined in the v1 Pub/Sub API."
returned: success
type: dict
ackDeadlineSeconds:
description:
- This value is the maximum time after a subscriber receives a message before
the subscriber should acknowledge the message. After message delivery but
before the ack deadline expires and before the message is acknowledged, it
is an outstanding message and will not be delivered again during that time
(on a best-effort basis).
- For pull subscriptions, this value is used as the initial value for the ack
deadline. To override this value for a given message, call subscriptions.modifyAckDeadline
with the corresponding ackId if using pull. The minimum custom deadline you
can specify is 10 seconds. The maximum custom deadline you can specify is
600 seconds (10 minutes).
- If this parameter is 0, a default value of 10 seconds is used.
- For push delivery, this value is also used to set the request timeout for
the call to the push endpoint.
- If the subscriber never acknowledges the message, the Pub/Sub system will
eventually redeliver the message.
returned: success
type: int
messageRetentionDuration:
description:
- How long to retain unacknowledged messages in the subscription's backlog,
from the moment a message is published. If retainAckedMessages is true, then
this also configures the retention of acknowledged messages, and thus configures
how far back in time a subscriptions.seek can be done. Defaults to 7 days.
Cannot be more than 7 days (`"604800s"`) or less than 10 minutes (`"600s"`).
- 'A duration in seconds with up to nine fractional digits, terminated by ''s''.
Example: `"600.5s"`.'
returned: success
type: str
retainAckedMessages:
description:
- Indicates whether to retain acknowledged messages. If `true`, then messages
are not expunged from the subscription's backlog, even if they are acknowledged,
until they fall out of the messageRetentionDuration window.
returned: success
type: bool
expirationPolicy:
description:
- A policy that specifies the conditions for this subscription's expiration.
- A subscription is considered active as long as any connected subscriber is
successfully consuming messages from the subscription or is issuing operations
on the subscription. If expirationPolicy is not set, a default policy with
ttl of 31 days will be used. If it is set but left empty, the resource never
expires. The minimum allowed value for expirationPolicy.ttl is 1 day.
returned: success
type: complex
contains:
ttl:
description:
- Specifies the "time-to-live" duration for an associated resource. The
resource expires if it is not active for a period of ttl.
- If ttl is not set, the associated resource never expires.
- A duration in seconds with up to nine fractional digits, terminated by
's'.
- Example - "3.5s".
returned: success
type: str
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest
import json
################################################################################
# Main
################################################################################
def main():
module = GcpModule(argument_spec=dict())
if not module.params['scopes']:
module.params['scopes'] = ['https://www.googleapis.com/auth/pubsub']
return_value = {'resources': fetch_list(module, collection(module))}
module.exit_json(**return_value)
def collection(module):
return "https://pubsub.googleapis.com/v1/projects/{project}/subscriptions".format(**module.params)
def fetch_list(module, link):
auth = GcpSession(module, 'pubsub')
return auth.list(link, return_if_object, array_name='subscriptions')
def return_if_object(module, response):
# If not found, return nothing.
if response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst:
module.fail_json(msg="Invalid JSON response with error: %s" % inst)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
if __name__ == "__main__":
main()
| sestrella/ansible | lib/ansible/modules/cloud/google/gcp_pubsub_subscription_info.py | Python | gpl-3.0 | 11,806 |
import random
from cloudbot.util import http, formatting
def api_get(kind, query):
"""Use the RESTful Google Search API"""
url = 'http://ajax.googleapis.com/ajax/services/search/%s?' \
'v=1.0&safe=moderate'
return http.get_json(url % kind, q=query)
# @hook.command("googleimage", "gis", "image")
def googleimage(text):
"""<query> - returns the first google image result for <query>"""
parsed = api_get('images', text)
if not 200 <= parsed['responseStatus'] < 300:
raise IOError('error searching for images: {}: {}'.format(parsed['responseStatus'], ''))
if not parsed['responseData']['results']:
return 'no images found'
return random.choice(parsed['responseData']['results'][:10])['unescapedUrl']
# @hook.command("google", "g", "search")
def google(text):
"""<query> - returns the first google search result for <query>"""
parsed = api_get('web', text)
if not 200 <= parsed['responseStatus'] < 300:
raise IOError('error searching for pages: {}: {}'.format(parsed['responseStatus'], ''))
if not parsed['responseData']['results']:
return 'No results found.'
result = parsed['responseData']['results'][0]
title = http.unescape(result['titleNoFormatting'])
title = formatting.truncate_str(title, 60)
content = http.unescape(result['content'])
if not content:
content = "No description available."
else:
content = http.html.fromstring(content).text_content()
content = formatting.truncate_str(content, 150).replace('\n', '')
return '{} -- \x02{}\x02: "{}"'.format(result['unescapedUrl'], title, content)
| weylin/CloudBot | plugins/google.py | Python | gpl-3.0 | 1,653 |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
from bpy.props import IntProperty, FloatProperty
import mathutils
from sverchok.node_tree import SverchCustomTreeNode
from sverchok.data_structure import updateNode
# documentation/blender_python_api_2_70_release/mathutils.kdtree.html
class SvKDTreeEdgesNodeMK2(bpy.types.Node, SverchCustomTreeNode):
bl_idname = 'SvKDTreeEdgesNodeMK2'
bl_label = 'KDT Closest Edges MK2'
bl_icon = 'OUTLINER_OB_EMPTY'
mindist = FloatProperty(
name='mindist', description='Minimum dist', min=0.0,
default=0.1, update=updateNode)
maxdist = FloatProperty(
name='maxdist', description='Maximum dist', min=0.0,
default=2.0, update=updateNode)
maxNum = IntProperty(
name='maxNum', description='max edge count',
default=4, min=1, update=updateNode)
skip = IntProperty(
name='skip', description='skip first n',
default=0, min=0, update=updateNode)
def sv_init(self, context):
self.inputs.new('VerticesSocket', 'Verts')
self.inputs.new('StringsSocket', 'mindist').prop_name = 'mindist'
self.inputs.new('StringsSocket', 'maxdist').prop_name = 'maxdist'
self.inputs.new('StringsSocket', 'maxNum').prop_name = 'maxNum'
self.inputs.new('StringsSocket', 'skip').prop_name = 'skip'
self.outputs.new('StringsSocket', 'Edges')
def process(self):
inputs = self.inputs
outputs = self.outputs
try:
verts = inputs['Verts'].sv_get()[0]
linked = outputs['Edges'].is_linked
except (IndexError, KeyError) as e:
return
optional_sockets = [
['mindist', self.mindist, float],
['maxdist', self.maxdist, float],
['maxNum', self.maxNum, int],
['skip', self.skip, int]]
socket_inputs = []
for s, s_default_value, dtype in optional_sockets:
if s in inputs and inputs[s].is_linked:
sock_input = dtype(inputs[s].sv_get()[0][0])
else:
sock_input = s_default_value
socket_inputs.append(sock_input)
self.run_kdtree(verts, socket_inputs)
def run_kdtree(self, verts, socket_inputs):
mindist, maxdist, maxNum, skip = socket_inputs
# make kdtree
# documentation/blender_python_api_2_78_release/mathutils.kdtree.html
size = len(verts)
kd = mathutils.kdtree.KDTree(size)
for i, xyz in enumerate(verts):
kd.insert(xyz, i)
kd.balance()
# set minimum values
maxNum = max(maxNum, 1)
skip = max(skip, 0)
# makes edges
e = set()
for i, vtx in enumerate(verts):
num_edges = 0
# this always returns closest first followed by next closest, etc.
# co index dist
for edge_idx, (_, index, dist) in enumerate(kd.find_range(vtx, abs(maxdist))):
if skip > 0:
if edge_idx < skip:
continue
if (dist <= abs(mindist)) or (i == index):
continue
edge = tuple(sorted([i, index]))
if not edge in e:
e.add(edge)
num_edges += 1
if num_edges == maxNum:
break
self.outputs['Edges'].sv_set([list(e)])
def register():
bpy.utils.register_class(SvKDTreeEdgesNodeMK2)
def unregister():
bpy.utils.unregister_class(SvKDTreeEdgesNodeMK2)
| elfnor/sverchok | nodes/analyzer/kd_tree_edges_mk2.py | Python | gpl-3.0 | 4,347 |
""" UserProfileDB class is a front-end to the User Profile Database
"""
__RCSID__ = "$Id$"
import types
import os
import sys
import hashlib
from DIRAC import S_OK, S_ERROR, gLogger, gConfig
from DIRAC.Core.Utilities import Time
from DIRAC.ConfigurationSystem.Client.Helpers import Registry
from DIRAC.Core.Base.DB import DB
class UserProfileDB( DB ):
""" UserProfileDB class is a front-end to the User Profile Database
"""
tableDict = { 'up_Users' : { 'Fields' : { 'Id' : 'INTEGER AUTO_INCREMENT NOT NULL',
'UserName' : 'VARCHAR(32) NOT NULL',
'LastAccess' : 'DATETIME',
},
'PrimaryKey' : 'Id',
'UniqueIndexes' : { 'U' : [ 'UserName' ] },
'Engine': 'InnoDB',
},
'up_Groups': { 'Fields' : { 'Id' : 'INTEGER AUTO_INCREMENT NOT NULL',
'UserGroup' : 'VARCHAR(32) NOT NULL',
'LastAccess' : 'DATETIME',
},
'PrimaryKey' : 'Id',
'UniqueIndexes' : { 'G' : [ 'UserGroup' ] },
'Engine': 'InnoDB',
},
'up_VOs': { 'Fields' : { 'Id' : 'INTEGER AUTO_INCREMENT NOT NULL',
'VO' : 'VARCHAR(32) NOT NULL',
'LastAccess' : 'DATETIME',
},
'PrimaryKey' : 'Id',
'UniqueIndexes' : { 'VO' : [ 'VO' ] },
'Engine': 'InnoDB',
},
'up_ProfilesData': { 'Fields' : { 'UserId' : 'INTEGER',
'GroupId' : 'INTEGER',
'VOId' : 'INTEGER',
'Profile' : 'VARCHAR(255) NOT NULL',
'VarName' : 'VARCHAR(255) NOT NULL',
'Data' : 'BLOB',
'ReadAccess' : 'VARCHAR(10) DEFAULT "USER"',
'PublishAccess' : 'VARCHAR(10) DEFAULT "USER"',
},
'PrimaryKey' : [ 'UserId', 'GroupId', 'Profile', 'VarName' ],
'Indexes' : { 'ProfileKey' : [ 'UserId', 'GroupId', 'Profile' ],
'UserKey' : [ 'UserId' ] ,
},
'Engine': 'InnoDB',
},
'up_HashTags': { 'Fields' : { 'UserId' : 'INTEGER',
'GroupId' : 'INTEGER',
'VOId' : 'INTEGER',
'HashTag' : 'VARCHAR(32) NOT NULL',
'TagName' : 'VARCHAR(255) NOT NULL',
'LastAccess' : 'DATETIME',
},
'PrimaryKey' : [ 'UserId', 'GroupId', 'TagName' ],
'Indexes' : { 'HashKey' : [ 'UserId', 'HashTag' ] },
'Engine': 'InnoDB',
},
}
def __init__( self ):
""" Constructor
"""
self.__permValues = [ 'USER', 'GROUP', 'VO', 'ALL' ]
self.__permAttrs = [ 'ReadAccess', 'PublishAccess' ]
DB.__init__( self, 'UserProfileDB', 'Framework/UserProfileDB', 10 )
retVal = self.__initializeDB()
if not retVal[ 'OK' ]:
raise Exception( "Can't create tables: %s" % retVal[ 'Message' ] )
def _checkTable( self ):
""" Make sure the tables are created
"""
return self.__initializeDB()
def __initializeDB( self ):
"""
Create the tables
"""
retVal = self._query( "show tables" )
if not retVal[ 'OK' ]:
return retVal
tablesInDB = [ t[0] for t in retVal[ 'Value' ] ]
tablesD = {}
if 'up_Users' not in tablesInDB:
tablesD[ 'up_Users' ] = self.tableDict['up_Users']
if 'up_Groups' not in tablesInDB:
tablesD[ 'up_Groups' ] = self.tableDict[ 'up_Groups']
if 'up_VOs' not in tablesInDB:
tablesD[ 'up_VOs' ] = self.tableDict['up_VOs']
if 'up_ProfilesData' not in tablesInDB:
tablesD[ 'up_ProfilesData' ] = self.tableDict['up_ProfilesData']
if 'up_HashTags' not in tablesInDB:
tablesD[ 'up_HashTags' ] = self.tableDict['up_HashTags']
return self._createTables( tablesD )
def __getUserId( self, userName, insertIfMissing = True ):
return self.__getObjId( userName, 'UserName', 'up_Users', insertIfMissing )
def __getGroupId( self, groupName, insertIfMissing = True ):
return self.__getObjId( groupName, 'UserGroup', 'up_Groups', insertIfMissing )
def __getVOId( self, voName, insertIfMissing = True ):
return self.__getObjId( voName, 'VO', 'up_VOs', insertIfMissing )
def __getObjId( self, objValue, varName, tableName, insertIfMissing = True ):
result = self.getFields( tableName, ['Id'], { varName: objValue } )
if not result[ 'OK' ]:
return result
data = result[ 'Value' ]
if len( data ) > 0:
objId = data[0][0]
self.updateFields( tableName, ['LastAccess'], ['UTC_TIMESTAMP()'], { 'Id': objId } )
return S_OK( objId )
if not insertIfMissing:
return S_ERROR( "No entry %s for %s defined in the DB" % ( objValue, varName ) )
result = self.insertFields( tableName, [ varName, 'LastAccess' ], [ objValue, 'UTC_TIMESTAMP()' ] )
if not result[ 'OK' ]:
return result
return S_OK( result[ 'lastRowId' ] )
def getUserGroupIds( self, userName, userGroup, insertIfMissing = True ):
result = self.__getUserId( userName, insertIfMissing )
if not result[ 'OK' ]:
return result
userId = result[ 'Value' ]
result = self.__getGroupId( userGroup, insertIfMissing )
if not result[ 'OK' ]:
return result
groupId = result[ 'Value' ]
userVO = Registry.getVOForGroup( userGroup )
if not userVO:
userVO = "undefined"
result = self.__getVOId( userVO, insertIfMissing )
if not result[ 'OK' ]:
return result
voId = result[ 'Value' ]
return S_OK( ( userId, groupId, voId ) )
def deleteUserProfile( self, userName, userGroup = False ):
"""
Delete the profiles for a user
"""
result = self.__getUserId( userName )
if not result[ 'OK' ]:
return result
userId = result[ 'Value' ]
condDict = { 'UserId': userId }
if userGroup:
result = self.__getGroupId( userGroup )
if not result[ 'OK' ]:
return result
groupId = result[ 'Value' ]
condDict['GroupId'] = groupId
result = self.deleteEntries( 'up_ProfilesData', condDict )
if not result[ 'OK' ] or not userGroup:
return result
return self.deleteEntries( 'up_Users', { 'Id': userId } )
def __webProfileUserDataCond( self, userIds, sqlProfileName = False, sqlVarName = False ):
condSQL = [ '`up_ProfilesData`.UserId=%s' % userIds[0],
'`up_ProfilesData`.GroupId=%s' % userIds[1],
'`up_ProfilesData`.VOId=%s' % userIds[2] ]
if sqlProfileName:
condSQL.append( '`up_ProfilesData`.Profile=%s' % sqlProfileName )
if sqlVarName:
condSQL.append( '`up_ProfilesData`.VarName=%s' % sqlVarName )
return " AND ".join( condSQL )
def __webProfileReadAccessDataCond( self, userIds, ownerIds, sqlProfileName, sqlVarName = False, match = False ):
permCondSQL = []
sqlCond = []
if match:
sqlCond.append( '`up_ProfilesData`.UserId = %s AND `up_ProfilesData`.GroupId = %s' % ( ownerIds[0], ownerIds[1] ) )
else:
permCondSQL.append( '`up_ProfilesData`.UserId = %s AND `up_ProfilesData`.GroupId = %s' % ( ownerIds[0], ownerIds[1] ) )
permCondSQL.append( '`up_ProfilesData`.GroupId=%s AND `up_ProfilesData`.ReadAccess="GROUP"' % userIds[1] )
permCondSQL.append( '`up_ProfilesData`.VOId=%s AND `up_ProfilesData`.ReadAccess="VO"' % userIds[2] )
permCondSQL.append( '`up_ProfilesData`.ReadAccess="ALL"' )
sqlCond.append( '`up_ProfilesData`.Profile = %s' % sqlProfileName )
if sqlVarName:
sqlCond.append( "`up_ProfilesData`.VarName = %s" % ( sqlVarName ) )
#Perms
sqlCond.append( "( ( %s ) )" % " ) OR ( ".join( permCondSQL ) )
return " AND ".join( sqlCond )
def __parsePerms( self, perms, addMissing = True ):
normPerms = {}
for pName in self.__permAttrs:
if not perms or pName not in perms:
if addMissing:
normPerms[ pName ] = self.__permValues[0]
continue
else:
permVal = perms[ pName ].upper()
for nV in self.__permValues:
if nV == permVal:
normPerms[ pName ] = nV
break
if pName not in normPerms and addMissing:
normPerms[ pName ] = self.__permValues[0]
return normPerms
def retrieveVarById( self, userIds, ownerIds, profileName, varName ):
"""
Get a data entry for a profile
"""
result = self._escapeString( profileName )
if not result[ 'OK' ]:
return result
sqlProfileName = result[ 'Value' ]
result = self._escapeString( varName )
if not result[ 'OK' ]:
return result
sqlVarName = result[ 'Value' ]
sqlCond = self.__webProfileReadAccessDataCond( userIds, ownerIds, sqlProfileName, sqlVarName, True )
#when we retrieve the user profile we have to take into account the user.
selectSQL = "SELECT data FROM `up_ProfilesData` WHERE %s" % sqlCond
result = self._query( selectSQL )
if not result[ 'OK' ]:
return result
data = result[ 'Value' ]
if len( data ) > 0:
return S_OK( data[0][0] )
return S_ERROR( "No data for userIds %s profileName %s varName %s" % ( userIds, profileName, varName ) )
def retrieveAllUserVarsById( self, userIds, profileName ):
"""
Get a data entry for a profile
"""
result = self._escapeString( profileName )
if not result[ 'OK' ]:
return result
sqlProfileName = result[ 'Value' ]
sqlCond = self.__webProfileUserDataCond( userIds, sqlProfileName )
selectSQL = "SELECT varName, data FROM `up_ProfilesData` WHERE %s" % sqlCond
result = self._query( selectSQL )
if not result[ 'OK' ]:
return result
data = result[ 'Value' ]
return S_OK( dict( data ) )
def retrieveUserProfilesById( self, userIds ):
"""
Get all profiles and data for a user
"""
sqlCond = self.__webProfileUserDataCond( userIds )
selectSQL = "SELECT Profile, varName, data FROM `up_ProfilesData` WHERE %s" % sqlCond
result = self._query( selectSQL )
if not result[ 'OK' ]:
return result
data = result[ 'Value' ]
dataDict = {}
for row in data:
if row[0] not in dataDict:
dataDict[ row[0] ] = {}
dataDict[ row[0] ][ row[1] ] = row[2 ]
return S_OK( dataDict )
def retrieveVarPermsById( self, userIds, ownerIds, profileName, varName ):
"""
Get a data entry for a profile
"""
result = self._escapeString( profileName )
if not result[ 'OK' ]:
return result
sqlProfileName = result[ 'Value' ]
result = self._escapeString( varName )
if not result[ 'OK' ]:
return result
sqlVarName = result[ 'Value' ]
sqlCond = self.__webProfileReadAccessDataCond( userIds, ownerIds, sqlProfileName, sqlVarName )
selectSQL = "SELECT %s FROM `up_ProfilesData` WHERE %s" % ( ", ".join( self.__permAttrs ), sqlCond )
result = self._query( selectSQL )
if not result[ 'OK' ]:
return result
data = result[ 'Value' ]
if len( data ) > 0:
permDict = {}
for i in range( len( self.__permAttrs ) ):
permDict[ self.__permAttrs[ i ] ] = data[0][i]
return S_OK( permDict )
return S_ERROR( "No data for userIds %s profileName %s varName %s" % ( userIds, profileName, varName ) )
def deleteVarByUserId( self, userIds, profileName, varName ):
"""
Remove a data entry for a profile
"""
result = self._escapeString( profileName )
if not result[ 'OK' ]:
return result
sqlProfileName = result[ 'Value' ]
result = self._escapeString( varName )
if not result[ 'OK' ]:
return result
sqlVarName = result[ 'Value' ]
sqlCond = self.__webProfileUserDataCond( userIds, sqlProfileName, sqlVarName )
selectSQL = "DELETE FROM `up_ProfilesData` WHERE %s" % sqlCond
return self._update( selectSQL )
def storeVarByUserId( self, userIds, profileName, varName, data, perms ):
"""
Set a data entry for a profile
"""
sqlInsertValues = []
sqlInsertKeys = []
sqlInsertKeys.append( ( 'UserId', userIds[0] ) )
sqlInsertKeys.append( ( 'GroupId', userIds[1] ) )
sqlInsertKeys.append( ( 'VOId', userIds[2] ) )
result = self._escapeString( profileName )
if not result[ 'OK' ]:
return result
sqlProfileName = result[ 'Value' ]
sqlInsertKeys.append( ( 'Profile', sqlProfileName ) )
result = self._escapeString( varName )
if not result[ 'OK' ]:
return result
sqlVarName = result[ 'Value' ]
sqlInsertKeys.append( ( 'VarName', sqlVarName ) )
result = self._escapeString( data )
if not result[ 'OK' ]:
return result
sqlInsertValues.append( ( 'Data', result[ 'Value' ] ) )
normPerms = self.__parsePerms( perms )
for k in normPerms:
sqlInsertValues.append( ( k, '"%s"' % normPerms[ k ] ) )
sqlInsert = sqlInsertKeys + sqlInsertValues
insertSQL = "INSERT INTO `up_ProfilesData` ( %s ) VALUES ( %s )" % ( ", ".join( [ f[0] for f in sqlInsert ] ),
", ".join( [ str( f[1] ) for f in sqlInsert ] ) )
result = self._update( insertSQL )
if result[ 'OK' ]:
return result
#If error and not duplicate -> real error
if result[ 'Message' ].find( "Duplicate entry" ) == -1:
return result
updateSQL = "UPDATE `up_ProfilesData` SET %s WHERE %s" % ( ", ".join( [ "%s=%s" % f for f in sqlInsertValues ] ),
self.__webProfileUserDataCond( userIds,
sqlProfileName,
sqlVarName ) )
return self._update( updateSQL )
def setUserVarPermsById( self, userIds, profileName, varName, perms ):
result = self._escapeString( profileName )
if not result[ 'OK' ]:
return result
sqlProfileName = result[ 'Value' ]
result = self._escapeString( varName )
if not result[ 'OK' ]:
return result
sqlVarName = result[ 'Value' ]
nPerms = self.__parsePerms( perms, False )
if not nPerms:
return S_OK()
sqlPerms = ",".join( [ "%s='%s'" % ( k, nPerms[k] ) for k in nPerms ] )
updateSql = "UPDATE `up_ProfilesData` SET %s WHERE %s" % ( sqlPerms,
self.__webProfileUserDataCond( userIds,
sqlProfileName,
sqlVarName ) )
return self._update( updateSql )
def retrieveVar( self, userName, userGroup, ownerName, ownerGroup, profileName, varName ):
"""
Get a data entry for a profile
"""
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
result = self.getUserGroupIds( ownerName, ownerGroup )
if not result[ 'OK' ]:
return result
ownerIds = result[ 'Value' ]
return self.retrieveVarById( userIds, ownerIds, profileName, varName )
def retrieveUserProfiles( self, userName, userGroup ):
"""
Helper for getting data
"""
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.retrieveUserProfilesById( userIds )
def retrieveAllUserVars( self, userName, userGroup, profileName ):
"""
Helper for getting data
"""
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.retrieveAllUserVarsById( userIds, profileName )
def retrieveVarPerms( self, userName, userGroup, ownerName, ownerGroup, profileName, varName ):
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
result = self.getUserGroupIds( ownerName, ownerGroup, False )
if not result[ 'OK' ]:
return result
ownerIds = result[ 'Value' ]
return self.retrieveVarPermsById( userIds, ownerIds, profileName, varName )
def setUserVarPerms( self, userName, userGroup, profileName, varName, perms ):
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.setUserVarPermsById( userIds, profileName, varName, perms )
def storeVar( self, userName, userGroup, profileName, varName, data, perms = None ):
"""
Helper for setting data
"""
try:
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.storeVarByUserId( userIds, profileName, varName, data, perms = perms )
finally:
pass
def deleteVar( self, userName, userGroup, profileName, varName ):
"""
Helper for deleting data
"""
try:
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.deleteVarByUserId( userIds, profileName, varName )
finally:
pass
def __profilesCondGenerator( self, value, varType, initialValue = False ):
if type( value ) in types.StringTypes:
value = [ value ]
ids = []
if initialValue:
ids.append( initialValue )
for val in value:
if varType == 'user':
result = self.__getUserId( val, insertIfMissing = False )
elif varType == 'group':
result = self.__getGroupId( val, insertIfMissing = False )
else:
result = self.__getVOId( val, insertIfMissing = False )
if not result[ 'OK' ]:
continue
ids.append( result[ 'Value' ] )
if varType == 'user':
fieldName = 'UserId'
elif varType == 'group':
fieldName = 'GroupId'
else:
fieldName = 'VOId'
return "`up_ProfilesData`.%s in ( %s )" % ( fieldName, ", ".join( [ str( iD ) for iD in ids ] ) )
def listVarsById( self, userIds, profileName, filterDict = None ):
result = self._escapeString( profileName )
if not result[ 'OK' ]:
return result
sqlProfileName = result[ 'Value' ]
sqlCond = [ "`up_Users`.Id = `up_ProfilesData`.UserId",
"`up_Groups`.Id = `up_ProfilesData`.GroupId",
"`up_VOs`.Id = `up_ProfilesData`.VOId",
self.__webProfileReadAccessDataCond( userIds, userIds, sqlProfileName ) ]
if filterDict:
fD = {}
for k in filterDict:
fD[ k.lower() ] = filterDict[ k ]
filterDict = fD
for k in ( 'user', 'group', 'vo' ):
if k in filterDict:
sqlCond.append( self.__profilesCondGenerator( filterDict[ k ], k ) )
sqlVars2Get = [ "`up_Users`.UserName", "`up_Groups`.UserGroup", "`up_VOs`.VO", "`up_ProfilesData`.VarName" ]
sqlQuery = "SELECT %s FROM `up_Users`, `up_Groups`, `up_VOs`, `up_ProfilesData` WHERE %s" % ( ", ".join( sqlVars2Get ),
" AND ".join( sqlCond ) )
return self._query( sqlQuery )
def listVars( self, userName, userGroup, profileName, filterDict = None ):
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.listVarsById( userIds, profileName, filterDict )
def storeHashTagById( self, userIds, tagName, hashTag = False ):
"""
Set a data entry for a profile
"""
if not hashTag:
hashTag = hashlib.md5()
hashTag.update( "%s;%s;%s" % ( Time.dateTime(), userIds, tagName ) )
hashTag = hashTag.hexdigest()
result = self.insertFields( 'up_HashTags', [ 'UserId', 'GroupId', 'VOId', 'TagName', 'HashTag' ],
[ userIds[0], userIds[1], userIds[2], tagName, hashTag ] )
if result[ 'OK' ]:
return S_OK( hashTag )
#If error and not duplicate -> real error
if result[ 'Message' ].find( "Duplicate entry" ) == -1:
return result
result = self.updateFields( 'up_HashTags', ['HashTag'], [hashTag], { 'UserId': userIds[0],
'GroupId': userIds[1],
'VOId': userIds[2],
'TagName': tagName } )
if not result[ 'OK' ]:
return result
return S_OK( hashTag )
def retrieveHashTagById( self, userIds, hashTag ):
"""
Get a data entry for a profile
"""
result = self.getFields( 'up_HashTags', ['TagName'], { 'UserId': userIds[0],
'GroupId': userIds[1],
'VOId': userIds[2],
'HashTag': hashTag } )
if not result[ 'OK' ]:
return result
data = result[ 'Value' ]
if len( data ) > 0:
return S_OK( data[0][0] )
return S_ERROR( "No data for combo userId %s hashTag %s" % ( userIds, hashTag ) )
def retrieveAllHashTagsById( self, userIds ):
"""
Get a data entry for a profile
"""
result = self.getFields( 'up_HashTags', ['HashTag', 'TagName'], { 'UserId': userIds[0],
'GroupId': userIds[1],
'VOId': userIds[2] } )
if not result[ 'OK' ]:
return result
data = result[ 'Value' ]
return S_OK( dict( data ) )
def storeHashTag( self, userName, userGroup, tagName, hashTag = False ):
"""
Helper for storing HASH
"""
try:
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.storeHashTagById( userIds, tagName, hashTag )
finally:
pass
def retrieveHashTag( self, userName, userGroup, hashTag ):
"""
Helper for retrieving HASH
"""
try:
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.retrieveHashTagById( userIds, hashTag )
finally:
pass
def retrieveAllHashTags( self, userName, userGroup ):
"""
Helper for retrieving HASH
"""
try:
result = self.getUserGroupIds( userName, userGroup )
if not result[ 'OK' ]:
return result
userIds = result[ 'Value' ]
return self.retrieveAllHashTagsById( userIds )
finally:
pass
def getUserProfileNames( self, permission ):
"""
it returns the available profile names by not taking account the permission: ReadAccess and PublishAccess
"""
result = None
permissions = self.__parsePerms( permission, False )
if not permissions:
return S_OK()
condition = ",".join( [ "%s='%s'" % ( k, permissions[k] ) for k in permissions ] )
query = "SELECT distinct Profile from `up_ProfilesData` where %s" % condition
retVal = self._query( query )
if retVal['OK']:
result = S_OK( [i[0] for i in retVal['Value']] )
else:
result = retVal
return result
def testUserProfileDB():
""" Some test cases
"""
# building up some fake CS values
gConfig.setOptionValue( 'DIRAC/Setup', 'Test' )
gConfig.setOptionValue( '/DIRAC/Setups/Test/Framework', 'Test' )
host = '127.0.0.1'
user = 'Dirac'
pwd = 'Dirac'
db = 'AccountingDB'
gConfig.setOptionValue( '/Systems/Framework/Test/Databases/UserProfileDB/Host', host )
gConfig.setOptionValue( '/Systems/Framework/Test/Databases/UserProfileDB/DBName', db )
gConfig.setOptionValue( '/Systems/Framework/Test/Databases/UserProfileDB/User', user )
gConfig.setOptionValue( '/Systems/Framework/Test/Databases/UserProfileDB/Password', pwd )
db = UserProfileDB()
assert db._connect()['OK']
userName = 'testUser'
userGroup = 'testGroup'
profileName = 'testProfile'
varName = 'testVar'
tagName = 'testTag'
hashTag = '237cadc4af90277e9524e6386e264630'
data = 'testData'
perms = 'USER'
try:
if False:
for tableName in db.tableDict.keys():
result = db._update( 'DROP TABLE `%s`' % tableName )
assert result['OK']
gLogger.info( '\n Creating Table\n' )
# Make sure it is there and it has been created for this test
result = db._checkTable()
assert result == {'OK': True, 'Value': None }
result = db._checkTable()
assert result == {'OK': True, 'Value': 0}
gLogger.info( '\n Adding some data\n' )
result = db.storeVar( userName, userGroup, profileName, varName, data, perms )
assert result['OK']
assert result['Value'] == 1
gLogger.info( '\n Some queries\n' )
result = db.getUserGroupIds( userName, userGroup )
assert result['OK']
assert result['Value'] == ( 1, 1, 1 )
result = db.listVars( userName, userGroup, profileName )
assert result['OK']
assert result['Value'][0][3] == varName
result = db.retrieveUserProfiles( userName, userGroup )
assert result['OK']
assert result['Value'] == { profileName: { varName: data } }
result = db.storeHashTag( userName, userGroup, tagName, hashTag )
assert result['OK']
assert result['Value'] == hashTag
result = db.retrieveAllHashTags( userName, userGroup )
assert result['OK']
assert result['Value'] == { hashTag: tagName }
result = db.retrieveHashTag( userName, userGroup, hashTag )
assert result['OK']
assert result['Value'] == tagName
gLogger.info( '\n OK\n' )
except AssertionError:
print 'ERROR ',
if not result['OK']:
print result['Message']
else:
print result
sys.exit( 1 )
if __name__ == '__main__':
from DIRAC.Core.Base import Script
Script.parseCommandLine()
gLogger.setLevel( 'VERBOSE' )
if 'PYTHONOPTIMIZE' in os.environ and os.environ['PYTHONOPTIMIZE']:
gLogger.info( 'Unset pyhthon optimization "PYTHONOPTIMIZE"' )
sys.exit( 0 )
testUserProfileDB()
| arrabito/DIRAC | FrameworkSystem/DB/UserProfileDB.py | Python | gpl-3.0 | 27,552 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008 Martijn Voncken <[email protected]>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
import copy
import logging
import deluge.component as component
from deluge.common import TORRENT_STATE
log = logging.getLogger(__name__)
STATE_SORT = ["All", "Active"] + TORRENT_STATE
# Special purpose filters:
def filter_keywords(torrent_ids, values):
# Cleanup
keywords = ",".join([v.lower() for v in values])
keywords = keywords.split(",")
for keyword in keywords:
torrent_ids = filter_one_keyword(torrent_ids, keyword)
return torrent_ids
def filter_one_keyword(torrent_ids, keyword):
"""
search torrent on keyword.
searches title,state,tracker-status,tracker,files
"""
all_torrents = component.get("TorrentManager").torrents
for torrent_id in torrent_ids:
torrent = all_torrents[torrent_id]
if keyword in torrent.filename.lower():
yield torrent_id
elif keyword in torrent.state.lower():
yield torrent_id
elif torrent.trackers and keyword in torrent.trackers[0]["url"]:
yield torrent_id
elif keyword in torrent_id:
yield torrent_id
# Want to find broken torrents (search on "error", or "unregistered")
elif keyword in torrent.tracker_status.lower():
yield torrent_id
else:
for t_file in torrent.get_files():
if keyword in t_file["path"].lower():
yield torrent_id
break
def filter_by_name(torrent_ids, search_string):
all_torrents = component.get("TorrentManager").torrents
try:
search_string, match_case = search_string[0].split('::match')
except ValueError:
search_string = search_string[0]
match_case = False
if match_case is False:
search_string = search_string.lower()
for torrent_id in torrent_ids:
torrent_name = all_torrents[torrent_id].get_name()
if match_case is False:
torrent_name = all_torrents[torrent_id].get_name().lower()
else:
torrent_name = all_torrents[torrent_id].get_name()
if search_string in torrent_name:
yield torrent_id
def tracker_error_filter(torrent_ids, values):
filtered_torrent_ids = []
tm = component.get("TorrentManager")
# If this is a tracker_host, then we need to filter on it
if values[0] != "Error":
for torrent_id in torrent_ids:
if values[0] == tm[torrent_id].get_status(["tracker_host"])["tracker_host"]:
filtered_torrent_ids.append(torrent_id)
return filtered_torrent_ids
# Check torrent's tracker_status for 'Error:' and return those torrent_ids
for torrent_id in torrent_ids:
if "Error:" in tm[torrent_id].get_status(["tracker_status"])["tracker_status"]:
filtered_torrent_ids.append(torrent_id)
return filtered_torrent_ids
class FilterManager(component.Component):
"""FilterManager
"""
def __init__(self, core):
component.Component.__init__(self, "FilterManager")
log.debug("FilterManager init..")
self.core = core
self.torrents = core.torrentmanager
self.registered_filters = {}
self.register_filter("keyword", filter_keywords)
self.register_filter("name", filter_by_name)
self.tree_fields = {}
self.prev_filter_tree_keys = None
self.filter_tree_items = None
self.register_tree_field("state", self._init_state_tree)
def _init_tracker_tree():
return {"Error": 0}
self.register_tree_field("tracker_host", _init_tracker_tree)
self.register_filter("tracker_host", tracker_error_filter)
def _init_users_tree():
return {"": 0}
self.register_tree_field("owner", _init_users_tree)
def filter_torrent_ids(self, filter_dict):
"""
returns a list of torrent_id's matching filter_dict.
core filter method
"""
if not filter_dict:
return self.torrents.get_torrent_list()
# Sanitize input: filter-value must be a list of strings
for key, value in filter_dict.items():
if isinstance(value, basestring):
filter_dict[key] = [value]
# Optimized filter for id
if "id" in filter_dict:
torrent_ids = list(filter_dict["id"])
del filter_dict["id"]
else:
torrent_ids = self.torrents.get_torrent_list()
# Return if there's nothing more to filter
if not filter_dict:
return torrent_ids
# Special purpose, state=Active.
if "state" in filter_dict:
# We need to make sure this is a list for the logic below
filter_dict["state"] = list(filter_dict["state"])
if "state" in filter_dict and "Active" in filter_dict["state"]:
filter_dict["state"].remove("Active")
if not filter_dict["state"]:
del filter_dict["state"]
torrent_ids = self.filter_state_active(torrent_ids)
if not filter_dict:
return torrent_ids
# Registered filters
for field, values in filter_dict.items():
if field in self.registered_filters:
# Filters out doubles
torrent_ids = list(set(self.registered_filters[field](torrent_ids, values)))
del filter_dict[field]
if not filter_dict:
return torrent_ids
torrent_keys, plugin_keys = self.torrents.separate_keys(filter_dict.keys(), torrent_ids)
# Leftover filter arguments, default filter on status fields.
for torrent_id in list(torrent_ids):
status = self.core.create_torrent_status(torrent_id, torrent_keys, plugin_keys)
for field, values in filter_dict.iteritems():
if (not status[field] in values) and torrent_id in torrent_ids:
torrent_ids.remove(torrent_id)
return torrent_ids
def get_filter_tree(self, show_zero_hits=True, hide_cat=None):
"""
returns {field: [(value,count)] }
for use in sidebar.
"""
torrent_ids = self.torrents.get_torrent_list()
tree_keys = list(self.tree_fields.keys())
if hide_cat:
for cat in hide_cat:
tree_keys.remove(cat)
torrent_keys, plugin_keys = self.torrents.separate_keys(tree_keys, torrent_ids)
# Keys are the same, so use previous items
if self.prev_filter_tree_keys != tree_keys:
self.filter_tree_items = dict((field, self.tree_fields[field]()) for field in tree_keys)
self.prev_filter_tree_keys = tree_keys
items = copy.deepcopy(self.filter_tree_items)
for torrent_id in list(torrent_ids):
status = self.core.create_torrent_status(torrent_id, torrent_keys, plugin_keys) # status={key:value}
for field in tree_keys:
value = status[field]
items[field][value] = items[field].get(value, 0) + 1
if "tracker_host" in items:
items["tracker_host"]["All"] = len(torrent_ids)
items["tracker_host"]["Error"] = len(tracker_error_filter(torrent_ids, ("Error",)))
if not show_zero_hits:
for cat in ["state", "owner", "tracker_host"]:
if cat in tree_keys:
self._hide_state_items(items[cat])
# Return a dict of tuples:
sorted_items = {}
for field in tree_keys:
sorted_items[field] = sorted(items[field].iteritems())
if "state" in tree_keys:
sorted_items["state"].sort(self._sort_state_items)
return sorted_items
def _init_state_tree(self):
init_state = {}
init_state["All"] = len(self.torrents.get_torrent_list())
for state in TORRENT_STATE:
init_state[state] = 0
init_state["Active"] = len(self.filter_state_active(self.torrents.get_torrent_list()))
return init_state
def register_filter(self, id, filter_func, filter_value=None):
self.registered_filters[id] = filter_func
def deregister_filter(self, id):
del self.registered_filters[id]
def register_tree_field(self, field, init_func=lambda: {}):
self.tree_fields[field] = init_func
def deregister_tree_field(self, field):
if field in self.tree_fields:
del self.tree_fields[field]
def filter_state_active(self, torrent_ids):
for torrent_id in list(torrent_ids):
status = self.torrents[torrent_id].get_status(["download_payload_rate", "upload_payload_rate"])
if status["download_payload_rate"] or status["upload_payload_rate"]:
pass
else:
torrent_ids.remove(torrent_id)
return torrent_ids
def _hide_state_items(self, state_items):
"for hide(show)-zero hits"
for (value, count) in state_items.items():
if value != "All" and count == 0:
del state_items[value]
def _sort_state_items(self, x, y):
""
if x[0] in STATE_SORT:
ix = STATE_SORT.index(x[0])
else:
ix = 99
if y[0] in STATE_SORT:
iy = STATE_SORT.index(y[0])
else:
iy = 99
return ix - iy
| rkokkelk/Gulliver | deluge/core/filtermanager.py | Python | gpl-3.0 | 9,642 |
#!/usr/bin/env python
# encoding: utf-8
"""A ${VISUAL} placeholder that will use the text that was last visually
selected and insert it here. If there was no text visually selected, this will
be the empty string. """
import re
import textwrap
from UltiSnips import _vim
from UltiSnips.indent_util import IndentUtil
from UltiSnips.text_objects._transformation import TextObjectTransformation
from UltiSnips.text_objects._base import NoneditableTextObject
_REPLACE_NON_WS = re.compile(r"[^ \t]")
class Visual(NoneditableTextObject, TextObjectTransformation):
"""See module docstring."""
def __init__(self, parent, token):
# Find our containing snippet for visual_content
snippet = parent
while snippet:
try:
self._text = snippet.visual_content.text
self._mode = snippet.visual_content.mode
break
except AttributeError:
snippet = snippet._parent # pylint:disable=protected-access
if not self._text:
self._text = token.alternative_text
self._mode = "v"
NoneditableTextObject.__init__(self, parent, token)
TextObjectTransformation.__init__(self, token)
def _update(self, done):
if self._mode == "v": # Normal selection.
text = self._text
else: # Block selection or line selection.
text_before = _vim.buf[self.start.line][:self.start.col]
indent = _REPLACE_NON_WS.sub(" ", text_before)
iu = IndentUtil()
indent = iu.indent_to_spaces(indent)
indent = iu.spaces_to_indent(indent)
text = ""
for idx, line in enumerate(textwrap.dedent(
self._text).splitlines(True)):
if idx != 0:
text += indent
text += line
text = text[:-1] # Strip final '\n'
text = self._transform(text)
self.overwrite(text)
self._parent._del_child(self) # pylint:disable=protected-access
return True
| eduardomallmann/vim-and-bash | pythonx/UltiSnips/text_objects/_visual.py | Python | gpl-3.0 | 2,074 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# ThinkOpen Solutions Brasil
# Copyright (C) Thinkopen Solutions <http://www.tkobr.com>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import purchase
| elego/tkobr-addons | unported/tko_purchase_show_only_supplier_products/__init__.py | Python | agpl-3.0 | 1,094 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Order.name'
db.add_column('valueaccounting_order', 'name',
self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True),
keep_default=False)
# Adding field 'EconomicResource.independent_demand'
db.add_column('valueaccounting_economicresource', 'independent_demand',
self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='dependent_resources', null=True, to=orm['valueaccounting.Order']),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Order.name'
db.delete_column('valueaccounting_order', 'name')
# Deleting field 'EconomicResource.independent_demand'
db.delete_column('valueaccounting_economicresource', 'independent_demand_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'valueaccounting.agentassociation': {
'Meta': {'object_name': 'AgentAssociation'},
'association_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'associations'", 'to': "orm['valueaccounting.AssociationType']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'from_agent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'associations_from'", 'to': "orm['valueaccounting.EconomicAgent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'to_agent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'associations_to'", 'to': "orm['valueaccounting.EconomicAgent']"})
},
'valueaccounting.agentresourcetype': {
'Meta': {'object_name': 'AgentResourceType'},
'agent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'resource_types'", 'to': "orm['valueaccounting.EconomicAgent']"}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'arts_changed'", 'null': 'True', 'to': "orm['auth.User']"}),
'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'arts_created'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'agent_resource_types'", 'to': "orm['valueaccounting.EventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lead_time': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'resource_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'agents'", 'to': "orm['valueaccounting.EconomicResourceType']"}),
'score': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '8', 'decimal_places': '2'}),
'unit_of_value': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'agent_resource_value_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}),
'value': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '8', 'decimal_places': '2'})
},
'valueaccounting.agenttype': {
'Meta': {'ordering': "('name',)", 'object_name': 'AgentType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member_type': ('django.db.models.fields.CharField', [], {'default': "'active'", 'max_length': '12'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sub-agents'", 'null': 'True', 'to': "orm['valueaccounting.AgentType']"}),
'party_type': ('django.db.models.fields.CharField', [], {'default': "'individual'", 'max_length': '12'})
},
'valueaccounting.agentuser': {
'Meta': {'object_name': 'AgentUser'},
'agent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'users'", 'to': "orm['valueaccounting.EconomicAgent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'agent'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'valueaccounting.associationtype': {
'Meta': {'object_name': 'AssociationType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'valueaccounting.cachedeventsummary': {
'Meta': {'ordering': "('agent', 'project', 'resource_type')", 'object_name': 'CachedEventSummary'},
'agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'cached_events'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'importance': ('django.db.models.fields.DecimalField', [], {'default': "'1'", 'max_digits': '3', 'decimal_places': '0'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'cached_events'", 'null': 'True', 'to': "orm['valueaccounting.Project']"}),
'quantity': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '8', 'decimal_places': '2'}),
'reputation': ('django.db.models.fields.DecimalField', [], {'default': "'1.00'", 'max_digits': '8', 'decimal_places': '2'}),
'resource_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'cached_events'", 'null': 'True', 'to': "orm['valueaccounting.EconomicResourceType']"}),
'resource_type_rate': ('django.db.models.fields.DecimalField', [], {'default': "'1.0'", 'max_digits': '8', 'decimal_places': '2'}),
'value': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '8', 'decimal_places': '2'})
},
'valueaccounting.commitment': {
'Meta': {'ordering': "('due_date',)", 'object_name': 'Commitment'},
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments_changed'", 'null': 'True', 'to': "orm['auth.User']"}),
'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'commitment_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments_created'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'due_date': ('django.db.models.fields.DateField', [], {}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'commitments'", 'to': "orm['valueaccounting.EventType']"}),
'exchange': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments'", 'null': 'True', 'to': "orm['valueaccounting.Exchange']"}),
'finished': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'from_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'given_commitments'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}),
'from_agent_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'given_commitments'", 'null': 'True', 'to': "orm['valueaccounting.AgentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'independent_demand': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'dependent_commitments'", 'null': 'True', 'to': "orm['valueaccounting.Order']"}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments'", 'null': 'True', 'to': "orm['valueaccounting.Order']"}),
'process': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments'", 'null': 'True', 'to': "orm['valueaccounting.Process']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments'", 'null': 'True', 'to': "orm['valueaccounting.Project']"}),
'quality': ('django.db.models.fields.DecimalField', [], {'default': "'0'", 'max_digits': '3', 'decimal_places': '0'}),
'quantity': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments'", 'null': 'True', 'to': "orm['valueaccounting.EconomicResource']"}),
'resource_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments'", 'null': 'True', 'to': "orm['valueaccounting.EconomicResourceType']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'to_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'taken_commitments'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}),
'unit_of_quantity': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitment_qty_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}),
'unit_of_value': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitment_value_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'value': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '8', 'decimal_places': '2'})
},
'valueaccounting.compensation': {
'Meta': {'ordering': "('compensation_date',)", 'object_name': 'Compensation'},
'compensating_event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'compensations'", 'to': "orm['valueaccounting.EconomicEvent']"}),
'compensating_value': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'compensation_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initiating_event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'initiated_compensations'", 'to': "orm['valueaccounting.EconomicEvent']"})
},
'valueaccounting.economicagent': {
'Meta': {'ordering': "('nick',)", 'object_name': 'EconomicAgent'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'agent_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'agents'", 'to': "orm['valueaccounting.AgentType']"}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'agents_changed'", 'null': 'True', 'to': "orm['auth.User']"}),
'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'agents_created'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '96', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'null': 'True', 'blank': 'True'}),
'longitude': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'nick': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'photo_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'reputation': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '8', 'decimal_places': '2'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'valueaccounting.economicevent': {
'Meta': {'ordering': "('-event_date',)", 'object_name': 'EconomicEvent'},
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'events_changed'", 'null': 'True', 'to': "orm['auth.User']"}),
'commitment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'fulfillment_events'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['valueaccounting.Commitment']"}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'events_created'", 'null': 'True', 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'event_date': ('django.db.models.fields.DateField', [], {}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'events'", 'to': "orm['valueaccounting.EventType']"}),
'exchange': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'events'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['valueaccounting.Exchange']"}),
'from_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'given_events'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_contribution': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'process': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'events'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['valueaccounting.Process']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'events'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['valueaccounting.Project']"}),
'quality': ('django.db.models.fields.DecimalField', [], {'default': "'0'", 'max_digits': '3', 'decimal_places': '0'}),
'quantity': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'events'", 'null': 'True', 'to': "orm['valueaccounting.EconomicResource']"}),
'resource_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'events'", 'to': "orm['valueaccounting.EconomicResourceType']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'to_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'taken_events'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}),
'unit_of_quantity': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'event_qty_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}),
'unit_of_value': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'event_value_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'value': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '8', 'decimal_places': '2'})
},
'valueaccounting.economicresource': {
'Meta': {'ordering': "('resource_type', 'identifier')", 'object_name': 'EconomicResource'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'authored_resources'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resources_changed'", 'null': 'True', 'to': "orm['auth.User']"}),
'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resources_created'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'custodian': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'custody_resources'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'independent_demand': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'dependent_resources'", 'null': 'True', 'to': "orm['valueaccounting.Order']"}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_resources'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}),
'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'photo_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'quality': ('django.db.models.fields.DecimalField', [], {'default': "'0'", 'null': 'True', 'max_digits': '3', 'decimal_places': '0', 'blank': 'True'}),
'quantity': ('django.db.models.fields.DecimalField', [], {'default': "'1.00'", 'max_digits': '8', 'decimal_places': '2'}),
'resource_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'resources'", 'to': "orm['valueaccounting.EconomicResourceType']"}),
'unit_of_quantity': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resource_qty_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'valueaccounting.economicresourcetype': {
'Meta': {'ordering': "('name',)", 'object_name': 'EconomicResourceType'},
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resource_types_changed'", 'null': 'True', 'to': "orm['auth.User']"}),
'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resource_types_created'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['valueaccounting.EconomicResourceType']"}),
'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'photo_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'rate': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '6', 'decimal_places': '2'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'substitutable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resource_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}),
'unit_of_use': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'units_of_use'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'valueaccounting.eventtype': {
'Meta': {'ordering': "('label',)", 'object_name': 'EventType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inverse_label': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'related_to': ('django.db.models.fields.CharField', [], {'default': "'process'", 'max_length': '12'}),
'relationship': ('django.db.models.fields.CharField', [], {'default': "'in'", 'max_length': '12'}),
'resource_effect': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'unit_type': ('django.db.models.fields.CharField', [], {'max_length': '12', 'blank': 'True'})
},
'valueaccounting.exchange': {
'Meta': {'object_name': 'Exchange'},
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exchanges_changed'", 'null': 'True', 'to': "orm['auth.User']"}),
'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exchanges_created'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'process_pattern': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exchanges'", 'null': 'True', 'to': "orm['valueaccounting.ProcessPattern']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exchanges'", 'null': 'True', 'to': "orm['valueaccounting.Project']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'use_case': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exchanges'", 'null': 'True', 'to': "orm['valueaccounting.UseCase']"})
},
'valueaccounting.facet': {
'Meta': {'ordering': "('name',)", 'object_name': 'Facet'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
},
'valueaccounting.facetvalue': {
'Meta': {'ordering': "('facet', 'value')", 'unique_together': "(('facet', 'value'),)", 'object_name': 'FacetValue'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'facet': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'values'", 'to': "orm['valueaccounting.Facet']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
'valueaccounting.feature': {
'Meta': {'ordering': "('name',)", 'object_name': 'Feature'},
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'features_changed'", 'null': 'True', 'to': "orm['auth.User']"}),
'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'features_created'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'features'", 'to': "orm['valueaccounting.EventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'process_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'features'", 'null': 'True', 'to': "orm['valueaccounting.ProcessType']"}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'features'", 'to': "orm['valueaccounting.EconomicResourceType']"}),
'quantity': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '8', 'decimal_places': '2'}),
'unit_of_quantity': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'feature_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"})
},
'valueaccounting.help': {
'Meta': {'ordering': "('page',)", 'object_name': 'Help'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '16'})
},
'valueaccounting.option': {
'Meta': {'ordering': "('component',)", 'object_name': 'Option'},
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'options_changed'", 'null': 'True', 'to': "orm['auth.User']"}),
'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'component': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['valueaccounting.EconomicResourceType']"}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'options_created'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'feature': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['valueaccounting.Feature']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'valueaccounting.order': {
'Meta': {'ordering': "('due_date',)", 'object_name': 'Order'},
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'orders_changed'", 'null': 'True', 'to': "orm['auth.User']"}),
'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'orders_created'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'due_date': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'order_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'order_type': ('django.db.models.fields.CharField', [], {'default': "'customer'", 'max_length': '12'}),
'provider': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sales_orders'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}),
'receiver': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'purchase_orders'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"})
},
'valueaccounting.patternfacetvalue': {
'Meta': {'ordering': "('pattern', 'event_type', 'facet_value')", 'unique_together': "(('pattern', 'facet_value', 'event_type'),)", 'object_name': 'PatternFacetValue'},
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'patterns'", 'to': "orm['valueaccounting.EventType']"}),
'facet_value': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'patterns'", 'to': "orm['valueaccounting.FacetValue']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'pattern': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'facets'", 'to': "orm['valueaccounting.ProcessPattern']"})
},
'valueaccounting.patternusecase': {
'Meta': {'object_name': 'PatternUseCase'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'pattern': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'use_cases'", 'to': "orm['valueaccounting.ProcessPattern']"}),
'use_case': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'patterns'", 'null': 'True', 'to': "orm['valueaccounting.UseCase']"})
},
'valueaccounting.process': {
'Meta': {'ordering': "('end_date',)", 'object_name': 'Process'},
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'processes_changed'", 'null': 'True', 'to': "orm['auth.User']"}),
'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'processes_created'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'finished': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'managed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'managed_processes'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_processes'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sub_processes'", 'null': 'True', 'to': "orm['valueaccounting.Process']"}),
'process_pattern': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'processes'", 'null': 'True', 'to': "orm['valueaccounting.ProcessPattern']"}),
'process_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'processes'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['valueaccounting.ProcessType']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'processes'", 'null': 'True', 'to': "orm['valueaccounting.Project']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'started': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'valueaccounting.processpattern': {
'Meta': {'ordering': "('name',)", 'object_name': 'ProcessPattern'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
'valueaccounting.processtype': {
'Meta': {'ordering': "('name',)", 'object_name': 'ProcessType'},
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'process_types_changed'", 'null': 'True', 'to': "orm['auth.User']"}),
'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'process_types_created'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'estimated_duration': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sub_process_types'", 'null': 'True', 'to': "orm['valueaccounting.ProcessType']"}),
'process_pattern': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'process_types'", 'null': 'True', 'to': "orm['valueaccounting.ProcessPattern']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'process_types'", 'null': 'True', 'to': "orm['valueaccounting.Project']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'valueaccounting.processtyperesourcetype': {
'Meta': {'ordering': "('resource_type',)", 'object_name': 'ProcessTypeResourceType'},
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ptrts_changed'", 'null': 'True', 'to': "orm['auth.User']"}),
'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ptrts_created'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'process_resource_types'", 'to': "orm['valueaccounting.EventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'process_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'resource_types'", 'to': "orm['valueaccounting.ProcessType']"}),
'quantity': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '8', 'decimal_places': '2'}),
'resource_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'process_types'", 'to': "orm['valueaccounting.EconomicResourceType']"}),
'unit_of_quantity': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'process_resource_qty_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"})
},
'valueaccounting.project': {
'Meta': {'ordering': "('name',)", 'object_name': 'Project'},
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'projects_changed'", 'null': 'True', 'to': "orm['auth.User']"}),
'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'projects_created'", 'null': 'True', 'to': "orm['auth.User']"}),
'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'importance': ('django.db.models.fields.DecimalField', [], {'default': "'0'", 'max_digits': '3', 'decimal_places': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sub_projects'", 'null': 'True', 'to': "orm['valueaccounting.Project']"}),
'project_team': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'project_team'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'})
},
'valueaccounting.reciprocity': {
'Meta': {'ordering': "('reciprocity_date',)", 'object_name': 'Reciprocity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initiating_commitment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'initiated_commitments'", 'to': "orm['valueaccounting.Commitment']"}),
'reciprocal_commitment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reciprocal_commitments'", 'to': "orm['valueaccounting.Commitment']"}),
'reciprocity_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'})
},
'valueaccounting.resourcetypefacetvalue': {
'Meta': {'ordering': "('resource_type', 'facet_value')", 'unique_together': "(('resource_type', 'facet_value'),)", 'object_name': 'ResourceTypeFacetValue'},
'facet_value': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'resource_types'", 'to': "orm['valueaccounting.FacetValue']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'resource_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'facets'", 'to': "orm['valueaccounting.EconomicResourceType']"})
},
'valueaccounting.selectedoption': {
'Meta': {'ordering': "('commitment', 'option')", 'object_name': 'SelectedOption'},
'commitment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['valueaccounting.Commitment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'commitments'", 'to': "orm['valueaccounting.Option']"})
},
'valueaccounting.unit': {
'Meta': {'ordering': "('name',)", 'object_name': 'Unit'},
'abbrev': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'symbol': ('django.db.models.fields.CharField', [], {'max_length': '1', 'blank': 'True'}),
'unit_type': ('django.db.models.fields.CharField', [], {'max_length': '12'})
},
'valueaccounting.usecase': {
'Meta': {'object_name': 'UseCase'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'restrict_to_one_pattern': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
}
}
complete_apps = ['valueaccounting'] | thierrymarianne/valuenetwork | valuenetwork/valueaccounting/migrations/0003_auto__add_field_order_name__add_field_economicresource_independent_dem.py | Python | agpl-3.0 | 47,400 |
# -*- coding: utf-8 -*-
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, models
class IRActionsWindow(models.Model):
_inherit = 'ir.actions.act_window'
@api.multi
def read(self, fields=None, context=None, load='_classic_read'):
actions = super(IRActionsWindow, self).read(fields=fields, load=load)
for action in actions:
if action.get('res_model', '') == 'res.partner':
# By default, only show standalone contact
action_context = action.get('context', '{}') or '{}'
if 'search_show_all_positions' not in action_context:
action['context'] = action_context.replace(
'{',
("{'search_show_all_positions': "
"{'is_set': True, 'set_value': False},"),
1)
return actions
| brain-tec/partner-contact | partner_contact_in_several_companies/models/ir_actions.py | Python | agpl-3.0 | 922 |
"""
Logistration API View Tests
"""
from unittest.mock import patch
from urllib.parse import urlencode
import socket
import ddt
from django.conf import settings
from django.urls import reverse
from rest_framework.test import APITestCase
from common.djangoapps.student.models import Registration
from common.djangoapps.student.tests.factories import UserFactory
from openedx.core.djangoapps.user_api.tests.test_views import UserAPITestCase
from openedx.core.djangolib.testing.utils import skip_unless_lms
from common.djangoapps.third_party_auth import pipeline
from common.djangoapps.third_party_auth.tests.testutil import ThirdPartyAuthTestMixin, simulate_running_pipeline
from openedx.core.djangoapps.geoinfo.api import country_code_from_ip
@skip_unless_lms
@ddt.ddt
class MFEContextViewTest(ThirdPartyAuthTestMixin, APITestCase):
"""
MFE context tests
"""
def setUp(self): # pylint: disable=arguments-differ
"""
Test Setup
"""
super().setUp()
self.url = reverse('mfe_context')
self.query_params = {'next': '/dashboard'}
hostname = socket.gethostname()
ip_address = socket.gethostbyname(hostname)
self.country_code = country_code_from_ip(ip_address)
# Several third party auth providers are created for these tests:
self.configure_google_provider(enabled=True, visible=True)
self.configure_facebook_provider(enabled=True, visible=True)
self.hidden_enabled_provider = self.configure_linkedin_provider(
visible=False,
enabled=True,
)
def _third_party_login_url(self, backend_name, auth_entry, params):
"""
Construct the login URL to start third party authentication
"""
return '{url}?auth_entry={auth_entry}&{param_str}'.format(
url=reverse('social:begin', kwargs={'backend': backend_name}),
auth_entry=auth_entry,
param_str=urlencode(params)
)
def get_provider_data(self, params):
"""
Returns the expected provider data based on providers enabled in test setup
"""
return [
{
'id': 'oa2-facebook',
'name': 'Facebook',
'iconClass': 'fa-facebook',
'iconImage': None,
'skipHintedLogin': False,
'loginUrl': self._third_party_login_url('facebook', 'login', params),
'registerUrl': self._third_party_login_url('facebook', 'register', params)
},
{
'id': 'oa2-google-oauth2',
'name': 'Google',
'iconClass': 'fa-google-plus',
'iconImage': None,
'skipHintedLogin': False,
'loginUrl': self._third_party_login_url('google-oauth2', 'login', params),
'registerUrl': self._third_party_login_url('google-oauth2', 'register', params)
},
]
def get_context(self, params=None, current_provider=None, backend_name=None, add_user_details=False):
"""
Returns the MFE context
"""
return {
'currentProvider': current_provider,
'platformName': settings.PLATFORM_NAME,
'providers': self.get_provider_data(params) if params else [],
'secondaryProviders': [],
'finishAuthUrl': pipeline.get_complete_url(backend_name) if backend_name else None,
'errorMessage': None,
'registerFormSubmitButtonText': 'Create Account',
'syncLearnerProfileData': False,
'pipeline_user_details': {'email': '[email protected]'} if add_user_details else {},
'countryCode': self.country_code
}
@patch.dict(settings.FEATURES, {'ENABLE_THIRD_PARTY_AUTH': False})
def test_no_third_party_auth_providers(self):
"""
Test that if third party auth is enabled, context returned by API contains
the provider information
"""
response = self.client.get(self.url, self.query_params)
assert response.status_code == 200
assert response.data == self.get_context()
def test_third_party_auth_providers(self):
"""
Test that api returns details of currently enabled third party auth providers
"""
response = self.client.get(self.url, self.query_params)
params = {
'next': self.query_params['next']
}
assert response.status_code == 200
assert response.data == self.get_context(params)
@ddt.data(
('google-oauth2', 'Google', False),
('facebook', 'Facebook', False),
('google-oauth2', 'Google', True)
)
@ddt.unpack
def test_running_pipeline(self, current_backend, current_provider, add_user_details):
"""
Test that when third party pipeline is running, the api returns details
of current provider
"""
email = '[email protected]' if add_user_details else None
params = {
'next': self.query_params['next']
}
# Simulate a running pipeline
pipeline_target = 'openedx.core.djangoapps.user_authn.views.login_form.third_party_auth.pipeline'
with simulate_running_pipeline(pipeline_target, current_backend, email=email):
response = self.client.get(self.url, self.query_params)
assert response.status_code == 200
assert response.data == self.get_context(params, current_provider, current_backend, add_user_details)
def test_tpa_hint(self):
"""
Test that if tpa_hint is provided, the context returns the third party auth provider
even if it is not visible on the login page
"""
params = {
'next': self.query_params['next']
}
tpa_hint = self.hidden_enabled_provider.provider_id
self.query_params.update({'tpa_hint': tpa_hint})
provider_data = self.get_provider_data(params)
provider_data.append({
'id': self.hidden_enabled_provider.provider_id,
'name': 'LinkedIn',
'iconClass': 'fa-linkedin',
'iconImage': None,
'skipHintedLogin': False,
'loginUrl': self._third_party_login_url('linkedin-oauth2', 'login', params),
'registerUrl': self._third_party_login_url('linkedin-oauth2', 'register', params)
})
response = self.client.get(self.url, self.query_params)
assert response.data['providers'] == provider_data
def test_user_country_code(self):
"""
Test api that returns country code of user
"""
response = self.client.get(self.url, self.query_params)
assert response.status_code == 200
assert response.data['countryCode'] == self.country_code
@skip_unless_lms
class SendAccountActivationEmail(UserAPITestCase):
"""
Test for send activation email view
"""
def setUp(self):
"""
Create a user, then log in.
"""
super().setUp()
self.user = UserFactory()
Registration().register(self.user)
result = self.client.login(username=self.user.username, password="test")
assert result, 'Could not log in'
self.path = reverse('send_account_activation_email')
@patch('common.djangoapps.student.views.management.compose_activation_email')
def test_send_email_to_inactive_user_via_cta_dialog(self, email):
"""
Tests when user clicks on resend activation email on CTA dialog box, system
sends an activation email to the user.
"""
self.user.is_active = False
self.user.save()
self.client.post(self.path)
assert email.called is True, 'method should have been called'
| edx/edx-platform | openedx/core/djangoapps/user_authn/api/tests/test_views.py | Python | agpl-3.0 | 7,808 |
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from erpnext.accounts.report.accounts_receivable.accounts_receivable import ReceivablePayableReport
def execute(filters=None):
args = {
"party_type": "Supplier",
"naming_by": ["Buying Settings", "supp_master_name"],
}
return ReceivablePayableReport(filters).run(args)
| gangadharkadam/v5_erp | erpnext/accounts/report/accounts_payable/accounts_payable.py | Python | agpl-3.0 | 461 |
default_app_config = 'apps.datasetmanager.apps.datasetmanagerConfig'
| almey/policycompass-services | apps/datasetmanager/__init__.py | Python | agpl-3.0 | 69 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Schedulers determine how worker's queues get filled. They control which
locations get scanned, in what order, at what time. This allows further
optimizations to be easily added, without having to modify the existing
overseer and worker thread code.
Schedulers will recieve:
queues - A list of queues for the workers they control. For now, this is a
list containing a single queue.
status - A list of status dicts for the workers. Schedulers can use this
information to make more intelligent scheduling decisions.
Useful values include:
- last_scan_date: unix timestamp of when the last scan was
completed
- location: [lat,lng,alt] of the last scan
args - The configuration arguments. This may not include all of the arguments,
just ones that are relevant to this scheduler instance (eg. if
multiple locations become supported, the args passed to the
scheduler will only contain the parameters for the location
it handles)
Schedulers must fill the queues with items to search.
Queue items are a list containing:
[step, (latitude, longitude, altitude),
appears_seconds, disappears_seconds)]
Where:
- step is the step number. Used only for display purposes.
- (latitude, longitude, altitude) is the location to be scanned.
- appears_seconds is the unix timestamp of when the pokemon next appears
- disappears_seconds is the unix timestamp of when the
pokemon next disappears
appears_seconds and disappears_seconds are used to skip scans that are too
late, and wait for scans the worker is early for. If a scheduler doesn't
have a specific time a location needs to be scanned, it should set
both to 0.
If implementing a new scheduler, place it before SchedulerFactory, and
add it to __scheduler_classes
'''
import itertools
import logging
import math
import geopy
import json
import time
import sys
from timeit import default_timer
from threading import Lock
from copy import deepcopy
import traceback
from collections import Counter
from queue import Empty
from operator import itemgetter
from datetime import datetime, timedelta
from .transform import get_new_coords
from .models import (hex_bounds, Pokemon, SpawnPoint, ScannedLocation,
ScanSpawnPoint)
from .utils import now, cur_sec, cellid, date_secs, equi_rect_distance
from .altitude import get_altitude
log = logging.getLogger(__name__)
# Simple base class that all other schedulers inherit from.
# Most of these functions should be overridden in the actual scheduler classes.
# Not all scheduler methods will need to use all of the functions.
class BaseScheduler(object):
def __init__(self, queues, status, args):
self.queues = queues
self.status = status
self.args = args
self.scan_location = False
self.size = None
self.ready = False
# Schedule function fills the queues with data.
def schedule(self):
log.warning('BaseScheduler does not schedule any items')
# location_changed function is called whenever the location being
# scanned changes.
# scan_location = (lat, lng, alt)
def location_changed(self, scan_location, dbq):
self.scan_location = scan_location
self.empty_queues()
# scanning_pause function is called when scanning is paused from the UI.
# The default function will empty all the queues.
# Note: This function is called repeatedly while scanning is paused!
def scanning_paused(self):
self.empty_queues()
def getsize(self):
return self.size
def get_overseer_message(self):
nextitem = self.queues[0].queue[0]
message = 'Processing search queue, next item is {:6f},{:6f}'.format(
nextitem[1][0], nextitem[1][1])
# If times are specified, print the time of the next queue item, and
# how many seconds ahead/behind realtime
if nextitem[2]:
message += ' @ {}'.format(
time.strftime('%H:%M:%S', time.localtime(nextitem[2])))
if nextitem[2] > now():
message += ' ({}s ahead)'.format(nextitem[2] - now())
else:
message += ' ({}s behind)'.format(now() - nextitem[2])
return message
# check if time to refresh queue
def time_to_refresh_queue(self):
return self.queues[0].empty()
def task_done(self, *args):
return self.queues[0].task_done()
# Return the next item in the queue
def next_item(self, search_items_queue):
step, step_location, appears, leaves = self.queues[0].get()
remain = appears - now() + 10
messages = {
'wait': 'Waiting for item from queue.',
'early': 'Early for {:6f},{:6f}; waiting {}s...'.format(
step_location[0], step_location[1], remain),
'late': 'Too late for location {:6f},{:6f}; skipping.'.format(
step_location[0], step_location[1]),
'search': 'Searching at {:6f},{:6f},{:6f}.'.format(
step_location[0], step_location[1], step_location[2]),
'invalid': ('Invalid response at {:6f},{:6f}, ' +
'abandoning location.').format(step_location[0],
step_location[1])
}
return step, step_location, appears, leaves, messages
# How long to delay since last action
def delay(self, *args):
return self.args.scan_delay # always scan delay time
# Function to empty all queues in the queues list
def empty_queues(self):
self.ready = False
for queue in self.queues:
if not queue.empty():
try:
while True:
queue.get_nowait()
except Empty:
pass
# Hex Search is the classic search method, with the pokepath modification,
# searching in a hex grid around the center location.
class HexSearch(BaseScheduler):
# Call base initialization, set step_distance.
def __init__(self, queues, status, args):
BaseScheduler.__init__(self, queues, status, args)
# If we are only scanning for pokestops/gyms, the scan radius can be
# 450m. Otherwise 70m.
if self.args.no_pokemon:
self.step_distance = 0.450
else:
self.step_distance = 0.070
self.step_limit = args.step_limit
# This will hold the list of locations to scan so it can be reused,
# instead of recalculating on each loop.
self.locations = False
# On location change, empty the current queue and the locations list
def location_changed(self, scan_location, dbq):
self.scan_location = scan_location
self.empty_queues()
self.locations = False
# Generates the list of locations to scan.
def _generate_locations(self):
NORTH = 0
EAST = 90
SOUTH = 180
WEST = 270
# Dist between column centers.
xdist = math.sqrt(3) * self.step_distance
ydist = 3 * (self.step_distance / 2) # Dist between row centers.
results = []
results.append((self.scan_location[0], self.scan_location[1], 0))
if self.step_limit > 1:
loc = self.scan_location
# Upper part.
ring = 1
while ring < self.step_limit:
loc = get_new_coords(
loc, xdist, WEST if ring % 2 == 1 else EAST)
results.append((loc[0], loc[1], 0))
for i in range(ring):
loc = get_new_coords(loc, ydist, NORTH)
loc = get_new_coords(
loc, xdist / 2, EAST if ring % 2 == 1 else WEST)
results.append((loc[0], loc[1], 0))
for i in range(ring):
loc = get_new_coords(
loc, xdist, EAST if ring % 2 == 1 else WEST)
results.append((loc[0], loc[1], 0))
for i in range(ring):
loc = get_new_coords(loc, ydist, SOUTH)
loc = get_new_coords(
loc, xdist / 2, EAST if ring % 2 == 1 else WEST)
results.append((loc[0], loc[1], 0))
ring += 1
# Lower part.
ring = self.step_limit - 1
loc = get_new_coords(loc, ydist, SOUTH)
loc = get_new_coords(
loc, xdist / 2, WEST if ring % 2 == 1 else EAST)
results.append((loc[0], loc[1], 0))
while ring > 0:
if ring == 1:
loc = get_new_coords(loc, xdist, WEST)
results.append((loc[0], loc[1], 0))
else:
for i in range(ring - 1):
loc = get_new_coords(loc, ydist, SOUTH)
loc = get_new_coords(
loc, xdist / 2, WEST if ring % 2 == 1 else EAST)
results.append((loc[0], loc[1], 0))
for i in range(ring):
loc = get_new_coords(
loc, xdist, WEST if ring % 2 == 1 else EAST)
results.append((loc[0], loc[1], 0))
for i in range(ring - 1):
loc = get_new_coords(loc, ydist, NORTH)
loc = get_new_coords(
loc, xdist / 2, WEST if ring % 2 == 1 else EAST)
results.append((loc[0], loc[1], 0))
loc = get_new_coords(
loc, xdist, EAST if ring % 2 == 1 else WEST)
results.append((loc[0], loc[1], 0))
ring -= 1
# This will pull the last few steps back to the front of the list,
# so you get a "center nugget" at the beginning of the scan, instead
# of the entire nothern area before the scan spots 70m to the south.
if self.step_limit >= 3:
if self.step_limit == 3:
results = results[-2:] + results[:-2]
else:
results = results[-7:] + results[:-7]
# Add the required appear and disappear times.
locationsZeroed = []
for step, location in enumerate(results, 1):
altitude = get_altitude(self.args, location)
locationsZeroed.append(
(step, (location[0], location[1], altitude), 0, 0))
return locationsZeroed
# Schedule the work to be done.
def schedule(self):
if not self.scan_location:
log.warning(
'Cannot schedule work until scan location has been set')
return
# Only generate the list of locations if we don't have it already
# calculated.
if not self.locations:
self.locations = self._generate_locations()
for location in self.locations:
# FUTURE IMPROVEMENT - For now, queues is assumed to have a single
# queue.
self.queues[0].put(location)
log.debug("Added location {}".format(location))
self.size = len(self.locations)
self.ready = True
# Spawn Only Hex Search works like Hex Search, but skips locations that
# have no known spawnpoints.
class HexSearchSpawnpoint(HexSearch):
def _any_spawnpoints_in_range(self, coords, spawnpoints):
return any(
geopy.distance.distance(coords, x).meters <= 70
for x in spawnpoints)
# Extend the generate_locations function to remove locations with no
# spawnpoints.
def _generate_locations(self):
n, e, s, w = hex_bounds(self.scan_location, self.step_limit)
spawnpoints = set((d['latitude'], d['longitude'])
for d in Pokemon.get_spawnpoints(s, w, n, e))
if len(spawnpoints) == 0:
log.warning('No spawnpoints found in the specified area! (Did ' +
'you forget to run a normal scan in this area first?)')
# Call the original _generate_locations.
locations = super(HexSearchSpawnpoint, self)._generate_locations()
# Remove items with no spawnpoints in range.
locations = [
coords for coords in locations
if self._any_spawnpoints_in_range(coords[1], spawnpoints)]
return locations
# Spawn Scan searches known spawnpoints at the specific time they spawn.
class SpawnScan(BaseScheduler):
def __init__(self, queues, status, args):
BaseScheduler.__init__(self, queues, status, args)
# On the first scan, we want to search the last 15 minutes worth of
# spawns to get existing pokemon onto the map.
self.firstscan = True
# If we are only scanning for pokestops/gyms, the scan radius can be
# 450m. Otherwise 70m.
if self.args.no_pokemon:
self.step_distance = 0.450
else:
self.step_distance = 0.070
self.step_limit = args.step_limit
self.locations = False
# Generate locations is called when the locations list is cleared - the
# first time it scans or after a location change.
def _generate_locations(self):
# Attempt to load spawns from file.
if self.args.spawnpoint_scanning != 'nofile':
log.debug('Loading spawn points from json file @ %s',
self.args.spawnpoint_scanning)
try:
with open(self.args.spawnpoint_scanning) as file:
self.locations = json.load(file)
except ValueError as e:
log.error('JSON error: %s; will fallback to database', repr(e))
except IOError as e:
log.error(
'Error opening json file: %s; will fallback to database',
repr(e))
# No locations yet? Try the database!
if not self.locations:
log.debug('Loading spawn points from database')
self.locations = Pokemon.get_spawnpoints_in_hex(
self.scan_location, self.args.step_limit)
# Well shit...
# if not self.locations:
# raise Exception('No availabe spawn points!')
# locations[]:
# {"lat": 37.53079079414139, "lng": -122.28811690874117,
# "spawnpoint_id": "808f9f1601d", "time": 511
log.info('Total of %d spawns to track', len(self.locations))
# locations.sort(key=itemgetter('time'))
if self.args.very_verbose:
for i in self.locations:
sec = i['time'] % 60
minute = (i['time'] / 60) % 60
m = 'Scan [{:02}:{:02}] ({}) @ {},{}'.format(
minute, sec, i['time'], i['lat'], i['lng'])
log.debug(m)
# 'time' from json and db alike has been munged to appearance time as
# seconds after the hour.
# Here we'll convert that to a real timestamp.
for location in self.locations:
# For a scan which should cover all CURRENT pokemon, we can offset
# the comparison time by 15 minutes so that the "appears" time
# won't be rolled over to the next hour.
# TODO: Make it work. The original logic (commented out) was
# producing bogus results if your first scan was in the last
# 15 minute of the hour. Wrapping my head around this isn't
# work right now, so I'll just drop the feature for the time
# being. It does need to come back so that
# repositioning/pausing works more nicely, but we can live
# without it too.
# if sps_scan_current:
# cursec = (location['time'] + 900) % 3600
# else:
cursec = location['time']
if cursec > cur_sec():
# Hasn't spawn in the current hour.
from_now = location['time'] - cur_sec()
appears = now() + from_now
else:
# Won't spawn till next hour.
late_by = cur_sec() - location['time']
appears = now() + 3600 - late_by
location['appears'] = appears
location['leaves'] = appears + 900
# Put the spawn points in order of next appearance time.
self.locations.sort(key=itemgetter('appears'))
# Match expected structure:
# locations = [((lat, lng, alt), ts_appears, ts_leaves),...]
retset = []
for step, location in enumerate(self.locations, 1):
altitude = get_altitude(self.args, [location['lat'],
location['lng']])
retset.append((step, (location['lat'], location['lng'], altitude),
location['appears'], location['leaves']))
return retset
# Schedule the work to be done.
def schedule(self):
if not self.scan_location:
log.warning(
'Cannot schedule work until scan location has been set')
return
# SpawnScan needs to calculate the list every time, since the times
# will change.
self.locations = self._generate_locations()
for location in self.locations:
# FUTURE IMPROVEMENT - For now, queues is assumed to have a single
# queue.
self.queues[0].put(location)
log.debug("Added location {}".format(location))
# Clear the locations list so it gets regenerated next cycle.
self.size = len(self.locations)
self.locations = None
self.ready = True
# SpeedScan is a complete search method that initially does a spawnpoint
# search in each scan location by scanning five two-minute bands within
# an hour and ten minute intervals between bands.
# After finishing the spawnpoint search or if timing isn't right for any of
# the remaining search bands, workers will search the nearest scan location
# that has a new spawn.
class SpeedScan(HexSearch):
# Call base initialization, set step_distance
def __init__(self, queues, status, args):
super(SpeedScan, self).__init__(queues, status, args)
self.refresh_date = datetime.utcnow() - timedelta(days=1)
self.next_band_date = self.refresh_date
self.queues = [[]]
self.ready = False
self.spawns_found = 0
self.spawns_missed_delay = {}
self.scans_done = 0
self.scans_missed = 0
self.scans_missed_list = []
# Minutes between queue refreshes. Should be less than 10 to allow for
# new bands during Initial scan
self.minutes = 5
self.found_percent = []
self.scan_percent = []
self.spawn_percent = []
self.status_message = []
self.tth_found = 0
# Initiate special types.
self._stat_init()
self._locks_init()
def _stat_init(self):
self.spawns_found = 0
self.spawns_missed_delay = {}
self.scans_done = 0
self.scans_missed = 0
self.scans_missed_list = []
def _locks_init(self):
self.lock_next_item = Lock()
# On location change, empty the current queue and the locations list
def location_changed(self, scan_location, db_update_queue):
super(SpeedScan, self).location_changed(scan_location, db_update_queue)
self.locations = self._generate_locations()
scans = {}
initial = {}
all_scans = {}
for sl in ScannedLocation.select_in_hex(self.scan_location,
self.args.step_limit):
all_scans[cellid((sl['latitude'], sl['longitude']))] = sl
for i, e in enumerate(self.locations):
cell = cellid(e[1])
scans[cell] = {'loc': e[1], # Lat/long pair
'step': e[0]}
initial[cell] = all_scans[cell] if cell in all_scans.keys(
) else ScannedLocation.new_loc(e[1])
self.scans = scans
db_update_queue.put((ScannedLocation, initial))
log.info('%d steps created', len(scans))
self.band_spacing = int(10 * 60 / len(scans))
self.band_status()
spawnpoints = SpawnPoint.select_in_hex(
self.scan_location, self.args.step_limit)
if not spawnpoints:
log.info('No spawnpoints in hex found in SpawnPoint table. ' +
'Doing initial scan.')
log.info('Found %d spawn points within hex', len(spawnpoints))
log.info('Doing %s distance calcs to assign spawn points to scans',
"{:,}".format(len(spawnpoints) * len(scans)))
scan_spawn_point = {}
ScannedLocation.link_spawn_points(scans, initial, spawnpoints,
self.step_distance, scan_spawn_point,
force=True)
if len(scan_spawn_point):
log.info('%d relations found between the spawn points and steps',
len(scan_spawn_point))
db_update_queue.put((ScanSpawnPoint, scan_spawn_point))
else:
log.info('Spawn points assigned')
# Generates the list of locations to scan
# Created a new function, because speed scan requires fixed locations,
# even when increasing -st. With HexSearch locations, the location of
# inner rings would change if -st was increased requiring rescanning
# since it didn't recognize the location in the ScannedLocation table
def _generate_locations(self):
NORTH = 0
EAST = 90
SOUTH = 180
WEST = 270
# dist between column centers
xdist = math.sqrt(3) * self.step_distance
ydist = 3 * (self.step_distance / 2) # dist between row centers
results = []
loc = self.scan_location
results.append((loc[0], loc[1], 0))
# upper part
for ring in range(1, self.step_limit):
for i in range(max(ring - 1, 1)):
if ring > 1:
loc = get_new_coords(loc, ydist, NORTH)
loc = get_new_coords(loc, xdist / (1 + (ring > 1)), WEST)
results.append((loc[0], loc[1], 0))
for i in range(ring):
loc = get_new_coords(loc, ydist, NORTH)
loc = get_new_coords(loc, xdist / 2, EAST)
results.append((loc[0], loc[1], 0))
for i in range(ring):
loc = get_new_coords(loc, xdist, EAST)
results.append((loc[0], loc[1], 0))
for i in range(ring):
loc = get_new_coords(loc, ydist, SOUTH)
loc = get_new_coords(loc, xdist / 2, EAST)
results.append((loc[0], loc[1], 0))
for i in range(ring):
loc = get_new_coords(loc, ydist, SOUTH)
loc = get_new_coords(loc, xdist / 2, WEST)
results.append((loc[0], loc[1], 0))
for i in range(ring + (ring + 1 < self.step_limit)):
loc = get_new_coords(loc, xdist, WEST)
results.append((loc[0], loc[1], 0))
generated_locations = []
for step, location in enumerate(results):
altitude = get_altitude(self.args, location)
generated_locations.append(
(step, (location[0], location[1], altitude), 0, 0))
return generated_locations
def getsize(self):
return len(self.queues[0])
def get_overseer_message(self):
n = 0
ms = (datetime.utcnow() - self.refresh_date).total_seconds() + \
self.refresh_ms
counter = {
'TTH': 0,
'spawn': 0,
'band': 0,
}
for item in self.queues[0]:
if item.get('done', False):
continue
if ms > item['end']:
continue
if ms < item['start']:
break
n += 1
counter[item['kind']] += 1
message = ('Scanning status: {} total waiting, {} initial bands, ' +
'{} TTH searches, and {} new spawns').format(
n, counter['band'], counter['TTH'], counter['spawn'])
if self.status_message:
message += '\n' + self.status_message
return message
# Refresh queue every 5 minutes
# the first band of a scan is done
def time_to_refresh_queue(self):
return ((datetime.utcnow() - self.refresh_date).total_seconds() >
self.minutes * 60 or self.queues == [[]])
# Function to empty all queues in the queues list
def empty_queues(self):
self.queues = [[]]
# How long to delay since last action
def delay(self, last_scan_date):
return max(
((last_scan_date - datetime.utcnow()).total_seconds() +
self.args.scan_delay),
2)
def band_status(self):
try:
bands_total = len(self.locations) * 5
bands_filled = ScannedLocation.get_bands_filled_by_cellids(
self.scans.keys())
percent = bands_filled * 100.0 / bands_total
if bands_total == bands_filled:
log.info('Initial spawnpoint scan is complete')
else:
log.info('Initial spawnpoint scan, %d of %d bands are done ' +
'or %.1f%% complete', bands_filled, bands_total,
percent)
return percent
except Exception as e:
log.error(
'Exception in band_status: Exception message: {}'.format(
repr(e)))
# Update the queue, and provide a report on performance of last minutes
def schedule(self):
log.info('Refreshing queue')
self.ready = False
now_date = datetime.utcnow()
self.refresh_date = now_date
self.refresh_ms = now_date.minute * 60 + now_date.second
old_q = deepcopy(self.queues[0])
queue = []
# Measure the time it takes to refresh the queue
start = time.time()
# prefetch all scanned locations
scanned_locations = ScannedLocation.get_by_cellids(self.scans.keys())
# extract all spawnpoints into a dict with spawnpoint
# id -> spawnpoint for easy access later
cell_to_linked_spawn_points = (
ScannedLocation.get_cell_to_linked_spawn_points(self.scans.keys()))
sp_by_id = {}
for sps in cell_to_linked_spawn_points.itervalues():
for sp in sps:
sp_by_id[sp['id']] = sp
for cell, scan in self.scans.iteritems():
queue += ScannedLocation.get_times(scan, now_date,
scanned_locations)
queue += SpawnPoint.get_times(cell, scan, now_date,
self.args.spawn_delay,
cell_to_linked_spawn_points,
sp_by_id)
end = time.time()
queue.sort(key=itemgetter('start'))
self.queues[0] = queue
self.ready = True
log.info('New queue created with %d entries in %f seconds', len(queue),
(end - start))
if old_q:
# Enclosing in try: to avoid divide by zero exceptions from
# killing overseer
try:
# Possible 'done' values are 'Missed', 'Scanned', None, or
# number
Not_none_list = filter(lambda e: e.get(
'done', None) is not None, old_q)
Missed_list = filter(lambda e: e.get(
'done', None) == 'Missed', Not_none_list)
Scanned_list = filter(lambda e: e.get(
'done', None) == 'Scanned', Not_none_list)
Timed_list = filter(lambda e: type(
e['done']) is not str, Not_none_list)
spawns_timed_list = filter(
lambda e: e['kind'] == 'spawn', Timed_list)
spawns_timed = len(spawns_timed_list)
bands_timed = len(
filter(lambda e: e['kind'] == 'band', Timed_list))
spawns_all = spawns_timed + \
len(filter(lambda e: e['kind'] == 'spawn', Scanned_list))
spawns_missed = len(
filter(lambda e: e['kind'] == 'spawn', Missed_list))
band_percent = self.band_status()
kinds = {}
tth_ranges = {}
self.tth_found = 0
self.active_sp = 0
found_percent = 100.0
good_percent = 100.0
spawns_reached = 100.0
spawnpoints = SpawnPoint.select_in_hex(
self.scan_location, self.args.step_limit)
for sp in spawnpoints:
if sp['missed_count'] > 5:
continue
self.active_sp += 1
self.tth_found += (sp['earliest_unseen'] ==
sp['latest_seen'])
kind = sp['kind']
kinds[kind] = kinds.get(kind, 0) + 1
tth_range = str(int(round(
((sp['earliest_unseen'] - sp['latest_seen']) % 3600) /
60.0)))
tth_ranges[tth_range] = tth_ranges.get(tth_range, 0) + 1
tth_ranges['0'] = tth_ranges.get('0', 0) - self.tth_found
len_spawnpoints = len(spawnpoints) + (not len(spawnpoints))
log.info('Total Spawn Points found in hex: %d',
len(spawnpoints))
log.info('Inactive Spawn Points found in hex: %d or %.1f%%',
len(spawnpoints) - self.active_sp,
(len(spawnpoints) -
self.active_sp) * 100.0 / len_spawnpoints)
log.info('Active Spawn Points found in hex: %d or %.1f%%',
self.active_sp,
self.active_sp * 100.0 / len_spawnpoints)
self.active_sp += self.active_sp == 0
for k in sorted(kinds.keys()):
log.info('%s kind spawns: %d or %.1f%%', k,
kinds[k], kinds[k] * 100.0 / self.active_sp)
log.info('Spawns with found TTH: %d or %.1f%% [%d missing]',
self.tth_found,
self.tth_found * 100.0 / self.active_sp,
self.active_sp - self.tth_found)
for k in sorted(tth_ranges.keys(), key=int):
log.info('Spawnpoints with a %sm range to find TTH: %d', k,
tth_ranges[k])
log.info('Over last %d minutes: %d new bands, %d Pokemon ' +
'found', self.minutes, bands_timed, spawns_all)
log.info('Of the %d total spawns, %d were targeted, and %d ' +
'found scanning for others', spawns_all, spawns_timed,
spawns_all - spawns_timed)
scan_total = spawns_timed + bands_timed
spm = scan_total / self.minutes
seconds_per_scan = self.minutes * 60 * \
self.args.workers / scan_total if scan_total else 0
log.info('%d scans over %d minutes, %d scans per minute, %d ' +
'secs per scan per worker', scan_total, self.minutes,
spm, seconds_per_scan)
sum = spawns_all + spawns_missed
if sum:
spawns_reached = spawns_all * 100.0 / \
(spawns_all + spawns_missed)
log.info('%d Pokemon found, and %d were not reached in ' +
'time for %.1f%% found', spawns_all,
spawns_missed, spawns_reached)
if spawns_timed:
average = reduce(
lambda x, y: x + y['done'],
spawns_timed_list,
0) / spawns_timed
log.info('%d Pokemon found, %d were targeted, with an ' +
'average delay of %d sec', spawns_all,
spawns_timed, average)
spawns_missed = reduce(
lambda x, y: x + len(y),
self.spawns_missed_delay.values(), 0)
sum = spawns_missed + self.spawns_found
found_percent = (
self.spawns_found * 100.0 / sum if sum else 0)
log.info('%d spawns scanned and %d spawns were not ' +
'there when expected for %.1f%%',
self.spawns_found, spawns_missed, found_percent)
self.spawn_percent.append(round(found_percent, 1))
if self.spawns_missed_delay:
log.warning('Missed spawn IDs with times after spawn:')
log.warning(self.spawns_missed_delay)
log.info('History: %s', str(
self.spawn_percent).strip('[]'))
sum = self.scans_done + len(self.scans_missed_list)
good_percent = self.scans_done * 100.0 / sum if sum else 0
log.info(
'%d scans successful and %d scans missed for %.1f%% found',
self.scans_done, len(self.scans_missed_list), good_percent)
self.scan_percent.append(round(good_percent, 1))
if self.scans_missed_list:
log.warning('Missed scans: %s', Counter(
self.scans_missed_list).most_common(3))
log.info('History: %s', str(self.scan_percent).strip('[]'))
self.status_message = ('Initial scan: {:.2f}%, TTH found: ' +
'{:.2f}% [{} missing], ').format(
band_percent, self.tth_found * 100.0 / self.active_sp,
self.active_sp - self.tth_found)
self.status_message += ('Spawns reached: {:.2f}%, Spawns ' +
'found: {:.2f}%, Good scans ' +
'{:.2f}%').format(spawns_reached,
found_percent,
good_percent)
self._stat_init()
except Exception as e:
log.error(
'Performance statistics had an Exception: {}'.format(
repr(e)))
traceback.print_exc(file=sys.stdout)
# Find the best item to scan next
def next_item(self, status):
# Thread safety: don't let multiple threads get the same "best item".
with self.lock_next_item:
# Score each item in the queue by # of due spawns or scan time
# bands can be filled.
while not self.ready:
time.sleep(1)
now_date = datetime.utcnow()
now_time = time.time()
n = 0 # count valid scans reviewed
q = self.queues[0]
ms = ((now_date - self.refresh_date).total_seconds() +
self.refresh_ms)
best = {'score': 0}
cant_reach = False
worker_loc = [status['latitude'], status['longitude']]
last_action = status['last_scan_date']
# Check all scan locations possible in the queue.
for i, item in enumerate(q):
# If already claimed by another worker or done, pass.
if item.get('done', False):
continue
# If the item is parked by a different thread (or by a
# different account, which should be on that one thread),
# pass.
our_parked_name = status['username']
if 'parked_name' in item:
# We use 'parked_last_update' to determine when the
# last time was since the thread passed the item with the
# same thread name & username. If it's been too long, unset
# the park so another worker can pick it up.
now = default_timer()
max_parking_idle_seconds = 3 * 60
if (now - item.get('parked_last_update', now)
> max_parking_idle_seconds):
# Unpark & don't skip it.
item.pop('parked_name', None)
item.pop('parked_last_update', None)
else:
# Still parked and not our item. Skip it.
if item.get('parked_name') != our_parked_name:
continue
# If already timed out, mark it as Missed and check next.
if ms > item['end']:
item['done'] = 'Missed' if not item.get(
'done', False) else item['done']
continue
# If we just did a fresh band recently, wait a few seconds to
# space out the band scans.
if now_date < self.next_band_date:
continue
# If the start time isn't yet, don't bother looking further,
# since queue sorted by start time.
if ms < item['start']:
break
loc = item['loc']
distance = equi_rect_distance(loc, worker_loc)
secs_to_arrival = distance / self.args.kph * 3600
# If we can't make it there before it disappears, don't bother
# trying.
if ms + secs_to_arrival > item['end']:
cant_reach = True
continue
n += 1
# Bands are top priority to find new spawns first
score = 1e12 if item['kind'] == 'band' else (
1e6 if item['kind'] == 'TTH' else 1)
# For spawns, score is purely based on how close they are to
# last worker position
score = score / (distance + .01)
if score > best['score']:
best = {'score': score, 'i': i}
best.update(item)
prefix = 'Calc %.2f for %d scans:' % (time.time() - now_time, n)
loc = best.get('loc', [])
step = best.get('step', 0)
i = best.get('i', 0)
messages = {
'wait': 'Nothing to scan.',
'early': 'Early for step {}; waiting a few seconds...'.format(
step),
'late': ('API response on step {} delayed by {} seconds. ' +
'Possible causes: slow proxies, internet, or ' +
'Niantic servers.').format(
step,
int((now_date - last_action).total_seconds())),
'search': 'Searching at step {}.'.format(step),
'invalid': ('Invalid response at step {}, abandoning ' +
'location.').format(step)
}
try:
item = q[i]
except IndexError:
messages['wait'] = ('Search aborting.'
+ ' Overseer refreshing queue.')
return -1, 0, 0, 0, messages
if best['score'] == 0:
if cant_reach:
messages['wait'] = ('Not able to reach any scan'
+ ' under the speed limit.')
return -1, 0, 0, 0, messages
distance = equi_rect_distance(loc, worker_loc)
if (distance >
(now_date - last_action).total_seconds() *
self.args.kph / 3600):
# Flag item as "parked" by a specific thread, because
# we're waiting for it. This will avoid all threads "walking"
# to the same item.
our_parked_name = status['username']
item['parked_name'] = our_parked_name
# CTRL+F 'parked_last_update' in this file for more info.
item['parked_last_update'] = default_timer()
messages['wait'] = 'Moving {}m to step {} for a {}.'.format(
int(distance * 1000), step,
best['kind'])
return -1, 0, 0, 0, messages
prefix += ' Step %d,' % (step)
# Check again if another worker heading there.
# TODO: Check if this is still necessary. I believe this was
# originally a failed attempt at thread safety, which still
# resulted in a race condition (multiple workers heading to the
# same spot). A thread Lock has since been added.
if item.get('done', False):
messages['wait'] = ('Skipping step {}. Other worker already ' +
'scanned.').format(step)
return -1, 0, 0, 0, messages
if not self.ready:
messages['wait'] = ('Search aborting.'
+ ' Overseer refreshing queue.')
return -1, 0, 0, 0, messages
# If a new band, set the date to wait until for the next band.
if best['kind'] == 'band' and best['end'] - best['start'] > 5 * 60:
self.next_band_date = datetime.utcnow() + timedelta(
seconds=self.band_spacing)
# Mark scanned
item['done'] = 'Scanned'
status['index_of_queue_item'] = i
messages['search'] = 'Scanning step {} for a {}.'.format(
best['step'], best['kind'])
return best['step'], best['loc'], 0, 0, messages
def task_done(self, status, parsed=False):
if parsed:
# Record delay between spawn time and scanning for statistics
now_secs = date_secs(datetime.utcnow())
item = self.queues[0][status['index_of_queue_item']]
seconds_within_band = (
int((datetime.utcnow() - self.refresh_date).total_seconds()) +
self.refresh_ms)
enforced_delay = (self.args.spawn_delay if item['kind'] == 'spawn'
else 0)
start_delay = seconds_within_band - item['start'] + enforced_delay
safety_buffer = item['end'] - seconds_within_band
if safety_buffer < 0:
log.warning('Too late by %d sec for a %s at step %d', -
safety_buffer, item['kind'], item['step'])
# If we had a 0/0/0 scan, then unmark as done so we can retry, and
# save for Statistics
elif parsed['bad_scan']:
self.scans_missed_list.append(cellid(item['loc']))
# Only try for a set amount of times (BAD_SCAN_RETRY)
if self.args.bad_scan_retry > 0 and (
self.scans_missed_list.count(cellid(item['loc'])) >
self.args.bad_scan_retry):
log.info('Step %d failed scan for %d times! Giving up...',
item['step'], self.args.bad_scan_retry + 1)
else:
item['done'] = None
log.info('Putting back step %d in queue', item['step'])
else:
# Scan returned data
self.scans_done += 1
item['done'] = start_delay
# Were we looking for spawn?
if item['kind'] == 'spawn':
sp_id = item['sp']
# Did we find the spawn?
if sp_id in parsed['sp_id_list']:
self.spawns_found += 1
elif start_delay > 0: # not sure why this could be
# negative, but sometimes it is
# if not, record ID and put back in queue
self.spawns_missed_delay[
sp_id] = self.spawns_missed_delay.get(sp_id, [])
self.spawns_missed_delay[sp_id].append(start_delay)
item['done'] = 'Scanned'
# For existing spawn points, if in any other queue items, mark
# 'scanned'
for sp_id in parsed['sp_id_list']:
for item in self.queues[0]:
if (sp_id == item.get('sp', None) and
item.get('done', None) is None and
now_secs > item['start'] and
now_secs < item['end']):
item['done'] = 'Scanned'
# The SchedulerFactory returns an instance of the correct type of scheduler.
class SchedulerFactory():
__schedule_classes = {
"hexsearch": HexSearch,
"hexsearchspawnpoint": HexSearchSpawnpoint,
"spawnscan": SpawnScan,
"speedscan": SpeedScan,
}
@staticmethod
def get_scheduler(name, *args, **kwargs):
scheduler_class = SchedulerFactory.__schedule_classes.get(
name.lower(), None)
if scheduler_class:
return scheduler_class(*args, **kwargs)
raise NotImplementedError(
"The requested scheduler has not been implemented")
# The KeyScheduler returns a scheduler that cycles through the given hash
# server keys.
class KeyScheduler(object):
def __init__(self, keys):
self.keys = {}
for key in keys:
self.keys[key] = {
'remaining': 0,
'maximum': 0,
'peak': 0
}
self.key_cycle = itertools.cycle(keys)
self.curr_key = ''
def keys(self):
return self.keys
def current(self):
return self.curr_key
def next(self):
self.curr_key = self.key_cycle.next()
return self.curr_key
| slgphantom/RocketMap | pogom/schedulers.py | Python | agpl-3.0 | 46,757 |
# -*- coding: utf-8 -*-
#
# privacyIDEA is a fork of LinOTP
# May 08, 2014 Cornelius Kölbel
# License: AGPLv3
# contact: http://www.privacyidea.org
#
# 2014-10-17 Fix the empty result problem
# Cornelius Kölbel, <[email protected]>
#
# Copyright (C) 2010 - 2014 LSE Leading Security Experts GmbH
# License: AGPLv3
# contact: http://www.linotp.org
# http://www.lsexperts.de
# [email protected]
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
# License as published by the Free Software Foundation; either
# version 3 of the License, or any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU AFFERO GENERAL PUBLIC LICENSE for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__doc__="""This is the BaseClass for audit trails
The audit is supposed to work like this. First we need to create an audit
object. E.g. this can be done in the before_request:
g.audit_object = getAudit(file_config)
During the request, the g.audit_object can be used to add audit information:
g.audit_object.log({"client": "123.2.3.4", "action": "validate/check"})
Thus at many different places in the code, audit information can be added to
the audit object.
Finally the audit_object needs to be stored to the audit storage. So we call:
g.audit_object.finalize_log()
which creates a signature of the audit data and writes the data to the audit
storage.
"""
import logging
log = logging.getLogger(__name__)
from privacyidea.lib.log import log_with
import socket
from datetime import datetime, timedelta
class Paginate(object):
"""
This is a pagination object, that is used for searching audit trails.
"""
def __init__(self):
# The audit data
self.auditdata = []
# The number of the previous page
self.prev = None
# the number of the next page
self.next = None
# the number of the current page
self.current = 1
# the total entry numbers
self.total = 0
class Audit(object): # pragma: no cover
def __init__(self, config=None):
"""
Create a new audit object.
:param config: The web config is passed to the audit module, so that
the special module implementation can get its configuration.
:type config: dict
:return:
"""
self.name = "AuditBase"
self.audit_data = {}
self.private = ""
self.public = ""
@log_with(log)
def initialize(self):
# defaults
self.audit_data = {'action_detail': '',
'info': '',
'log_level': 'INFO',
'administrator': '',
'value': '',
'key': '',
'serial': '',
'token_type': '',
'clearance_level': 0,
'privacyidea_server': socket.gethostname(),
'realm': '',
'user': '',
'client': ''
}
#controller = request.environ['pylons.routes_dict']['controller']
#action = request.environ['pylons.routes_dict']['action']
#c.audit['action'] = "%s/%s" % (controller, action)
def log_token_num(self, count):
"""
Log the number of the tokens.
Can be passed like
log_token_num(get_tokens(count=True))
:param count: Number of tokens
:type count: int
:return:
"""
self.audit_data['action_detail'] = "tokennum = %s" % str(count)
@log_with(log)
def read_keys(self, pub, priv):
"""
Set the private and public key for the audit class. This is achieved by
passing the entries.
#priv = config.get("privacyideaAudit.key.private")
#pub = config.get("privacyideaAudit.key.public")
:param pub: Public key, used for verifying the signature
:type pub: string with filename
:param priv: Private key, used to sign the audit entry
:type priv: string with filename
:return: None
"""
try:
f = open(priv, "r")
self.private = f.read()
f.close()
except Exception as e:
log.error("Error reading private key %s: (%r)" % (priv, e))
raise e
try:
f = open(pub, "r")
self.public = f.read()
f.close()
except Exception as e:
log.error("Error reading public key %s: (%r)" % (pub, e))
raise e
def get_audit_id(self):
return self.name
def get_total(self, param, AND=True, display_error=True):
"""
This method returns the total number of audit entries
in the audit store
"""
return None
@log_with(log)
def log(self, param): # pragma: no cover
"""
This method is used to log the data.
During a request this method can be called several times to fill the
internal audit_data dictionary.
"""
pass
def add_to_log(self, param):
"""
Add to existing log entry
:param param:
:return:
"""
pass
def finalize_log(self):
"""
This method is called to finalize the audit_data. I.e. sign the data
and write it to the database.
It should hash the data and do a hash chain and sign the data
"""
pass
def initialize_log(self, param):
"""
This method initialized the log state.
The fact, that the log state was initialized, also needs to be logged.
Therefor the same params are passed as i the log method.
"""
pass
def set(self):
"""
This function could be used to set certain things like the signing key.
But maybe it should only be read from privacyidea.ini?
"""
pass
def search(self, param, display_error=True, rp_dict=None):
"""
This function is used to search audit events.
param: Search parameters can be passed.
return: A pagination object
This function is deprecated.
"""
return Paginate()
def csv_generator(self, param):
"""
A generator that can be used to stream the audit log
:param param:
:return:
"""
pass
def search_query(self, search_dict, rp_dict):
"""
This function returns the audit log as an iterator on the result
"""
return None
def audit_entry_to_dict(self, audit_entry):
"""
If the search_query returns an iterator with elements that are not a
dictionary, the audit module needs
to provide this function, to convert the audit entry to a dictionary.
"""
return {}
def get_dataframe(self, start_time=datetime.now()-timedelta(days=7),
end_time=datetime.now()):
"""
The Audit module can handle its data the best. This function is used
to return a pandas.dataframe with all audit data in the given time
frame.
This dataframe then can be used for extracting statistics.
:param start_time: The start time of the data
:type start_time: datetime
:param end_time: The end time of the data
:type end_time: datetime
:return: Audit data
:rtype: dataframe
"""
return None
| woddx/privacyidea | privacyidea/lib/auditmodules/base.py | Python | agpl-3.0 | 7,831 |
"""
Django Admin pages for DiscountRestrictionConfig.
"""
from django.contrib import admin
from django.utils.translation import gettext_lazy as _
from openedx.core.djangoapps.config_model_utils.admin import StackedConfigModelAdmin
from .models import DiscountPercentageConfig, DiscountRestrictionConfig
class DiscountRestrictionConfigAdmin(StackedConfigModelAdmin):
"""
Admin to configure discount restrictions
"""
fieldsets = (
('Context', {
'fields': DiscountRestrictionConfig.KEY_FIELDS,
'description': _(
'These define the context to disable lms-controlled discounts on. '
'If no values are set, then the configuration applies globally. '
'If a single value is set, then the configuration applies to all courses '
'within that context. At most one value can be set at a time.<br>'
'If multiple contexts apply to a course (for example, if configuration '
'is specified for the course specifically, and for the org that the course '
'is in, then the more specific context overrides the more general context.'
),
}),
('Configuration', {
'fields': ('disabled',),
'description': _(
'If any of these values is left empty or "Unknown", then their value '
'at runtime will be retrieved from the next most specific context that applies. '
'For example, if "Disabled" is left as "Unknown" in the course context, then that '
'course will be Disabled only if the org that it is in is Disabled.'
),
})
)
raw_id_fields = ('course',)
admin.site.register(DiscountRestrictionConfig, DiscountRestrictionConfigAdmin)
class DiscountPercentageConfigAdmin(StackedConfigModelAdmin):
"""
Admin to configure discount percentage
"""
fieldsets = (
('Context', {
'fields': DiscountRestrictionConfig.KEY_FIELDS,
'description': _(
'These define the context to configure the percentage for the first purchase discount.'
'If multiple contexts apply to a course (for example, if configuration '
'is specified for the course specifically, and for the org that the course '
'is in, then the more specific context overrides the more general context.'
),
}),
('Configuration', {
'fields': ('percentage',),
})
)
raw_id_fields = ('course',)
admin.site.register(DiscountPercentageConfig, DiscountPercentageConfigAdmin)
| edx/edx-platform | openedx/features/discounts/admin.py | Python | agpl-3.0 | 2,665 |
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""
This module contains external, potentially separately licensed,
packages that are included in spack.
So far:
argparse: We include our own version to be Python 2.6 compatible.
distro: Provides a more stable linux distribution detection.
functools: Used for implementation of total_ordering.
jinja2: A modern and designer-friendly templating language for Python
jsonschema: An implementation of JSON Schema for Python.
ordereddict: We include our own version to be Python 2.6 compatible.
py: Needed by pytest. Library with cross-python path,
ini-parsing, io, code, and log facilities.
pyqver2: External script to query required python version of
python source code. Used for ensuring 2.6 compatibility.
pytest: Testing framework used by Spack.
yaml: Used for config files.
"""
| TheTimmy/spack | lib/spack/external/__init__.py | Python | lgpl-2.1 | 2,139 |
from __future__ import absolute_import, unicode_literals
from case import Mock, patch
from amqp.five import text_t
from amqp.utils import (NullHandler, bytes_to_str, coro, get_errno, get_logger,
str_to_bytes)
class test_get_errno:
def test_has_attr(self):
exc = KeyError('foo')
exc.errno = 23
assert get_errno(exc) == 23
def test_in_args(self):
exc = KeyError(34, 'foo')
exc.args = (34, 'foo')
assert get_errno(exc) == 34
def test_args_short(self):
exc = KeyError(34)
assert not get_errno(exc)
def test_no_args(self):
assert not get_errno(object())
class test_coro:
def test_advances(self):
@coro
def x():
yield 1
yield 2
it = x()
assert next(it) == 2
class test_str_to_bytes:
def test_from_unicode(self):
assert isinstance(str_to_bytes(u'foo'), bytes)
def test_from_bytes(self):
assert isinstance(str_to_bytes(b'foo'), bytes)
def test_supports_surrogates(self):
bytes_with_surrogates = '\ud83d\ude4f'.encode('utf-8', 'surrogatepass')
assert str_to_bytes(u'\ud83d\ude4f') == bytes_with_surrogates
class test_bytes_to_str:
def test_from_unicode(self):
assert isinstance(bytes_to_str(u'foo'), text_t)
def test_from_bytes(self):
assert bytes_to_str(b'foo')
def test_support_surrogates(self):
assert bytes_to_str(u'\ud83d\ude4f') == u'\ud83d\ude4f'
class test_NullHandler:
def test_emit(self):
NullHandler().emit(Mock(name='record'))
class test_get_logger:
def test_as_str(self):
with patch('logging.getLogger') as getLogger:
x = get_logger('foo.bar')
getLogger.assert_called_with('foo.bar')
assert x is getLogger()
def test_as_logger(self):
with patch('amqp.utils.NullHandler') as _NullHandler:
m = Mock(name='logger')
m.handlers = None
x = get_logger(m)
assert x is m
x.addHandler.assert_called_with(_NullHandler())
| pexip/os-python-amqp | t/unit/test_utils.py | Python | lgpl-2.1 | 2,126 |
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Lzma(AutotoolsPackage):
"""LZMA Utils are legacy data compression software with high compression
ratio. LZMA Utils are no longer developed, although critical bugs may be
fixed as long as fixing them doesn't require huge changes to the code.
Users of LZMA Utils should move to XZ Utils. XZ Utils support the legacy
.lzma format used by LZMA Utils, and can also emulate the command line
tools of LZMA Utils. This should make transition from LZMA Utils to XZ
Utils relatively easy."""
homepage = "http://tukaani.org/lzma/"
url = "http://tukaani.org/lzma/lzma-4.32.7.tar.gz"
version('4.32.7', '2a748b77a2f8c3cbc322dbd0b4c9d06a')
| wscullin/spack | var/spack/repos/builtin/packages/lzma/package.py | Python | lgpl-2.1 | 1,935 |
#!/usr/bin/env python
import sys, math
if len(sys.argv) != 3:
print("Usage:")
print("%s [RA HH:MM:SS] [DEC Deg:Arcmin:Arcsec] " % sys.argv[0])
exit(0)
ra = sys.argv[1]
dec = sys.argv[2]
rai = ra.split(":")
deci = dec.split(":")
radeg = float(rai[0]) * 15.0 + float(rai[1]) * (1.0 / 60.0) + float(rai[2]) * (1.0 / 3600)
decdeg = float(deci[0]) + float(deci[1]) * (1.0 / 60.0) + float(deci[2]) * (1.0 / 3600.0)
print("RA,DEC: %f,%f deg" % (radeg, decdeg))
| ari-zah/gaiasky | core/scripts/radec-conv.py | Python | lgpl-3.0 | 472 |
#!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example deactivates all active placements. To determine which
placements exist, run get_all_placements.py."""
__author__ = '[email protected] (Jeff Sham)'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
from adspygoogle.dfp import DfpUtils
# Initialize client object.
client = DfpClient(path=os.path.join('..', '..', '..', '..'))
# Initialize appropriate service.
placement_service = client.GetService('PlacementService', version='v201306')
# Create query.
values = [{
'key': 'status',
'value': {
'xsi_type': 'TextValue',
'value': 'ACTIVE'
}
}]
query = 'WHERE status = :status'
# Get placements by statement.
placements = DfpUtils.GetAllEntitiesByStatementWithService(
placement_service, query=query, bind_vars=values)
for placement in placements:
print ('Placement with id \'%s\', name \'%s\', and status \'%s\' will be '
'deactivated.' % (placement['id'], placement['name'],
placement['status']))
print 'Number of placements to be deactivated: %s' % len(placements)
# Perform action.
result = placement_service.PerformPlacementAction(
{'type': 'DeactivatePlacements'}, {'query': query, 'values': values})[0]
# Display results.
if result and int(result['numChanges']) > 0:
print 'Number of placements deactivated: %s' % result['numChanges']
else:
print 'No placements were deactivated.'
| caioserra/apiAdwords | examples/adspygoogle/dfp/v201306/deactivate_placements.py | Python | apache-2.0 | 2,253 |
"""The tests for hls streams."""
from datetime import timedelta
from io import BytesIO
from unittest.mock import patch
from homeassistant.setup import async_setup_component
from homeassistant.components.stream.core import Segment
from homeassistant.components.stream.recorder import recorder_save_worker
import homeassistant.util.dt as dt_util
from tests.common import async_fire_time_changed
from tests.components.stream.common import (
generate_h264_video, preload_stream)
async def test_record_stream(hass, hass_client):
"""
Test record stream.
Purposefully not mocking anything here to test full
integration with the stream component.
"""
await async_setup_component(hass, 'stream', {
'stream': {}
})
with patch(
'homeassistant.components.stream.recorder.recorder_save_worker'):
# Setup demo track
source = generate_h264_video()
stream = preload_stream(hass, source)
recorder = stream.add_provider('recorder')
stream.start()
segments = 0
while True:
segment = await recorder.recv()
if not segment:
break
segments += 1
stream.stop()
assert segments > 1
async def test_recorder_timeout(hass, hass_client):
"""Test recorder timeout."""
await async_setup_component(hass, 'stream', {
'stream': {}
})
with patch(
'homeassistant.components.stream.recorder.RecorderOutput.cleanup'
) as mock_cleanup:
# Setup demo track
source = generate_h264_video()
stream = preload_stream(hass, source)
recorder = stream.add_provider('recorder')
stream.start()
await recorder.recv()
# Wait a minute
future = dt_util.utcnow() + timedelta(minutes=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert mock_cleanup.called
async def test_recorder_save():
"""Test recorder save."""
# Setup
source = generate_h264_video()
output = BytesIO()
output.name = 'test.mp4'
# Run
recorder_save_worker(output, [Segment(1, source, 4)])
# Assert
assert output.getvalue()
| auduny/home-assistant | tests/components/stream/test_recorder.py | Python | apache-2.0 | 2,235 |
# Copyright 2019 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS-IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script runs the following tests in all cases.
- Javascript and Python Linting
- Backend Python tests
Only when frontend files are changed will it run Frontend Karma unit tests.
"""
from __future__ import annotations
import argparse
import subprocess
from . import common
from . import run_backend_tests
from . import run_frontend_tests
from .linters import pre_commit_linter
_PARSER = argparse.ArgumentParser(
description="""
Run this script from the oppia root folder prior to opening a PR:
python -m scripts.run_presubmit_checks
Set the origin branch to compare against by adding
--branch=your_branch or -b=your_branch
By default, if the current branch tip exists on remote origin,
the current branch is compared against its tip on GitHub.
Otherwise it's compared against 'develop'.
This script runs the following tests in all cases.
- Javascript and Python Linting
- Backend Python tests
Only when frontend files are changed will it run Frontend Karma unit tests.
If any of these tests result in errors, this script will terminate.
Note: The test scripts are arranged in increasing order of time taken. This
enables a broken build to be detected as quickly as possible.
""")
_PARSER.add_argument(
'--branch', '-b',
help='optional; if specified, the origin branch to compare against.')
def main(args=None):
"""Run the presubmit checks."""
parsed_args = _PARSER.parse_args(args=args)
# Run Javascript and Python linters.
print('Linting files since the last commit')
pre_commit_linter.main(args=[])
print('Linting passed.')
print('')
current_branch = subprocess.check_output([
'git', 'rev-parse', '--abbrev-ref', 'HEAD'])
# If the current branch exists on remote origin, matched_branch_num=1
# else matched_branch_num=0.
matched_branch_num = subprocess.check_output([
'git', 'ls-remote', '--heads', 'origin', current_branch, '|', 'wc',
'-l'])
# Set the origin branch to develop if it's not specified.
if parsed_args.branch:
branch = parsed_args.branch
elif matched_branch_num == '1':
branch = 'origin/%s' % current_branch
else:
branch = 'develop'
print('Comparing the current branch with %s' % branch)
all_changed_files = subprocess.check_output([
'git', 'diff', '--cached', '--name-only', '--diff-filter=ACM', branch])
if common.FRONTEND_DIR in all_changed_files:
# Run frontend unit tests.
print('Running frontend unit tests')
run_frontend_tests.main(args=['--run_minified_tests'])
print('Frontend tests passed.')
else:
# If files in common.FRONTEND_DIR were not changed, skip the tests.
common.print_each_string_after_two_new_lines([
'No frontend files were changed.',
'Skipped frontend tests'])
# Run backend tests.
print('Running backend tests')
run_backend_tests.main(args=[])
print('Backend tests passed.')
if __name__ == '__main__':
main()
| kevinlee12/oppia | scripts/run_presubmit_checks.py | Python | apache-2.0 | 3,619 |
# Copyright 2015-2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
import datetime
import json
import os
from dateutil import zoneinfo
from mock import mock
from .common import BaseTest, instance
from c7n.filters import FilterValidationError
from c7n.filters.offhours import OffHour, OnHour, ScheduleParser, Time
# Per http://blog.xelnor.net/python-mocking-datetime/
# naive implementation has issues with pypy
real_datetime_class = datetime.datetime
def mock_datetime_now(tgt, dt):
class DatetimeSubclassMeta(type):
@classmethod
def __instancecheck__(mcs, obj):
return isinstance(obj, real_datetime_class)
class BaseMockedDatetime(real_datetime_class):
target = tgt
@classmethod
def now(cls, tz=None):
return cls.target.replace(tzinfo=tz)
@classmethod
def utcnow(cls):
return cls.target
# Python2 & Python3 compatible metaclass
MockedDatetime = DatetimeSubclassMeta(
b'datetime' if str is bytes else 'datetime', # hack Python2/3 port
(BaseMockedDatetime,), {})
return mock.patch.object(dt, 'datetime', MockedDatetime)
class OffHoursFilterTest(BaseTest):
"""[off|on] hours testing"""
def test_offhours_records(self):
session_factory = self.replay_flight_data('test_offhours_records')
t = datetime.datetime.now(zoneinfo.gettz('America/New_York'))
t = t.replace(year=2016, month=8, day=14, hour=19, minute=00)
with mock_datetime_now(t, datetime):
p = self.load_policy({
'name': 'offhours-records',
'resource': 'ec2',
'filters': [
{'State.Name': 'running'},
{'type': 'offhour',
'offhour': 19,
'tag': 'custodian_downtime',
'default_tz': 'est',
'weekends': False}]
}, session_factory=session_factory)
resources = p.run()
self.assertEqual(resources, [])
with open(os.path.join(
p.options['output_dir'],
'offhours-records',
'parse_errors.json')) as fh:
data = json.load(fh)
self.assertEqual(len(data), 1)
self.assertEqual(data[0][0], 'i-0ee3a9bc2eeed269f')
self.assertEqual(data[0][1], 'off=[m-f,8];on=[n-f,5];pz=est')
with open(os.path.join(
p.options['output_dir'],
'offhours-records',
'opted_out.json')) as fh:
data = json.load(fh)
self.assertEqual(len(data), 1)
self.assertEqual(data[0]['InstanceId'], 'i-0a619b58a7e704a9f')
def test_validate(self):
self.assertRaises(
FilterValidationError, OffHour({'default_tz': 'zmta'}).validate)
self.assertRaises(
FilterValidationError, OffHour({'offhour': 25}).validate)
i = OffHour({})
self.assertEqual(i.validate(), i)
def test_process(self):
f = OffHour({'opt-out': True})
instances = [
instance(Tags=[]),
instance(
Tags=[{'Key': 'maid_offhours', 'Value': ''}]),
instance(
Tags=[{'Key': 'maid_offhours', 'Value': 'on'}]),
instance(
Tags=[{'Key': 'maid_offhours', 'Value': 'off'}]),
instance(
Tags=[
{'Key': 'maid_offhours',
'Value': "off=(m-f,5);zebrablue,on=(t-w,5)"}])]
t = datetime.datetime(
year=2015, month=12, day=1, hour=19, minute=5,
tzinfo=zoneinfo.gettz('America/New_York'))
with mock_datetime_now(t, datetime):
self.assertEqual(
f.process(instances), [instances[0], instances[1], instances[2]]
)
def test_opt_out_behavior(self):
# Some users want to match based on policy filters to
# a resource subset with default opt out behavior
t = datetime.datetime(
year=2015, month=12, day=1, hour=19, minute=5,
tzinfo=zoneinfo.gettz('America/New_York'))
f = OffHour({'opt-out': True})
with mock_datetime_now(t, datetime):
i = instance(Tags=[])
self.assertEqual(f(i), True)
i = instance(
Tags=[{'Key': 'maid_offhours', 'Value': ''}]
)
self.assertEqual(f(i), True)
i = instance(
Tags=[{'Key': 'maid_offhours', 'Value': 'on'}]
)
self.assertEqual(f(i), True)
i = instance(
Tags=[{'Key': 'maid_offhours', 'Value': 'off'}])
self.assertEqual(f(i), False)
self.assertEqual(f.opted_out, [i])
def test_opt_in_behavior(self):
# Given the addition of opt out behavior, verify if its
# not configured that we don't touch an instance that
# has no downtime tag
i = instance(Tags=[])
i2 = instance(Tags=[{'Key': 'maid_offhours', 'Value': ''}])
i3 = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'on'}])
t = datetime.datetime(
year=2015, month=12, day=1, hour=19, minute=5,
tzinfo=zoneinfo.gettz('America/New_York'))
f = OffHour({})
with mock_datetime_now(t, datetime):
self.assertEqual(f(i), False)
self.assertEqual(f(i2), True)
self.assertEqual(f(i3), True)
t = datetime.datetime(
year=2015, month=12, day=1, hour=7, minute=5,
tzinfo=zoneinfo.gettz('America/New_York'))
f = OnHour({})
with mock_datetime_now(t, datetime):
self.assertEqual(f(i), False)
self.assertEqual(f(i2), True)
self.assertEqual(f(i3), True)
def xtest_time_match_stops_after_skew(self):
hour = 7
t = datetime.datetime(
year=2015, month=12, day=1, hour=hour, minute=5,
tzinfo=zoneinfo.gettz('America/New_York'))
i = instance(Tags=[
{'Key': 'maid_offhours', 'Value': 'tz=est'}])
f = OnHour({'skew': 1})
results = []
with mock_datetime_now(t, datetime) as dt:
for n in range(0, 4):
dt.target = t.replace(hour=hour + n)
results.append(f(i))
self.assertEqual(results, [True, True, False, False])
def test_resource_schedule_error(self):
t = datetime.datetime.now(zoneinfo.gettz('America/New_York'))
t = t.replace(year=2015, month=12, day=1, hour=19, minute=5)
f = OffHour({})
f.process_resource_schedule = lambda: False
with mock_datetime_now(t, datetime):
i = instance(Tags=[
{'Key': 'maid_offhours', 'Value': 'tz=est'}])
self.assertEqual(f(i), False)
def test_time_filter_usage_errors(self):
self.assertRaises(NotImplementedError, Time, {})
def test_everyday_onhour(self):
# weekends on means we match times on the weekend
start_day = 14 # sunday
t = datetime.datetime(
year=2016, day=start_day, month=8, hour=7, minute=20)
i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'tz=est'}])
f = OnHour({'weekends': False})
results = []
with mock_datetime_now(t, datetime) as dt:
for n in range(7):
dt.target = t.replace(day=start_day + n)
results.append(f(i))
self.assertEqual(results, [True] * 7)
def test_everyday_offhour(self):
# weekends on means we match times on the weekend
start_day = 14 # sunday
t = datetime.datetime(
year=2016, day=start_day, month=8, hour=19, minute=20)
i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'tz=est'}])
f = OffHour({'weekends': False})
results = []
with mock_datetime_now(t, datetime) as dt:
for n in range(7):
dt.target = t.replace(day=start_day + n)
results.append(f(i))
self.assertEqual(results, [True] * 7)
def test_weekends_only_onhour_support(self):
# start day is a sunday, weekend only means we only start
# on monday morning.
start_day = 14
t = datetime.datetime(
year=2016, day=start_day, month=8, hour=7, minute=20)
i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'tz=est'}])
f = OnHour({'weekends-only': True})
results = []
with mock_datetime_now(t, datetime) as dt:
for n in range(7):
dt.target = t.replace(day=start_day + n)
results.append(f(i))
self.assertEqual(results, [
False, True, False, False, False, False, False])
def test_weekends_only_offhour_support(self):
# start day is a sunday, weekend only means we only stop
# on friday evening.
start_day = 14
t = datetime.datetime(
year=2016, day=start_day, month=8, hour=7, minute=20)
i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'tz=est'}])
f = OnHour({'weekends-only': True})
results = []
with mock_datetime_now(t, datetime) as dt:
for n in range(7):
dt.target = t.replace(day=start_day + n)
results.append(f(i))
self.assertEqual(results, [
False, True, False, False, False, False, False])
def test_onhour_weekend_support(self):
start_day = 14
t = datetime.datetime(
year=2016, day=start_day, month=2, hour=19, minute=20)
i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'tz=est'}])
f = OffHour({'weekends-only': True})
results = []
with mock_datetime_now(t, datetime) as dt:
for n in range(7):
dt.target = t.replace(day=start_day + n)
results.append(f(i))
self.assertEqual(
results,
[False, False, False, False, False, True, False])
def test_offhour_weekend_support(self):
start_day = 26
t = datetime.datetime(
year=2016, day=start_day, month=2, hour=19, minute=20)
i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'tz=est'}])
f = OffHour({})
results = []
with mock_datetime_now(t, datetime) as dt:
for n in range(0, 4):
dt.target = t.replace(day=start_day + n)
results.append(f(i))
self.assertEqual(results, [True, False, False, True])
def test_current_time_test(self):
t = datetime.datetime.now(zoneinfo.gettz('America/New_York'))
t = t.replace(year=2015, month=12, day=1, hour=19, minute=5)
with mock_datetime_now(t, datetime):
i = instance(Tags=[
{'Key': 'maid_offhours', 'Value': 'tz=est'}])
f = OffHour({})
p = f.get_tag_value(i)
self.assertEqual(p, 'tz=est')
tz = f.get_tz('est')
self.assertTrue(str(tz) in (
"tzfile('US/Eastern')",
"tzfile('America/New_York')"))
self.assertEqual(
datetime.datetime.now(tz), t)
self.assertEqual(t.hour, 19)
def test_offhours_real_world_values(self):
t = datetime.datetime.now(zoneinfo.gettz('America/New_York'))
t = t.replace(year=2015, month=12, day=1, hour=19, minute=5)
with mock_datetime_now(t, datetime):
results = [OffHour({})(i) for i in [
instance(Tags=[
{'Key': 'maid_offhours', 'Value': ''}]),
instance(Tags=[
{'Key': 'maid_offhours', 'Value': 'on'}]),
instance(Tags=[
{'Key': 'maid_offhours', 'Value': '"Offhours tz=ET"'}]),
instance(Tags=[
{'Key': 'maid_offhours', 'Value': 'Offhours tz=PT'}])]]
# unclear what this is really checking
self.assertEqual(results, [True, True, True, True])
def test_offhours_get_value(self):
off = OffHour({'default_tz': 'ct'})
i = instance(Tags=[
{'Key': 'maid_offhours', 'Value': 'Offhours tz=PT'}])
self.assertEqual(off.get_tag_value(i), "offhours tz=pt")
self.assertFalse(off.parser.has_resource_schedule(
off.get_tag_value(i), 'off'))
self.assertTrue(off.parser.keys_are_valid(
off.get_tag_value(i)))
self.assertEqual(off.parser.raw_data(
off.get_tag_value(i)), {'tz': 'pt'})
def test_offhours(self):
t = datetime.datetime(year=2015, month=12, day=1, hour=19, minute=5,
tzinfo=zoneinfo.gettz('America/New_York'))
with mock_datetime_now(t, datetime):
i = instance(Tags=[
{'Key': 'maid_offhours', 'Value': 'tz=est'}])
self.assertEqual(OffHour({})(i), True)
def test_onhour(self):
t = datetime.datetime(year=2015, month=12, day=1, hour=7, minute=5,
tzinfo=zoneinfo.gettz('America/New_York'))
with mock_datetime_now(t, datetime):
i = instance(Tags=[
{'Key': 'maid_offhours', 'Value': 'tz=est'}])
self.assertEqual(OnHour({})(i), True)
self.assertEqual(OnHour({'onhour': 8})(i), False)
def test_cant_parse_tz(self):
i = instance(Tags=[
{'Key': 'maid_offhours', 'Value': 'tz=evt'}])
self.assertEqual(OffHour({})(i), False)
def test_custom_offhours(self):
t = datetime.datetime.now(zoneinfo.gettz('America/New_York'))
t = t.replace(year=2016, month=5, day=26, hour=19, minute=00)
results = []
with mock_datetime_now(t, datetime):
for i in [instance(Tags=[{'Key': 'maid_offhours',
'Value': 'off=(m-f,19);on=(m-f,7);tz=et'}]),
instance(Tags=[{'Key': 'maid_offhours',
'Value': 'off=(m-f,20);on=(m-f,7);tz=et'}])]:
results.append(OffHour({})(i))
self.assertEqual(results, [True, False])
def test_custom_onhours(self):
t = datetime.datetime.now(zoneinfo.gettz('America/New_York'))
t = t.replace(year=2016, month=5, day=26, hour=7, minute=00)
results = []
with mock_datetime_now(t, datetime):
for i in [instance(Tags=[{'Key': 'maid_offhours',
'Value': 'off=(m-f,19);on=(m-f,7);tz=et'}]),
instance(Tags=[{'Key': 'maid_offhours',
'Value': 'off=(m-f,20);on=(m-f,9);tz=et'}])]:
results.append(OnHour({})(i))
self.assertEqual(results, [True, False])
def test_arizona_tz(self):
t = datetime.datetime.now(zoneinfo.gettz('America/New_York'))
t = t.replace(year=2016, month=5, day=26, hour=7, minute=00)
with mock_datetime_now(t, datetime):
i = instance(Tags=[{'Key': 'maid_offhours',
'Value': 'off=(m-f,19);on=(m-f,7);tz=at'}])
self.assertEqual(OnHour({})(i), True)
i = instance(Tags=[{'Key': 'maid_offhours',
'Value': 'off=(m-f,20);on=(m-f,6);tz=ast'}])
self.assertEqual(OnHour({})(i), False)
def test_custom_bad_tz(self):
t = datetime.datetime.now(zoneinfo.gettz('America/New_York'))
t = t.replace(year=2016, month=5, day=26, hour=7, minute=00)
with mock_datetime_now(t, datetime):
i = instance(Tags=[{'Key': 'maid_offhours',
'Value': 'off=(m-f,19);on=(m-f,7);tz=et'}])
self.assertEqual(OnHour({})(i), True)
i = instance(Tags=[{'Key': 'maid_offhours',
'Value': 'off=(m-f,20);on=(m-f,7);tz=abc'}])
self.assertEqual(OnHour({})(i), False)
def test_custom_bad_hours(self):
t = datetime.datetime.now(zoneinfo.gettz('America/New_York'))
t = t.replace(year=2016, month=5, day=26, hour=19, minute=00)
# default error handling is to exclude the resource
with mock_datetime_now(t, datetime):
# This isn't considered a bad value, its basically omitted.
i = instance(Tags=[{'Key': 'maid_offhours',
'Value': 'off=();tz=et'}])
self.assertEqual(OffHour({})(i), False)
i = instance(Tags=[{'Key': 'maid_offhours',
'Value': 'off=(m-f,90);on=(m-f,7);tz=et'}])
# malformed value
self.assertEqual(OffHour({})(i), False)
t = t.replace(year=2016, month=5, day=26, hour=13, minute=00)
with mock_datetime_now(t, datetime):
i = instance(Tags=[{'Key': 'maid_offhours',
'Value': 'off=();tz=et'}])
# will go to default values, but not work due to default time
self.assertEqual(OffHour({})(i), False)
i = instance(Tags=[{'Key': 'maid_offhours',
'Value': 'off=(m-f,90);on=(m-f,7);tz=et'}])
self.assertEqual(OffHour({})(i), False)
def test_tz_only(self):
t = datetime.datetime.now(zoneinfo.gettz('America/New_York'))
t = t.replace(year=2016, month=5, day=26, hour=7, minute=00)
with mock_datetime_now(t, datetime):
i = instance(Tags=[{'Key': 'maid_offhours',
'Value': 'tz=est'}])
self.assertEqual(OnHour({})(i), True)
def test_empty_tag(self):
t = datetime.datetime.now(zoneinfo.gettz('America/New_York'))
t = t.replace(year=2016, month=5, day=26, hour=7, minute=00)
with mock_datetime_now(t, datetime):
i = instance(Tags=[{'Key': 'maid_offhours',
'Value': ''}])
self.assertEqual(OnHour({})(i), True)
def test_on_tag(self):
t = datetime.datetime.now(zoneinfo.gettz('America/New_York'))
t = t.replace(year=2016, month=5, day=26, hour=7, minute=00)
with mock_datetime_now(t, datetime):
i = instance(Tags=[{'Key': 'maid_offhours',
'Value': 'on'}])
self.assertEqual(OnHour({})(i), True)
class ScheduleParserTest(BaseTest):
# table style test
# list of (tag value, parse result)
table = [
################
# Standard cases
('off=(m-f,10);on=(m-f,7);tz=et',
{'off': [{'days': [0, 1, 2, 3, 4], 'hour': 10}],
'on': [{'days': [0, 1, 2, 3, 4], 'hour': 7}],
'tz': 'et'}),
("off=[(m-f,9)];on=(m-s,10);tz=pt",
{'off': [{'days': [0, 1, 2, 3, 4], 'hour': 9}],
'on': [{'days': [0, 1, 2, 3, 4, 5], 'hour': 10}],
'tz': 'pt'}),
("off=[(m-f,23)];on=(m-s,10);tz=pt",
{'off': [{'days': [0, 1, 2, 3, 4], 'hour': 23}],
'on': [{'days': [0, 1, 2, 3, 4, 5], 'hour': 10}],
'tz': 'pt'}),
('off=(m-f,19);on=(m-f,7);tz=pst',
{'off': [{'days': [0, 1, 2, 3, 4], 'hour': 19}],
'on': [{'days': [0, 1, 2, 3, 4], 'hour': 7}],
'tz': 'pst'}),
# wrap around days (saturday, sunday, monday)
('on=[(s-m,10)];off=(s-m,19)',
{'on': [{'days': [5, 6, 0], 'hour': 10}],
'off': [{'days': [5, 6, 0], 'hour': 19}],
'tz': 'et'}),
# multiple single days specified
('on=[(m,9),(t,10),(w,7)];off=(m-u,19)',
{'on': [{'days': [0], 'hour': 9},
{'days': [1], 'hour': 10},
{'days': [2], 'hour': 7}],
'off': [{'days': [0, 1, 2, 3, 4, 5, 6], 'hour': 19}],
'tz': 'et'}),
# using brackets also works, if only single time set
('off=[m-f,20];on=[m-f,5];tz=est',
{'on': [{'days': [0, 1, 2, 3, 4], 'hour': 5}],
'off': [{'days': [0, 1, 2, 3, 4], 'hour': 20}],
'tz': 'est'}),
# same string, exercise cache lookup.
('off=[m-f,20];on=[m-f,5];tz=est',
{'on': [{'days': [0, 1, 2, 3, 4], 'hour': 5}],
'off': [{'days': [0, 1, 2, 3, 4], 'hour': 20}],
'tz': 'est'}),
################
# Invalid Cases
('', None),
# invalid day
('off=(1-2,12);on=(m-f,10);tz=est', None),
# invalid hour
('off=(m-f,a);on=(m-f,10);tz=est', None),
('off=(m-f,99);on=(m-f,7);tz=pst', None),
# invalid day
('off=(x-f,10);on=(m-f,10);tz=est', None),
# no hour specified for on
('off=(m-f);on=(m-f,10);tz=est', None),
# invalid day spec
('off=(m-t-f,12);on=(m-f,10);tz=est', None),
# random extra
('off=(m-f,5);zebra=blue,on=(t-w,5)', None),
('off=(m-f,5);zebra=blue;on=(t-w,5)', None),
# random extra again
('off=(m-f,5);zebrablue,on=(t-w,5)', None),
('bar;off=(m-f,5);zebrablue,on=(t-w,5)', None),
]
def test_schedule_parser(self):
self.maxDiff = None
parser = ScheduleParser({'tz': 'et'})
for value, expected in self.table:
self.assertEqual(parser.parse(value), expected)
| jimmyraywv/cloud-custodian | tests/test_offhours.py | Python | apache-2.0 | 21,903 |
# rfkill tests
# Copyright (c) 2014, Jouni Malinen <[email protected]>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import logging
logger = logging.getLogger()
import time
import hostapd
from hostapd import HostapdGlobal
import hwsim_utils
from wpasupplicant import WpaSupplicant
from rfkill import RFKill
from utils import HwsimSkip
def get_rfkill(dev):
phy = dev.get_driver_status_field("phyname")
try:
for r, s, h in RFKill.list():
if r.name == phy:
return r
except Exception, e:
raise HwsimSkip("No rfkill available: " + str(e))
raise HwsimSkip("No rfkill match found for the interface")
def test_rfkill_open(dev, apdev):
"""rfkill block/unblock during open mode connection"""
rfk = get_rfkill(dev[0])
hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" })
dev[0].connect("open", key_mgmt="NONE", scan_freq="2412")
try:
logger.info("rfkill block")
rfk.block()
dev[0].wait_disconnected(timeout=10,
error="Missing disconnection event on rfkill block")
if "FAIL" not in dev[0].request("REASSOCIATE"):
raise Exception("REASSOCIATE accepted while disabled")
if "FAIL" not in dev[0].request("REATTACH"):
raise Exception("REATTACH accepted while disabled")
if "FAIL" not in dev[0].request("RECONNECT"):
raise Exception("RECONNECT accepted while disabled")
if "FAIL" not in dev[0].request("FETCH_OSU"):
raise Exception("FETCH_OSU accepted while disabled")
logger.info("rfkill unblock")
rfk.unblock()
dev[0].wait_connected(timeout=10,
error="Missing connection event on rfkill unblock")
hwsim_utils.test_connectivity(dev[0], hapd)
finally:
rfk.unblock()
def test_rfkill_wpa2_psk(dev, apdev):
"""rfkill block/unblock during WPA2-PSK connection"""
rfk = get_rfkill(dev[0])
ssid = "test-wpa2-psk"
passphrase = 'qwertyuiop'
params = hostapd.wpa2_params(ssid=ssid, passphrase=passphrase)
hapd = hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].connect(ssid, psk=passphrase, scan_freq="2412")
try:
logger.info("rfkill block")
rfk.block()
dev[0].wait_disconnected(timeout=10,
error="Missing disconnection event on rfkill block")
logger.info("rfkill unblock")
rfk.unblock()
dev[0].wait_connected(timeout=10,
error="Missing connection event on rfkill unblock")
hwsim_utils.test_connectivity(dev[0], hapd)
finally:
rfk.unblock()
def test_rfkill_autogo(dev, apdev):
"""rfkill block/unblock for autonomous P2P GO"""
rfk0 = get_rfkill(dev[0])
rfk1 = get_rfkill(dev[1])
dev[0].p2p_start_go()
dev[1].request("SET p2p_no_group_iface 0")
dev[1].p2p_start_go()
try:
logger.info("rfkill block 0")
rfk0.block()
ev = dev[0].wait_global_event(["P2P-GROUP-REMOVED"], timeout=10)
if ev is None:
raise Exception("Group removal not reported")
if "reason=UNAVAILABLE" not in ev:
raise Exception("Unexpected group removal reason: " + ev)
if "FAIL" not in dev[0].request("P2P_LISTEN 1"):
raise Exception("P2P_LISTEN accepted unexpectedly")
if "FAIL" not in dev[0].request("P2P_LISTEN"):
raise Exception("P2P_LISTEN accepted unexpectedly")
logger.info("rfkill block 1")
rfk1.block()
ev = dev[1].wait_global_event(["P2P-GROUP-REMOVED"], timeout=10)
if ev is None:
raise Exception("Group removal not reported")
if "reason=UNAVAILABLE" not in ev:
raise Exception("Unexpected group removal reason: " + ev)
logger.info("rfkill unblock 0")
rfk0.unblock()
logger.info("rfkill unblock 1")
rfk1.unblock()
time.sleep(1)
finally:
rfk0.unblock()
rfk1.unblock()
def test_rfkill_hostapd(dev, apdev):
"""rfkill block/unblock during and prior to hostapd operations"""
hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open" })
rfk = get_rfkill(hapd)
try:
rfk.block()
ev = hapd.wait_event(["INTERFACE-DISABLED"], timeout=5)
if ev is None:
raise Exception("INTERFACE-DISABLED event not seen")
rfk.unblock()
ev = hapd.wait_event(["INTERFACE-ENABLED"], timeout=5)
if ev is None:
raise Exception("INTERFACE-ENABLED event not seen")
# hostapd does not current re-enable beaconing automatically
hapd.disable()
hapd.enable()
dev[0].connect("open", key_mgmt="NONE", scan_freq="2412")
rfk.block()
ev = hapd.wait_event(["INTERFACE-DISABLED"], timeout=5)
if ev is None:
raise Exception("INTERFACE-DISABLED event not seen")
dev[0].wait_disconnected(timeout=10)
dev[0].request("DISCONNECT")
hapd.disable()
hglobal = HostapdGlobal()
hglobal.flush()
hglobal.remove(apdev[0]['ifname'])
hapd = hostapd.add_ap(apdev[0]['ifname'], { "ssid": "open2" },
no_enable=True)
if "FAIL" not in hapd.request("ENABLE"):
raise Exception("ENABLE succeeded unexpectedly (rfkill)")
finally:
rfk.unblock()
def test_rfkill_wpas(dev, apdev):
"""rfkill block prior to wpa_supplicant start"""
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5")
rfk = get_rfkill(wpas)
wpas.interface_remove("wlan5")
try:
rfk.block()
wpas.interface_add("wlan5")
time.sleep(0.5)
state = wpas.get_status_field("wpa_state")
if state != "INTERFACE_DISABLED":
raise Exception("Unexpected state with rfkill blocked: " + state)
rfk.unblock()
time.sleep(0.5)
state = wpas.get_status_field("wpa_state")
if state == "INTERFACE_DISABLED":
raise Exception("Unexpected state with rfkill unblocked: " + state)
finally:
rfk.unblock()
| wangybgit/Chameleon | hostapd-OpenWrt/tests/hwsim/test_rfkill.py | Python | apache-2.0 | 6,236 |
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Installation script for Oppia third-party libraries."""
import contextlib
import json
import os
import shutil
import StringIO
import sys
import tarfile
import urllib
import urllib2
import zipfile
import common
TOOLS_DIR = os.path.join('..', 'oppia_tools')
THIRD_PARTY_DIR = os.path.join('.', 'third_party')
THIRD_PARTY_STATIC_DIR = os.path.join(THIRD_PARTY_DIR, 'static')
MANIFEST_FILE_PATH = os.path.join(os.getcwd(), 'manifest.json')
# Place to download zip files for temporary storage.
TMP_UNZIP_PATH = os.path.join('.', 'tmp_unzip.zip')
# Check that the current directory is correct.
common.require_cwd_to_be_oppia(allow_deploy_dir=True)
TARGET_DOWNLOAD_DIRS = {
'frontend': THIRD_PARTY_STATIC_DIR,
'backend': THIRD_PARTY_DIR,
'oppiaTools': TOOLS_DIR
}
_DOWNLOAD_FORMAT_ZIP = 'zip'
_DOWNLOAD_FORMAT_TAR = 'tar'
_DOWNLOAD_FORMAT_FILES = 'files'
DOWNLOAD_FORMATS_TO_MANIFEST_KEYS = {
'zip': {
'mandatory_keys': ['version', 'url', 'downloadFormat'],
'optional_key_pairs': [
['rootDir', 'rootDirPrefix'], ['targetDir', 'targetDirPrefix']]
},
'files': {
'mandatory_keys': [
'version', 'url', 'files',
'targetDirPrefix', 'downloadFormat'],
'optional_key_pairs': []
},
'tar': {
'mandatory_keys': [
'version', 'url', 'tarRootDirPrefix',
'targetDirPrefix', 'downloadFormat'],
'optional_key_pairs': []
}
}
def download_files(source_url_root, target_dir, source_filenames):
"""Downloads a group of files and saves them to a given directory.
Each file is downloaded only if it does not already exist.
Args:
source_url_root: the URL to prepend to all the filenames.
target_dir: the directory to save the files to.
source_filenames: a list of filenames. Each filename is appended to the
end of the source_url_root in order to give the URL from which to
download the file. The downloaded file is then placed in target_dir,
and retains the same filename.
"""
assert isinstance(source_filenames, list)
common.ensure_directory_exists(target_dir)
for filename in source_filenames:
if not os.path.exists(os.path.join(target_dir, filename)):
print 'Downloading file %s to %s' % (filename, target_dir)
urllib.urlretrieve(
'%s/%s' % (source_url_root, filename),
os.path.join(target_dir, filename))
def download_and_unzip_files(
source_url, target_parent_dir, zip_root_name, target_root_name):
"""Downloads a zip file, unzips it, and saves the result in a given dir.
The download occurs only if the target directory that the zip file unzips
to does not exist.
NB: This function assumes that the root level of the zip file has exactly
one folder.
Args:
source_url: the URL from which to download the zip file.
target_parent_dir: the directory to save the contents of the zip file to.
zip_root_name: the name of the top-level folder in the zip directory.
target_root_name: the name that the top-level folder should be renamed to
in the local directory.
"""
if not os.path.exists(os.path.join(target_parent_dir, target_root_name)):
print 'Downloading and unzipping file %s to %s' % (
zip_root_name, target_parent_dir)
common.ensure_directory_exists(target_parent_dir)
urllib.urlretrieve(source_url, TMP_UNZIP_PATH)
try:
with zipfile.ZipFile(TMP_UNZIP_PATH, 'r') as zfile:
zfile.extractall(target_parent_dir)
os.remove(TMP_UNZIP_PATH)
except Exception:
if os.path.exists(TMP_UNZIP_PATH):
os.remove(TMP_UNZIP_PATH)
# Some downloads (like jqueryui-themes) may require a user-agent.
req = urllib2.Request(source_url)
req.add_header('User-agent', 'python')
# This is needed to get a seekable filestream that can be used
# by zipfile.ZipFile.
file_stream = StringIO.StringIO(urllib2.urlopen(req).read())
with zipfile.ZipFile(file_stream, 'r') as zfile:
zfile.extractall(target_parent_dir)
# Rename the target directory.
os.rename(
os.path.join(target_parent_dir, zip_root_name),
os.path.join(target_parent_dir, target_root_name))
def download_and_untar_files(
source_url, target_parent_dir, tar_root_name, target_root_name):
"""Downloads a tar file, untars it, and saves the result in a given dir.
The download occurs only if the target directory that the tar file untars
to does not exist.
NB: This function assumes that the root level of the tar file has exactly
one folder.
Args:
source_url: the URL from which to download the tar file.
target_parent_dir: the directory to save the contents of the tar file to.
tar_root_name: the name of the top-level folder in the tar directory.
target_root_name: the name that the top-level folder should be renamed to
in the local directory.
"""
if not os.path.exists(os.path.join(target_parent_dir, target_root_name)):
print 'Downloading and untarring file %s to %s' % (
tar_root_name, target_parent_dir)
common.ensure_directory_exists(target_parent_dir)
urllib.urlretrieve(source_url, TMP_UNZIP_PATH)
with contextlib.closing(tarfile.open(TMP_UNZIP_PATH, 'r:gz')) as tfile:
tfile.extractall(target_parent_dir)
os.remove(TMP_UNZIP_PATH)
# Rename the target directory.
os.rename(
os.path.join(target_parent_dir, tar_root_name),
os.path.join(target_parent_dir, target_root_name))
def get_file_contents(filepath, mode='r'):
"""Gets the contents of a file, given a relative filepath from oppia/."""
with open(filepath, mode) as f:
return f.read().decode('utf-8')
def return_json(filepath):
"""Return json object when provided url
Args:
filepath: the path to the json file.
Return:
a parsed json objects
"""
response = get_file_contents(filepath)
return json.loads(response)
def test_manifest_syntax(dependency_type, dependency_dict):
"""This checks syntax of the manifest.json dependencies.
Display warning message when there is an error and terminate the program.
Args:
dependency_type: dependency download format.
dependency_dict: manifest.json dependency dict
"""
keys = dependency_dict.keys()
mandatory_keys = DOWNLOAD_FORMATS_TO_MANIFEST_KEYS[
dependency_type]['mandatory_keys']
# Optional keys requires exactly one member of the pair
# to be available as a key in the dependency_dict
optional_key_pairs = DOWNLOAD_FORMATS_TO_MANIFEST_KEYS[
dependency_type]['optional_key_pairs']
for key in mandatory_keys:
if key not in keys:
print '------------------------------------------'
print 'There is syntax error in this dependency'
print dependency_dict
print 'This key is missing or misspelled: "%s".' % key
print 'Exiting'
sys.exit(1)
if optional_key_pairs:
for optional_keys in optional_key_pairs:
optional_keys_in_dict = [
key for key in optional_keys if key in keys]
if len(optional_keys_in_dict) != 1:
print '------------------------------------------'
print 'There is syntax error in this dependency'
print dependency_dict
print (
'Only one of these keys pair must be used: "%s".'
% str(optional_keys))
print 'Exiting'
sys.exit(1)
# Checks the validity of the URL corresponding to the file format.
dependency_url = dependency_dict['url']
if '#' in dependency_url:
dependency_url = dependency_url.rpartition('#')[0]
is_zip_file_format = dependency_type == _DOWNLOAD_FORMAT_ZIP
is_tar_file_format = dependency_type == _DOWNLOAD_FORMAT_TAR
if (dependency_url.endswith('.zip') and not is_zip_file_format or
is_zip_file_format and not dependency_url.endswith('.zip') or
dependency_url.endswith('.tar.gz') and not is_tar_file_format or
is_tar_file_format and not dependency_url.endswith('.tar.gz')):
print '------------------------------------------'
print 'There is syntax error in this dependency'
print dependency_dict
print 'This url %s is invalid for %s file format.' % (
dependency_url, dependency_type)
print 'Exiting.'
sys.exit(1)
def validate_manifest(filepath):
"""This validates syntax of the manifest.json
Args:
filepath: the path to the json file.
"""
manifest_data = return_json(filepath)
dependencies = manifest_data['dependencies']
for _, dependency in dependencies.items():
for _, dependency_contents in dependency.items():
download_format = dependency_contents['downloadFormat']
test_manifest_syntax(download_format, dependency_contents)
def download_manifest_files(filepath):
"""This download all files to the required folders
Args:
filepath: the path to the json file.
"""
validate_manifest(filepath)
manifest_data = return_json(filepath)
dependencies = manifest_data['dependencies']
for data, dependency in dependencies.items():
for _, dependency_contents in dependency.items():
dependency_rev = dependency_contents['version']
dependency_url = dependency_contents['url']
download_format = dependency_contents['downloadFormat']
if download_format == _DOWNLOAD_FORMAT_FILES:
dependency_files = dependency_contents['files']
target_dirname = (
dependency_contents['targetDirPrefix'] + dependency_rev)
dependency_dst = os.path.join(
TARGET_DOWNLOAD_DIRS[data], target_dirname)
download_files(dependency_url, dependency_dst, dependency_files)
elif download_format == _DOWNLOAD_FORMAT_ZIP:
if 'rootDir' in dependency_contents:
dependency_zip_root_name = dependency_contents['rootDir']
else:
dependency_zip_root_name = (
dependency_contents['rootDirPrefix'] + dependency_rev)
if 'targetDir' in dependency_contents:
dependency_target_root_name = (
dependency_contents['targetDir'])
else:
dependency_target_root_name = (
dependency_contents['targetDirPrefix'] + dependency_rev)
download_and_unzip_files(
dependency_url, TARGET_DOWNLOAD_DIRS[data],
dependency_zip_root_name, dependency_target_root_name)
elif download_format == _DOWNLOAD_FORMAT_TAR:
dependency_tar_root_name = (
dependency_contents['tarRootDirPrefix'] + dependency_rev)
dependency_target_root_name = (
dependency_contents['targetDirPrefix'] + dependency_rev)
download_and_untar_files(
dependency_url, TARGET_DOWNLOAD_DIRS[data],
dependency_tar_root_name, dependency_target_root_name)
MATHJAX_REV = '2.6.0'
MATHJAX_ROOT_NAME = 'MathJax-%s' % MATHJAX_REV
MATHJAX_TARGET_ROOT_NAME = MATHJAX_ROOT_NAME
MATHJAX_DIR_PREFIX = os.path.join(
THIRD_PARTY_STATIC_DIR, MATHJAX_TARGET_ROOT_NAME)
MATHJAX_SUBDIRS_TO_REMOVE = [
'unpacked', os.path.join('fonts', 'HTML-CSS', 'TeX', 'png')]
def _install_third_party_libs():
download_manifest_files(MANIFEST_FILE_PATH)
# MathJax is too big. Remove many unneeded files by following these
# instructions:
# https://github.com/mathjax/MathJax/wiki/Shrinking-MathJax-for-%22local%22-installation pylint: disable=line-too-long
for subdir in MATHJAX_SUBDIRS_TO_REMOVE:
full_dir = os.path.join(MATHJAX_DIR_PREFIX, subdir)
if os.path.isdir(full_dir):
print 'Removing unnecessary MathJax directory \'%s\'' % subdir
shutil.rmtree(full_dir)
if __name__ == '__main__':
_install_third_party_libs()
| kennho/oppia | scripts/install_third_party.py | Python | apache-2.0 | 13,118 |
# BEGIN_COPYRIGHT
#
# Copyright 2009-2015 CRS4.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# END_COPYRIGHT
"""
Test suite for top-level functions.
"""
import unittest
import os
import tempfile
import shutil
import pydoop
class TestPydoop(unittest.TestCase):
def setUp(self):
self.wd = tempfile.mkdtemp(prefix='pydoop_test_')
self.old_env = os.environ.copy()
def tearDown(self):
shutil.rmtree(self.wd)
os.environ = self.old_env
reload(pydoop)
def test_home(self):
old_home = pydoop.hadoop_home()
if os.path.isdir(old_home):
new_home = os.path.join(self.wd, 'hadoop')
os.symlink(old_home, new_home)
os.environ['HADOOP_HOME'] = new_home
reload(pydoop)
self.assertEqual(pydoop.hadoop_home(), new_home)
def test_conf(self):
os.environ['HADOOP_CONF_DIR'] = self.wd
reload(pydoop)
self.assertEqual(pydoop.hadoop_conf(), self.wd)
def test_pydoop_jar_path(self):
jar_path = pydoop.jar_path()
if jar_path is not None:
self.assertTrue(os.path.exists(jar_path))
directory, filename = os.path.split(jar_path)
self.assertEqual(filename, pydoop.jar_name())
self.assertEqual('pydoop', os.path.basename(directory))
def suite():
suite_ = unittest.TestSuite()
suite_.addTest(TestPydoop('test_home'))
suite_.addTest(TestPydoop('test_conf'))
suite_.addTest(TestPydoop('test_pydoop_jar_path'))
return suite_
if __name__ == '__main__':
_RUNNER = unittest.TextTestRunner(verbosity=2)
_RUNNER.run((suite()))
| ilveroluca/pydoop | test/common/test_pydoop.py | Python | apache-2.0 | 2,152 |
Subsets and Splits