code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
# Copyright 2021 DeepMind Technologies Limited and Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Drift utils test."""
from absl.testing import absltest
from absl.testing import parameterized
from dd_two_player_games import drift_utils
from dd_two_player_games import gan
LEARNING_RATE_TUPLES = [
(0.01, 0.01),
(0.01, 0.05),
(0.05, 0.01),
(0.0001, 0.5)]
class DriftUtilsTest(parameterized.TestCase):
"""Test class to ensure drift coefficients are computed correctly.
Ensures that the drift coefficients in two-player games are
computed as for the math for:
* simultaneous updates.
* alternating updates (for both player orders).
"""
@parameterized.parameters(LEARNING_RATE_TUPLES)
def test_sim_updates(self, disc_lr, gen_lr):
# player order does not matter.
# the number of updates does not matter for simultaneous updates.
learning_rates = gan.GANTuple(disc=disc_lr, gen=gen_lr)
drift_coeffs = drift_utils.get_dd_coeffs(
None, True, learning_rates, num_updates=None)
self.assertEqual(drift_coeffs.disc.self_norm, 0.5 * disc_lr)
self.assertEqual(drift_coeffs.disc.other_norm, 0.0)
self.assertEqual(drift_coeffs.disc.other_dot_prod, 0.5 * disc_lr)
self.assertEqual(drift_coeffs.gen.self_norm, 0.5 * gen_lr)
self.assertEqual(drift_coeffs.gen.other_norm, 0.0)
self.assertEqual(drift_coeffs.gen.other_dot_prod, 0.5 * gen_lr)
@parameterized.parameters(LEARNING_RATE_TUPLES)
def test_alt_updates(self, disc_lr, gen_lr):
learning_rates = gan.GANTuple(disc=disc_lr, gen=gen_lr)
num_updates = gan.GANTuple(disc=1, gen=1)
drift_coeffs = drift_utils.get_dd_coeffs(
drift_utils.PlayerOrder.disc_first, False, learning_rates,
num_updates=num_updates)
self.assertEqual(drift_coeffs.disc.self_norm, 0.5 * disc_lr)
self.assertEqual(drift_coeffs.disc.other_norm, 0.0)
self.assertEqual(drift_coeffs.disc.other_dot_prod, 0.5 * disc_lr)
self.assertEqual(drift_coeffs.gen.self_norm, 0.5 * gen_lr)
self.assertEqual(drift_coeffs.gen.other_norm, 0.0)
self.assertEqual(
drift_coeffs.gen.other_dot_prod,
0.5 * gen_lr * (1 - 2 * disc_lr / gen_lr))
@parameterized.parameters(LEARNING_RATE_TUPLES)
def test_alt_updates_change_player_order(self, disc_lr, gen_lr):
learning_rates = gan.GANTuple(disc=disc_lr, gen=gen_lr)
num_updates = gan.GANTuple(disc=1, gen=1)
drift_coeffs = drift_utils.get_dd_coeffs(
drift_utils.PlayerOrder.gen_first, False, learning_rates,
num_updates=num_updates)
self.assertEqual(drift_coeffs.disc.self_norm, 0.5 * disc_lr)
self.assertEqual(drift_coeffs.disc.other_norm, 0.0)
self.assertEqual(
drift_coeffs.disc.other_dot_prod,
0.5 * disc_lr * (1 - 2 * gen_lr / disc_lr))
self.assertEqual(drift_coeffs.gen.self_norm, 0.5 * gen_lr)
self.assertEqual(drift_coeffs.gen.other_norm, 0.0)
self.assertEqual(drift_coeffs.gen.other_dot_prod, 0.5 * gen_lr)
if __name__ == '__main__':
absltest.main()
| deepmind/dd_two_player_games | dd_two_player_games/drift_utils_test.py | Python | apache-2.0 | 3,547 |
# Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Deferred tasks for bootstrapping the GnG app."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import functools
import inspect
import logging
import os
import sys
from distutils import version
from google.appengine.ext import deferred
from loaner.web_app import constants
from loaner.web_app.backend.clients import bigquery
from loaner.web_app.backend.clients import directory
from loaner.web_app.backend.lib import datastore_yaml
from loaner.web_app.backend.lib import user
from loaner.web_app.backend.lib import utils
from loaner.web_app.backend.models import bootstrap_status_model
from loaner.web_app.backend.models import config_model
_ORG_UNIT_EXISTS_MSG = 'Org unit %s already exists, so cannot create.'
_TASK_DESCRIPTIONS = {
'bootstrap_datastore_yaml': 'Importing datastore YAML file',
'bootstrap_chrome_ous': 'Creating Chrome OUs in Directory',
'bootstrap_bq_history': 'Configuring datastore history tables in BigQuery',
'bootstrap_load_config_yaml': 'Loading config_defaults.yaml into datastore.'
}
# Tasks that should only be run for a new deployment, i.e. they are destructive.
_BOOTSTRAP_INIT_TASKS = (
'bootstrap_datastore_yaml',
'bootstrap_load_config_yaml'
)
# Tasks that should be run for an update or can rerun, i.e. they are idempotent.
_BOOTSTRAP_UPDATE_TASKS = tuple(
set(_TASK_DESCRIPTIONS.keys()) - set(_BOOTSTRAP_INIT_TASKS)
)
class Error(Exception):
"""Exception raised when master method called but ENABLE_BOOTSTRAP False."""
def managed_task(task_function):
"""Decorator to manage task methods.
This records the status of the task in an entity and raises the
deferred.PermanentTaskFailure exception to prevent tasks from repeating upon
failure. In such cases, the exception message is recorded to the entity.
Args:
task_function: function, to be managed by the decorator.
Returns:
Wrapped function.
Raises:
deferred.PermanentTaskFailure: if anything at all goes wrong.
"""
@functools.wraps(task_function)
def wrapper(*args, **kwargs):
"""Wrapper for managed task decorator."""
status_entity = bootstrap_status_model.BootstrapStatus.get_or_insert(
task_function.__name__)
status_entity.description = _TASK_DESCRIPTIONS.get(
task_function.__name__, task_function.__name__)
status_entity.timestamp = datetime.datetime.utcnow()
try:
task_function(*args, **kwargs)
status_entity.success = True
status_entity.details = None
status_entity.put()
except Exception as e:
status_entity.success = False
status_entity.details = '{} {}'.format(str(type(e)), str(e))
status_entity.put()
raise deferred.PermanentTaskFailure(
'Task {} failed; error: {}'.format(
task_function.__name__, status_entity.details))
return wrapper
@managed_task
def bootstrap_datastore_yaml(wipe=True, **kwargs):
"""Bootstraps arbitrary datastore entities from supplied YAML input.
Args:
wipe: bool, whether to wipe all existing datastore models for any model
contained in the YAML.
**kwargs: keyword args including a user_email with which to run the
datastore methods (required for BigQuery streaming).
"""
with open(
os.path.join(os.path.dirname(__file__), 'bootstrap.yaml')) as yaml_file:
datastore_yaml.import_yaml(yaml_file.read(), kwargs['user_email'], wipe)
@managed_task
def bootstrap_chrome_ous(**kwargs):
"""Bootstraps Chrome device OUs.
Args:
**kwargs: keyword args including a user_email with which to run the
Directory API client methods (required for BigQuery streaming).
"""
logging.info('Requesting delegated admin for bootstrap')
client = directory.DirectoryApiClient(user_email=kwargs['user_email'])
for org_unit_name, org_unit_path in constants.ORG_UNIT_DICT.iteritems():
logging.info(
'Creating org unit %s at path %s ...', org_unit_name, org_unit_path)
if client.get_org_unit(org_unit_path):
logging.warn(_ORG_UNIT_EXISTS_MSG, org_unit_name)
else:
client.insert_org_unit(org_unit_path)
@managed_task
def bootstrap_bq_history(**kwargs):
"""Bootstraps BigQuery history tables for archival purposes.
Args:
**kwargs: keyword args including a user_email with which to run the
Directory API client methods (required for BigQuery streaming).
"""
del kwargs # Unused, but comes by default.
client = bigquery.BigQueryClient()
client.initialize_tables()
@managed_task
def bootstrap_load_config_yaml(**kwargs):
"""Loads config_defaults.yaml into datastore.
Args:
**kwargs: Unused, but required for bootstrap tasks.
"""
del kwargs # Unused, but comes by default.
config_defaults = utils.load_config_from_yaml()
for name, value in config_defaults.iteritems():
if name == 'bootstrap_started':
config_model.Config.set(name, config_model.Config.get(name), False)
else:
config_model.Config.set(name, value, False)
def get_bootstrap_functions(get_all=False):
"""Gets all functions necessary for bootstrap.
This function collects only the functions necessary for the bootstrap
process. Specifically, it will collect tasks specific to a new or existing
deployment (an update). Additionally, it will collect any failed tasks so that
they can be attempted again.
Args:
get_all: bool, return all bootstrap tasks, defaults to False.
Returns:
Dict, all functions necessary for bootstrap.
"""
module_functions = inspect.getmembers(
sys.modules[__name__], inspect.isfunction)
bootstrap_functions = {
key: value
for key, value in dict(module_functions)
.iteritems() if key.startswith('bootstrap_')
}
if get_all or _is_new_deployment():
return bootstrap_functions
if is_update():
bootstrap_functions = {
key: value for key, value in bootstrap_functions.iteritems()
if key in _BOOTSTRAP_UPDATE_TASKS
}
else: # Collect all bootstrap functions that failed and all update tasks.
for function_name in bootstrap_functions.keys():
status_entity = bootstrap_status_model.BootstrapStatus.get_by_id(
function_name)
if (status_entity and
status_entity.success and
function_name not in _BOOTSTRAP_UPDATE_TASKS):
del bootstrap_functions[function_name]
return bootstrap_functions
def _run_function_as_task(all_functions_list, function_name, kwargs=None):
"""Runs a specific function and its kwargs as an AppEngine task.
Args:
all_functions_list: string list, A list with all function names that are
registered as bootstrap functions on the Loaner app.
function_name: string, A specific function that should be ran as a task.
kwargs: dict, Optional kwargs to be passed to the function that will run.
Returns:
The deferred task from AppEngine taskqueue.
Raises:
Error: if requested bootstrap method is not allowed or does not exist.
"""
logging.debug('Running %s as a task.', function_name)
function = all_functions_list.get(function_name)
if function is None:
raise Error(
'Requested bootstrap method {} does not exist.'.format(function_name))
if not kwargs:
kwargs = {}
kwargs['user_email'] = user.get_user_email()
return deferred.defer(function, **kwargs)
def run_bootstrap(requested_tasks=None):
"""Runs one or more bootstrap functions.
Args:
requested_tasks: dict, wherein the keys are function names and the
values are keyword arg dicts. If no functions are passed, runs all
necessary bootstrap functions with no specific kwargs.
Returns:
A dictionary of started tasks, with the task names as keys and the values
being task descriptions as found in _TASK_DESCRIPTIONS.
"""
config_model.Config.set('bootstrap_started', True)
bootstrap_functions = get_bootstrap_functions()
if _is_new_deployment():
logging.info('Running bootstrap for a new deployment.')
else:
logging.info(
'Running bootstrap for an update from version %s to %s.',
config_model.Config.get('running_version'),
constants.APP_VERSION)
run_status_dict = {}
if requested_tasks:
for function_name, kwargs in requested_tasks.iteritems():
_run_function_as_task(bootstrap_functions, function_name, kwargs)
run_status_dict[function_name] = _TASK_DESCRIPTIONS.get(
function_name, function_name)
else:
logging.debug('Running all functions as no specific function was passed.')
for function_name in bootstrap_functions:
_run_function_as_task(bootstrap_functions, function_name)
run_status_dict[function_name] = _TASK_DESCRIPTIONS.get(
function_name, function_name)
return run_status_dict
def _is_new_deployment():
"""Checks whether this is a new deployment.
A '0.0' version number and a missing bootstrap_datastore_yaml task
status indicates that this is a new deployment. The latter check
is to support backward-compatibility with early alpha versions that did not
have a version number.
Returns:
True if this is a new deployment, else False.
"""
return (config_model.Config.get('running_version') == '0.0' and
not bootstrap_status_model.BootstrapStatus.get_by_id(
'bootstrap_datastore_yaml'))
def _is_latest_version():
"""Checks if the app is up to date and sets bootstrap to incomplete if not.
Checks whether the running version is the same as the deployed version as an
app that is not updated should trigger bootstrap moving back to an incomplete
state, thus signaling that certain tasks need to be run again.
Returns:
True if running matches deployed version and not a new install, else False.
"""
if _is_new_deployment():
return False
up_to_date = version.LooseVersion(
constants.APP_VERSION) == version.LooseVersion(
config_model.Config.get('running_version'))
if not up_to_date and not is_bootstrap_started():
# Set the updates tasks to incomplete so that they run again.
config_model.Config.set('bootstrap_completed', False)
for task in _BOOTSTRAP_UPDATE_TASKS:
status_entity = bootstrap_status_model.BootstrapStatus.get_or_insert(task)
status_entity.success = False
status_entity.put()
return up_to_date
def is_update():
"""Checks whether the application is in a state requiring an update.
Returns:
True if an update is available and this is not a new installation.
"""
if _is_new_deployment():
return False
return version.LooseVersion(constants.APP_VERSION) > version.LooseVersion(
config_model.Config.get('running_version'))
def is_bootstrap_completed():
"""Gets the general status of the app bootstrap.
Ensures that the latest version is running and that bootstrap has completed.
Returns:
True if the bootstrap is complete, else False.
"""
return (_is_latest_version() and
config_model.Config.get('bootstrap_completed'))
def is_bootstrap_started():
"""Checks to see if bootstrap has started.
Returns:
True if the bootstrap has started, else False.
"""
if (config_model.Config.get('bootstrap_started') and
config_model.Config.get('bootstrap_completed')):
# If bootstrap was completed indicate that it is no longer in progress.
config_model.Config.set('bootstrap_started', False)
return config_model.Config.get('bootstrap_started')
def get_bootstrap_task_status():
"""Gets the status of the bootstrap tasks.
Additionally, this sets the overall completion status if the tasks were
successful and sets the running version number after bootstrap completion.
Returns:
Dictionary with task names as the keys and values being sub-dictionaries
containing data derived from the datastore entities. If there is no data
for any given task, its place is held by an empty dict.
"""
bootstrap_completed = True
bootstrap_task_status = {}
for function_name in get_bootstrap_functions(get_all=True):
status_entity = bootstrap_status_model.BootstrapStatus.get_by_id(
function_name)
if status_entity:
bootstrap_task_status[function_name] = status_entity.to_dict()
else:
bootstrap_task_status[function_name] = {}
if not bootstrap_task_status[function_name].get('success'):
bootstrap_completed = False
if bootstrap_completed:
config_model.Config.set(
'running_version', constants.APP_VERSION)
logging.info(
'Successfully bootstrapped application to version %s.',
constants.APP_VERSION)
config_model.Config.set('bootstrap_completed', bootstrap_completed)
return bootstrap_task_status
| google/loaner | loaner/web_app/backend/lib/bootstrap.py | Python | apache-2.0 | 13,356 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-17 19:24
from __future__ import unicode_literals
import c3nav.mapdata.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mapdata', '0047_remove_mapupdate_changed_geometries'),
]
operations = [
migrations.CreateModel(
name='Ramp',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('minx', models.DecimalField(db_index=True, decimal_places=2, max_digits=6, verbose_name='min x coordinate')),
('miny', models.DecimalField(db_index=True, decimal_places=2, max_digits=6, verbose_name='min y coordinate')),
('maxx', models.DecimalField(db_index=True, decimal_places=2, max_digits=6, verbose_name='max x coordinate')),
('maxy', models.DecimalField(db_index=True, decimal_places=2, max_digits=6, verbose_name='max y coordinate')),
('geometry', c3nav.mapdata.fields.GeometryField(default=None, geomtype='polygon')),
('space', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ramps', to='mapdata.Space', verbose_name='space')),
],
options={
'verbose_name': 'Ramp',
'verbose_name_plural': 'Ramps',
'default_related_name': 'ramps',
},
),
]
| c3nav/c3nav | src/c3nav/mapdata/migrations/0048_ramp.py | Python | apache-2.0 | 1,532 |
from test.lib.testing import eq_
from sqlalchemy.orm import mapper, relationship, create_session, \
clear_mappers, sessionmaker, class_mapper
from sqlalchemy.orm.mapper import _mapper_registry
from sqlalchemy.orm.session import _sessions
import operator
from test.lib import testing, engines
from sqlalchemy import MetaData, Integer, String, ForeignKey, \
PickleType, create_engine, Unicode
from test.lib.schema import Table, Column
import sqlalchemy as sa
from sqlalchemy.sql import column
from sqlalchemy.processors import to_decimal_processor_factory, \
to_unicode_processor_factory
from test.lib.util import gc_collect
from sqlalchemy.util.compat import decimal
import gc
import weakref
from test.lib import fixtures
class A(fixtures.ComparableEntity):
pass
class B(fixtures.ComparableEntity):
pass
def profile_memory(func):
# run the test 50 times. if length of gc.get_objects()
# keeps growing, assert false
def profile(*args):
gc_collect()
samples = [0 for x in range(0, 50)]
for x in range(0, 50):
func(*args)
gc_collect()
samples[x] = len(gc.get_objects())
print "sample gc sizes:", samples
assert len(_sessions) == 0
for x in samples[-4:]:
if x != samples[-5]:
flatline = False
break
else:
flatline = True
# object count is bigger than when it started
if not flatline and samples[-1] > samples[0]:
for x in samples[1:-2]:
# see if a spike bigger than the endpoint exists
if x > samples[-1]:
break
else:
assert False, repr(samples) + " " + repr(flatline)
return profile
def assert_no_mappers():
clear_mappers()
gc_collect()
assert len(_mapper_registry) == 0
class EnsureZeroed(fixtures.ORMTest):
def setup(self):
_sessions.clear()
_mapper_registry.clear()
class MemUsageTest(EnsureZeroed):
__requires__ = 'cpython',
# ensure a pure growing test trips the assertion
@testing.fails_if(lambda: True)
def test_fixture(self):
class Foo(object):
pass
x = []
@profile_memory
def go():
x[-1:] = [Foo(), Foo(), Foo(), Foo(), Foo(), Foo()]
go()
def test_session(self):
metadata = MetaData(testing.db)
table1 = Table("mytable", metadata,
Column('col1', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('col2', String(30)))
table2 = Table("mytable2", metadata,
Column('col1', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('col2', String(30)),
Column('col3', Integer, ForeignKey("mytable.col1")))
metadata.create_all()
m1 = mapper(A, table1, properties={
"bs":relationship(B, cascade="all, delete",
order_by=table2.c.col1)},
order_by=table1.c.col1)
m2 = mapper(B, table2)
m3 = mapper(A, table1, non_primary=True)
@profile_memory
def go():
sess = create_session()
a1 = A(col2="a1")
a2 = A(col2="a2")
a3 = A(col2="a3")
a1.bs.append(B(col2="b1"))
a1.bs.append(B(col2="b2"))
a3.bs.append(B(col2="b3"))
for x in [a1,a2,a3]:
sess.add(x)
sess.flush()
sess.expunge_all()
alist = sess.query(A).all()
eq_(
[
A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
A(col2="a2", bs=[]),
A(col2="a3", bs=[B(col2="b3")])
],
alist)
for a in alist:
sess.delete(a)
sess.flush()
go()
metadata.drop_all()
del m1, m2, m3
assert_no_mappers()
@testing.crashes('sqlite', ':memory: connection not suitable here')
def test_orm_many_engines(self):
metadata = MetaData(testing.db)
table1 = Table("mytable", metadata,
Column('col1', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('col2', String(30)))
table2 = Table("mytable2", metadata,
Column('col1', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('col2', String(30)),
Column('col3', Integer, ForeignKey("mytable.col1")))
metadata.create_all()
m1 = mapper(A, table1, properties={
"bs":relationship(B, cascade="all, delete",
order_by=table2.c.col1)},
order_by=table1.c.col1,
_compiled_cache_size=10
)
m2 = mapper(B, table2,
_compiled_cache_size=10
)
m3 = mapper(A, table1, non_primary=True)
@profile_memory
def go():
engine = engines.testing_engine(
options={'logging_name':'FOO',
'pool_logging_name':'BAR',
'use_reaper':False}
)
sess = create_session(bind=engine)
a1 = A(col2="a1")
a2 = A(col2="a2")
a3 = A(col2="a3")
a1.bs.append(B(col2="b1"))
a1.bs.append(B(col2="b2"))
a3.bs.append(B(col2="b3"))
for x in [a1,a2,a3]:
sess.add(x)
sess.flush()
sess.expunge_all()
alist = sess.query(A).all()
eq_(
[
A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
A(col2="a2", bs=[]),
A(col2="a3", bs=[B(col2="b3")])
],
alist)
for a in alist:
sess.delete(a)
sess.flush()
sess.close()
engine.dispose()
go()
metadata.drop_all()
del m1, m2, m3
assert_no_mappers()
def test_ad_hoc_types(self):
"""test storage of bind processors, result processors
in dialect-wide registry."""
from sqlalchemy.dialects import mysql, postgresql, sqlite
from sqlalchemy import types
eng = engines.testing_engine()
for args in (
(types.Integer, ),
(types.String, ),
(types.PickleType, ),
(types.Enum, 'a', 'b', 'c'),
(sqlite.DATETIME, ),
(postgresql.ENUM, 'a', 'b', 'c'),
(types.Interval, ),
(postgresql.INTERVAL, ),
(mysql.VARCHAR, ),
):
@profile_memory
def go():
type_ = args[0](*args[1:])
bp = type_._cached_bind_processor(eng.dialect)
rp = type_._cached_result_processor(eng.dialect, 0)
go()
assert not eng.dialect._type_memos
def test_many_updates(self):
metadata = MetaData(testing.db)
wide_table = Table('t', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
*[Column('col%d' % i, Integer) for i in range(10)]
)
class Wide(object):
pass
mapper(Wide, wide_table, _compiled_cache_size=10)
metadata.create_all()
session = create_session()
w1 = Wide()
session.add(w1)
session.flush()
session.close()
del session
counter = [1]
@profile_memory
def go():
session = create_session()
w1 = session.query(Wide).first()
x = counter[0]
dec = 10
while dec > 0:
# trying to count in binary here,
# works enough to trip the test case
if pow(2, dec) < x:
setattr(w1, 'col%d' % dec, counter[0])
x -= pow(2, dec)
dec -= 1
session.flush()
session.close()
counter[0] += 1
try:
go()
finally:
metadata.drop_all()
@testing.fails_if(lambda : testing.db.dialect.name == 'sqlite' \
and testing.db.dialect.dbapi.version_info >= (2,
5),
'Newer pysqlites generate warnings here too and '
'have similar issues.')
def test_unicode_warnings(self):
metadata = MetaData(testing.db)
table1 = Table('mytable', metadata, Column('col1', Integer,
primary_key=True,
test_needs_autoincrement=True), Column('col2',
Unicode(30)))
metadata.create_all()
i = [1]
@testing.emits_warning()
@profile_memory
def go():
# execute with a non-unicode object. a warning is emitted,
# this warning shouldn't clog up memory.
testing.db.execute(table1.select().where(table1.c.col2
== 'foo%d' % i[0]))
i[0] += 1
try:
go()
finally:
metadata.drop_all()
def test_mapper_reset(self):
metadata = MetaData(testing.db)
table1 = Table("mytable", metadata,
Column('col1', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('col2', String(30)))
table2 = Table("mytable2", metadata,
Column('col1', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('col2', String(30)),
Column('col3', Integer, ForeignKey("mytable.col1")))
@profile_memory
def go():
m1 = mapper(A, table1, properties={
"bs":relationship(B, order_by=table2.c.col1)
})
m2 = mapper(B, table2)
m3 = mapper(A, table1, non_primary=True)
sess = create_session()
a1 = A(col2="a1")
a2 = A(col2="a2")
a3 = A(col2="a3")
a1.bs.append(B(col2="b1"))
a1.bs.append(B(col2="b2"))
a3.bs.append(B(col2="b3"))
for x in [a1,a2,a3]:
sess.add(x)
sess.flush()
sess.expunge_all()
alist = sess.query(A).order_by(A.col1).all()
eq_(
[
A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
A(col2="a2", bs=[]),
A(col2="a3", bs=[B(col2="b3")])
],
alist)
for a in alist:
sess.delete(a)
sess.flush()
sess.close()
clear_mappers()
metadata.create_all()
try:
go()
finally:
metadata.drop_all()
assert_no_mappers()
def test_with_inheritance(self):
metadata = MetaData(testing.db)
table1 = Table("mytable", metadata,
Column('col1', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('col2', String(30))
)
table2 = Table("mytable2", metadata,
Column('col1', Integer, ForeignKey('mytable.col1'),
primary_key=True, test_needs_autoincrement=True),
Column('col3', String(30)),
)
@profile_memory
def go():
class A(fixtures.ComparableEntity):
pass
class B(A):
pass
mapper(A, table1,
polymorphic_on=table1.c.col2,
polymorphic_identity='a')
mapper(B, table2,
inherits=A,
polymorphic_identity='b')
sess = create_session()
a1 = A()
a2 = A()
b1 = B(col3='b1')
b2 = B(col3='b2')
for x in [a1,a2,b1, b2]:
sess.add(x)
sess.flush()
sess.expunge_all()
alist = sess.query(A).order_by(A.col1).all()
eq_(
[
A(), A(), B(col3='b1'), B(col3='b2')
],
alist)
for a in alist:
sess.delete(a)
sess.flush()
# dont need to clear_mappers()
del B
del A
metadata.create_all()
try:
go()
finally:
metadata.drop_all()
assert_no_mappers()
def test_with_manytomany(self):
metadata = MetaData(testing.db)
table1 = Table("mytable", metadata,
Column('col1', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('col2', String(30))
)
table2 = Table("mytable2", metadata,
Column('col1', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('col2', String(30)),
)
table3 = Table('t1tot2', metadata,
Column('t1', Integer, ForeignKey('mytable.col1')),
Column('t2', Integer, ForeignKey('mytable2.col1')),
)
@profile_memory
def go():
class A(fixtures.ComparableEntity):
pass
class B(fixtures.ComparableEntity):
pass
mapper(A, table1, properties={
'bs':relationship(B, secondary=table3,
backref='as', order_by=table3.c.t1)
})
mapper(B, table2)
sess = create_session()
a1 = A(col2='a1')
a2 = A(col2='a2')
b1 = B(col2='b1')
b2 = B(col2='b2')
a1.bs.append(b1)
a2.bs.append(b2)
for x in [a1,a2]:
sess.add(x)
sess.flush()
sess.expunge_all()
alist = sess.query(A).order_by(A.col1).all()
eq_(
[
A(bs=[B(col2='b1')]), A(bs=[B(col2='b2')])
],
alist)
for a in alist:
sess.delete(a)
sess.flush()
# dont need to clear_mappers()
del B
del A
metadata.create_all()
try:
go()
finally:
metadata.drop_all()
assert_no_mappers()
@testing.fails_if(lambda : testing.db.dialect.name == 'sqlite' \
and testing.db.dialect.dbapi.version > '2.5')
@testing.provide_metadata
def test_key_fallback_result(self):
e = testing.db
m = self.metadata
t = Table('t', m, Column('x', Integer), Column('y', Integer))
m.create_all(e)
e.execute(t.insert(), {"x":1, "y":1})
@profile_memory
def go():
r = e.execute(t.alias().select())
for row in r:
row[t.c.x]
go()
# fails on newer versions of pysqlite due to unusual memory behvior
# in pysqlite itself. background at:
# http://thread.gmane.org/gmane.comp.python.db.pysqlite.user/2290
@testing.fails_if(lambda : testing.db.dialect.name == 'sqlite' \
and testing.db.dialect.dbapi.version > '2.5')
def test_join_cache(self):
metadata = MetaData(testing.db)
table1 = Table('table1', metadata, Column('id', Integer,
primary_key=True,
test_needs_autoincrement=True), Column('data',
String(30)))
table2 = Table('table2', metadata, Column('id', Integer,
primary_key=True,
test_needs_autoincrement=True), Column('data',
String(30)), Column('t1id', Integer,
ForeignKey('table1.id')))
class Foo(object):
pass
class Bar(object):
pass
mapper(Foo, table1, properties={'bars'
: relationship(mapper(Bar, table2))})
metadata.create_all()
session = sessionmaker()
@profile_memory
def go():
s = table2.select()
sess = session()
sess.query(Foo).join((s, Foo.bars)).all()
sess.rollback()
try:
go()
finally:
metadata.drop_all()
def test_mutable_identity(self):
metadata = MetaData(testing.db)
table1 = Table("mytable", metadata,
Column('col1', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('col2', PickleType(comparator=operator.eq, mutable=True))
)
class Foo(object):
def __init__(self, col2):
self.col2 = col2
mapper(Foo, table1)
metadata.create_all()
session = sessionmaker()()
def go():
obj = [
Foo({'a':1}),
Foo({'b':1}),
Foo({'c':1}),
Foo({'d':1}),
Foo({'e':1}),
Foo({'f':1}),
Foo({'g':1}),
Foo({'h':1}),
Foo({'i':1}),
Foo({'j':1}),
Foo({'k':1}),
Foo({'l':1}),
]
session.add_all(obj)
session.commit()
testing.eq_(len(session.identity_map._mutable_attrs), 12)
testing.eq_(len(session.identity_map), 12)
obj = None
gc_collect()
testing.eq_(len(session.identity_map._mutable_attrs), 0)
testing.eq_(len(session.identity_map), 0)
try:
go()
finally:
metadata.drop_all()
def test_type_compile(self):
from sqlalchemy.dialects.sqlite.base import dialect as SQLiteDialect
cast = sa.cast(column('x'), sa.Integer)
@profile_memory
def go():
dialect = SQLiteDialect()
cast.compile(dialect=dialect)
go()
@testing.requires.cextensions
def test_DecimalResultProcessor_init(self):
@profile_memory
def go():
to_decimal_processor_factory({}, 10)
go()
@testing.requires.cextensions
def test_DecimalResultProcessor_process(self):
@profile_memory
def go():
to_decimal_processor_factory(decimal.Decimal, 10)(1.2)
go()
@testing.requires.cextensions
def test_UnicodeResultProcessor_init(self):
@profile_memory
def go():
to_unicode_processor_factory('utf8')
go()
| ioram7/keystone-federado-pgid2013 | build/sqlalchemy/test/aaa_profiling/test_memusage.py | Python | apache-2.0 | 19,051 |
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import test # noqa
from tempest_lib.common.utils import data_utils # noqa
from tempest_lib import exceptions as lib_exc # noqa
from manila_tempest_tests import clients_share as clients
from manila_tempest_tests.tests.api import base
class ShareTypesAdminNegativeTest(base.BaseSharesAdminTest):
def _create_share_type(self):
name = data_utils.rand_name("unique_st_name")
extra_specs = self.add_required_extra_specs_to_dict({"key": "value"})
return self.create_share_type(name, extra_specs=extra_specs)
@classmethod
def resource_setup(cls):
super(ShareTypesAdminNegativeTest, cls).resource_setup()
cls.member_shares_client = clients.Manager().shares_client
@test.attr(type=["gate", "smoke", ])
def test_create_share_with_nonexistent_share_type(self):
self.assertRaises(lib_exc.NotFound,
self.create_share,
share_type_id=data_utils.rand_name("fake"))
@test.attr(type=["gate", "smoke", ])
def test_create_share_type_with_empty_name(self):
self.assertRaises(lib_exc.BadRequest, self.create_share_type, '')
@test.attr(type=["gate", "smoke", ])
def test_create_share_type_with_too_big_name(self):
self.assertRaises(lib_exc.BadRequest,
self.create_share_type,
"x" * 256)
@test.attr(type=["gate", "smoke", ])
def test_get_share_type_by_nonexistent_id(self):
self.assertRaises(lib_exc.NotFound,
self.shares_client.get_share_type,
data_utils.rand_name("fake"))
@test.attr(type=["gate", "smoke", ])
def test_try_delete_share_type_by_nonexistent_id(self):
self.assertRaises(lib_exc.NotFound,
self.shares_client.delete_share_type,
data_utils.rand_name("fake"))
@test.attr(type=["gate", "smoke", ])
def test_try_create_duplicate_of_share_type(self):
st = self._create_share_type()
self.assertRaises(lib_exc.Conflict,
self.create_share_type,
st["share_type"]["name"],
extra_specs=self.add_required_extra_specs_to_dict())
@test.attr(type=["gate", "smoke", ])
def test_add_share_type_allowed_for_public(self):
st = self._create_share_type()
self.assertRaises(lib_exc.Conflict,
self.shares_client.add_access_to_share_type,
st["share_type"]["id"],
self.shares_client.tenant_id)
@test.attr(type=["gate", "smoke", ])
def test_remove_share_type_allowed_for_public(self):
st = self._create_share_type()
self.assertRaises(lib_exc.Conflict,
self.shares_client.remove_access_from_share_type,
st["share_type"]["id"],
self.shares_client.tenant_id)
@test.attr(type=["gate", "smoke", ])
def test_add_share_type_by_nonexistent_id(self):
self.assertRaises(lib_exc.NotFound,
self.shares_client.add_access_to_share_type,
data_utils.rand_name("fake"),
self.shares_client.tenant_id)
@test.attr(type=["gate", "smoke", ])
def test_remove_share_type_by_nonexistent_id(self):
self.assertRaises(lib_exc.NotFound,
self.shares_client.remove_access_from_share_type,
data_utils.rand_name("fake"),
self.shares_client.tenant_id)
| scality/manila | manila_tempest_tests/tests/api/admin/test_share_types_negative.py | Python | apache-2.0 | 4,282 |
# Copyright 2020 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cirq
import cirq_google
def test_equality():
assert cirq_google.PhysicalZTag() == cirq_google.PhysicalZTag()
assert hash(cirq_google.PhysicalZTag()) == hash(cirq_google.PhysicalZTag())
def test_syc_str_repr():
assert str(cirq_google.PhysicalZTag()) == 'PhysicalZTag()'
assert repr(cirq_google.PhysicalZTag()) == 'cirq_google.PhysicalZTag()'
cirq.testing.assert_equivalent_repr(
cirq_google.PhysicalZTag(), setup_code=('import cirq\nimport cirq_google\n')
)
| quantumlib/Cirq | cirq-google/cirq_google/ops/physical_z_tag_test.py | Python | apache-2.0 | 1,084 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import string
import textwrap
import six
from six.moves.configparser import ConfigParser
from swift.common.utils import (
config_true_value, SWIFT_CONF_FILE, whataremyips, list_from_csv)
from swift.common.ring import Ring, RingData
from swift.common.utils import quorum_size
from swift.common.exceptions import RingValidationError
from pyeclib.ec_iface import ECDriver, ECDriverError, VALID_EC_TYPES
LEGACY_POLICY_NAME = 'Policy-0'
VALID_CHARS = '-' + string.ascii_letters + string.digits
DEFAULT_POLICY_TYPE = REPL_POLICY = 'replication'
EC_POLICY = 'erasure_coding'
DEFAULT_EC_OBJECT_SEGMENT_SIZE = 1048576
class BindPortsCache(object):
def __init__(self, swift_dir, bind_ip):
self.swift_dir = swift_dir
self.mtimes_by_ring_path = {}
self.portsets_by_ring_path = {}
self.my_ips = set(whataremyips(bind_ip))
def all_bind_ports_for_node(self):
"""
Given an iterable of IP addresses identifying a storage backend server,
return a set of all bind ports defined in all rings for this storage
backend server.
The caller is responsible for not calling this method (which performs
at least a stat on all ring files) too frequently.
"""
# NOTE: we don't worry about disappearing rings here because you can't
# ever delete a storage policy.
for policy in POLICIES:
# NOTE: we must NOT use policy.load_ring to load the ring. Users
# of this utility function will not need the actual ring data, just
# the bind ports.
#
# This is duplicated with Ring.__init__ just a bit...
serialized_path = os.path.join(self.swift_dir,
policy.ring_name + '.ring.gz')
try:
new_mtime = os.path.getmtime(serialized_path)
except OSError:
continue
old_mtime = self.mtimes_by_ring_path.get(serialized_path)
if not old_mtime or old_mtime != new_mtime:
self.portsets_by_ring_path[serialized_path] = set(
dev['port']
for dev in RingData.load(serialized_path,
metadata_only=True).devs
if dev and dev['ip'] in self.my_ips)
self.mtimes_by_ring_path[serialized_path] = new_mtime
# No "break" here so that the above line will update the
# mtimes_by_ring_path entry for any ring that changes, not just
# the first one we notice.
# Return the requested set of ports from our (now-freshened) cache
return six.moves.reduce(set.union,
self.portsets_by_ring_path.values(), set())
class PolicyError(ValueError):
def __init__(self, msg, index=None):
if index is not None:
msg += ', for index %r' % index
super(PolicyError, self).__init__(msg)
def _get_policy_string(base, policy_index):
if policy_index == 0 or policy_index is None:
return_string = base
else:
return_string = base + "-%d" % int(policy_index)
return return_string
def get_policy_string(base, policy_or_index):
"""
Helper function to construct a string from a base and the policy.
Used to encode the policy index into either a file name or a
directory name by various modules.
:param base: the base string
:param policy_or_index: StoragePolicy instance, or an index
(string or int), if None the legacy
storage Policy-0 is assumed.
:returns: base name with policy index added
:raises: PolicyError if no policy exists with the given policy_index
"""
if isinstance(policy_or_index, BaseStoragePolicy):
policy = policy_or_index
else:
policy = POLICIES.get_by_index(policy_or_index)
if policy is None:
raise PolicyError("Unknown policy", index=policy_or_index)
return _get_policy_string(base, int(policy))
def split_policy_string(policy_string):
"""
Helper function to convert a string representing a base and a
policy. Used to decode the policy from either a file name or
a directory name by various modules.
:param policy_string: base name with policy index added
:raises: PolicyError if given index does not map to a valid policy
:returns: a tuple, in the form (base, policy) where base is the base
string and policy is the StoragePolicy instance for the
index encoded in the policy_string.
"""
if '-' in policy_string:
base, policy_index = policy_string.rsplit('-', 1)
else:
base, policy_index = policy_string, None
policy = POLICIES.get_by_index(policy_index)
if get_policy_string(base, policy) != policy_string:
raise PolicyError("Unknown policy", index=policy_index)
return base, policy
class BaseStoragePolicy(object):
"""
Represents a storage policy. Not meant to be instantiated directly;
implement a derived subclasses (e.g. StoragePolicy, ECStoragePolicy, etc)
or use :func:`~swift.common.storage_policy.reload_storage_policies` to
load POLICIES from ``swift.conf``.
The object_ring property is lazy loaded once the service's ``swift_dir``
is known via :meth:`~StoragePolicyCollection.get_object_ring`, but it may
be over-ridden via object_ring kwarg at create time for testing or
actively loaded with :meth:`~StoragePolicy.load_ring`.
"""
policy_type_to_policy_cls = {}
def __init__(self, idx, name='', is_default=False, is_deprecated=False,
object_ring=None, aliases=''):
# do not allow BaseStoragePolicy class to be instantiated directly
if type(self) == BaseStoragePolicy:
raise TypeError("Can't instantiate BaseStoragePolicy directly")
# policy parameter validation
try:
self.idx = int(idx)
except ValueError:
raise PolicyError('Invalid index', idx)
if self.idx < 0:
raise PolicyError('Invalid index', idx)
self.alias_list = []
if not name or not self._validate_policy_name(name):
raise PolicyError('Invalid name %r' % name, idx)
self.alias_list.append(name)
if aliases:
names_list = list_from_csv(aliases)
for alias in names_list:
if alias == name:
continue
self._validate_policy_name(alias)
self.alias_list.append(alias)
self.is_deprecated = config_true_value(is_deprecated)
self.is_default = config_true_value(is_default)
if self.policy_type not in BaseStoragePolicy.policy_type_to_policy_cls:
raise PolicyError('Invalid type', self.policy_type)
if self.is_deprecated and self.is_default:
raise PolicyError('Deprecated policy can not be default. '
'Invalid config', self.idx)
self.ring_name = _get_policy_string('object', self.idx)
self.object_ring = object_ring
@property
def name(self):
return self.alias_list[0]
@name.setter
def name_setter(self, name):
self._validate_policy_name(name)
self.alias_list[0] = name
@property
def aliases(self):
return ", ".join(self.alias_list)
def __int__(self):
return self.idx
def __cmp__(self, other):
return cmp(self.idx, int(other))
def __repr__(self):
return ("%s(%d, %r, is_default=%s, "
"is_deprecated=%s, policy_type=%r)") % \
(self.__class__.__name__, self.idx, self.alias_list,
self.is_default, self.is_deprecated, self.policy_type)
@classmethod
def register(cls, policy_type):
"""
Decorator for Storage Policy implementations to register
their StoragePolicy class. This will also set the policy_type
attribute on the registered implementation.
"""
def register_wrapper(policy_cls):
if policy_type in cls.policy_type_to_policy_cls:
raise PolicyError(
'%r is already registered for the policy_type %r' % (
cls.policy_type_to_policy_cls[policy_type],
policy_type))
cls.policy_type_to_policy_cls[policy_type] = policy_cls
policy_cls.policy_type = policy_type
return policy_cls
return register_wrapper
@classmethod
def _config_options_map(cls):
"""
Map config option name to StoragePolicy parameter name.
"""
return {
'name': 'name',
'aliases': 'aliases',
'policy_type': 'policy_type',
'default': 'is_default',
'deprecated': 'is_deprecated',
}
@classmethod
def from_config(cls, policy_index, options):
config_to_policy_option_map = cls._config_options_map()
policy_options = {}
for config_option, value in options.items():
try:
policy_option = config_to_policy_option_map[config_option]
except KeyError:
raise PolicyError('Invalid option %r in '
'storage-policy section' % config_option,
index=policy_index)
policy_options[policy_option] = value
return cls(policy_index, **policy_options)
def get_info(self, config=False):
"""
Return the info dict and conf file options for this policy.
:param config: boolean, if True all config options are returned
"""
info = {}
for config_option, policy_attribute in \
self._config_options_map().items():
info[config_option] = getattr(self, policy_attribute)
if not config:
# remove some options for public consumption
if not self.is_default:
info.pop('default')
if not self.is_deprecated:
info.pop('deprecated')
info.pop('policy_type')
return info
def _validate_policy_name(self, name):
"""
Helper function to determine the validity of a policy name. Used
to check policy names before setting them.
:param name: a name string for a single policy name.
:returns: true if the name is valid.
:raises: PolicyError if the policy name is invalid.
"""
# this is defensively restrictive, but could be expanded in the future
if not all(c in VALID_CHARS for c in name):
raise PolicyError('Names are used as HTTP headers, and can not '
'reliably contain any characters not in %r. '
'Invalid name %r' % (VALID_CHARS, name))
if name.upper() == LEGACY_POLICY_NAME.upper() and self.idx != 0:
msg = 'The name %s is reserved for policy index 0. ' \
'Invalid name %r' % (LEGACY_POLICY_NAME, name)
raise PolicyError(msg, self.idx)
if name.upper() in (existing_name.upper() for existing_name
in self.alias_list):
msg = 'The name %s is already assigned to this policy.' % name
raise PolicyError(msg, self.idx)
return True
def add_name(self, name):
"""
Adds an alias name to the storage policy. Shouldn't be called
directly from the storage policy but instead through the
storage policy collection class, so lookups by name resolve
correctly.
:param name: a new alias for the storage policy
"""
if self._validate_policy_name(name):
self.alias_list.append(name)
def remove_name(self, name):
"""
Removes an alias name from the storage policy. Shouldn't be called
directly from the storage policy but instead through the storage
policy collection class, so lookups by name resolve correctly. If
the name removed is the primary name then the next availiable alias
will be adopted as the new primary name.
:param name: a name assigned to the storage policy
"""
if name not in self.alias_list:
raise PolicyError("%s is not a name assigned to policy %s"
% (name, self.idx))
if len(self.alias_list) == 1:
raise PolicyError("Cannot remove only name %s from policy %s. "
"Policies must have at least one name."
% (name, self.idx))
else:
self.alias_list.remove(name)
def change_primary_name(self, name):
"""
Changes the primary/default name of the policy to a specified name.
:param name: a string name to replace the current primary name.
"""
if name == self.name:
return
elif name in self.alias_list:
self.remove_name(name)
else:
self._validate_policy_name(name)
self.alias_list.insert(0, name)
def _validate_ring(self):
"""
Hook, called when the ring is loaded. Can be used to
validate the ring against the StoragePolicy configuration.
"""
pass
def load_ring(self, swift_dir):
"""
Load the ring for this policy immediately.
:param swift_dir: path to rings
"""
if self.object_ring:
return
self.object_ring = Ring(swift_dir, ring_name=self.ring_name)
# Validate ring to make sure it conforms to policy requirements
self._validate_ring()
@property
def quorum(self):
"""
Number of successful backend requests needed for the proxy to
consider the client request successful.
"""
raise NotImplementedError()
@BaseStoragePolicy.register(REPL_POLICY)
class StoragePolicy(BaseStoragePolicy):
"""
Represents a storage policy of type 'replication'. Default storage policy
class unless otherwise overridden from swift.conf.
Not meant to be instantiated directly; use
:func:`~swift.common.storage_policy.reload_storage_policies` to load
POLICIES from ``swift.conf``.
"""
@property
def quorum(self):
"""
Quorum concept in the replication case:
floor(number of replica / 2) + 1
"""
if not self.object_ring:
raise PolicyError('Ring is not loaded')
return quorum_size(self.object_ring.replica_count)
@BaseStoragePolicy.register(EC_POLICY)
class ECStoragePolicy(BaseStoragePolicy):
"""
Represents a storage policy of type 'erasure_coding'.
Not meant to be instantiated directly; use
:func:`~swift.common.storage_policy.reload_storage_policies` to load
POLICIES from ``swift.conf``.
"""
def __init__(self, idx, name='', aliases='', is_default=False,
is_deprecated=False, object_ring=None,
ec_segment_size=DEFAULT_EC_OBJECT_SEGMENT_SIZE,
ec_type=None, ec_ndata=None, ec_nparity=None):
super(ECStoragePolicy, self).__init__(
idx=idx, name=name, aliases=aliases, is_default=is_default,
is_deprecated=is_deprecated, object_ring=object_ring)
# Validate erasure_coding policy specific members
# ec_type is one of the EC implementations supported by PyEClib
if ec_type is None:
raise PolicyError('Missing ec_type')
if ec_type not in VALID_EC_TYPES:
raise PolicyError('Wrong ec_type %s for policy %s, should be one'
' of "%s"' % (ec_type, self.name,
', '.join(VALID_EC_TYPES)))
self._ec_type = ec_type
# Define _ec_ndata as the number of EC data fragments
# Accessible as the property "ec_ndata"
try:
value = int(ec_ndata)
if value <= 0:
raise ValueError
self._ec_ndata = value
except (TypeError, ValueError):
raise PolicyError('Invalid ec_num_data_fragments %r' %
ec_ndata, index=self.idx)
# Define _ec_nparity as the number of EC parity fragments
# Accessible as the property "ec_nparity"
try:
value = int(ec_nparity)
if value <= 0:
raise ValueError
self._ec_nparity = value
except (TypeError, ValueError):
raise PolicyError('Invalid ec_num_parity_fragments %r'
% ec_nparity, index=self.idx)
# Define _ec_segment_size as the encode segment unit size
# Accessible as the property "ec_segment_size"
try:
value = int(ec_segment_size)
if value <= 0:
raise ValueError
self._ec_segment_size = value
except (TypeError, ValueError):
raise PolicyError('Invalid ec_object_segment_size %r' %
ec_segment_size, index=self.idx)
# Initialize PyECLib EC backend
try:
self.pyeclib_driver = \
ECDriver(k=self._ec_ndata, m=self._ec_nparity,
ec_type=self._ec_type)
except ECDriverError as e:
raise PolicyError("Error creating EC policy (%s)" % e,
index=self.idx)
# quorum size in the EC case depends on the choice of EC scheme.
self._ec_quorum_size = \
self._ec_ndata + self.pyeclib_driver.min_parity_fragments_needed()
@property
def ec_type(self):
return self._ec_type
@property
def ec_ndata(self):
return self._ec_ndata
@property
def ec_nparity(self):
return self._ec_nparity
@property
def ec_segment_size(self):
return self._ec_segment_size
@property
def fragment_size(self):
"""
Maximum length of a fragment, including header.
NB: a fragment archive is a sequence of 0 or more max-length
fragments followed by one possibly-shorter fragment.
"""
# Technically pyeclib's get_segment_info signature calls for
# (data_len, segment_size) but on a ranged GET we don't know the
# ec-content-length header before we need to compute where in the
# object we should request to align with the fragment size. So we
# tell pyeclib a lie - from it's perspective, as long as data_len >=
# segment_size it'll give us the answer we want. From our
# perspective, because we only use this answer to calculate the
# *minimum* size we should read from an object body even if data_len <
# segment_size we'll still only read *the whole one and only last
# fragment* and pass than into pyeclib who will know what to do with
# it just as it always does when the last fragment is < fragment_size.
return self.pyeclib_driver.get_segment_info(
self.ec_segment_size, self.ec_segment_size)['fragment_size']
@property
def ec_scheme_description(self):
"""
This short hand form of the important parts of the ec schema is stored
in Object System Metadata on the EC Fragment Archives for debugging.
"""
return "%s %d+%d" % (self._ec_type, self._ec_ndata, self._ec_nparity)
def __repr__(self):
return ("%s, EC config(ec_type=%s, ec_segment_size=%d, "
"ec_ndata=%d, ec_nparity=%d)") % \
(super(ECStoragePolicy, self).__repr__(), self.ec_type,
self.ec_segment_size, self.ec_ndata, self.ec_nparity)
@classmethod
def _config_options_map(cls):
options = super(ECStoragePolicy, cls)._config_options_map()
options.update({
'ec_type': 'ec_type',
'ec_object_segment_size': 'ec_segment_size',
'ec_num_data_fragments': 'ec_ndata',
'ec_num_parity_fragments': 'ec_nparity',
})
return options
def get_info(self, config=False):
info = super(ECStoragePolicy, self).get_info(config=config)
if not config:
info.pop('ec_object_segment_size')
info.pop('ec_num_data_fragments')
info.pop('ec_num_parity_fragments')
info.pop('ec_type')
return info
def _validate_ring(self):
"""
EC specific validation
Replica count check - we need _at_least_ (#data + #parity) replicas
configured. Also if the replica count is larger than exactly that
number there's a non-zero risk of error for code that is considering
the number of nodes in the primary list from the ring.
"""
if not self.object_ring:
raise PolicyError('Ring is not loaded')
nodes_configured = self.object_ring.replica_count
if nodes_configured != (self.ec_ndata + self.ec_nparity):
raise RingValidationError(
'EC ring for policy %s needs to be configured with '
'exactly %d nodes. Got %d.' % (
self.name, self.ec_ndata + self.ec_nparity,
nodes_configured))
@property
def quorum(self):
"""
Number of successful backend requests needed for the proxy to consider
the client request successful.
The quorum size for EC policies defines the minimum number
of data + parity elements required to be able to guarantee
the desired fault tolerance, which is the number of data
elements supplemented by the minimum number of parity
elements required by the chosen erasure coding scheme.
For example, for Reed-Solomon, the minimum number parity
elements required is 1, and thus the quorum_size requirement
is ec_ndata + 1.
Given the number of parity elements required is not the same
for every erasure coding scheme, consult PyECLib for
min_parity_fragments_needed()
"""
return self._ec_quorum_size
class StoragePolicyCollection(object):
"""
This class represents the collection of valid storage policies for the
cluster and is instantiated as :class:`StoragePolicy` objects are added to
the collection when ``swift.conf`` is parsed by
:func:`parse_storage_policies`.
When a StoragePolicyCollection is created, the following validation
is enforced:
* If a policy with index 0 is not declared and no other policies defined,
Swift will create one
* The policy index must be a non-negative integer
* If no policy is declared as the default and no other policies are
defined, the policy with index 0 is set as the default
* Policy indexes must be unique
* Policy names are required
* Policy names are case insensitive
* Policy names must contain only letters, digits or a dash
* Policy names must be unique
* The policy name 'Policy-0' can only be used for the policy with index 0
* If any policies are defined, exactly one policy must be declared default
* Deprecated policies can not be declared the default
"""
def __init__(self, pols):
self.default = []
self.by_name = {}
self.by_index = {}
self._validate_policies(pols)
def _add_policy(self, policy):
"""
Add pre-validated policies to internal indexes.
"""
for name in policy.alias_list:
self.by_name[name.upper()] = policy
self.by_index[int(policy)] = policy
def __repr__(self):
return (textwrap.dedent("""
StoragePolicyCollection([
%s
])
""") % ',\n '.join(repr(p) for p in self)).strip()
def __len__(self):
return len(self.by_index)
def __getitem__(self, key):
return self.by_index[key]
def __iter__(self):
return iter(self.by_index.values())
def _validate_policies(self, policies):
"""
:param policies: list of policies
"""
for policy in policies:
if int(policy) in self.by_index:
raise PolicyError('Duplicate index %s conflicts with %s' % (
policy, self.get_by_index(int(policy))))
for name in policy.alias_list:
if name.upper() in self.by_name:
raise PolicyError('Duplicate name %s conflicts with %s' % (
policy, self.get_by_name(name)))
if policy.is_default:
if not self.default:
self.default = policy
else:
raise PolicyError(
'Duplicate default %s conflicts with %s' % (
policy, self.default))
self._add_policy(policy)
# If a 0 policy wasn't explicitly given, or nothing was
# provided, create the 0 policy now
if 0 not in self.by_index:
if len(self) != 0:
raise PolicyError('You must specify a storage policy '
'section for policy index 0 in order '
'to define multiple policies')
self._add_policy(StoragePolicy(0, name=LEGACY_POLICY_NAME))
# at least one policy must be enabled
enabled_policies = [p for p in self if not p.is_deprecated]
if not enabled_policies:
raise PolicyError("Unable to find policy that's not deprecated!")
# if needed, specify default
if not self.default:
if len(self) > 1:
raise PolicyError("Unable to find default policy")
self.default = self[0]
self.default.is_default = True
def get_by_name(self, name):
"""
Find a storage policy by its name.
:param name: name of the policy
:returns: storage policy, or None
"""
return self.by_name.get(name.upper())
def get_by_index(self, index):
"""
Find a storage policy by its index.
An index of None will be treated as 0.
:param index: numeric index of the storage policy
:returns: storage policy, or None if no such policy
"""
# makes it easier for callers to just pass in a header value
if index in ('', None):
index = 0
else:
try:
index = int(index)
except ValueError:
return None
return self.by_index.get(index)
@property
def legacy(self):
return self.get_by_index(None)
def get_object_ring(self, policy_idx, swift_dir):
"""
Get the ring object to use to handle a request based on its policy.
An index of None will be treated as 0.
:param policy_idx: policy index as defined in swift.conf
:param swift_dir: swift_dir used by the caller
:returns: appropriate ring object
"""
policy = self.get_by_index(policy_idx)
if not policy:
raise PolicyError("No policy with index %s" % policy_idx)
if not policy.object_ring:
policy.load_ring(swift_dir)
return policy.object_ring
def get_policy_info(self):
"""
Build info about policies for the /info endpoint
:returns: list of dicts containing relevant policy information
"""
policy_info = []
for pol in self:
# delete from /info if deprecated
if pol.is_deprecated:
continue
policy_entry = pol.get_info()
policy_info.append(policy_entry)
return policy_info
def add_policy_alias(self, policy_index, *aliases):
"""
Adds a new name or names to a policy
:param policy_index: index of a policy in this policy collection.
:param *aliases: arbitrary number of string policy names to add.
"""
policy = self.get_by_index(policy_index)
for alias in aliases:
if alias.upper() in self.by_name:
raise PolicyError('Duplicate name %s in use '
'by policy %s' % (alias,
self.get_by_name(alias)))
else:
policy.add_name(alias)
self.by_name[alias.upper()] = policy
def remove_policy_alias(self, *aliases):
"""
Removes a name or names from a policy. If the name removed is the
primary name then the next availiable alias will be adopted
as the new primary name.
:param *aliases: arbitrary number of existing policy names to remove.
"""
for alias in aliases:
policy = self.get_by_name(alias)
if not policy:
raise PolicyError('No policy with name %s exists.' % alias)
if len(policy.alias_list) == 1:
raise PolicyError('Policy %s with name %s has only one name. '
'Policies must have at least one name.' % (
policy, alias))
else:
policy.remove_name(alias)
del self.by_name[alias.upper()]
def change_policy_primary_name(self, policy_index, new_name):
"""
Changes the primary or default name of a policy. The new primary
name can be an alias that already belongs to the policy or a
completely new name.
:param policy_index: index of a policy in this policy collection.
:param new_name: a string name to set as the new default name.
"""
policy = self.get_by_index(policy_index)
name_taken = self.get_by_name(new_name)
# if the name belongs to some other policy in the collection
if name_taken and name_taken != policy:
raise PolicyError('Other policy %s with name %s exists.' %
(self.get_by_name(new_name).idx, new_name))
else:
policy.change_primary_name(new_name)
self.by_name[new_name.upper()] = policy
def parse_storage_policies(conf):
"""
Parse storage policies in ``swift.conf`` - note that validation
is done when the :class:`StoragePolicyCollection` is instantiated.
:param conf: ConfigParser parser object for swift.conf
"""
policies = []
for section in conf.sections():
if not section.startswith('storage-policy:'):
continue
policy_index = section.split(':', 1)[1]
config_options = dict(conf.items(section))
policy_type = config_options.pop('policy_type', DEFAULT_POLICY_TYPE)
policy_cls = BaseStoragePolicy.policy_type_to_policy_cls[policy_type]
policy = policy_cls.from_config(policy_index, config_options)
policies.append(policy)
return StoragePolicyCollection(policies)
class StoragePolicySingleton(object):
"""
An instance of this class is the primary interface to storage policies
exposed as a module level global named ``POLICIES``. This global
reference wraps ``_POLICIES`` which is normally instantiated by parsing
``swift.conf`` and will result in an instance of
:class:`StoragePolicyCollection`.
You should never patch this instance directly, instead patch the module
level ``_POLICIES`` instance so that swift code which imported
``POLICIES`` directly will reference the patched
:class:`StoragePolicyCollection`.
"""
def __iter__(self):
return iter(_POLICIES)
def __len__(self):
return len(_POLICIES)
def __getitem__(self, key):
return _POLICIES[key]
def __getattribute__(self, name):
return getattr(_POLICIES, name)
def __repr__(self):
return repr(_POLICIES)
def reload_storage_policies():
"""
Reload POLICIES from ``swift.conf``.
"""
global _POLICIES
policy_conf = ConfigParser()
policy_conf.read(SWIFT_CONF_FILE)
try:
_POLICIES = parse_storage_policies(policy_conf)
except PolicyError as e:
raise SystemExit('ERROR: Invalid Storage Policy Configuration '
'in %s (%s)' % (SWIFT_CONF_FILE, e))
# parse configuration and setup singleton
_POLICIES = None
reload_storage_policies()
POLICIES = StoragePolicySingleton()
| levythu/swift | swift/common/storage_policy.py | Python | apache-2.0 | 33,131 |
# (c) Copyright 2014 Brocade Communications Systems Inc.
# All Rights Reserved.
#
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from oslo_utils import excutils
import paramiko
from cinder import exception
from cinder.i18n import _, _LE
from cinder.openstack.common import log as logging
from cinder import utils
from cinder.zonemanager.drivers.brocade import brcd_fabric_opts as fabric_opts
import cinder.zonemanager.drivers.brocade.fc_zone_constants as zone_constant
from cinder.zonemanager import fc_san_lookup_service as fc_service
LOG = logging.getLogger(__name__)
class BrcdFCSanLookupService(fc_service.FCSanLookupService):
"""The SAN lookup service that talks to Brocade switches.
Version History:
1.0.0 - Initial version
"""
VERSION = "1.0.0"
def __init__(self, **kwargs):
"""Initializing the client."""
super(BrcdFCSanLookupService, self).__init__(**kwargs)
self.configuration = kwargs.get('configuration', None)
self.create_configuration()
self.client = self.create_ssh_client(**kwargs)
def create_configuration(self):
"""Configuration specific to SAN context values."""
config = self.configuration
fabric_names = [x.strip() for x in config.fc_fabric_names.split(',')]
LOG.debug('Fabric Names: %s', fabric_names)
# There can be more than one SAN in the network and we need to
# get credentials for each for SAN context lookup later.
if len(fabric_names) > 0:
self.fabric_configs = fabric_opts.load_fabric_configurations(
fabric_names)
def create_ssh_client(self, **kwargs):
ssh_client = paramiko.SSHClient()
known_hosts_file = kwargs.get('known_hosts_file', None)
if known_hosts_file is None:
ssh_client.load_system_host_keys()
else:
ssh_client.load_host_keys(known_hosts_file)
missing_key_policy = kwargs.get('missing_key_policy', None)
if missing_key_policy is None:
missing_key_policy = paramiko.WarningPolicy()
ssh_client.set_missing_host_key_policy(missing_key_policy)
return ssh_client
def get_device_mapping_from_network(self,
initiator_wwn_list,
target_wwn_list):
"""Provides the initiator/target map for available SAN contexts.
Looks up nameserver of each fc SAN configured to find logged in devices
and returns a map of initiator and target port WWNs for each fabric.
:param initiator_wwn_list: List of initiator port WWN
:param target_wwn_list: List of target port WWN
:returns List -- device wwn map in following format
{
<San name>: {
'initiator_port_wwn_list':
('200000051e55a100', '200000051e55a121'..)
'target_port_wwn_list':
('100000051e55a100', '100000051e55a121'..)
}
}
:raises Exception when connection to fabric is failed
"""
device_map = {}
formatted_target_list = []
formatted_initiator_list = []
fabric_map = {}
fabric_names = self.configuration.fc_fabric_names
fabrics = None
if not fabric_names:
raise exception.InvalidParameterValue(
err=_("Missing Fibre Channel SAN configuration "
"param - fc_fabric_names"))
fabrics = [x.strip() for x in fabric_names.split(',')]
LOG.debug("FC Fabric List: %s", fabrics)
if fabrics:
for t in target_wwn_list:
formatted_target_list.append(self.get_formatted_wwn(t))
for i in initiator_wwn_list:
formatted_initiator_list.append(self.
get_formatted_wwn(i))
for fabric_name in fabrics:
fabric_ip = self.fabric_configs[fabric_name].safe_get(
'fc_fabric_address')
fabric_user = self.fabric_configs[fabric_name].safe_get(
'fc_fabric_user')
fabric_pwd = self.fabric_configs[fabric_name].safe_get(
'fc_fabric_password')
fabric_port = self.fabric_configs[fabric_name].safe_get(
'fc_fabric_port')
# Get name server data from fabric and find the targets
# logged in
nsinfo = ''
try:
LOG.debug("Getting name server data for "
"fabric %s", fabric_ip)
self.client.connect(
fabric_ip, fabric_port, fabric_user, fabric_pwd)
nsinfo = self.get_nameserver_info()
except exception.FCSanLookupServiceException:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed collecting name server info from"
" fabric %s") % fabric_ip)
except Exception as e:
msg = _("SSH connection failed "
"for %(fabric)s with error: %(err)s"
) % {'fabric': fabric_ip, 'err': e}
LOG.error(msg)
raise exception.FCSanLookupServiceException(message=msg)
finally:
self.client.close()
LOG.debug("Lookup service:nsinfo-%s", nsinfo)
LOG.debug("Lookup service:initiator list from "
"caller-%s", formatted_initiator_list)
LOG.debug("Lookup service:target list from "
"caller-%s", formatted_target_list)
visible_targets = filter(lambda x: x in formatted_target_list,
nsinfo)
visible_initiators = filter(lambda x: x in
formatted_initiator_list, nsinfo)
if visible_targets:
LOG.debug("Filtered targets is: %s", visible_targets)
# getting rid of the : before returning
for idx, elem in enumerate(visible_targets):
elem = str(elem).replace(':', '')
visible_targets[idx] = elem
else:
LOG.debug("No targets are in the nameserver for SAN %s",
fabric_name)
if visible_initiators:
# getting rid of the : before returning ~sk
for idx, elem in enumerate(visible_initiators):
elem = str(elem).replace(':', '')
visible_initiators[idx] = elem
else:
LOG.debug("No initiators are in the nameserver "
"for SAN %s", fabric_name)
fabric_map = {
'initiator_port_wwn_list': visible_initiators,
'target_port_wwn_list': visible_targets
}
device_map[fabric_name] = fabric_map
LOG.debug("Device map for SAN context: %s", device_map)
return device_map
def get_nameserver_info(self):
"""Get name server data from fabric.
This method will return the connected node port wwn list(local
and remote) for the given switch fabric
"""
cli_output = None
nsinfo_list = []
try:
cli_output = self._get_switch_data(zone_constant.NS_SHOW)
except exception.FCSanLookupServiceException:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed collecting nsshow info for fabric"))
if cli_output:
nsinfo_list = self._parse_ns_output(cli_output)
try:
cli_output = self._get_switch_data(zone_constant.NS_CAM_SHOW)
except exception.FCSanLookupServiceException:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed collecting nscamshow"))
if cli_output:
nsinfo_list.extend(self._parse_ns_output(cli_output))
LOG.debug("Connector returning nsinfo-%s", nsinfo_list)
return nsinfo_list
def _get_switch_data(self, cmd):
stdin, stdout, stderr = None, None, None
utils.check_ssh_injection([cmd])
try:
stdin, stdout, stderr = self.client.exec_command(cmd)
switch_data = stdout.readlines()
except paramiko.SSHException as e:
msg = (_("SSH Command failed with error '%(err)s' "
"'%(command)s'") % {'err': e,
'command': cmd})
LOG.error(msg)
raise exception.FCSanLookupServiceException(message=msg)
finally:
if (stdin):
stdin.flush()
stdin.close()
if (stdout):
stdout.close()
if (stderr):
stderr.close()
return switch_data
def _parse_ns_output(self, switch_data):
"""Parses name server data.
Parses nameserver raw data and adds the device port wwns to the list
:returns list of device port wwn from ns info
"""
nsinfo_list = []
for line in switch_data:
if not(" NL " in line or " N " in line):
continue
linesplit = line.split(';')
if len(linesplit) > 2:
node_port_wwn = linesplit[2]
nsinfo_list.append(node_port_wwn)
else:
msg = _("Malformed nameserver string: %s") % line
LOG.error(msg)
raise exception.InvalidParameterValue(err=msg)
return nsinfo_list
def get_formatted_wwn(self, wwn_str):
"""Utility API that formats WWN to insert ':'."""
if (len(wwn_str) != 16):
return wwn_str.lower()
else:
return (':'.join([wwn_str[i:i + 2]
for i in range(0, len(wwn_str), 2)])).lower()
| Akrog/cinder | cinder/zonemanager/drivers/brocade/brcd_fc_san_lookup_service.py | Python | apache-2.0 | 10,817 |
#!/usr/bin/env python
# Copyright 2017, Major Hayden <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handle all shell commands/arguments/options."""
import importlib
import os
import pkgutil
import sys
import click
context_settings = dict(auto_envvar_prefix='MonitorStack')
class Context(object):
"""Set up a context object that we can pass."""
def __init__(self):
"""Initialize class."""
self.verbose = False
self.home = os.getcwd()
def log(self, msg, *args):
"""Log a message to stderr."""
click.echo(msg, file=sys.stderr)
def vlog(self, msg, *args):
"""Log a message to stderr only if verbose is enabled."""
if self.verbose:
self.log(msg, *args)
pass_context = click.make_pass_decorator(Context, ensure=True)
class MonitorStackCLI(click.MultiCommand):
"""Create a complex command finder."""
@property
def cmd_folder(self):
"""Get the path to the plugin directory."""
return os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'plugins'
)
)
def list_commands(self, ctx):
"""Get a list of all available commands."""
rv = list()
for _, pkg_name, _ in pkgutil.iter_modules([self.cmd_folder]):
rv.append(pkg_name)
else:
return sorted(rv)
def get_command(self, ctx, name):
"""Load a command and run it."""
for _, pkg_name, _ in pkgutil.iter_modules([self.cmd_folder]):
if pkg_name == name:
mod = importlib.import_module(
'monitorstack.plugins.{}'.format(name)
)
return getattr(mod, 'cli')
else:
raise SystemExit('Module "{}" Not Found.'.format(name))
VALID_OUTPUT_FORMATS = [
'json',
'line',
'telegraf',
'rax-maas'
]
@click.command(cls=MonitorStackCLI, context_settings=context_settings)
@click.option(
'-f', '--format', 'output_format',
type=click.Choice(VALID_OUTPUT_FORMATS),
default='json',
help="Output format (valid options: {}".format(
', '.join(VALID_OUTPUT_FORMATS)
),
)
@click.option('-v', '--verbose', is_flag=True, help='Enables verbose mode.')
@pass_context
def cli(*args, **kwargs):
"""A complex command line interface."""
try:
args[0].verbose = kwargs.get('verbose', False)
except IndexError: # pragma: no cover
pass
@cli.resultcallback(replace=True)
def process_result(results, output_format, **kwargs):
"""Render the output into the proper format."""
module_name = 'monitorstack.common.formatters'
method_name = 'write_{}'.format(output_format.replace('-', '_'))
output_formatter = getattr(
importlib.import_module(module_name),
method_name
)
# Force the output formatter into a list
if not isinstance(results, list): # pragma: no cover
results = [results]
exit_code = 0
for result in results:
output_formatter(result)
if result['exit_code'] != 0:
exit_code = result['exit_code']
else:
sys.exit(exit_code)
if __name__ == '__main__': # pragma: no cover
topdir = os.path.normpath(
os.path.join(
os.path.abspath(
sys.argv[0]
),
os.pardir,
os.pardir
)
)
sys.path.insert(0, topdir)
cli()
| major/monitorstack | monitorstack/cli.py | Python | apache-2.0 | 3,975 |
from lxml import etree
from nxpy.interface import Interface
from nxpy.vlan import Vlan
from nxpy.flow import Flow
from util import tag_pattern
class Device(object):
# Singleton
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(
Device, cls).__new__(cls, *args, **kwargs)
return cls._instance
def __init__(self):
self.name = ''
self.domain_name = ''
self.interfaces = []
self.vlans = []
self.routing_options = []
def export(self, netconf_config=False):
config = etree.Element("configuration")
device = etree.Element('system')
if self.name:
etree.SubElement(device, "host-name").text = self.name
if self.domain_name:
etree.SubElement(device, "domain-name").text = self.domain_name
if len(device.getchildren()):
config.append(device)
interfaces = etree.Element('interfaces')
if len(self.interfaces):
for interface in self.interfaces:
if (interface):
interfaces.append(interface.export())
config.append(interfaces)
vlans = etree.Element('vlans')
if len(self.vlans):
for vlan in self.vlans:
if (vlan):
vlans.append(vlan.export())
config.append(vlans)
routing_options = etree.Element('routing-options')
if len(self.routing_options):
for ro in self.routing_options:
if (ro):
routing_options.append(ro.export())
config.append(routing_options)
if netconf_config:
conf = etree.Element("config")
conf.append(config)
config = conf
if len(config.getchildren()):
return config
else:
return False
def build(self, node):
for child in node:
nodeName_ = tag_pattern.match(child.tag).groups()[-1]
self.buildChildren(child, nodeName_)
def buildChildren(self, child_, nodeName_, from_subclass=False):
if nodeName_ == 'interfaces':
for node in child_:
obj_ = Interface()
obj_.build(node)
self.interfaces.append(obj_)
if nodeName_ == 'vlans':
for node in child_:
obj_ = Vlan()
obj_.build(node)
self.vlans.append(obj_)
if nodeName_ == 'routing-options':
for node in child_:
childName_ = tag_pattern.match(node.tag).groups()[-1]
# *************** FLOW ****************
if childName_ == 'flow':
obj_ = Flow()
obj_.build(node)
self.routing_options.append(obj_)
| Kent1/nxpy | nxpy/device.py | Python | apache-2.0 | 2,884 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""SQLAlchemy models for heat data."""
import uuid
from oslo_db.sqlalchemy import models
from oslo_utils import timeutils
import six
import sqlalchemy
from sqlalchemy.ext import declarative
from sqlalchemy.orm import backref
from sqlalchemy.orm import relationship
from sqlalchemy.orm import session as orm_session
from heat.db.sqlalchemy import types
BASE = declarative.declarative_base()
def get_session():
from heat.db.sqlalchemy import api as db_api
return db_api.get_session()
class HeatBase(models.ModelBase, models.TimestampMixin):
"""Base class for Heat Models."""
__table_args__ = {'mysql_engine': 'InnoDB'}
def expire(self, session=None, attrs=None):
"""Expire this object ()."""
if not session:
session = orm_session.Session.object_session(self)
if not session:
session = get_session()
session.expire(self, attrs)
def refresh(self, session=None, attrs=None):
"""Refresh this object."""
if not session:
session = orm_session.Session.object_session(self)
if not session:
session = get_session()
session.refresh(self, attrs)
def delete(self, session=None):
"""Delete this object."""
if not session:
session = orm_session.Session.object_session(self)
if not session:
session = get_session()
session.begin(subtransactions=True)
session.delete(self)
session.commit()
def update_and_save(self, values, session=None):
if not session:
session = orm_session.Session.object_session(self)
if not session:
session = get_session()
session.begin(subtransactions=True)
for k, v in six.iteritems(values):
setattr(self, k, v)
session.commit()
class SoftDelete(object):
deleted_at = sqlalchemy.Column(sqlalchemy.DateTime)
def soft_delete(self, session=None):
"""Mark this object as deleted."""
self.update_and_save({'deleted_at': timeutils.utcnow()},
session=session)
class StateAware(object):
action = sqlalchemy.Column('action', sqlalchemy.String(255))
status = sqlalchemy.Column('status', sqlalchemy.String(255))
status_reason = sqlalchemy.Column('status_reason', sqlalchemy.Text)
class RawTemplate(BASE, HeatBase):
"""Represents an unparsed template which should be in JSON format."""
__tablename__ = 'raw_template'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
template = sqlalchemy.Column(types.Json)
files = sqlalchemy.Column(types.Json)
environment = sqlalchemy.Column('environment', types.Json)
class StackTag(BASE, HeatBase):
"""Key/value store of arbitrary stack tags."""
__tablename__ = 'stack_tag'
id = sqlalchemy.Column('id',
sqlalchemy.Integer,
primary_key=True,
nullable=False)
tag = sqlalchemy.Column('tag', sqlalchemy.Unicode(80))
stack_id = sqlalchemy.Column('stack_id',
sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
nullable=False)
class SyncPoint(BASE, HeatBase):
"""Represents a syncpoint for a stack that is being worked on."""
__tablename__ = 'sync_point'
__table_args__ = (
sqlalchemy.PrimaryKeyConstraint('entity_id',
'traversal_id',
'is_update'),
sqlalchemy.ForeignKeyConstraint(['stack_id'], ['stack.id'])
)
entity_id = sqlalchemy.Column(sqlalchemy.String(36))
traversal_id = sqlalchemy.Column(sqlalchemy.String(36))
is_update = sqlalchemy.Column(sqlalchemy.Boolean)
# integer field for atomic update operations
atomic_key = sqlalchemy.Column(sqlalchemy.Integer, nullable=False)
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
nullable=False)
input_data = sqlalchemy.Column(types.Json)
class Stack(BASE, HeatBase, SoftDelete, StateAware):
"""Represents a stack created by the heat engine."""
__tablename__ = 'stack'
__table_args__ = (
sqlalchemy.Index('ix_stack_name', 'name', mysql_length=255),
sqlalchemy.Index('ix_stack_tenant', 'tenant', mysql_length=255),
)
id = sqlalchemy.Column(sqlalchemy.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
name = sqlalchemy.Column(sqlalchemy.String(255))
raw_template_id = sqlalchemy.Column(
sqlalchemy.Integer,
sqlalchemy.ForeignKey('raw_template.id'),
nullable=False)
raw_template = relationship(RawTemplate, backref=backref('stack'),
foreign_keys=[raw_template_id])
prev_raw_template_id = sqlalchemy.Column(
'prev_raw_template_id',
sqlalchemy.Integer,
sqlalchemy.ForeignKey('raw_template.id'))
prev_raw_template = relationship(RawTemplate,
foreign_keys=[prev_raw_template_id])
username = sqlalchemy.Column(sqlalchemy.String(256))
tenant = sqlalchemy.Column(sqlalchemy.String(256))
user_creds_id = sqlalchemy.Column(
sqlalchemy.Integer,
sqlalchemy.ForeignKey('user_creds.id'))
owner_id = sqlalchemy.Column(sqlalchemy.String(36), index=True)
parent_resource_name = sqlalchemy.Column(sqlalchemy.String(255))
timeout = sqlalchemy.Column(sqlalchemy.Integer)
disable_rollback = sqlalchemy.Column(sqlalchemy.Boolean, nullable=False)
stack_user_project_id = sqlalchemy.Column(sqlalchemy.String(64))
backup = sqlalchemy.Column('backup', sqlalchemy.Boolean)
nested_depth = sqlalchemy.Column('nested_depth', sqlalchemy.Integer)
convergence = sqlalchemy.Column('convergence', sqlalchemy.Boolean)
tags = relationship(StackTag, cascade="all,delete",
backref=backref('stack'))
current_traversal = sqlalchemy.Column('current_traversal',
sqlalchemy.String(36))
current_deps = sqlalchemy.Column('current_deps', types.Json)
# Override timestamp column to store the correct value: it should be the
# time the create/update call was issued, not the time the DB entry is
# created/modified. (bug #1193269)
updated_at = sqlalchemy.Column(sqlalchemy.DateTime)
class StackLock(BASE, HeatBase):
"""Store stack locks for deployments with multiple-engines."""
__tablename__ = 'stack_lock'
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
primary_key=True)
engine_id = sqlalchemy.Column(sqlalchemy.String(36))
class UserCreds(BASE, HeatBase):
"""Represents user credentials.
Also, mirrors the 'context' handed in by wsgi.
"""
__tablename__ = 'user_creds'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
username = sqlalchemy.Column(sqlalchemy.String(255))
password = sqlalchemy.Column(sqlalchemy.String(255))
region_name = sqlalchemy.Column(sqlalchemy.String(255))
decrypt_method = sqlalchemy.Column(sqlalchemy.String(64))
tenant = sqlalchemy.Column(sqlalchemy.String(1024))
auth_url = sqlalchemy.Column(sqlalchemy.Text)
tenant_id = sqlalchemy.Column(sqlalchemy.String(256))
trust_id = sqlalchemy.Column(sqlalchemy.String(255))
trustor_user_id = sqlalchemy.Column(sqlalchemy.String(64))
stack = relationship(Stack, backref=backref('user_creds'),
cascade_backrefs=False)
class Event(BASE, HeatBase):
"""Represents an event generated by the heat engine."""
__tablename__ = 'event'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
nullable=False)
stack = relationship(Stack, backref=backref('events'))
uuid = sqlalchemy.Column(sqlalchemy.String(36),
default=lambda: str(uuid.uuid4()),
unique=True)
resource_action = sqlalchemy.Column(sqlalchemy.String(255))
resource_status = sqlalchemy.Column(sqlalchemy.String(255))
resource_name = sqlalchemy.Column(sqlalchemy.String(255))
physical_resource_id = sqlalchemy.Column(sqlalchemy.String(255))
_resource_status_reason = sqlalchemy.Column(
'resource_status_reason', sqlalchemy.String(255))
resource_type = sqlalchemy.Column(sqlalchemy.String(255))
resource_properties = sqlalchemy.Column(sqlalchemy.PickleType)
@property
def resource_status_reason(self):
return self._resource_status_reason
@resource_status_reason.setter
def resource_status_reason(self, reason):
self._resource_status_reason = reason and reason[:255] or ''
class ResourceData(BASE, HeatBase):
"""Key/value store of arbitrary, resource-specific data."""
__tablename__ = 'resource_data'
id = sqlalchemy.Column('id',
sqlalchemy.Integer,
primary_key=True,
nullable=False)
key = sqlalchemy.Column('key', sqlalchemy.String(255))
value = sqlalchemy.Column('value', sqlalchemy.Text)
redact = sqlalchemy.Column('redact', sqlalchemy.Boolean)
decrypt_method = sqlalchemy.Column(sqlalchemy.String(64))
resource_id = sqlalchemy.Column('resource_id',
sqlalchemy.Integer,
sqlalchemy.ForeignKey('resource.id'),
nullable=False)
class Resource(BASE, HeatBase, StateAware):
"""Represents a resource created by the heat engine."""
__tablename__ = 'resource'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
uuid = sqlalchemy.Column(sqlalchemy.String(36),
default=lambda: str(uuid.uuid4()),
unique=True)
name = sqlalchemy.Column('name', sqlalchemy.String(255))
physical_resource_id = sqlalchemy.Column('nova_instance',
sqlalchemy.String(255))
# odd name as "metadata" is reserved
rsrc_metadata = sqlalchemy.Column('rsrc_metadata', types.Json)
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
nullable=False)
stack = relationship(Stack, backref=backref('resources'))
root_stack_id = sqlalchemy.Column(sqlalchemy.String(36), index=True)
data = relationship(ResourceData,
cascade="all,delete",
backref=backref('resource'))
# Override timestamp column to store the correct value: it should be the
# time the create/update call was issued, not the time the DB entry is
# created/modified. (bug #1193269)
updated_at = sqlalchemy.Column(sqlalchemy.DateTime)
properties_data = sqlalchemy.Column('properties_data', types.Json)
properties_data_encrypted = sqlalchemy.Column('properties_data_encrypted',
sqlalchemy.Boolean)
engine_id = sqlalchemy.Column(sqlalchemy.String(36))
atomic_key = sqlalchemy.Column(sqlalchemy.Integer)
needed_by = sqlalchemy.Column('needed_by', types.List)
requires = sqlalchemy.Column('requires', types.List)
replaces = sqlalchemy.Column('replaces', sqlalchemy.Integer,
default=None)
replaced_by = sqlalchemy.Column('replaced_by', sqlalchemy.Integer,
default=None)
current_template_id = sqlalchemy.Column(
'current_template_id',
sqlalchemy.Integer,
sqlalchemy.ForeignKey('raw_template.id'))
class WatchRule(BASE, HeatBase):
"""Represents a watch_rule created by the heat engine."""
__tablename__ = 'watch_rule'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
name = sqlalchemy.Column('name', sqlalchemy.String(255))
rule = sqlalchemy.Column('rule', types.Json)
state = sqlalchemy.Column('state', sqlalchemy.String(255))
last_evaluated = sqlalchemy.Column(sqlalchemy.DateTime,
default=timeutils.utcnow)
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
nullable=False)
stack = relationship(Stack, backref=backref('watch_rule'))
class WatchData(BASE, HeatBase):
"""Represents a watch_data created by the heat engine."""
__tablename__ = 'watch_data'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
data = sqlalchemy.Column('data', types.Json)
watch_rule_id = sqlalchemy.Column(
sqlalchemy.Integer,
sqlalchemy.ForeignKey('watch_rule.id'),
nullable=False)
watch_rule = relationship(WatchRule, backref=backref('watch_data'))
class SoftwareConfig(BASE, HeatBase):
"""Represents a software configuration resource.
Represents a software configuration resource to be applied to one or more
servers.
"""
__tablename__ = 'software_config'
id = sqlalchemy.Column('id', sqlalchemy.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
name = sqlalchemy.Column('name', sqlalchemy.String(255))
group = sqlalchemy.Column('group', sqlalchemy.String(255))
config = sqlalchemy.Column('config', types.Json)
tenant = sqlalchemy.Column(
'tenant', sqlalchemy.String(64), nullable=False, index=True)
class SoftwareDeployment(BASE, HeatBase, StateAware):
"""Represents a software deployment resource.
Represents applying a software configuration resource to a single server
resource.
"""
__tablename__ = 'software_deployment'
__table_args__ = (
sqlalchemy.Index('ix_software_deployment_created_at', 'created_at'),)
id = sqlalchemy.Column('id', sqlalchemy.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
config_id = sqlalchemy.Column(
'config_id',
sqlalchemy.String(36),
sqlalchemy.ForeignKey('software_config.id'),
nullable=False)
config = relationship(SoftwareConfig, backref=backref('deployments'))
server_id = sqlalchemy.Column('server_id', sqlalchemy.String(36),
nullable=False, index=True)
input_values = sqlalchemy.Column('input_values', types.Json)
output_values = sqlalchemy.Column('output_values', types.Json)
tenant = sqlalchemy.Column(
'tenant', sqlalchemy.String(64), nullable=False, index=True)
stack_user_project_id = sqlalchemy.Column(sqlalchemy.String(64))
updated_at = sqlalchemy.Column(sqlalchemy.DateTime)
class Snapshot(BASE, HeatBase):
__tablename__ = 'snapshot'
id = sqlalchemy.Column('id', sqlalchemy.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
nullable=False)
name = sqlalchemy.Column('name', sqlalchemy.String(255))
data = sqlalchemy.Column('data', types.Json)
tenant = sqlalchemy.Column(
'tenant', sqlalchemy.String(64), nullable=False, index=True)
status = sqlalchemy.Column('status', sqlalchemy.String(255))
status_reason = sqlalchemy.Column('status_reason', sqlalchemy.String(255))
stack = relationship(Stack, backref=backref('snapshot'))
class Service(BASE, HeatBase, SoftDelete):
__tablename__ = 'service'
id = sqlalchemy.Column('id',
sqlalchemy.String(36),
primary_key=True,
default=lambda: str(uuid.uuid4()))
engine_id = sqlalchemy.Column('engine_id',
sqlalchemy.String(36),
nullable=False)
host = sqlalchemy.Column('host',
sqlalchemy.String(255),
nullable=False)
hostname = sqlalchemy.Column('hostname',
sqlalchemy.String(255),
nullable=False)
binary = sqlalchemy.Column('binary',
sqlalchemy.String(255),
nullable=False)
topic = sqlalchemy.Column('topic',
sqlalchemy.String(255),
nullable=False)
report_interval = sqlalchemy.Column('report_interval',
sqlalchemy.Integer,
nullable=False)
| steveb/heat | heat/db/sqlalchemy/models.py | Python | apache-2.0 | 17,625 |
"""
Copyright 2017-present Airbnb, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import copy
from datetime import datetime, timedelta
import json
from nose.tools import (
assert_equal,
assert_false,
assert_is_instance,
assert_not_in,
assert_raises,
assert_true
)
from streamalert.shared.alert import Alert, AlertCreationError
class TestAlert:
"""Test shared Alert class."""
# pylint: disable=no-self-use,protected-access,too-many-public-methods
@staticmethod
def _basic_alert():
return Alert('test_rule', {'abc': 123}, {'aws-firehose:alerts', 'aws-sns:test-output'})
@staticmethod
def _customized_alert():
return Alert(
'test_rule',
{'abc': 123},
{'aws-firehose:alerts', 'aws-sns:test-output', 'aws-s3:other-output'},
alert_id='abc-123',
attempts=1,
cluster='',
context={'rule': 'context'},
created=datetime.utcnow(),
dispatched=datetime.utcnow(),
log_source='source',
log_type='csv',
merge_by_keys=['abc'],
merge_window=timedelta(minutes=5),
outputs_sent={'aws-sns:test-output'},
rule_description='A Test Rule',
source_entity='entity',
source_service='s3',
staged=True
)
def test_alert_encoder_invalid_json(self):
"""Alert Class - Alert Encoder - Invalid JSON raises parent exception"""
assert_raises(TypeError, json.dumps, RuntimeWarning, default=list)
def test_init_invalid_kwargs(self):
"""Alert Class - Init With Invalid Kwargs"""
assert_raises(AlertCreationError, Alert, '', {}, set(), cluster='test', invalid='nonsense')
def test_ordering(self):
"""Alert Class - Alerts Are Sorted By Creation"""
alerts = [self._basic_alert() for _ in range(5)]
assert_equal(alerts, sorted([alerts[0], alerts[3], alerts[1], alerts[4], alerts[2]]))
def test_repr(self):
"""Alert Class - Complete Alert Representation"""
assert_is_instance(repr(self._basic_alert()), str)
assert_is_instance(repr(self._customized_alert()), str)
def test_str(self):
"""Alert Class - To String"""
alert = self._customized_alert()
assert_equal('<Alert abc-123 triggered from test_rule>', str(alert))
def test_dynamo_key(self):
"""Alert Class - Dynamo Key"""
alert = self._customized_alert()
assert_equal({'RuleName': 'test_rule', 'AlertID': 'abc-123'}, alert.dynamo_key)
def test_remaining_outputs_merge_disabled(self):
"""Alert Class - Remaining Outputs - No Merge Information"""
alert = self._basic_alert()
assert_equal(alert.outputs, alert.remaining_outputs)
# One output sent successfully
alert.outputs_sent = {'aws-sns:test-output'}
assert_equal({'aws-firehose:alerts'}, alert.remaining_outputs)
# All outputs sent successfully
alert.outputs_sent = {'aws-firehose:alerts', 'aws-sns:test-output'}
assert_equal(set(), alert.remaining_outputs)
def test_remaining_outputs_merge_enabled(self):
"""Alert Class - Remaining Outputs - With Merge Config"""
# Only the required firehose output shows as remaining
assert_equal({'aws-firehose:alerts'}, self._customized_alert().remaining_outputs)
def test_dynamo_record(self):
"""Alert Class - Dynamo Record"""
# Make sure there are no empty strings nor sets (not allowed in Dynamo)
alert = Alert(
'test_rule', {}, {'aws-sns:test-output'},
cluster='',
created='',
log_source='',
log_type='',
outputs_sent=set(),
rule_description='',
source_entity='',
source_service=''
)
record = alert.dynamo_record()
assert_not_in('', list(record.values()))
assert_not_in(set(), list(record.values()))
def test_create_from_dynamo_record(self):
"""Alert Class - Create Alert from Dynamo Record"""
alert = self._customized_alert()
# Converting to a Dynamo record and back again should result in the exact same alert
record = alert.dynamo_record()
new_alert = Alert.create_from_dynamo_record(record)
assert_equal(alert.dynamo_record(), new_alert.dynamo_record())
def test_create_from_dynamo_record_invalid(self):
"""Alert Class - AlertCreationError raised for an invalid Dynamo Record"""
assert_raises(AlertCreationError, Alert.create_from_dynamo_record, {})
def test_output_dict(self):
"""Alert Class - Output Dict"""
alert = self._basic_alert()
result = alert.output_dict()
# Ensure result is JSON-serializable (no sets)
assert_is_instance(json.dumps(result), str)
# Ensure result is Athena compatible (no None values)
assert_not_in(None, list(result.values()))
def test_can_merge_no_config(self):
"""Alert Class - Can Merge - False if Either Alert Does Not Have Merge Config"""
assert_false(self._basic_alert().can_merge(self._customized_alert()))
assert_false(self._customized_alert().can_merge(self._basic_alert()))
def test_can_merge_too_far_apart(self):
"""Alert Class - Can Merge - False if Outside Merge Window"""
alert1 = Alert(
'', {'key': True}, set(),
created=datetime(year=2000, month=1, day=1, minute=0),
merge_by_keys=['key'],
merge_window=timedelta(minutes=10)
)
alert2 = Alert(
'', {'key': True}, set(),
created=datetime(year=2000, month=1, day=1, minute=11),
merge_by_keys=['key'],
merge_window=timedelta(minutes=10)
)
assert_false(alert1.can_merge(alert2))
assert_false(alert2.can_merge(alert1))
def test_can_merge_different_merge_keys(self):
"""Alert Class - Can Merge - False if Different Merge Keys Defined"""
alert1 = Alert(
'', {'key': True}, set(),
merge_by_keys=['key'],
merge_window=timedelta(minutes=10)
)
alert2 = Alert(
'', {'key': True}, set(),
merge_by_keys=['other'],
merge_window=timedelta(minutes=10)
)
assert_false(alert1.can_merge(alert2))
assert_false(alert2.can_merge(alert1))
def test_can_merge_key_not_common(self):
"""Alert Class - Can Merge - False if Merge Key Not Present in Both Records"""
alert1 = Alert(
'', {'key': True}, set(),
merge_by_keys=['key'],
merge_window=timedelta(minutes=10)
)
alert2 = Alert(
'', {'other': True}, set(),
merge_by_keys=['key'],
merge_window=timedelta(minutes=10)
)
assert_false(alert1.can_merge(alert2))
assert_false(alert2.can_merge(alert1))
def test_can_merge_different_values(self):
"""Alert Class - Can Merge - False if Merge Key has Different Values"""
alert1 = Alert(
'', {'key': True}, set(),
merge_by_keys=['key'],
merge_window=timedelta(minutes=10)
)
alert2 = Alert(
'', {'key': False}, set(),
merge_by_keys=['key'],
merge_window=timedelta(minutes=10)
)
assert_false(alert1.can_merge(alert2))
assert_false(alert2.can_merge(alert1))
def test_can_merge_merge_keys_absent(self):
"""Alert Class - Can Merge - True if Merge Keys Do Not Exist in Either Record"""
alert1 = Alert('', {}, set(), merge_by_keys=['key'], merge_window=timedelta(minutes=10))
alert2 = Alert('', {}, set(), merge_by_keys=['key'], merge_window=timedelta(minutes=10))
assert_true(alert1.can_merge(alert2))
assert_true(alert2.can_merge(alert1))
def test_can_merge_true(self):
"""Alert Class - Can Merge - True Result"""
alert1 = Alert(
'', {'key': True}, set(),
created=datetime(year=2000, month=1, day=1, minute=0),
merge_by_keys=['key'],
merge_window=timedelta(minutes=10)
)
alert2 = Alert(
'', {'key': True, 'other': True}, set(),
created=datetime(year=2000, month=1, day=1, minute=10),
merge_by_keys=['key'],
merge_window=timedelta(minutes=10)
)
assert_true(alert1.can_merge(alert2))
assert_true(alert2.can_merge(alert1))
def test_compute_common_empty_record(self):
"""Alert Class - Compute Common - Empty Record List"""
assert_equal({}, Alert._compute_common([]))
def test_compute_common_single_record(self):
"""Alert Class - Compute Common - Single Record"""
# The greatest common subset of a single record is itself
record = {'a': 1, 'b': 2, 'c': {'d': {'e': 3}}}
assert_equal(record, Alert._compute_common([record]))
def test_compute_common_top_level(self):
"""Alert Class - Compute Common - No Nested Dictionaries"""
record1 = {'a': 1, 'b': 2, 'c': 3}
record2 = {'b': 2, 'c': 3, 'd': 4}
record3 = {'c': 3, 'd': 4, 'e': 5}
assert_equal({'c': 3}, Alert._compute_common([record1, record2, record3]))
def test_compute_common_no_similarities(self):
"""Alert Class - Compute Common - Empty Common Set"""
record1 = {'a': -1, 'b': -2, 'c': -3, 'd': {'e': 0}}
record2 = {'a': 1, 'b': 2, 'c': 3}
assert_equal({}, Alert._compute_common([record1, record2]))
def test_compute_common_partial_nested(self):
"""Alert Class - Compute Common - Some Common Features in Nested Dictionary"""
# This is the example given in the docstring
record1 = {'abc': 123, 'nested': {'A': 1, 'B': 2}}
record2 = {'abc': 123, 'def': 456, 'nested': {'A': 1}}
assert_equal({'abc': 123, 'nested': {'A': 1}}, Alert._compute_common([record1, record2]))
def test_compute_common_different_types(self):
"""Alert Class - Compute Common - Same Keys, Different Types"""
record1 = {'a': 1, 'b': None, 'c': {'d': {'e': 5}, 'f': {'g': 6}}}
record2 = {'a': '1', 'b': 0, 'c': []}
assert_equal({}, Alert._compute_common([record1, record2]))
def test_compute_common_many_nested(self):
"""Alert Class - Compute Common - Multiple Levels of Nesting"""
record1 = {
'a': {
'b': {
'c': 3,
'd': 4
},
'e': {
'h': {
'i': 9
}
},
'j': {}
}
}
record2 = {
'a': {
'b': {
'c': 3,
},
'e': {
'f': {
'g': 8
},
'h': {}
},
'j': {}
}
}
expected = {
'a': {
'b': {
'c': 3
},
'j': {}
}
}
assert_equal(expected, Alert._compute_common([record1, record2]))
def test_compute_common_all_identical(self):
"""Alert Class - Compute Common - Identical Records"""
record = {'a': 1, 'b': 2, 'c': {'d': {'e': 3}}}
assert_equal(record, Alert._compute_common([record] * 4))
def test_compute_diff_no_common(self):
"""Alert Class - Compute Diff - No Common Set"""
record = {'a': 1, 'b': 2, 'c': {'d': {'e': 3}}}
assert_equal(record, Alert._compute_diff({}, record))
def test_compute_diff_no_diff(self):
"""Alert Class - Compute Diff - Record Identical to Common"""
record = {'a': 1, 'b': 2, 'c': {'d': {'e': 3}}}
common = record
assert_equal({}, Alert._compute_diff(common, record))
def test_compute_diff_top_level(self):
"""Alert Class - Compute Diff - Top Level Keys"""
common = {'c': 3}
record = {'a': 1, 'b': 2, 'c': 3}
assert_equal({'a': 1, 'b': 2}, Alert._compute_diff(common, record))
def test_compute_diff_different_types(self):
"""Alert Class - Compute Diff - Type Mismatch Short-Circuits Recursion"""
common = {'b': 2}
record = {'a': 1, 'b': {'nested': 'stuff'}}
assert_equal(record, Alert._compute_diff(common, record))
def test_compute_diff_nested(self):
"""Alert Class - Compute Diff - Difference in Nested Dictionary"""
# This is the example given in the docstring
common = {'abc': 123, 'nested': {'A': 1}}
record = {'abc': 123, 'nested': {'A': 1, 'B': 2}}
assert_equal({'nested': {'B': 2}}, Alert._compute_diff(common, record))
def test_compute_diff_many_nested(self):
"""Alert Class - Compute Diff - Multiple Levels of Nesting"""
# These values are the same as those from test_compute_common_many_nested
record1 = {
'a': {
'b': {
'c': 3,
'd': 4
},
'e': {
'h': {
'i': 9
}
},
'j': {}
}
}
record2 = {
'a': {
'b': {
'c': 3,
},
'e': {
'f': {
'g': 8
},
'h': {}
},
'j': {}
}
}
common = {
'a': {
'b': {
'c': 3
},
'j': {}
}
}
expected_diff1 = {
'a': {
'b': {
'd': 4
},
'e': {
'h': {
'i': 9
}
}
}
}
assert_equal(expected_diff1, Alert._compute_diff(common, record1))
expected_diff2 = {
'a': {
'e': {
'f': {
'g': 8
},
'h': {}
}
}
}
assert_equal(expected_diff2, Alert._compute_diff(common, record2))
def test_merge(self):
"""Alert Class - Merge - Create Merged Alert"""
# Example based on a CarbonBlack log
record1 = {
'alliance_data_virustotal': [],
'alliance_link_virustotal': '',
'alliance_score_virustotal': 0,
'cmdline': 'whoami',
'comms_ip': '1.2.3.4',
'hostname': 'my-computer-name',
'path': '/usr/bin/whoami',
'streamalert:ioc': {
'hello': 'world'
},
'timestamp': 1234.5678,
'username': 'user'
}
alert1 = Alert(
'RuleName', record1, {'aws-sns:topic'},
created=datetime(year=2000, month=1, day=1),
merge_by_keys=['hostname', 'username'],
merge_window=timedelta(minutes=5)
)
# Second alert has slightly different record and different outputs
record2 = copy.deepcopy(record1)
record2['streamalert:ioc'] = {'goodbye': 'world'}
record2['timestamp'] = 9999
alert2 = Alert(
'RuleName', record2, {'slack:channel'},
created=datetime(year=2000, month=1, day=2),
merge_by_keys=['hostname', 'username'],
merge_window=timedelta(minutes=5)
)
merged = Alert.merge([alert1, alert2])
assert_is_instance(merged, Alert)
assert_equal({'slack:channel'}, merged.outputs) # Most recent outputs were used
expected_record = {
'AlertCount': 2,
'AlertTimeFirst': '2000-01-01T00:00:00.000000Z',
'AlertTimeLast': '2000-01-02T00:00:00.000000Z',
'MergedBy': {
'hostname': 'my-computer-name',
'username': 'user'
},
'OtherCommonKeys': {
'alliance_data_virustotal': [],
'alliance_link_virustotal': '',
'alliance_score_virustotal': 0,
'cmdline': 'whoami',
'comms_ip': '1.2.3.4',
'path': '/usr/bin/whoami',
},
'ValueDiffs': {
'2000-01-01T00:00:00.000000Z': {
'streamalert:ioc': {'hello': 'world'},
'timestamp': 1234.5678
},
'2000-01-02T00:00:00.000000Z': {
'streamalert:ioc': {'goodbye': 'world'},
'timestamp': 9999
}
}
}
assert_equal(expected_record, merged.record)
def test_merge_nested(self):
"""Alert Class - Merge - Merge with Nested Keys"""
record1 = {
'NumMatchedRules': 1,
'FileInfo': {
'Deleted': None,
'Nested': [1, 2, 'three']
},
'MatchedRules': {
'Rule1': 'MatchedStrings'
}
}
alert1 = Alert(
'RuleName', record1, {'slack:channel'},
created=datetime(year=2000, month=1, day=1),
merge_by_keys=['Nested'],
merge_window=timedelta(minutes=5)
)
record2 = {
'NumMatchedRules': 2,
'FileInfo': {
'Deleted': None,
'Nested': [1, 2, 'three']
},
'MatchedRules': {
'Rule1': 'MatchedStrings'
}
}
alert2 = Alert(
'RuleName', record2, {'slack:channel'},
created=datetime(year=2000, month=1, day=2),
merge_by_keys=['Nested'],
merge_window=timedelta(minutes=5)
)
record3 = {
'MatchedRules': {
'Rule1': 'MatchedStrings'
},
'Nested': [1, 2, 'three'] # This is in a different place in the record
}
alert3 = Alert(
'RuleName', record3, {'slack:channel'},
created=datetime(year=2000, month=1, day=3),
merge_by_keys=['Nested'],
merge_window=timedelta(minutes=5)
)
merged = Alert.merge([alert1, alert2, alert3])
expected_record = {
'AlertCount': 3,
'AlertTimeFirst': '2000-01-01T00:00:00.000000Z',
'AlertTimeLast': '2000-01-03T00:00:00.000000Z',
'MergedBy': {
'Nested': [1, 2, 'three']
},
'OtherCommonKeys': {
'MatchedRules': {
'Rule1': 'MatchedStrings'
}
},
'ValueDiffs': {
'2000-01-01T00:00:00.000000Z': {
'NumMatchedRules': 1,
'FileInfo': {
'Deleted': None
}
},
'2000-01-02T00:00:00.000000Z': {
'NumMatchedRules': 2,
'FileInfo': {
'Deleted': None
}
},
'2000-01-03T00:00:00.000000Z': {}
}
}
assert_equal(expected_record, merged.record)
| airbnb/streamalert | tests/unit/streamalert/shared/test_alert.py | Python | apache-2.0 | 20,111 |
#coding=utf-8
from PIL import Image#需要pillow库
import glob, os
in_dir ='background'#源图片目录
out_dir = in_dir+'_out'#转换后图片目录
if not os.path.exists(out_dir): os.mkdir(out_dir)
#图片批处理
def main():
for files in glob.glob(in_dir+'/*'):
filepath,filename = os.path.split(files)
im = Image.open(files)
w,h = im.size
im = im.resize((int(1920), int(1.0*h/w*1920)))
im.save(os.path.join(out_dir,filename))
if __name__=='__main__':
main() | cleverZY/Helloworld | jizhidezy.py | Python | apache-2.0 | 513 |
# -*- coding: utf-8 -*-
"""
Created on Mon Apr 27 09:30:47 2015
@author: martin
"""
from Bio.Seq import Seq
from Bio.Alphabet import IUPAC
from Bio.SubsMat.MatrixInfo import blosum62
#remarque : Ceci n'est pas une maélioration de modifDoublon,
#c'est une version alternative qui ne s'applique pas aux même doublons.
def score_match(a,b, matrix):
if b == '*' :
return -500000000000000000000000000
elif a == '-' :
return 0
elif (a,b) not in matrix:
return matrix[(tuple(reversed((a,b))))]
else:
return matrix[(a,b)]
def modifDoublon2(x,y,z,ldna,offset,doublon,i,q,patternX,patternY):
ATCG = ["A","T","C","G"]
# on commence par déterminer quels acides aminés de x et y sont "ciblés"
aaX = Seq("", IUPAC.protein)
aaY = Seq("", IUPAC.protein)
q_bis = 0.
q_bis += q
if(z<=0):
aaX += x[i]
q_bis /= patternX[i]
else:
aaX += x[i+1+z//3]
q_bis /=patternX[i+1+z//3]
if(z>0):
aaY += y[-i]
q_bis*=patternY[-i]
else:
aaY +=y[-i + z//3]
q_bis*=patternY[-i + z//3]
scores = []
for a in ATCG:
for b in ATCG:
currentDNAx = Seq("", IUPAC.unambiguous_dna)
currentDNAy = Seq("", IUPAC.unambiguous_dna)
currentDNAx += a + b + ldna[doublon+2]
currentaaX = currentDNAx.translate()
currentDNAy += ldna[doublon-1] +a + b
currentaaY = currentDNAy.reverse_complement().translate()
score = score_match(aaX[0].upper(),currentaaX[0].upper(),blosum62)
score += q_bis*score_match(aaY[0].upper(),currentaaY[0].upper(),blosum62)
scores.append(score)
result = scores.index(max(scores))
ldna[doublon] = ATCG[result//4]
ldna[doublon+1]= ATCG[result%4]
| MartinSilvert/overlappingGenesCreator | src/modifDoublon2.py | Python | apache-2.0 | 1,846 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for CreateContext
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-aiplatform
# [START aiplatform_v1_generated_MetadataService_CreateContext_sync]
from google.cloud import aiplatform_v1
def sample_create_context():
# Create a client
client = aiplatform_v1.MetadataServiceClient()
# Initialize request argument(s)
request = aiplatform_v1.CreateContextRequest(
parent="parent_value",
)
# Make the request
response = client.create_context(request=request)
# Handle the response
print(response)
# [END aiplatform_v1_generated_MetadataService_CreateContext_sync]
| googleapis/python-aiplatform | samples/generated_samples/aiplatform_v1_generated_metadata_service_create_context_sync.py | Python | apache-2.0 | 1,468 |
import sys
from CTFd import create_app
app = create_app()
app.run(debug=True, host="0.0.0.0", port=int(sys.argv[1]))
| rosatolen/CTFd | serve.py | Python | apache-2.0 | 117 |
# -*- coding: utf-8 -*-
from optparse import make_option
from django.core.management.base import BaseCommand
from messytables import XLSTableSet, headers_guess, headers_processor, offset_processor
from data.models import Source, Course, MerlotCategory
class Command(BaseCommand):
help = "Utilities to merge our database with MERLOT"
args = "--file"
option_list = BaseCommand.option_list + (
make_option("--file", action="store", dest="filename", help="Source filename"),
make_option("--source", action="store", dest="source_id", help="Source ID"),
make_option("--provider", action="store", dest="provider_tag", help="Provider Tag"),
)
def handle(self, *args, **options):
if options.get('filename'):
self.ku_openlearning(options.get('filename'), options.get('source_id'))
def ku_openlearning(self, filename, source_id):
CATEGORY_MAPPING = {
'Assessment of learning': 2298, #Assessment,
'Finance': 2235,
'Public Service': 'Criminal Justice',
'Health Science': 'Health Sciences',
'Management': 2248,
'Online Instruction': 'Hybrid and Online Course Development',
'Early Childhood': ['Career Counseling and Services', 'Childhood and Adolescence'],
'Law, Legal': 'Law',
'Psychology': 'Psychology',
'Customer Service': 2246,
'Communications': 'Communications',
'Professionalism': 'Personal Development'
}
source = Source.objects.get(pk=source_id)
fh = open(filename, 'rb')
table_set = XLSTableSet(fh)
row_set = table_set.tables[0]
offset, headers = headers_guess(row_set.sample)
row_set.register_processor(headers_processor(headers))
row_set.register_processor(offset_processor(offset + 1))
for row in row_set:
url = row[0].value
title = row[1].value
description = row[2].value
# language = row[4].value
# material_type = row[5].value
license = row[6].value
categories = row[7].value
keywords = row[8].value
# audience = row[9].value
course, is_created = Course.objects.get_or_create(
linkurl = url,
provider = source.provider,
source = source,
defaults = {
'title': title,
'description': description,
'tags': keywords,
'language': 'English',
'license': license,
'content_medium': 'text',
'creative_commons': 'Yes',
'creative_commons_commercial': 'No',
'creative_commons_derivatives': 'No'
}
)
merlot_cat = CATEGORY_MAPPING[categories]
if type(merlot_cat) != list:
merlot_cat = [merlot_cat,]
for item in merlot_cat:
try:
m = MerlotCategory.objects.get(merlot_id=item)
course.merlot_categories.add(m)
except ValueError:
m = MerlotCategory.objects.get(name=item)
course.merlot_categories.add(m)
| ocwc/ocwc-data | search/data/management/commands/courses.py | Python | apache-2.0 | 3,372 |
################################################################################
# nc.py
#
# Base class for NC code creation
# And global functions for calling current creator
#
# Hirutso Enni, 2009-01-13
# altered by Dan Falck 2010-08-04
# added tap() arguments Michael Haberler 2010-10-07
################################################################################
ncOFF = 0
ncLEFT = -1
ncRIGHT = +1
ncCW = -1
ncCCW = +1
ncMIST = 1
ncFLOOD = 2
################################################################################
class Creator:
def __init__(self):
pass
############################################################################
## Internals
def file_open(self, name):
self.file = open(name, 'w')
self.filename = name
def file_close(self):
self.file.close()
def write(self, s):
self.file.write(s)
############################################################################
## Programs
def program_begin(self, id, name=''):
"""Begin a program"""
pass
def add_stock(self, type_name, params):
pass
def program_stop(self, optional=False):
"""Stop the machine"""
pass
def program_end(self):
"""End the program"""
pass
def flush_nc(self):
"""Flush all pending codes"""
pass
############################################################################
## Subprograms
def sub_begin(self, id, name=''):
"""Begin a subprogram"""
pass
def sub_call(self, id):
"""Call a subprogram"""
pass
def sub_end(self):
"""Return from a subprogram"""
pass
############################################################################
## Settings
def imperial(self):
"""Set imperial units"""
pass
def metric(self):
"""Set metric units"""
pass
def absolute(self):
"""Set absolute coordinates"""
pass
def incremental(self):
"""Set incremental coordinates"""
pass
def polar(self, on=True):
"""Set polar coordinates"""
pass
def set_plane(self, plane):
"""Set plane"""
pass
def set_temporary_origin(self, x=None, y=None, z=None, a=None, b=None, c=None):
"""Set temporary origin G92"""
pass
def remove_temporary_origin(self):
"""Remote temporary origin G92.1"""
pass
############################################################################
## Tools
def tool_change(self, id):
"""Change the tool"""
pass
def tool_defn(self, id, name='', params=None):
"""Define a tool"""
pass
def offset_radius(self, id, radius=None):
"""Set tool radius offsetting"""
pass
def offset_length(self, id, length=None):
"""Set tool length offsetting"""
pass
def current_tool(self):
return None
############################################################################
## Datums
def datum_shift(self, x=None, y=None, z=None, a=None, b=None, c=None):
"""Shift the datum"""
pass
def datum_set(self, x=None, y=None, z=None, a=None, b=None, c=None):
"""Set the datum"""
pass
def workplane(self, id):
"""Set the workplane"""
pass
def clearanceplane(self,z=None):
"""set clearance plane"""
pass
############################################################################
## APT360 like Transformation Definitions
## These definitions were created while looking at Irvin Kraal's book on APT
## - Numerical Control Progamming in APT - page 211
def matrix(self,a1=None,b1=None,c1=None,a2=None,b2=None,c2=None,a3=None,b3=None,c3=None):
"""Create a matrix for transformations"""
pass
def translate(self,x=None,y=None,z=None):
"""Translate in x,y,z direction"""
pass
def rotate(self,xyrot=None,yzrot=None,zxrot=None,angle=None):
"""Rotate about a coordinate axis"""
pass
def scale(self,k=None):
"""Scale by factor k"""
pass
def matrix_product(self,matrix1=None,matrix2=None):
"""Create matrix that is the product of two other matrices"""
pass
def mirror_plane(self,plane1=None,plane2=None,plane3=None):
"""Mirror image about one or more coordinate planes"""
pass
def mirror_line(self,line=None):
"""Mirror about a line"""
pass
############################################################################
## Rates + Modes
def feedrate(self, f):
"""Set the feedrate"""
pass
def feedrate_hv(self, fh, fv):
"""Set the horizontal and vertical feedrates"""
pass
def spindle(self, s, clockwise=True):
"""Set the spindle speed"""
pass
def coolant(self, mode=0):
"""Set the coolant mode"""
pass
def gearrange(self, gear=0):
"""Set the gear range"""
pass
############################################################################
## Moves
def rapid(self, x=None, y=None, z=None, a=None, b=None, c=None):
"""Rapid move"""
pass
def feed(self, x=None, y=None, z=None, a = None, b = None, c = None):
"""Feed move"""
pass
def arc_cw(self, x=None, y=None, z=None, i=None, j=None, k=None, r=None):
"""Clockwise arc move"""
pass
def arc_ccw(self, x=None, y=None, z=None, i=None, j=None, k=None, r=None):
"""Counterclockwise arc move"""
pass
def dwell(self, t):
"""Dwell"""
pass
def rapid_home(self, x=None, y=None, z=None, a=None, b=None, c=None):
"""Rapid relative to home position"""
pass
def rapid_unhome(self):
"""Return from rapid home"""
pass
def set_machine_coordinates(self):
"""Set machine coordinates"""
pass
############################################################################
## Cutter radius compensation
def use_CRC(self):
"""CRC"""
return False
############################################################################
## Cycles
def pattern(self):
"""Simple pattern eg. circle, rect"""
pass
def pocket(self):
"""Pocket routine"""
pass
def profile(self):
"""Profile routine"""
pass
def drill(self, x=None, y=None, dwell=None, depthparams = None, retract_mode=None, spindle_mode=None, internal_coolant_on=None, rapid_to_clearance=None):
"""Drilling routines"""
pass
# original prototype was:
# def tap(self, x=None, y=None, z=None, zretract=None, depth=None, standoff=None, dwell_bottom=None, pitch=None, stoppos=None, spin_in=None, spin_out=None):
#
# current call is like so:
# tap(x=10, y=10, z=0, tap_mode=0, depth=12.7, standoff=6.35, direction=0, pitch=1.25)
# just add tap_mode & direction parameters
def tap(self, x=None, y=None, z=None, zretract=None, depth=None, standoff=None, dwell_bottom=None, pitch=None, stoppos=None, spin_in=None, spin_out=None, tap_mode=None, direction=None):
"""Tapping routines"""
pass
def bore(self, x=None, y=None, z=None, zretract=None, depth=None, standoff=None, dwell_bottom=None, feed_in=None, feed_out=None, stoppos=None, shift_back=None, shift_right=None, backbore=False, stop=False):
"""Boring routines"""
pass
def end_canned_cycle(self):
pass
############################################################################
## Misc
def comment(self, text):
"""Insert a comment"""
pass
def insert(self, text):
"""APT style INSERT statement"""
pass
def block_delete(self, on=False):
"""block to ignore if block delete switch is on"""
pass
def variable(self, id):
"""Insert a variable"""
pass
def variable_set(self, id, value):
"""Set a variable"""
pass
def probe_linear_centre_outside(self, x1=None, y1=None, depth=None, x2=None, y2=None ):
pass
def probe_single_point(self, point_along_edge_x=None, point_along_edge_y=None, depth=None, retracted_point_x=None, retracted_point_y=None, destination_point_x=None, destination_point_y=None, intersection_variable_x=None, intersection_variable_y=None, probe_offset_x_component=None, probe_offset_y_component=None ):
pass
def probe_downward_point(self, x=None, y=None, depth=None, intersection_variable_z=None):
pass
def report_probe_results(self, x1=None, y1=None, z1=None, x2=None, y2=None, z2=None, x3=None, y3=None, z3=None, x4=None, y4=None, z4=None, x5=None, y5=None, z5=None, x6=None, y6=None, z6=None, xml_file_name=None ):
pass
def open_log_file(self, xml_file_name=None ):
pass
def log_coordinate(self, x=None, y=None, z=None):
pass
def log_message(self, message=None):
pass
def close_log_file(self):
pass
def rapid_to_midpoint(self, x1=None, y1=None, z1=None, x2=None, y2=None, z2=None):
pass
def rapid_to_intersection(self, x1, y1, x2, y2, x3, y3, x4, y4, intersection_x, intersection_y, ua_numerator, ua_denominator, ua, ub_numerator, ub):
pass
def rapid_to_rotated_coordinate(self, x1, y1, x2, y2, ref_x, ref_y, x_current, y_current, x_final, y_final):
pass
def set_path_control_mode(self, mode, motion_blending_tolerance, naive_cam_tolerance ):
pass
############################################################################
## NC code creator for additive machines like RepRap
def wipe(self):
"""wipe routine"""
pass
def extruder_on(self):
"""Turn on the extruder"""
pass
def extruder_off(self):
"""turn off the extruder"""
pass
def set_extruder_flowrate(self, flowrate):
"""Set the flowrate for the extruder"""
pass
def extruder_temp(self, temp):
"""Set the extruder temp in celsius"""
pass
def fan_on(self):
"""turn on the cooling fan"""
pass
def fan_off(self):
"""turn off the cooling fan"""
pass
def build_bed_temp(self, temp):
"""Set the bed temp in celsius"""
pass
def chamber_temp(self, temp):
"""Set the chamber temp in celsius"""
pass
def begin_ncblock(self):
# if the moves have come from backplotting nc code, then the nc code text can be given with these three functions
pass
def end_ncblock(self):
pass
def add_text(self, s, col, cdata):
pass
################################################################################
creator = Creator()
############################################################################
## Internals
def write(s):
creator.write(s)
def output(filename):
creator.file_open(filename)
############################################################################
## Programs
def program_begin(id, name=''):
creator.program_begin(id, name)
def add_stock(type_name, params):
creator.add_stock(type_name, params)
def program_stop(optional=False):
creator.program_stop(optional)
def program_end():
creator.program_end()
def flush_nc():
creator.flush_nc()
############################################################################
## Subprograms
def sub_begin(id, name=''):
creator.sub_begin(id, name)
def sub_call(id):
creator.sub_call(id)
def sub_end():
creator.sub_end()
############################################################################
## Settings
def imperial():
creator.imperial()
def metric():
creator.metric()
def absolute():
creator.absolute()
def incremental():
creator.incremental()
def polar(on=True):
creator.polar(on)
def set_plane(plane):
creator.set_plane(plane)
def set_temporary_origin(x=None, y=None, z=None, a=None, b=None, c=None):
creator.set_temporary_origin(x,y,z,a,b,c)
def remove_temporary_origin():
creator.remove_temporary_origin()
############################################################################
## Tools
def tool_change(id):
creator.tool_change(id)
def tool_defn(id, name='', params=None):
creator.tool_defn(id, name, params)
def offset_radius(id, radius=None):
creator.offset_radius(id, radius)
def offset_length(id, length=None):
creator.offset_length(id, length)
def current_tool(self):
return creator.current_tool()
############################################################################
## Datums
def datum_shift(x=None, y=None, z=None, a=None, b=None, c=None):
creator.datum_shift(x, y, z, a, b, c)
def datum_set(x=None, y=None, z=None, a=None, b=None, c=None):
creator.datum_set(x, y, z, a, b, c)
def workplane(id):
creator.workplane(id)
def clearanceplane(z=None):
creator.clearanceplane(z)
############################################################################
## APT360 like Transformation Definitions
## These definitions were created while looking at Irvin Kraal's book on APT
## - Numerical Control Progamming in APT - page 211
def matrix(a1=None,b1=None,c1=None,a2=None,b2=None,c2=None,a3=None,b3=None,c3=None):
creator.matrix(a1,b1,c1,a2,b2,c2,a3,b3,c3)
def translate(x=None,y=None,z=None):
creator.translate(x,y,z)
def rotate(xyrot=None,yzrot=None,zxrot=None,angle=None):
creator.rotate(xyrot,yzrot,zxrot,angle)
def scale(k=None):
creator.scale(k)
def matrix_product(matrix1=None,matrix2=None):
creator.matrix_product(matrix1,matrix2)
def mirror_plane(plane1=None,plane2=None,plane3=None):
creator.mirror_plane(plane1,plane2,plane3)
def mirror_line(line=None):
creator.mirror_line(line)
############################################################################
## Rates + Modes
def feedrate(f):
creator.feedrate(f)
def feedrate_hv(fh, fv):
creator.feedrate_hv(fh, fv)
def spindle(s, clockwise=True):
creator.spindle(s, clockwise)
def coolant(mode=0):
creator.coolant(mode)
def gearrange(gear=0):
creator.gearrange(gear)
############################################################################
## Moves
def rapid(x=None, y=None, z=None, a=None, b=None, c=None):
creator.rapid(x, y, z, a, b, c)
def feed(x=None, y=None, z=None, a = None, b = None, c = None):
creator.feed(x, y, z)
def arc_cw(x=None, y=None, z=None, i=None, j=None, k=None, r=None):
creator.arc_cw(x, y, z, i, j, k, r)
def arc_ccw(x=None, y=None, z=None, i=None, j=None, k=None, r=None):
creator.arc_ccw(x, y, z, i, j, k, r)
def dwell(t):
creator.dwell(t)
def rapid_home(x=None, y=None, z=None, a=None, b=None, c=None):
creator.rapid_home(x, y, z, a, b, c)
def rapid_unhome():
creator.rapid_unhome()
def set_machine_coordinates():
creator.set_machine_coordinates()
############################################################################
## Cutter radius compensation
def use_CRC():
return creator.use_CRC()
def CRC_nominal_path():
return creator.CRC_nominal_path()
def start_CRC(left = True, radius = 0.0):
creator.start_CRC(left, radius)
def end_CRC():
creator.end_CRC()
############################################################################
## Cycles
def pattern():
creator.pattern()
def pocket():
creator.pocket()
def profile():
creator.profile()
def drill(x=None, y=None, dwell=None, depthparams = None, retract_mode=None, spindle_mode=None, internal_coolant_on=None, rapid_to_clearance=None):
creator.drill(x, y, dwell, depthparams, retract_mode, spindle_mode, internal_coolant_on, rapid_to_clearance)
def tap(x=None, y=None, z=None, zretract=None, depth=None, standoff=None, dwell_bottom=None, pitch=None, stoppos=None, spin_in=None, spin_out=None, tap_mode=None, direction=None):
creator.tap(x, y, z, zretract, depth, standoff, dwell_bottom, pitch, stoppos, spin_in, spin_out, tap_mode, direction)
def bore(x=None, y=None, z=None, zretract=None, depth=None, standoff=None, dwell_bottom=None, feed_in=None, feed_out=None, stoppos=None, shift_back=None, shift_right=None, backbore=False, stop=False):
creator.bore(x, y, z, zretract, depth, standoff, dwell_Bottom, feed_in, feed_out, stoppos, shift_back, shift_right, backbore, stop)
def end_canned_cycle():
creator.end_canned_cycle()
def peck(count, first, last=None, step=0.0):
pecks = []
peck = first
if (last == None) : last = first
for i in range(0,count):
pecks.append(peck)
if (peck - step > last) : peck -= step
return pecks
############################################################################
## Misc
def comment(text):
creator.comment(text)
def insert(text):
creator.insert(text)
def block_delete(on=False):
creator.block_delete(on)
def variable(id):
creator.variable(id)
def variable_set(id, value):
creator.variable_set(id, value)
def probe_single_point(point_along_edge_x=None, point_along_edge_y=None, depth=None, retracted_point_x=None, retracted_point_y=None, destination_point_x=None, destination_point_y=None, intersection_variable_x=None, intersection_variable_y=None, probe_offset_x_component=None, probe_offset_y_component=None ):
creator.probe_single_point(point_along_edge_x, point_along_edge_y, depth, retracted_point_x, retracted_point_y, destination_point_x, destination_point_y, intersection_variable_x, intersection_variable_y, probe_offset_x_component, probe_offset_y_component )
def probe_downward_point(x=None, y=None, depth=None, intersection_variable_z=None):
creator.probe_downward_point(x, y, depth, intersection_variable_z)
def report_probe_results(x1=None, y1=None, z1=None, x2=None, y2=None, z2=None, x3=None, y3=None, z3=None, x4=None, y4=None, z4=None, x5=None, y5=None, z5=None, x6=None, y6=None, z6=None, xml_file_name=None ):
creator.report_probe_results(x1, y1, z1, x2, y2, z2, x3, y3, z3, x4, y4, z4, x5, y5, z5, x6, y6, z6, xml_file_name)
def open_log_file(xml_file_name=None ):
creator.open_log_file(xml_file_name)
def log_coordinate(x=None, y=None, z=None):
creator.log_coordinate(x, y, z)
def log_message(message=None):
creator.log_message(message)
def close_log_file():
creator.close_log_file()
def rapid_to_midpoint(x1=None, y1=None, z1=None, x2=None, y2=None, z2=None):
creator.rapid_to_midpoint(x1, y1, z1, x2, y2, z2)
def rapid_to_intersection(x1, y1, x2, y2, x3, y3, x4, y4, intersection_x, intersection_y, ua_numerator, ua_denominator, ua, ub_numerator, ub):
creator.rapid_to_intersection(x1, y1, x2, y2, x3, y3, x4, y4, intersection_x, intersection_y, ua_numerator, ua_denominator, ua, ub_numerator, ub)
def rapid_to_rotated_coordinate(x1, y1, x2, y2, ref_x, ref_y, x_current, y_current, x_final, y_final):
creator.rapid_to_rotated_coordinate(x1, y1, x2, y2, ref_x, ref_y, x_current, y_current, x_final, y_final)
def set_path_control_mode(mode, motion_blending_tolerance, naive_cam_tolerance ):
creator.set_path_control_mode(mode, motion_blending_tolerance, naive_cam_tolerance )
############################################################################
## NC code creator for additive machines like RepRap
def wipe():
creator.wipe()
def extruder_on():
creator.extruder_on()
def extruder_off():
creator.extruder_off()
def set_extruder_flowrate(flowrate):
creator.set_extruder_flowrate(flowrate)
def extruder_temp(temp=None):
creator.extruder_temp(temp)
def fan_on():
creator.fan_on()
def fan_off():
creator.fan_off()
def build_bed_temp(temp=None):
creator.build_bed_temp(temp)
def chamber_temp(temp=None):
creator.chamber_temp(temp)
| JohnyEngine/CNC | heekscnc/nc/nc.py | Python | apache-2.0 | 20,018 |
# -*- coding: utf-8 -*-
"""
Given an array of integers, every element appears twice except for one. Find
that single one.
Note:
Your algorithm should have a linear runtime complexity. Could you implement it
without using extra memory?
"""
__author__ = '[email protected]'
class Solution(object):
def __init__(self, nums):
self.nums = nums
def single_number(self):
num_count = {}
for num in self.nums:
num_count.setdefault(num, 0)
num_count[num] = num_count.get(num) + 1
print num_count
for key, value in num_count.items():
if value == 1:
return key
def main():
solution = Solution([2, 3, 5, 7, 9, 3, 5, 7, 9])
print solution.single_number()
if __name__ == '__main__':
main()
| yelongyu/leetcode | 136.single_number.py | Python | apache-2.0 | 801 |
import subprocess, os, sys
from reverseZone_naming import reverseZone_name
from netaddr import *
zone_files_path="/etc/bind/zones"
def remove_reverse_record():
host_name_to_be_removed= sys.argv[1]
reverse_zone_file_name,reverse_zone_name=reverseZone_name()
os.chdir(zone_files_path)
readFiles = open(reverse_zone_file_name,'r')
reverse_zone_file_content = readFiles.read()
readFiles.close()
readFiles = open(reverse_zone_file_name,'r')
lines = readFiles.readlines()
readFiles.close()
if host_name_to_be_removed in reverse_zone_file_content:
file_content = open(reverse_zone_file_name,'w')
for line in lines:
if not host_name_to_be_removed in line:
file_content.write(line)
file_content.close()
print "\nThe reverse record that you entered has been removed!\n"
else:
print "\nThe record you wanted to remove is already absent in the database!\n"
def main():
remove_reverse_record()
main()
| anilveeramalli/cloudify-azure-plugin | blueprints/clustered-dns/dns/dns_remove_reverse_record.py | Python | apache-2.0 | 927 |
__all__ = ["gauth", "gcalendar", "lectio", "lesson", "run"]
| Hanse00/LecToCal | lectocal/__init__.py | Python | apache-2.0 | 60 |
#!/usr/bin/env python
"""beanstalkc - A beanstalkd Client Library for Python"""
import logging
import socket
import sys
__license__ = '''
Copyright (C) 2008-2016 Andreas Bolka
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
__version__ = '0.4.0'
DEFAULT_HOST = 'localhost'
DEFAULT_PORT = 11300
DEFAULT_PRIORITY = 2 ** 31
DEFAULT_TTR = 120
DEFAULT_TUBE_NAME = 'default'
class BeanstalkcException(Exception): pass
class UnexpectedResponse(BeanstalkcException): pass
class CommandFailed(BeanstalkcException): pass
class DeadlineSoon(BeanstalkcException): pass
class SocketError(BeanstalkcException):
@staticmethod
def wrap(wrapped_function, *args, **kwargs):
try:
return wrapped_function(*args, **kwargs)
except socket.error:
err = sys.exc_info()[1]
raise SocketError(err)
class Connection(object):
def __init__(self, host=DEFAULT_HOST, port=DEFAULT_PORT, parse_yaml=True,
connect_timeout=socket.getdefaulttimeout()):
if parse_yaml is True:
try:
parse_yaml = __import__('yaml').load
except ImportError:
logging.error('Failed to load PyYAML, will not parse YAML')
parse_yaml = False
self._connect_timeout = connect_timeout
self._parse_yaml = parse_yaml or (lambda x: x)
self.host = host
self.port = port
self.connect()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def connect(self):
"""Connect to beanstalkd server."""
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.settimeout(self._connect_timeout)
SocketError.wrap(self._socket.connect, (self.host, self.port))
self._socket.settimeout(None)
self._socket_file = self._socket.makefile('rb')
def close(self):
"""Close connection to server."""
try:
self._socket.sendall('quit\r\n')
except socket.error:
pass
try:
self._socket.close()
except socket.error:
pass
def reconnect(self):
"""Re-connect to server."""
self.close()
self.connect()
def _interact(self, command, expected_ok, expected_err=[]):
SocketError.wrap(self._socket.sendall, command)
status, results = self._read_response()
if status in expected_ok:
return results
elif status in expected_err:
raise CommandFailed(command.split()[0], status, results)
else:
raise UnexpectedResponse(command.split()[0], status, results)
def _read_response(self):
line = SocketError.wrap(self._socket_file.readline)
if not line:
raise SocketError()
response = line.split()
return response[0], response[1:]
def _read_body(self, size):
body = SocketError.wrap(self._socket_file.read, size)
SocketError.wrap(self._socket_file.read, 2) # trailing crlf
if size > 0 and not body:
raise SocketError()
return body
def _interact_value(self, command, expected_ok, expected_err=[]):
return self._interact(command, expected_ok, expected_err)[0]
def _interact_job(self, command, expected_ok, expected_err, reserved=True):
jid, size = self._interact(command, expected_ok, expected_err)
body = self._read_body(int(size))
return Job(self, int(jid), body, reserved)
def _interact_yaml(self, command, expected_ok, expected_err=[]):
size, = self._interact(command, expected_ok, expected_err)
body = self._read_body(int(size))
return self._parse_yaml(body)
def _interact_peek(self, command):
try:
return self._interact_job(command, ['FOUND'], ['NOT_FOUND'], False)
except CommandFailed:
return None
# -- public interface --
def put(self, body, priority=DEFAULT_PRIORITY, delay=0, ttr=DEFAULT_TTR):
"""Put a job into the current tube. Returns job id."""
assert isinstance(body, str), 'Job body must be a str instance'
jid = self._interact_value('put %d %d %d %d\r\n%s\r\n' % (
priority, delay, ttr, len(body), body),
['INSERTED'],
['JOB_TOO_BIG', 'BURIED', 'DRAINING'])
return int(jid)
def reserve(self, timeout=None):
"""Reserve a job from one of the watched tubes, with optional timeout
in seconds. Returns a Job object, or None if the request times out."""
if timeout is not None:
command = 'reserve-with-timeout %d\r\n' % timeout
else:
command = 'reserve\r\n'
try:
return self._interact_job(command,
['RESERVED'],
['DEADLINE_SOON', 'TIMED_OUT'])
except CommandFailed:
exc = sys.exc_info()[1]
_, status, results = exc.args
if status == 'TIMED_OUT':
return None
elif status == 'DEADLINE_SOON':
raise DeadlineSoon(results)
def kick(self, bound=1):
"""Kick at most bound jobs into the ready queue."""
return int(self._interact_value('kick %d\r\n' % bound, ['KICKED']))
def kick_job(self, jid):
"""Kick a specific job into the ready queue."""
self._interact('kick-job %d\r\n' % jid, ['KICKED'], ['NOT_FOUND'])
def peek(self, jid):
"""Peek at a job. Returns a Job, or None."""
return self._interact_peek('peek %d\r\n' % jid)
def peek_ready(self):
"""Peek at next ready job. Returns a Job, or None."""
return self._interact_peek('peek-ready\r\n')
def peek_delayed(self):
"""Peek at next delayed job. Returns a Job, or None."""
return self._interact_peek('peek-delayed\r\n')
def peek_buried(self):
"""Peek at next buried job. Returns a Job, or None."""
return self._interact_peek('peek-buried\r\n')
def tubes(self):
"""Return a list of all existing tubes."""
return self._interact_yaml('list-tubes\r\n', ['OK'])
def using(self):
"""Return the tube currently being used."""
return self._interact_value('list-tube-used\r\n', ['USING'])
def use(self, name):
"""Use a given tube."""
return self._interact_value('use %s\r\n' % name, ['USING'])
def watching(self):
"""Return a list of all tubes being watched."""
return self._interact_yaml('list-tubes-watched\r\n', ['OK'])
def watch(self, name):
"""Watch a given tube."""
return int(self._interact_value('watch %s\r\n' % name, ['WATCHING']))
def ignore(self, name):
"""Stop watching a given tube."""
try:
return int(self._interact_value('ignore %s\r\n' % name,
['WATCHING'],
['NOT_IGNORED']))
except CommandFailed:
# Tried to ignore the only tube in the watchlist, which failed.
return 0
def stats(self):
"""Return a dict of beanstalkd statistics."""
return self._interact_yaml('stats\r\n', ['OK'])
def stats_tube(self, name):
"""Return a dict of stats about a given tube."""
return self._interact_yaml('stats-tube %s\r\n' % name,
['OK'],
['NOT_FOUND'])
def pause_tube(self, name, delay):
"""Pause a tube for a given delay time, in seconds."""
self._interact('pause-tube %s %d\r\n' % (name, delay),
['PAUSED'],
['NOT_FOUND'])
# -- job interactors --
def delete(self, jid):
"""Delete a job, by job id."""
self._interact('delete %d\r\n' % jid, ['DELETED'], ['NOT_FOUND'])
def release(self, jid, priority=DEFAULT_PRIORITY, delay=0):
"""Release a reserved job back into the ready queue."""
self._interact('release %d %d %d\r\n' % (jid, priority, delay),
['RELEASED', 'BURIED'],
['NOT_FOUND'])
def bury(self, jid, priority=DEFAULT_PRIORITY):
"""Bury a job, by job id."""
self._interact('bury %d %d\r\n' % (jid, priority),
['BURIED'],
['NOT_FOUND'])
def touch(self, jid):
"""Touch a job, by job id, requesting more time to work on a reserved
job before it expires."""
self._interact('touch %d\r\n' % jid, ['TOUCHED'], ['NOT_FOUND'])
def stats_job(self, jid):
"""Return a dict of stats about a job, by job id."""
return self._interact_yaml('stats-job %d\r\n' % jid,
['OK'],
['NOT_FOUND'])
class Job(object):
def __init__(self, conn, jid, body, reserved=True):
self.conn = conn
self.jid = jid
self.body = body
self.reserved = reserved
def _priority(self):
stats = self.stats()
if isinstance(stats, dict):
return stats['pri']
return DEFAULT_PRIORITY
# -- public interface --
def delete(self):
"""Delete this job."""
self.conn.delete(self.jid)
self.reserved = False
def release(self, priority=None, delay=0):
"""Release this job back into the ready queue."""
if self.reserved:
self.conn.release(self.jid, priority or self._priority(), delay)
self.reserved = False
def bury(self, priority=None):
"""Bury this job."""
if self.reserved:
self.conn.bury(self.jid, priority or self._priority())
self.reserved = False
def kick(self):
"""Kick this job alive."""
self.conn.kick_job(self.jid)
def touch(self):
"""Touch this reserved job, requesting more time to work on it before
it expires."""
if self.reserved:
self.conn.touch(self.jid)
def stats(self):
"""Return a dict of stats about this job."""
return self.conn.stats_job(self.jid)
if __name__ == '__main__':
import nose
nose.main(argv=['nosetests', '-c', '.nose.cfg'])
| earl/beanstalkc | beanstalkc.py | Python | apache-2.0 | 10,921 |
""".. Ignore pydocstyle D400.
===============
Signal Handlers
===============
"""
from asgiref.sync import async_to_sync
from django.conf import settings
from django.db import transaction
from django.db.models.signals import post_delete, post_save
from django.dispatch import receiver
from resolwe.flow.managers import manager
from resolwe.flow.models import Data, Relation
from resolwe.flow.models.entity import RelationPartition
def commit_signal(data_id):
"""Nudge manager at the end of every Data object save event."""
if not getattr(settings, "FLOW_MANAGER_DISABLE_AUTO_CALLS", False):
immediate = getattr(settings, "FLOW_MANAGER_SYNC_AUTO_CALLS", False)
async_to_sync(manager.communicate)(data_id=data_id, run_sync=immediate)
@receiver(post_save, sender=Data)
def manager_post_save_handler(sender, instance, created, **kwargs):
"""Run newly created (spawned) processes."""
if (
instance.status == Data.STATUS_DONE
or instance.status == Data.STATUS_ERROR
or created
):
# Run manager at the end of the potential transaction. Otherwise
# tasks are send to workers before transaction ends and therefore
# workers cannot access objects created inside transaction.
transaction.on_commit(lambda: commit_signal(instance.id))
# NOTE: m2m_changed signal cannot be used because of a bug:
# https://code.djangoproject.com/ticket/17688
@receiver(post_delete, sender=RelationPartition)
def delete_relation(sender, instance, **kwargs):
"""Delete the Relation object when the last Entity is removed."""
def process_signal(relation_id):
"""Get the relation and delete it if it has no entities left."""
try:
relation = Relation.objects.get(pk=relation_id)
except Relation.DoesNotExist:
return
if relation.entities.count() == 0:
relation.delete()
# Wait for partitions to be recreated.
transaction.on_commit(lambda: process_signal(instance.relation_id))
| genialis/resolwe | resolwe/flow/signals.py | Python | apache-2.0 | 2,030 |
#!/usr/bin/env python3
#coding:utf8
# *** TROUBLESHOOTING ***
# 1) If you get the error "ImportError: No module named zope.interface" then add an empty __init__.py file to the PYTHONDIR/Lib/site-packages/zope directory
# 2) It is expected that you will have NSIS 3 NSIS from http://nsis.sourceforge.net installed.
import codecs
import sys
# try:
# if (sys.version_info.major != 2) or (sys.version_info.minor < 7):
# raise Exception("You must build Syncplay with Python 2.7!")
# except AttributeError:
# import warnings
# warnings.warn("You must build Syncplay with Python 2.7!")
from glob import glob
import os
import subprocess
from string import Template
from distutils.core import setup
try:
from py2exe.build_exe import py2exe
except ImportError:
from py2exe.distutils_buildexe import py2exe
import syncplay
from syncplay.messages import getMissingStrings
missingStrings = getMissingStrings()
if missingStrings is not None and missingStrings != "":
import warnings
warnings.warn("MISSING/UNUSED STRINGS DETECTED:\n{}".format(missingStrings))
def get_nsis_path():
bin_name = "makensis.exe"
from winreg import HKEY_LOCAL_MACHINE as HKLM
from winreg import KEY_READ, KEY_WOW64_32KEY, OpenKey, QueryValueEx
try:
nsisreg = OpenKey(HKLM, "Software\\NSIS", 0, KEY_READ | KEY_WOW64_32KEY)
if QueryValueEx(nsisreg, "VersionMajor")[0] >= 3:
return "{}\\{}".format(QueryValueEx(nsisreg, "")[0], bin_name)
else:
raise Exception("You must install NSIS 3 or later.")
except WindowsError:
return bin_name
NSIS_COMPILE = get_nsis_path()
OUT_DIR = "syncplay_v{}".format(syncplay.version)
SETUP_SCRIPT_PATH = "syncplay_setup.nsi"
NSIS_SCRIPT_TEMPLATE = r"""
!include LogicLib.nsh
!include nsDialogs.nsh
!include FileFunc.nsh
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\English.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Polish.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Russian.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\German.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Italian.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Spanish.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\PortugueseBR.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Portuguese.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Turkish.nlf"
Unicode true
Name "Syncplay $version"
OutFile "Syncplay-$version-Setup.exe"
InstallDir $$PROGRAMFILES\Syncplay
RequestExecutionLevel admin
ManifestDPIAware false
XPStyle on
Icon syncplay\resources\icon.ico ;Change DIR
SetCompressor /SOLID lzma
VIProductVersion "$version.0"
VIAddVersionKey /LANG=$${LANG_ENGLISH} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_ENGLISH} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_ENGLISH} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_ENGLISH} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_POLISH} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_POLISH} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_POLISH} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_POLISH} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_RUSSIAN} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_RUSSIAN} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_RUSSIAN} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_RUSSIAN} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_ITALIAN} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_ITALIAN} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_ITALIAN} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_ITALIAN} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_SPANISH} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_SPANISH} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_SPANISH} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_SPANISH} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESEBR} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESEBR} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_PORTUGUESEBR} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESEBR} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESE} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESE} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_PORTUGUESE} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESE} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_TURKISH} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_TURKISH} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_TURKISH} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_TURKISH} "FileDescription" "Syncplay"
LangString ^SyncplayLanguage $${LANG_ENGLISH} "en"
LangString ^Associate $${LANG_ENGLISH} "Associate Syncplay with multimedia files."
LangString ^Shortcut $${LANG_ENGLISH} "Create Shortcuts in following locations:"
LangString ^StartMenu $${LANG_ENGLISH} "Start Menu"
LangString ^Desktop $${LANG_ENGLISH} "Desktop"
LangString ^QuickLaunchBar $${LANG_ENGLISH} "Quick Launch Bar"
LangString ^AutomaticUpdates $${LANG_ENGLISH} "Check for updates automatically"
LangString ^UninstConfig $${LANG_ENGLISH} "Delete configuration file."
LangString ^SyncplayLanguage $${LANG_POLISH} "pl"
LangString ^Associate $${LANG_POLISH} "Skojarz Syncplaya z multimediami"
LangString ^Shortcut $${LANG_POLISH} "Utworz skroty w nastepujacych miejscach:"
LangString ^StartMenu $${LANG_POLISH} "Menu Start"
LangString ^Desktop $${LANG_POLISH} "Pulpit"
LangString ^QuickLaunchBar $${LANG_POLISH} "Pasek szybkiego uruchamiania"
LangString ^UninstConfig $${LANG_POLISH} "Usun plik konfiguracyjny."
LangString ^SyncplayLanguage $${LANG_RUSSIAN} "ru"
LangString ^Associate $${LANG_RUSSIAN} "Ассоциировать Syncplay с видеофайлами"
LangString ^Shortcut $${LANG_RUSSIAN} "Создать ярлыки:"
LangString ^StartMenu $${LANG_RUSSIAN} "в меню Пуск"
LangString ^Desktop $${LANG_RUSSIAN} "на рабочем столе"
LangString ^QuickLaunchBar $${LANG_RUSSIAN} "в меню быстрого запуска"
LangString ^AutomaticUpdates $${LANG_RUSSIAN} "Проверять обновления автоматически"; TODO: Confirm Russian translation ("Check for updates automatically")
LangString ^UninstConfig $${LANG_RUSSIAN} "Удалить файл настроек."
LangString ^SyncplayLanguage $${LANG_GERMAN} "de"
LangString ^Associate $${LANG_GERMAN} "Syncplay als Standardprogramm für Multimedia-Dateien verwenden."
LangString ^Shortcut $${LANG_GERMAN} "Erstelle Verknüpfungen an folgenden Orten:"
LangString ^StartMenu $${LANG_GERMAN} "Startmenü"
LangString ^Desktop $${LANG_GERMAN} "Desktop"
LangString ^QuickLaunchBar $${LANG_GERMAN} "Schnellstartleiste"
LangString ^AutomaticUpdates $${LANG_GERMAN} "Automatisch nach Updates suchen";
LangString ^UninstConfig $${LANG_GERMAN} "Konfigurationsdatei löschen."
LangString ^SyncplayLanguage $${LANG_ITALIAN} "it"
LangString ^Associate $${LANG_ITALIAN} "Associa Syncplay con i file multimediali."
LangString ^Shortcut $${LANG_ITALIAN} "Crea i collegamenti nei percorsi seguenti:"
LangString ^StartMenu $${LANG_ITALIAN} "Menu Start"
LangString ^Desktop $${LANG_ITALIAN} "Desktop"
LangString ^QuickLaunchBar $${LANG_ITALIAN} "Barra di avvio rapido"
LangString ^AutomaticUpdates $${LANG_ITALIAN} "Controllo automatico degli aggiornamenti"
LangString ^UninstConfig $${LANG_ITALIAN} "Cancella i file di configurazione."
LangString ^SyncplayLanguage $${LANG_SPANISH} "es"
LangString ^Associate $${LANG_SPANISH} "Asociar Syncplay con archivos multimedia."
LangString ^Shortcut $${LANG_SPANISH} "Crear accesos directos en las siguientes ubicaciones:"
LangString ^StartMenu $${LANG_SPANISH} "Menú de inicio"
LangString ^Desktop $${LANG_SPANISH} "Escritorio"
LangString ^QuickLaunchBar $${LANG_SPANISH} "Barra de acceso rápido"
LangString ^AutomaticUpdates $${LANG_SPANISH} "Buscar actualizaciones automáticamente"
LangString ^UninstConfig $${LANG_SPANISH} "Borrar archivo de configuración."
LangString ^SyncplayLanguage $${LANG_PORTUGUESEBR} "pt_BR"
LangString ^Associate $${LANG_PORTUGUESEBR} "Associar Syncplay aos arquivos multimídia."
LangString ^Shortcut $${LANG_PORTUGUESEBR} "Criar atalhos nos seguintes locais:"
LangString ^StartMenu $${LANG_PORTUGUESEBR} "Menu Iniciar"
LangString ^Desktop $${LANG_PORTUGUESEBR} "Área de trabalho"
LangString ^QuickLaunchBar $${LANG_PORTUGUESEBR} "Barra de acesso rápido"
LangString ^AutomaticUpdates $${LANG_PORTUGUESEBR} "Verificar atualizações automaticamente"
LangString ^UninstConfig $${LANG_PORTUGUESEBR} "Deletar arquivo de configuração."
LangString ^SyncplayLanguage $${LANG_PORTUGUESE} "pt_PT"
LangString ^Associate $${LANG_PORTUGUESE} "Associar Syncplay aos ficheiros multimédia."
LangString ^Shortcut $${LANG_PORTUGUESE} "Criar atalhos nos seguintes locais:"
LangString ^StartMenu $${LANG_PORTUGUESE} "Menu Iniciar"
LangString ^Desktop $${LANG_PORTUGUESE} "Área de trabalho"
LangString ^QuickLaunchBar $${LANG_PORTUGUESE} "Barra de acesso rápido"
LangString ^AutomaticUpdates $${LANG_PORTUGUESE} "Verificar atualizações automaticamente"
LangString ^UninstConfig $${LANG_PORTUGUESE} "Apagar ficheiro de configuração."
LangString ^SyncplayLanguage $${LANG_TURKISH} "tr"
LangString ^Associate $${LANG_TURKISH} "Syncplay'i ortam dosyalarıyla ilişkilendirin."
LangString ^Shortcut $${LANG_TURKISH} "Aşağıdaki konumlarda kısayollar oluşturun:"
LangString ^StartMenu $${LANG_TURKISH} "Başlangıç menüsü"
LangString ^Desktop $${LANG_TURKISH} "Masaüstü"
LangString ^QuickLaunchBar $${LANG_TURKISH} "Hızlı Başlatma Çubuğu"
LangString ^AutomaticUpdates $${LANG_TURKISH} "Güncellemeleri otomatik denetle"
LangString ^UninstConfig $${LANG_TURKISH} "Yapılandırma dosyasını silin."
; Remove text to save space
LangString ^ClickInstall $${LANG_GERMAN} " "
PageEx license
LicenseData syncplay\resources\license.rtf
PageExEnd
Page custom DirectoryCustom DirectoryCustomLeave
Page instFiles
UninstPage custom un.installConfirm un.installConfirmLeave
UninstPage instFiles
Var Dialog
Var Icon_Syncplay
Var Icon_Syncplay_Handle
;Var CheckBox_Associate
Var CheckBox_AutomaticUpdates
Var CheckBox_StartMenuShortcut
Var CheckBox_DesktopShortcut
Var CheckBox_QuickLaunchShortcut
;Var CheckBox_Associate_State
Var CheckBox_AutomaticUpdates_State
Var CheckBox_StartMenuShortcut_State
Var CheckBox_DesktopShortcut_State
Var CheckBox_QuickLaunchShortcut_State
Var Button_Browse
Var Directory
Var GroupBox_DirSub
Var Label_Text
Var Label_Shortcut
Var Label_Size
Var Label_Space
Var Text_Directory
Var Uninst_Dialog
Var Uninst_Icon
Var Uninst_Icon_Handle
Var Uninst_Label_Directory
Var Uninst_Label_Text
Var Uninst_Text_Directory
Var Uninst_CheckBox_Config
Var Uninst_CheckBox_Config_State
Var Size
Var SizeHex
Var AvailibleSpace
Var AvailibleSpaceGiB
Var Drive
Var VLC_Directory
;!macro APP_ASSOCIATE EXT FileCLASS DESCRIPTION COMMANDTEXT COMMAND
; WriteRegStr HKCR ".$${EXT}" "" "$${FileCLASS}"
; WriteRegStr HKCR "$${FileCLASS}" "" `$${DESCRIPTION}`
; WriteRegStr HKCR "$${FileCLASS}\shell" "" "open"
; WriteRegStr HKCR "$${FileCLASS}\shell\open" "" `$${COMMANDTEXT}`
; WriteRegStr HKCR "$${FileCLASS}\shell\open\command" "" `$${COMMAND}`
;!macroend
!macro APP_UNASSOCIATE EXT FileCLASS
; Backup the previously associated File class
ReadRegStr $$R0 HKCR ".$${EXT}" `$${FileCLASS}_backup`
WriteRegStr HKCR ".$${EXT}" "" "$$R0"
DeleteRegKey HKCR `$${FileCLASS}`
!macroend
;!macro ASSOCIATE EXT
; !insertmacro APP_ASSOCIATE "$${EXT}" "Syncplay.$${EXT}" "$$INSTDIR\Syncplay.exe,%1%" \
; "Open with Syncplay" "$$INSTDIR\Syncplay.exe $$\"%1$$\""
;!macroend
!macro UNASSOCIATE EXT
!insertmacro APP_UNASSOCIATE "$${EXT}" "Syncplay.$${EXT}"
!macroend
;Prevents from running more than one instance of installer and sets default state of checkboxes
Function .onInit
System::Call 'kernel32::CreateMutexA(i 0, i 0, t "SyncplayMutex") i .r1 ?e'
Pop $$R0
StrCmp $$R0 0 +3
MessageBox MB_OK|MB_ICONEXCLAMATION "The installer is already running."
Abort
;StrCpy $$CheckBox_Associate_State $${BST_CHECKED}
StrCpy $$CheckBox_StartMenuShortcut_State $${BST_CHECKED}
Call GetSize
Call DriveSpace
Call Language
FunctionEnd
;Language selection dialog
Function Language
Push ""
Push $${LANG_ENGLISH}
Push English
Push $${LANG_POLISH}
Push Polski
Push $${LANG_RUSSIAN}
Push Русский
Push $${LANG_GERMAN}
Push Deutsch
Push $${LANG_ITALIAN}
Push Italiano
Push $${LANG_SPANISH}
Push Español
Push $${LANG_PORTUGUESEBR}
Push 'Português do Brasil'
Push $${LANG_PORTUGUESE}
Push 'Português de Portugal'
Push $${LANG_TURKISH}
Push 'Türkçe'
Push A ; A means auto count languages
LangDLL::LangDialog "Language Selection" "Please select the language of Syncplay and the installer"
Pop $$LANGUAGE
StrCmp $$LANGUAGE "cancel" 0 +2
Abort
FunctionEnd
Function DirectoryCustom
nsDialogs::Create 1018
Pop $$Dialog
GetFunctionAddress $$R8 DirectoryCustomLeave
nsDialogs::OnBack $$R8
$${NSD_CreateIcon} 0u 0u 22u 20u ""
Pop $$Icon_Syncplay
$${NSD_SetIconFromInstaller} $$Icon_Syncplay $$Icon_Syncplay_Handle
$${NSD_CreateLabel} 25u 0u 241u 34u "$$(^DirText)"
Pop $$Label_Text
$${NSD_CreateText} 8u 38u 187u 12u "$$INSTDIR"
Pop $$Text_Directory
$${NSD_SetFocus} $$Text_Directory
$${NSD_CreateBrowseButton} 202u 37u 55u 14u "$$(^BrowseBtn)"
Pop $$Button_Browse
$${NSD_OnClick} $$Button_Browse DirectoryBrowseDialog
$${NSD_CreateGroupBox} 1u 27u 264u 30u "$$(^DirSubText)"
Pop $$GroupBox_DirSub
$${NSD_CreateLabel} 0u 122u 132 8u "$$(^SpaceRequired)$$SizeMB"
Pop $$Label_Size
$${NSD_CreateLabel} 321u 122u 132 8u "$$(^SpaceAvailable)$$AvailibleSpaceGiB.$$AvailibleSpaceGB"
Pop $$Label_Space
;$${NSD_CreateCheckBox} 8u 59u 187u 10u "$$(^Associate)"
;Pop $$CheckBox_Associate
$${NSD_CreateCheckBox} 8u 72u 250u 10u "$$(^AutomaticUpdates)"
Pop $$CheckBox_AutomaticUpdates
$${NSD_Check} $$CheckBox_AutomaticUpdates
$${NSD_CreateLabel} 8u 95u 187u 10u "$$(^Shortcut)"
Pop $$Label_Shortcut
$${NSD_CreateCheckbox} 8u 105u 70u 10u "$$(^StartMenu)"
Pop $$CheckBox_StartMenuShortcut
$${NSD_CreateCheckbox} 78u 105u 70u 10u "$$(^Desktop)"
Pop $$CheckBox_DesktopShortcut
$${NSD_CreateCheckbox} 158u 105u 130u 10u "$$(^QuickLaunchBar)"
Pop $$CheckBox_QuickLaunchShortcut
;$${If} $$CheckBox_Associate_State == $${BST_CHECKED}
; $${NSD_Check} $$CheckBox_Associate
;$${EndIf}
$${If} $$CheckBox_StartMenuShortcut_State == $${BST_CHECKED}
$${NSD_Check} $$CheckBox_StartMenuShortcut
$${EndIf}
$${If} $$CheckBox_DesktopShortcut_State == $${BST_CHECKED}
$${NSD_Check} $$CheckBox_DesktopShortcut
$${EndIf}
$${If} $$CheckBox_QuickLaunchShortcut_State == $${BST_CHECKED}
$${NSD_Check} $$CheckBox_QuickLaunchShortcut
$${EndIf}
$${If} $$CheckBox_AutomaticUpdates_State == $${BST_CHECKED}
$${NSD_Check} $$CheckBox_AutomaticUpdates
$${EndIf}
nsDialogs::Show
$${NSD_FreeIcon} $$Icon_Syncplay_Handle
FunctionEnd
Function DirectoryCustomLeave
$${NSD_GetText} $$Text_Directory $$INSTDIR
;$${NSD_GetState} $$CheckBox_Associate $$CheckBox_Associate_State
$${NSD_GetState} $$CheckBox_AutomaticUpdates $$CheckBox_AutomaticUpdates_State
$${NSD_GetState} $$CheckBox_StartMenuShortcut $$CheckBox_StartMenuShortcut_State
$${NSD_GetState} $$CheckBox_DesktopShortcut $$CheckBox_DesktopShortcut_State
$${NSD_GetState} $$CheckBox_QuickLaunchShortcut $$CheckBox_QuickLaunchShortcut_State
FunctionEnd
Function DirectoryBrowseDialog
nsDialogs::SelectFolderDialog $$(^DirBrowseText)
Pop $$Directory
$${If} $$Directory != error
StrCpy $$INSTDIR $$Directory
$${NSD_SetText} $$Text_Directory $$INSTDIR
Call DriveSpace
$${NSD_SetText} $$Label_Space "$$(^SpaceAvailable)$$AvailibleSpaceGiB.$$AvailibleSpaceGB"
$${EndIf}
Abort
FunctionEnd
Function GetSize
StrCpy $$Size "$totalSize"
IntOp $$Size $$Size / 1024
IntFmt $$SizeHex "0x%08X" $$Size
IntOp $$Size $$Size / 1024
FunctionEnd
;Calculates Free Space on HDD
Function DriveSpace
StrCpy $$Drive $$INSTDIR 1
$${DriveSpace} "$$Drive:\" "/D=F /S=M" $$AvailibleSpace
IntOp $$AvailibleSpaceGiB $$AvailibleSpace / 1024
IntOp $$AvailibleSpace $$AvailibleSpace % 1024
IntOp $$AvailibleSpace $$AvailibleSpace / 102
FunctionEnd
Function InstallOptions
;$${If} $$CheckBox_Associate_State == $${BST_CHECKED}
; Call Associate
; DetailPrint "Associated Syncplay with multimedia files"
;$${EndIf}
$${If} $$CheckBox_StartMenuShortcut_State == $${BST_CHECKED}
CreateDirectory $$SMPROGRAMS\Syncplay
SetOutPath "$$INSTDIR"
CreateShortCut "$$SMPROGRAMS\Syncplay\Syncplay.lnk" "$$INSTDIR\Syncplay.exe" ""
CreateShortCut "$$SMPROGRAMS\Syncplay\Syncplay Server.lnk" "$$INSTDIR\syncplayServer.exe" ""
CreateShortCut "$$SMPROGRAMS\Syncplay\Uninstall.lnk" "$$INSTDIR\Uninstall.exe" ""
WriteINIStr "$$SMPROGRAMS\Syncplay\SyncplayWebsite.url" "InternetShortcut" "URL" "https://syncplay.pl"
$${EndIf}
$${If} $$CheckBox_DesktopShortcut_State == $${BST_CHECKED}
SetOutPath "$$INSTDIR"
CreateShortCut "$$DESKTOP\Syncplay.lnk" "$$INSTDIR\Syncplay.exe" ""
$${EndIf}
$${If} $$CheckBox_QuickLaunchShortcut_State == $${BST_CHECKED}
SetOutPath "$$INSTDIR"
CreateShortCut "$$QUICKLAUNCH\Syncplay.lnk" "$$INSTDIR\Syncplay.exe" ""
$${EndIf}
FunctionEnd
;Associates extensions with Syncplay
;Function Associate
; !insertmacro ASSOCIATE avi
; !insertmacro ASSOCIATE mpg
; !insertmacro ASSOCIATE mpeg
; !insertmacro ASSOCIATE mpe
; !insertmacro ASSOCIATE m1v
; !insertmacro ASSOCIATE m2v
; !insertmacro ASSOCIATE mpv2
; !insertmacro ASSOCIATE mp2v
; !insertmacro ASSOCIATE mkv
; !insertmacro ASSOCIATE mp4
; !insertmacro ASSOCIATE m4v
; !insertmacro ASSOCIATE mp4v
; !insertmacro ASSOCIATE 3gp
; !insertmacro ASSOCIATE 3gpp
; !insertmacro ASSOCIATE 3g2
; !insertmacro ASSOCIATE 3pg2
; !insertmacro ASSOCIATE flv
; !insertmacro ASSOCIATE f4v
; !insertmacro ASSOCIATE rm
; !insertmacro ASSOCIATE wmv
; !insertmacro ASSOCIATE swf
; !insertmacro ASSOCIATE rmvb
; !insertmacro ASSOCIATE divx
; !insertmacro ASSOCIATE amv
;FunctionEnd
Function WriteRegistry
Call GetSize
WriteRegStr HKLM SOFTWARE\Syncplay "Install_Dir" "$$INSTDIR"
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "DisplayName" "Syncplay"
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "InstallLocation" "$$INSTDIR"
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "UninstallString" '"$$INSTDIR\uninstall.exe"'
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "DisplayIcon" "$$INSTDIR\resources\icon.ico"
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "Publisher" "Syncplay"
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "DisplayVersion" "$version"
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "URLInfoAbout" "https://syncplay.pl/"
WriteRegDWORD HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "NoModify" 1
WriteRegDWORD HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "NoRepair" 1
WriteRegDWORD HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "EstimatedSize" "$$SizeHex"
WriteINIStr $$APPDATA\syncplay.ini general language $$(^SyncplayLanguage)
$${If} $$CheckBox_AutomaticUpdates_State == $${BST_CHECKED}
WriteINIStr $$APPDATA\syncplay.ini general CheckForUpdatesAutomatically "True"
$${Else}
WriteINIStr $$APPDATA\syncplay.ini general CheckForUpdatesAutomatically "False"
$${EndIf}
FunctionEnd
Function un.installConfirm
nsDialogs::Create 1018
Pop $$Uninst_Dialog
$${NSD_CreateIcon} 0u 1u 22u 20u ""
Pop $$Uninst_Icon
$${NSD_SetIconFromInstaller} $$Uninst_Icon $$Uninst_Icon_Handle
$${NSD_CreateLabel} 0u 45u 55u 8u "$$(^UninstallingSubText)"
Pop $$Uninst_Label_Directory
$${NSD_CreateLabel} 25u 0u 241u 34u "$$(^UninstallingText)"
Pop $$Uninst_Label_Text
ReadRegStr $$INSTDIR HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "InstallLocation"
$${NSD_CreateText} 56u 43u 209u 12u "$$INSTDIR"
Pop $$Uninst_Text_Directory
EnableWindow $$Uninst_Text_Directory 0
$${NSD_CreateCheckBox} 0u 60u 250u 10u "$$(^UninstConfig)"
Pop $$Uninst_CheckBox_Config
nsDialogs::Show
$${NSD_FreeIcon} $$Uninst_Icon_Handle
FunctionEnd
Function un.installConfirmLeave
$${NSD_GetState} $$Uninst_CheckBox_Config $$Uninst_CheckBox_Config_State
FunctionEnd
Function un.AssociateDel
!insertmacro UNASSOCIATE avi
!insertmacro UNASSOCIATE mpg
!insertmacro UNASSOCIATE mpeg
!insertmacro UNASSOCIATE mpe
!insertmacro UNASSOCIATE m1v
!insertmacro UNASSOCIATE m2v
!insertmacro UNASSOCIATE mpv2
!insertmacro UNASSOCIATE mp2v
!insertmacro UNASSOCIATE mkv
!insertmacro UNASSOCIATE mp4
!insertmacro UNASSOCIATE m4v
!insertmacro UNASSOCIATE mp4v
!insertmacro UNASSOCIATE 3gp
!insertmacro UNASSOCIATE 3gpp
!insertmacro UNASSOCIATE 3g2
!insertmacro UNASSOCIATE 3pg2
!insertmacro UNASSOCIATE flv
!insertmacro UNASSOCIATE f4v
!insertmacro UNASSOCIATE rm
!insertmacro UNASSOCIATE wmv
!insertmacro UNASSOCIATE swf
!insertmacro UNASSOCIATE rmvb
!insertmacro UNASSOCIATE divx
!insertmacro UNASSOCIATE amv
FunctionEnd
Function un.InstallOptions
Delete $$SMPROGRAMS\Syncplay\Syncplay.lnk
Delete "$$SMPROGRAMS\Syncplay\Syncplay Server.lnk"
Delete $$SMPROGRAMS\Syncplay\Uninstall.lnk
Delete $$SMPROGRAMS\Syncplay\SyncplayWebsite.url
RMDir $$SMPROGRAMS\Syncplay
Delete $$DESKTOP\Syncplay.lnk
Delete $$QUICKLAUNCH\Syncplay.lnk
ReadRegStr $$VLC_Directory HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "VLCInstallLocation"
IfFileExists "$$VLC_Directory\lua\intf\syncplay.lua" 0 +2
Delete $$VLC_Directory\lua\intf\syncplay.lua
FunctionEnd
Section "Install"
SetOverwrite on
SetOutPath $$INSTDIR
WriteUninstaller uninstall.exe
$installFiles
Call InstallOptions
Call WriteRegistry
SectionEnd
Section "Uninstall"
Call un.AssociateDel
Call un.InstallOptions
$uninstallFiles
DeleteRegKey HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay"
DeleteRegKey HKLM SOFTWARE\Syncplay
Delete $$INSTDIR\uninstall.exe
RMDir $$INSTDIR\Syncplay\\resources\lua\intf
RMDir $$INSTDIR\Syncplay\\resources\lua
RMDir $$INSTDIR\Syncplay\\resources
RMDir $$INSTDIR\resources
RMDir $$INSTDIR\lib
RMDir $$INSTDIR
$${If} $$Uninst_CheckBox_Config_State == $${BST_CHECKED}
IfFileExists "$$APPDATA\.syncplay" 0 +2
Delete $$APPDATA\.syncplay
IfFileExists "$$APPDATA\syncplay.ini" 0 +2
Delete $$APPDATA\syncplay.ini
$${EndIf}
SectionEnd
"""
class NSISScript(object):
def create(self):
fileList, totalSize = self.getBuildDirContents(OUT_DIR)
print("Total size eq: {}".format(totalSize))
installFiles = self.prepareInstallListTemplate(fileList)
uninstallFiles = self.prepareDeleteListTemplate(fileList)
if os.path.isfile(SETUP_SCRIPT_PATH):
raise RuntimeError("Cannot create setup script, file exists at {}".format(SETUP_SCRIPT_PATH))
contents = Template(NSIS_SCRIPT_TEMPLATE).substitute(
version=syncplay.version,
uninstallFiles=uninstallFiles,
installFiles=installFiles,
totalSize=totalSize,
)
with codecs.open(SETUP_SCRIPT_PATH, "w", "utf-8-sig") as outfile:
outfile.write(contents)
def compile(self):
if not os.path.isfile(NSIS_COMPILE):
return "makensis.exe not found, won't create the installer"
subproc = subprocess.Popen([NSIS_COMPILE, SETUP_SCRIPT_PATH], env=os.environ)
subproc.communicate()
retcode = subproc.returncode
os.remove(SETUP_SCRIPT_PATH)
if retcode:
raise RuntimeError("NSIS compilation return code: %d" % retcode)
def getBuildDirContents(self, path):
fileList = {}
totalSize = 0
for root, _, files in os.walk(path):
totalSize += sum(os.path.getsize(os.path.join(root, file_)) for file_ in files)
for file_ in files:
new_root = root.replace(OUT_DIR, "").strip("\\")
if new_root not in fileList:
fileList[new_root] = []
fileList[new_root].append(file_)
return fileList, totalSize
def prepareInstallListTemplate(self, fileList):
create = []
for dir_ in fileList.keys():
create.append('SetOutPath "$INSTDIR\\{}"'.format(dir_))
for file_ in fileList[dir_]:
create.append('FILE "{}\\{}\\{}"'.format(OUT_DIR, dir_, file_))
return "\n".join(create)
def prepareDeleteListTemplate(self, fileList):
delete = []
for dir_ in fileList.keys():
for file_ in fileList[dir_]:
delete.append('DELETE "$INSTDIR\\{}\\{}"'.format(dir_, file_))
delete.append('RMdir "$INSTDIR\\{}"'.format(file_))
return "\n".join(delete)
def pruneUnneededLibraries():
from pathlib import Path
cwd = os.getcwd()
libDir = cwd + '\\' + OUT_DIR + '\\lib\\'
unneededModules = ['PySide2.Qt3D*', 'PySide2.QtAxContainer.pyd', 'PySide2.QtCharts.pyd', 'PySide2.QtConcurrent.pyd',
'PySide2.QtDataVisualization.pyd', 'PySide2.QtHelp.pyd', 'PySide2.QtLocation.pyd',
'PySide2.QtMultimedia.pyd', 'PySide2.QtMultimediaWidgets.pyd', 'PySide2.QtOpenGL.pyd',
'PySide2.QtPositioning.pyd', 'PySide2.QtPrintSupport.pyd', 'PySide2.QtQml.pyd',
'PySide2.QtQuick.pyd', 'PySide2.QtQuickWidgets.pyd', 'PySide2.QtScxml.pyd', 'PySide2.QtSensors.pyd',
'PySide2.QtSql.pyd', 'PySide2.QtSvg.pyd', 'PySide2.QtTest.pyd', 'PySide2.QtTextToSpeech.pyd',
'PySide2.QtUiTools.pyd', 'PySide2.QtWebChannel.pyd', 'PySide2.QtWebEngine.pyd',
'PySide2.QtWebEngineCore.pyd', 'PySide2.QtWebEngineWidgets.pyd', 'PySide2.QtWebSockets.pyd',
'PySide2.QtWinExtras.pyd', 'PySide2.QtXml.pyd', 'PySide2.QtXmlPatterns.pyd']
unneededLibs = ['Qt53D*', 'Qt5Charts.dll', 'Qt5Concurrent.dll', 'Qt5DataVisualization.dll', 'Qt5Gamepad.dll', 'Qt5Help.dll',
'Qt5Location.dll', 'Qt5Multimedia.dll', 'Qt5MultimediaWidgets.dll', 'Qt5OpenGL.dll', 'Qt5Positioning.dll',
'Qt5PrintSupport.dll', 'Qt5Quick.dll', 'Qt5QuickWidgets.dll', 'Qt5Scxml.dll', 'Qt5Sensors.dll', 'Qt5Sql.dll',
'Qt5Svg.dll', 'Qt5Test.dll', 'Qt5TextToSpeech.dll', 'Qt5WebChannel.dll', 'Qt5WebEngine.dll',
'Qt5WebEngineCore.dll', 'Qt5WebEngineWidgets.dll', 'Qt5WebSockets.dll', 'Qt5WinExtras.dll', 'Qt5Xml.dll',
'Qt5XmlPatterns.dll']
windowsDLL = ['MSVCP140.dll', 'VCRUNTIME140.dll']
deleteList = unneededModules + unneededLibs + windowsDLL
deleteList.append('api-*')
for filename in deleteList:
for p in Path(libDir).glob(filename):
p.unlink()
def copyQtPlugins(paths):
import shutil
from PySide2 import QtCore
basePath = QtCore.QLibraryInfo.location(QtCore.QLibraryInfo.PluginsPath)
basePath = basePath.replace('/', '\\')
destBase = os.getcwd() + '\\' + OUT_DIR
for elem in paths:
elemDir, elemName = os.path.split(elem)
source = basePath + '\\' + elem
dest = destBase + '\\' + elem
destDir = destBase + '\\' + elemDir
os.makedirs(destDir, exist_ok=True)
shutil.copy(source, dest)
class build_installer(py2exe):
def run(self):
py2exe.run(self)
print('*** deleting unnecessary libraries and modules ***')
pruneUnneededLibraries()
print('*** copying qt plugins ***')
copyQtPlugins(qt_plugins)
script = NSISScript()
script.create()
print("*** compiling the NSIS setup script ***")
script.compile()
print("*** DONE ***")
guiIcons = glob('syncplay/resources/*.ico') + glob('syncplay/resources/*.png') + ['syncplay/resources/spinner.mng']
resources = [
"syncplay/resources/syncplayintf.lua",
"syncplay/resources/license.rtf",
"syncplay/resources/third-party-notices.rtf"
]
resources.extend(guiIcons)
intf_resources = ["syncplay/resources/lua/intf/syncplay.lua"]
qt_plugins = ['platforms\\qwindows.dll', 'styles\\qwindowsvistastyle.dll']
common_info = dict(
name='Syncplay',
version=syncplay.version,
author='Uriziel',
author_email='[email protected]',
description='Syncplay',
)
info = dict(
common_info,
windows=[{
"script": "syncplayClient.py",
"icon_resources": [(1, "syncplay\\resources\\icon.ico")],
'dest_base': "Syncplay"},
],
console=['syncplayServer.py'],
# *** If you wish to make the Syncplay client use console mode (for --no-gui to work) then comment out the above two lines and uncomment the following line:
# console=['syncplayServer.py', {"script":"syncplayClient.py", "icon_resources":[(1, "resources\\icon.ico")], 'dest_base': "Syncplay"}],
options={
'py2exe': {
'dist_dir': OUT_DIR,
'packages': 'PySide2, cffi, OpenSSL, certifi',
'includes': 'twisted, sys, encodings, datetime, os, time, math, urllib, ast, unicodedata, _ssl, win32pipe, win32file',
'excludes': 'venv, doctest, pdb, unittest, win32clipboard, win32pdh, win32security, win32trace, win32ui, winxpgui, win32process, Tkinter',
'dll_excludes': 'msvcr71.dll, MSVCP90.dll, POWRPROF.dll',
'optimize': 2,
'compressed': 1
}
},
data_files=[("resources", resources), ("resources/lua/intf", intf_resources)],
zipfile="lib/libsync",
cmdclass={"py2exe": build_installer},
)
sys.argv.extend(['py2exe'])
setup(**info)
| alby128/syncplay | buildPy2exe.py | Python | apache-2.0 | 31,233 |
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from clr import AddReference
AddReference("System")
AddReference("QuantConnect.Algorithm")
AddReference("QuantConnect.Common")
from System import *
from QuantConnect import *
from QuantConnect.Orders import *
from QuantConnect.Algorithm import *
from QuantConnect.Algorithm.Framework import *
from QuantConnect.Algorithm.Framework.Selection import *
from Alphas.RsiAlphaModel import RsiAlphaModel
from Portfolio.EqualWeightingPortfolioConstructionModel import EqualWeightingPortfolioConstructionModel
from Execution.StandardDeviationExecutionModel import StandardDeviationExecutionModel
from datetime import timedelta
### <summary>
### Regression algorithm for the StandardDeviationExecutionModel.
### This algorithm shows how the execution model works to split up orders and submit them
### only when the price is 2 standard deviations from the 60min mean (default model settings).
### </summary>
### <meta name="tag" content="using data" />
### <meta name="tag" content="using quantconnect" />
### <meta name="tag" content="trading and orders" />
class StandardDeviationExecutionModelRegressionAlgorithm(QCAlgorithm):
'''Regression algorithm for the StandardDeviationExecutionModel.
This algorithm shows how the execution model works to split up orders and submit them
only when the price is 2 standard deviations from the 60min mean (default model settings).'''
def Initialize(self):
''' Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.'''
# Set requested data resolution
self.UniverseSettings.Resolution = Resolution.Minute
self.SetStartDate(2013,10,7)
self.SetEndDate(2013,10,11)
self.SetCash(1000000)
self.SetUniverseSelection(ManualUniverseSelectionModel([
Symbol.Create('AIG', SecurityType.Equity, Market.USA),
Symbol.Create('BAC', SecurityType.Equity, Market.USA),
Symbol.Create('IBM', SecurityType.Equity, Market.USA),
Symbol.Create('SPY', SecurityType.Equity, Market.USA)
]))
self.SetAlpha(RsiAlphaModel(14, Resolution.Hour))
self.SetPortfolioConstruction(EqualWeightingPortfolioConstructionModel())
self.SetExecution(StandardDeviationExecutionModel())
def OnOrderEvent(self, orderEvent):
self.Log(f"{self.Time}: {orderEvent}") | Jay-Jay-D/LeanSTP | Algorithm.Python/StandardDeviationExecutionModelRegressionAlgorithm.py | Python | apache-2.0 | 3,090 |
# -*- coding: utf-8 -*-
'''
fantastic Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import debrid
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['sceper.ws','sceper.unblocked.pro']
self.base_link = 'https://sceper.unblocked.pro'
self.search_link = '/search/%s/feed/rss2/'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = {'imdb': imdb, 'title': title, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.parse_qs(url)
url = dict([(i, url[i][0]) if url[i] else (i, '') for i in url])
url['title'], url['premiered'], url['season'], url['episode'] = title, premiered, season, episode
url = urllib.urlencode(url)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
if debrid.status() == False: raise Exception()
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']
hdlr = 'S%02dE%02d' % (int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else data['year']
query = '%s S%02dE%02d' % (data['tvshowtitle'], int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (data['title'], data['year'])
query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)
url = self.search_link % urllib.quote_plus(query)
url = urlparse.urljoin(self.base_link, url)
r = client.request(url)
posts = client.parseDOM(r, 'item')
hostDict = hostprDict
items = []
for post in posts:
try:
t = client.parseDOM(post, 'title')[0]
c = client.parseDOM(post, 'content.+?')[0]
s = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+) (?:GB|GiB|MB|MiB))', c)
s = s[0] if s else '0'
u = zip(client.parseDOM(c, 'a', ret='href'), client.parseDOM(c, 'a'))
u = [(i[0], i[1], re.findall('PT(\d+)$', i[1])) for i in u]
u = [(i[0], i[1]) for i in u if not i[2]]
if 'tvshowtitle' in data:
u = [([x for x in i[0].strip('//').split('/')][-1], i[0]) for i in u]
else:
u = [(t, i[0], s) for i in u]
items += u
except:
pass
for item in items:
try:
name = item[0]
name = client.replaceHTMLCodes(name)
t = re.sub('(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*|3D)(\.|\)|\]|\s|)(.+|)', '', name)
if not cleantitle.get(t) == cleantitle.get(title): raise Exception()
y = re.findall('[\.|\(|\[|\s](\d{4}|S\d*E\d*|S\d*)[\.|\)|\]|\s]', name)[-1].upper()
if not y == hdlr: raise Exception()
fmt = re.sub('(.+)(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*)(\.|\)|\]|\s)', '', name.upper())
fmt = re.split('\.|\(|\)|\[|\]|\s|\-', fmt)
fmt = [i.lower() for i in fmt]
if any(i.endswith(('subs', 'sub', 'dubbed', 'dub')) for i in fmt): raise Exception()
if any(i in ['extras'] for i in fmt): raise Exception()
if '1080p' in fmt: quality = '1080p'
elif '720p' in fmt: quality = 'HD'
else: quality = 'SD'
if any(i in ['dvdscr', 'r5', 'r6'] for i in fmt): quality = 'SCR'
elif any(i in ['camrip', 'tsrip', 'hdcam', 'hdts', 'dvdcam', 'dvdts', 'cam', 'telesync', 'ts'] for i in fmt): quality = 'CAM'
info = []
if '3d' in fmt: info.append('3D')
try:
size = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+) (?:GB|GiB|MB|MiB))', item[2])[-1]
div = 1 if size.endswith(('GB', 'GiB')) else 1024
size = float(re.sub('[^0-9|/.|/,]', '', size))/div
size = '%.2f GB' % size
info.append(size)
except:
pass
if any(i in ['hevc', 'h265', 'x265'] for i in fmt): info.append('HEVC')
info = ' | '.join(info)
url = item[1]
if any(x in url for x in ['.rar', '.zip', '.iso']): raise Exception()
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0]
if not host in hostDict: raise Exception()
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'info': info, 'direct': False, 'debridonly': True})
except:
pass
check = [i for i in sources if not i['quality'] == 'CAM']
if check: sources = check
return sources
except:
return sources
def resolve(self, url):
return url
| TheWardoctor/Wardoctors-repo | script.module.fantastic/lib/resources/lib/sources/en/sceper.py | Python | apache-2.0 | 6,854 |
# #######
# Copyright (c) 2018-2020 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oauth2client import GOOGLE_TOKEN_URI
from oauth2client.client import GoogleCredentials
from .. import gcp
from .. import constants
class CloudResourcesBase(gcp.GoogleCloudApi):
def __init__(self,
config,
logger,
scope=constants.COMPUTE_SCOPE,
discovery=constants.CLOUDRESOURCES_DISCOVERY,
api_version=constants.API_V1):
super(CloudResourcesBase, self).__init__(
config,
logger,
scope,
discovery,
api_version)
def get_credentials(self, scope):
# check
# run: gcloud beta auth application-default login
# look to ~/.config/gcloud/application_default_credentials.json
credentials = GoogleCredentials(
access_token=None,
client_id=self.auth['client_id'],
client_secret=self.auth['client_secret'],
refresh_token=self.auth['refresh_token'],
token_expiry=None,
token_uri=GOOGLE_TOKEN_URI,
user_agent='Python client library'
)
return credentials
def get(self):
raise NotImplementedError()
def create(self):
raise NotImplementedError()
def delete(self):
raise NotImplementedError()
| cloudify-cosmo/cloudify-gcp-plugin | cloudify_gcp/admin/__init__.py | Python | apache-2.0 | 1,945 |
# Copyright 2014 Diamond Light Source Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module:: tomo_recon
:platform: Unix
:synopsis: runner for tests using the MPI framework
.. moduleauthor:: Mark Basham <[email protected]>
"""
import unittest
import tempfile
from savu.test import test_utils as tu
from savu.test.plugin_runner_test import run_protected_plugin_runner
class SimpleTomoTest(unittest.TestCase):
def test_process(self):
options = {
"transport": "hdf5",
"process_names": "CPU0",
"data_file": tu.get_test_data_path('24737.nxs'),
"process_file": tu.get_test_data_path('simple_recon_test_process.nxs'),
"out_path": tempfile.mkdtemp()
}
run_protected_plugin_runner(options)
if __name__ == "__main__":
unittest.main()
| mjn19172/Savu | savu/test/simple_tomo_test.py | Python | apache-2.0 | 1,360 |
# coding=utf-8
# Copyright 2022 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for xquad dataset module."""
from tensorflow_datasets import testing
from tensorflow_datasets.question_answering import xquad
class XquadTest(testing.DatasetBuilderTestCase):
DATASET_CLASS = xquad.Xquad
BUILDER_CONFIG_NAMES_TO_TEST = ["ar"]
DL_EXTRACT_RESULT = {
"translate-train": "translate-train.json",
"translate-dev": "translate-dev.json",
"translate-test": "translate-test.json",
"test": "test.json",
}
SPLITS = {
"translate-train": 3,
"translate-dev": 2,
"translate-test": 3,
"test": 1,
}
if __name__ == "__main__":
testing.test_main()
| tensorflow/datasets | tensorflow_datasets/question_answering/xquad_test.py | Python | apache-2.0 | 1,233 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers
from google.api_core import operations_v1
from google.api_core import gapic_v1
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.cloud.appengine_admin_v1.types import appengine
from google.cloud.appengine_admin_v1.types import instance
from google.longrunning import operations_pb2 # type: ignore
from .base import InstancesTransport, DEFAULT_CLIENT_INFO
class InstancesGrpcTransport(InstancesTransport):
"""gRPC backend transport for Instances.
Manages instances of a version.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_stubs: Dict[str, Callable]
def __init__(
self,
*,
host: str = "appengine.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
self._operations_client: Optional[operations_v1.OperationsClient] = None
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier should be used.
credentials_file=None,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
cls,
host: str = "appengine.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Quick check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsClient(self.grpc_channel)
# Return the client from cache.
return self._operations_client
@property
def list_instances(
self,
) -> Callable[[appengine.ListInstancesRequest], appengine.ListInstancesResponse]:
r"""Return a callable for the list instances method over gRPC.
Lists the instances of a version.
Tip: To aggregate details about instances over time, see the
`Stackdriver Monitoring
API <https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list>`__.
Returns:
Callable[[~.ListInstancesRequest],
~.ListInstancesResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_instances" not in self._stubs:
self._stubs["list_instances"] = self.grpc_channel.unary_unary(
"/google.appengine.v1.Instances/ListInstances",
request_serializer=appengine.ListInstancesRequest.serialize,
response_deserializer=appengine.ListInstancesResponse.deserialize,
)
return self._stubs["list_instances"]
@property
def get_instance(
self,
) -> Callable[[appengine.GetInstanceRequest], instance.Instance]:
r"""Return a callable for the get instance method over gRPC.
Gets instance information.
Returns:
Callable[[~.GetInstanceRequest],
~.Instance]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_instance" not in self._stubs:
self._stubs["get_instance"] = self.grpc_channel.unary_unary(
"/google.appengine.v1.Instances/GetInstance",
request_serializer=appengine.GetInstanceRequest.serialize,
response_deserializer=instance.Instance.deserialize,
)
return self._stubs["get_instance"]
@property
def delete_instance(
self,
) -> Callable[[appengine.DeleteInstanceRequest], operations_pb2.Operation]:
r"""Return a callable for the delete instance method over gRPC.
Stops a running instance.
The instance might be automatically recreated based on the
scaling settings of the version. For more information, see "How
Instances are Managed" (`standard
environment <https://cloud.google.com/appengine/docs/standard/python/how-instances-are-managed>`__
\| `flexible
environment <https://cloud.google.com/appengine/docs/flexible/python/how-instances-are-managed>`__).
To ensure that instances are not re-created and avoid getting
billed, you can stop all instances within the target version by
changing the serving status of the version to ``STOPPED`` with
the
```apps.services.versions.patch`` <https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions/patch>`__
method.
Returns:
Callable[[~.DeleteInstanceRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_instance" not in self._stubs:
self._stubs["delete_instance"] = self.grpc_channel.unary_unary(
"/google.appengine.v1.Instances/DeleteInstance",
request_serializer=appengine.DeleteInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_instance"]
@property
def debug_instance(
self,
) -> Callable[[appengine.DebugInstanceRequest], operations_pb2.Operation]:
r"""Return a callable for the debug instance method over gRPC.
Enables debugging on a VM instance. This allows you
to use the SSH command to connect to the virtual machine
where the instance lives. While in "debug mode", the
instance continues to serve live traffic. You should
delete the instance when you are done debugging and then
allow the system to take over and determine if another
instance should be started.
Only applicable for instances in App Engine flexible
environment.
Returns:
Callable[[~.DebugInstanceRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "debug_instance" not in self._stubs:
self._stubs["debug_instance"] = self.grpc_channel.unary_unary(
"/google.appengine.v1.Instances/DebugInstance",
request_serializer=appengine.DebugInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["debug_instance"]
def close(self):
self.grpc_channel.close()
__all__ = ("InstancesGrpcTransport",)
| googleapis/python-appengine-admin | google/cloud/appengine_admin_v1/services/instances/transports/grpc.py | Python | apache-2.0 | 16,909 |
"""Support for RainMachine devices."""
import asyncio
from datetime import timedelta
import logging
from regenmaschine import Client
from regenmaschine.errors import RainMachineError
import voluptuous as vol
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_IP_ADDRESS,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
)
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.service import verify_domain_control
from .const import (
CONF_ZONE_RUN_TIME,
DATA_CLIENT,
DATA_PROGRAMS,
DATA_PROVISION_SETTINGS,
DATA_RESTRICTIONS_CURRENT,
DATA_RESTRICTIONS_UNIVERSAL,
DATA_ZONES,
DATA_ZONES_DETAILS,
DEFAULT_ZONE_RUN,
DOMAIN,
PROGRAM_UPDATE_TOPIC,
SENSOR_UPDATE_TOPIC,
ZONE_UPDATE_TOPIC,
)
_LOGGER = logging.getLogger(__name__)
CONF_PROGRAM_ID = "program_id"
CONF_SECONDS = "seconds"
CONF_ZONE_ID = "zone_id"
DATA_LISTENER = "listener"
DEFAULT_ATTRIBUTION = "Data provided by Green Electronics LLC"
DEFAULT_ICON = "mdi:water"
DEFAULT_SCAN_INTERVAL = timedelta(seconds=60)
DEFAULT_SSL = True
SERVICE_ALTER_PROGRAM = vol.Schema({vol.Required(CONF_PROGRAM_ID): cv.positive_int})
SERVICE_ALTER_ZONE = vol.Schema({vol.Required(CONF_ZONE_ID): cv.positive_int})
SERVICE_PAUSE_WATERING = vol.Schema({vol.Required(CONF_SECONDS): cv.positive_int})
SERVICE_START_PROGRAM_SCHEMA = vol.Schema(
{vol.Required(CONF_PROGRAM_ID): cv.positive_int}
)
SERVICE_START_ZONE_SCHEMA = vol.Schema(
{
vol.Required(CONF_ZONE_ID): cv.positive_int,
vol.Optional(CONF_ZONE_RUN_TIME, default=DEFAULT_ZONE_RUN): cv.positive_int,
}
)
SERVICE_STOP_PROGRAM_SCHEMA = vol.Schema(
{vol.Required(CONF_PROGRAM_ID): cv.positive_int}
)
SERVICE_STOP_ZONE_SCHEMA = vol.Schema({vol.Required(CONF_ZONE_ID): cv.positive_int})
CONFIG_SCHEMA = cv.deprecated(DOMAIN, invalidation_version="0.119")
async def async_setup(hass, config):
"""Set up the RainMachine component."""
hass.data[DOMAIN] = {DATA_CLIENT: {}, DATA_LISTENER: {}}
return True
async def async_setup_entry(hass, config_entry):
"""Set up RainMachine as config entry."""
entry_updates = {}
if not config_entry.unique_id:
# If the config entry doesn't already have a unique ID, set one:
entry_updates["unique_id"] = config_entry.data[CONF_IP_ADDRESS]
if CONF_ZONE_RUN_TIME in config_entry.data:
# If a zone run time exists in the config entry's data, pop it and move it to
# options:
data = {**config_entry.data}
entry_updates["data"] = data
entry_updates["options"] = {
**config_entry.options,
CONF_ZONE_RUN_TIME: data.pop(CONF_ZONE_RUN_TIME),
}
if entry_updates:
hass.config_entries.async_update_entry(config_entry, **entry_updates)
_verify_domain_control = verify_domain_control(hass, DOMAIN)
websession = aiohttp_client.async_get_clientsession(hass)
client = Client(session=websession)
try:
await client.load_local(
config_entry.data[CONF_IP_ADDRESS],
config_entry.data[CONF_PASSWORD],
port=config_entry.data[CONF_PORT],
ssl=config_entry.data.get(CONF_SSL, DEFAULT_SSL),
)
except RainMachineError as err:
_LOGGER.error("An error occurred: %s", err)
raise ConfigEntryNotReady from err
else:
# regenmaschine can load multiple controllers at once, but we only grab the one
# we loaded above:
controller = next(iter(client.controllers.values()))
rainmachine = RainMachine(hass, config_entry, controller)
# Update the data object, which at this point (prior to any sensors registering
# "interest" in the API), will focus on grabbing the latest program and zone data:
await rainmachine.async_update()
hass.data[DOMAIN][DATA_CLIENT][config_entry.entry_id] = rainmachine
for component in ("binary_sensor", "sensor", "switch"):
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
@_verify_domain_control
async def disable_program(call):
"""Disable a program."""
await rainmachine.controller.programs.disable(call.data[CONF_PROGRAM_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def disable_zone(call):
"""Disable a zone."""
await rainmachine.controller.zones.disable(call.data[CONF_ZONE_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def enable_program(call):
"""Enable a program."""
await rainmachine.controller.programs.enable(call.data[CONF_PROGRAM_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def enable_zone(call):
"""Enable a zone."""
await rainmachine.controller.zones.enable(call.data[CONF_ZONE_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def pause_watering(call):
"""Pause watering for a set number of seconds."""
await rainmachine.controller.watering.pause_all(call.data[CONF_SECONDS])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def start_program(call):
"""Start a particular program."""
await rainmachine.controller.programs.start(call.data[CONF_PROGRAM_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def start_zone(call):
"""Start a particular zone for a certain amount of time."""
await rainmachine.controller.zones.start(
call.data[CONF_ZONE_ID], call.data[CONF_ZONE_RUN_TIME]
)
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def stop_all(call):
"""Stop all watering."""
await rainmachine.controller.watering.stop_all()
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def stop_program(call):
"""Stop a program."""
await rainmachine.controller.programs.stop(call.data[CONF_PROGRAM_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def stop_zone(call):
"""Stop a zone."""
await rainmachine.controller.zones.stop(call.data[CONF_ZONE_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def unpause_watering(call):
"""Unpause watering."""
await rainmachine.controller.watering.unpause_all()
await rainmachine.async_update_programs_and_zones()
for service, method, schema in [
("disable_program", disable_program, SERVICE_ALTER_PROGRAM),
("disable_zone", disable_zone, SERVICE_ALTER_ZONE),
("enable_program", enable_program, SERVICE_ALTER_PROGRAM),
("enable_zone", enable_zone, SERVICE_ALTER_ZONE),
("pause_watering", pause_watering, SERVICE_PAUSE_WATERING),
("start_program", start_program, SERVICE_START_PROGRAM_SCHEMA),
("start_zone", start_zone, SERVICE_START_ZONE_SCHEMA),
("stop_all", stop_all, {}),
("stop_program", stop_program, SERVICE_STOP_PROGRAM_SCHEMA),
("stop_zone", stop_zone, SERVICE_STOP_ZONE_SCHEMA),
("unpause_watering", unpause_watering, {}),
]:
hass.services.async_register(DOMAIN, service, method, schema=schema)
hass.data[DOMAIN][DATA_LISTENER] = config_entry.add_update_listener(
async_reload_entry
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload an OpenUV config entry."""
hass.data[DOMAIN][DATA_CLIENT].pop(config_entry.entry_id)
cancel_listener = hass.data[DOMAIN][DATA_LISTENER].pop(config_entry.entry_id)
cancel_listener()
tasks = [
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in ("binary_sensor", "sensor", "switch")
]
await asyncio.gather(*tasks)
return True
async def async_reload_entry(hass, config_entry):
"""Handle an options update."""
await hass.config_entries.async_reload(config_entry.entry_id)
class RainMachine:
"""Define a generic RainMachine object."""
def __init__(self, hass, config_entry, controller):
"""Initialize."""
self._async_cancel_time_interval_listener = None
self.config_entry = config_entry
self.controller = controller
self.data = {}
self.device_mac = controller.mac
self.hass = hass
self._api_category_count = {
DATA_PROVISION_SETTINGS: 0,
DATA_RESTRICTIONS_CURRENT: 0,
DATA_RESTRICTIONS_UNIVERSAL: 0,
}
self._api_category_locks = {
DATA_PROVISION_SETTINGS: asyncio.Lock(),
DATA_RESTRICTIONS_CURRENT: asyncio.Lock(),
DATA_RESTRICTIONS_UNIVERSAL: asyncio.Lock(),
}
async def _async_update_listener_action(self, now):
"""Define an async_track_time_interval action to update data."""
await self.async_update()
@callback
def async_deregister_sensor_api_interest(self, api_category):
"""Decrement the number of entities with data needs from an API category."""
# If this deregistration should leave us with no registration at all, remove the
# time interval:
if sum(self._api_category_count.values()) == 0:
if self._async_cancel_time_interval_listener:
self._async_cancel_time_interval_listener()
self._async_cancel_time_interval_listener = None
return
self._api_category_count[api_category] -= 1
async def async_fetch_from_api(self, api_category):
"""Execute the appropriate coroutine to fetch particular data from the API."""
if api_category == DATA_PROGRAMS:
data = await self.controller.programs.all(include_inactive=True)
elif api_category == DATA_PROVISION_SETTINGS:
data = await self.controller.provisioning.settings()
elif api_category == DATA_RESTRICTIONS_CURRENT:
data = await self.controller.restrictions.current()
elif api_category == DATA_RESTRICTIONS_UNIVERSAL:
data = await self.controller.restrictions.universal()
elif api_category == DATA_ZONES:
data = await self.controller.zones.all(include_inactive=True)
elif api_category == DATA_ZONES_DETAILS:
# This API call needs to be separate from the DATA_ZONES one above because,
# maddeningly, the DATA_ZONES_DETAILS API call doesn't include the current
# state of the zone:
data = await self.controller.zones.all(details=True, include_inactive=True)
self.data[api_category] = data
async def async_register_sensor_api_interest(self, api_category):
"""Increment the number of entities with data needs from an API category."""
# If this is the first registration we have, start a time interval:
if not self._async_cancel_time_interval_listener:
self._async_cancel_time_interval_listener = async_track_time_interval(
self.hass,
self._async_update_listener_action,
DEFAULT_SCAN_INTERVAL,
)
self._api_category_count[api_category] += 1
# If a sensor registers interest in a particular API call and the data doesn't
# exist for it yet, make the API call and grab the data:
async with self._api_category_locks[api_category]:
if api_category not in self.data:
await self.async_fetch_from_api(api_category)
async def async_update(self):
"""Update all RainMachine data."""
tasks = [self.async_update_programs_and_zones(), self.async_update_sensors()]
await asyncio.gather(*tasks)
async def async_update_sensors(self):
"""Update sensor/binary sensor data."""
_LOGGER.debug("Updating sensor data for RainMachine")
# Fetch an API category if there is at least one interested entity:
tasks = {}
for category, count in self._api_category_count.items():
if count == 0:
continue
tasks[category] = self.async_fetch_from_api(category)
results = await asyncio.gather(*tasks.values(), return_exceptions=True)
for api_category, result in zip(tasks, results):
if isinstance(result, RainMachineError):
_LOGGER.error(
"There was an error while updating %s: %s", api_category, result
)
continue
async_dispatcher_send(self.hass, SENSOR_UPDATE_TOPIC)
async def async_update_programs_and_zones(self):
"""Update program and zone data.
Program and zone updates always go together because of how linked they are:
programs affect zones and certain combinations of zones affect programs.
Note that this call does not take into account interested entities when making
the API calls; we make the reasonable assumption that switches will always be
enabled.
"""
_LOGGER.debug("Updating program and zone data for RainMachine")
tasks = {
DATA_PROGRAMS: self.async_fetch_from_api(DATA_PROGRAMS),
DATA_ZONES: self.async_fetch_from_api(DATA_ZONES),
DATA_ZONES_DETAILS: self.async_fetch_from_api(DATA_ZONES_DETAILS),
}
results = await asyncio.gather(*tasks.values(), return_exceptions=True)
for api_category, result in zip(tasks, results):
if isinstance(result, RainMachineError):
_LOGGER.error(
"There was an error while updating %s: %s", api_category, result
)
async_dispatcher_send(self.hass, PROGRAM_UPDATE_TOPIC)
async_dispatcher_send(self.hass, ZONE_UPDATE_TOPIC)
class RainMachineEntity(Entity):
"""Define a generic RainMachine entity."""
def __init__(self, rainmachine):
"""Initialize."""
self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION}
self._device_class = None
self._name = None
self.rainmachine = rainmachine
@property
def device_class(self):
"""Return the device class."""
return self._device_class
@property
def device_info(self):
"""Return device registry information for this entity."""
return {
"identifiers": {(DOMAIN, self.rainmachine.controller.mac)},
"name": self.rainmachine.controller.name,
"manufacturer": "RainMachine",
"model": (
f"Version {self.rainmachine.controller.hardware_version} "
f"(API: {self.rainmachine.controller.api_version})"
),
"sw_version": self.rainmachine.controller.software_version,
}
@property
def device_state_attributes(self) -> dict:
"""Return the state attributes."""
return self._attrs
@property
def name(self) -> str:
"""Return the name of the entity."""
return self._name
@property
def should_poll(self):
"""Disable polling."""
return False
@callback
def _update_state(self):
"""Update the state."""
self.update_from_latest_data()
self.async_write_ha_state()
@callback
def update_from_latest_data(self):
"""Update the entity."""
raise NotImplementedError
| balloob/home-assistant | homeassistant/components/rainmachine/__init__.py | Python | apache-2.0 | 15,955 |
"""
Installs and configures Cinder
"""
import os
import re
import uuid
import logging
from packstack.installer import exceptions
from packstack.installer import processors
from packstack.installer import validators
from packstack.installer import basedefs
from packstack.installer import utils
from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile
from packstack.installer import exceptions
from packstack.installer import output_messages
# Controller object will
# be initialized from main flow
controller = None
# Plugin name
PLUGIN_NAME = "OS-Cinder"
PLUGIN_NAME_COLORED = utils.color_text(PLUGIN_NAME, 'blue')
logging.debug("plugin %s loaded", __name__)
def initConfig(controllerObject):
global controller
controller = controllerObject
logging.debug("Adding OpenStack Cinder configuration")
paramsList = [
{"CMD_OPTION" : "cinder-host",
"USAGE" : "The IP address of the server on which to install Cinder",
"PROMPT" : "Enter the IP address of the Cinder server",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_ssh],
"DEFAULT_VALUE" : utils.get_localhost_ip(),
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_CINDER_HOST",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "cinder-db-passwd",
"USAGE" : "The password to use for the Cinder to access DB",
"PROMPT" : "Enter the password for the Cinder DB access",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_not_empty],
"DEFAULT_VALUE" : uuid.uuid4().hex[:16],
"MASK_INPUT" : True,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_CINDER_DB_PW",
"USE_DEFAULT" : True,
"NEED_CONFIRM" : True,
"CONDITION" : False },
{"CMD_OPTION" : "cinder-ks-passwd",
"USAGE" : "The password to use for the Cinder to authenticate with Keystone",
"PROMPT" : "Enter the password for the Cinder Keystone access",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_not_empty],
"DEFAULT_VALUE" : uuid.uuid4().hex[:16],
"MASK_INPUT" : True,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_CINDER_KS_PW",
"USE_DEFAULT" : True,
"NEED_CONFIRM" : True,
"CONDITION" : False },
{"CMD_OPTION" : "cinder-backend",
"USAGE" : ("The Cinder backend to use, valid options are: "
"lvm, gluster, nfs"),
"PROMPT" : "Enter the Cinder backend to be configured",
"OPTION_LIST" : ["lvm", "gluster", "nfs"],
"VALIDATORS" : [validators.validate_options],
"DEFAULT_VALUE" : "lvm",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_CINDER_BACKEND",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "CINDER",
"DESCRIPTION" : "Cinder Config parameters",
"PRE_CONDITION" : "CONFIG_CINDER_INSTALL",
"PRE_CONDITION_MATCH" : "y",
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def check_lvm_options(config):
return (config.get('CONFIG_CINDER_INSTALL', 'n') == 'y' and
config.get('CONFIG_CINDER_BACKEND', 'lvm') == 'lvm')
paramsList = [
{"CMD_OPTION" : "cinder-volumes-create",
"USAGE" : ("Create Cinder's volumes group. This should only be done for "
"testing on a proof-of-concept installation of Cinder. This "
"will create a file-backed volume group and is not suitable "
"for production usage."),
"PROMPT" : ("Should Cinder's volumes group be created (for proof-of-concept "
"installation)?"),
"OPTION_LIST" : ["y", "n"],
"VALIDATORS" : [validators.validate_options],
"DEFAULT_VALUE" : "y",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_CINDER_VOLUMES_CREATE",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "CINDERVOLUMECREATE",
"DESCRIPTION" : "Cinder volume create Config parameters",
"PRE_CONDITION" : check_lvm_options,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def check_lvm_vg_options(config):
return (config.get('CONFIG_CINDER_INSTALL', 'n') == 'y' and
config.get('CONFIG_CINDER_BACKEND', 'lvm') == 'lvm' and
config.get('CONFIG_CINDER_VOLUMES_CREATE', 'y') == 'y')
paramsList = [
{"CMD_OPTION" : "cinder-volumes-size",
"USAGE" : ("Cinder's volumes group size. Note that actual volume size "
"will be extended with 3% more space for VG metadata."),
"PROMPT" : "Enter Cinder's volumes group usable size",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_not_empty],
"DEFAULT_VALUE" : "20G",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_CINDER_VOLUMES_SIZE",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "CINDERVOLUMESIZE",
"DESCRIPTION" : "Cinder volume size Config parameters",
"PRE_CONDITION" : check_lvm_vg_options,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def check_gluster_options(config):
return (config.get('CONFIG_CINDER_INSTALL', 'n') == 'y' and
config.get('CONFIG_CINDER_BACKEND', 'lvm') == 'gluster')
paramsList = [
{"CMD_OPTION" : "cinder-gluster-mounts",
"USAGE" : ("A single or comma separated list of gluster volume shares "
"to mount, eg: ip-address:/vol-name "),
"PROMPT" : ("Enter a single or comma separated list of gluster volume "
"shares to use with Cinder"),
"OPTION_LIST" : ["^'([\d]{1,3}\.){3}[\d]{1,3}:/.*'"],
"VALIDATORS" : [validators.validate_multi_regexp],
"PROCESSORS" : [processors.process_add_quotes_around_values],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_CINDER_GLUSTER_MOUNTS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "CINDERGLUSTERMOUNTS",
"DESCRIPTION" : "Cinder gluster Config parameters",
"PRE_CONDITION" : check_gluster_options,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def check_nfs_options(config):
return (config.get('CONFIG_CINDER_INSTALL', 'n') == 'y' and
config.get('CONFIG_CINDER_BACKEND', 'lvm') == 'nfs')
paramsList = [
{"CMD_OPTION" : "cinder-nfs-mounts",
"USAGE" : ("A single or comma seprated list of NFS exports to mount, "
"eg: ip-address:/export-name "),
"PROMPT" : ("Enter a single or comma seprated list of NFS exports to "
"use with Cinder"),
"OPTION_LIST" : ["^'([\d]{1,3}\.){3}[\d]{1,3}:/.*'"],
"VALIDATORS" : [validators.validate_multi_regexp],
"PROCESSORS" : [processors.process_add_quotes_around_values],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_CINDER_NFS_MOUNTS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "CINDERNFSMOUNTS",
"DESCRIPTION" : "Cinder NFS Config parameters",
"PRE_CONDITION" : check_nfs_options,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def initSequences(controller):
if controller.CONF['CONFIG_CINDER_INSTALL'] != 'y':
return
cinder_steps = [
{'title': 'Installing dependencies for Cinder', 'functions':[install_cinder_deps]},
{'title': 'Adding Cinder Keystone manifest entries', 'functions':[create_keystone_manifest]},
{'title': 'Adding Cinder manifest entries', 'functions':[create_manifest]}
]
if controller.CONF['CONFIG_CINDER_BACKEND'] == 'lvm':
cinder_steps.append({'title': 'Checking if the Cinder server has a cinder-volumes vg', 'functions':[check_cinder_vg]})
controller.addSequence("Installing OpenStack Cinder", [], [], cinder_steps)
def install_cinder_deps(config):
server = utils.ScriptRunner(config['CONFIG_CINDER_HOST'])
pkgs = []
if config['CONFIG_CINDER_BACKEND'] == 'lvm':
pkgs.append('lvm2')
for p in pkgs:
server.append("rpm -q %(package)s || yum install -y %(package)s" % dict(package=p))
server.execute()
def check_cinder_vg(config):
cinders_volume = 'cinder-volumes'
# Do we have a cinder-volumes vg?
have_cinders_volume = False
server = utils.ScriptRunner(config['CONFIG_CINDER_HOST'])
server.append('vgdisplay %s' % cinders_volume)
try:
server.execute()
have_cinders_volume = True
except exceptions.ScriptRuntimeError:
pass
# Configure system LVM settings (snapshot_autoextend)
server = utils.ScriptRunner(config['CONFIG_CINDER_HOST'])
server.append('sed -i -r "s/^ *snapshot_autoextend_threshold +=.*/'
' snapshot_autoextend_threshold = 80/" '
'/etc/lvm/lvm.conf')
server.append('sed -i -r "s/^ *snapshot_autoextend_percent +=.*/'
' snapshot_autoextend_percent = 20/" '
'/etc/lvm/lvm.conf')
try:
server.execute()
except exceptions.ScriptRuntimeError:
logging.info("Warning: Unable to set system LVM settings.")
if config["CONFIG_CINDER_VOLUMES_CREATE"] != "y":
if not have_cinders_volume:
raise exceptions.MissingRequirements("The cinder server should"
" contain a cinder-volumes volume group")
else:
if have_cinders_volume:
controller.MESSAGES.append(
output_messages.INFO_CINDER_VOLUMES_EXISTS)
return
server = utils.ScriptRunner(config['CONFIG_CINDER_HOST'])
server.append('systemctl')
try:
server.execute()
rst_cmd = 'systemctl restart openstack-cinder-volume.service'
except exceptions.ScriptRuntimeError:
rst_cmd = 'service openstack-cinder-volume restart'
server.clear()
logging.info("A new cinder volumes group will be created")
err = "Cinder's volume group '%s' could not be created" % \
cinders_volume
cinders_volume_path = '/var/lib/cinder'
server.append('mkdir -p %s' % cinders_volume_path)
logging.debug("Volume's path: %s" % cinders_volume_path)
match = re.match('^(?P<size>\d+)G$',
config['CONFIG_CINDER_VOLUMES_SIZE'].strip())
if not match:
msg = 'Invalid Cinder volumes VG size.'
raise exceptions.ParamValidationError(msg)
cinders_volume_size = int(match.group('size')) * 1024
cinders_reserve = int(cinders_volume_size * 0.03)
cinders_volume_size = cinders_volume_size + cinders_reserve
cinders_volume_path = os.path.join(cinders_volume_path, cinders_volume)
server.append('dd if=/dev/zero of=%s bs=1 count=0 seek=%sM'
% (cinders_volume_path, cinders_volume_size))
server.append('LOFI=$(losetup --show -f %s)' % cinders_volume_path)
server.append('pvcreate $LOFI')
server.append('vgcreate %s $LOFI' % cinders_volume)
# Add the loop device on boot
server.append('grep %(volume)s /etc/rc.d/rc.local || '
'echo "losetup -f %(path)s && '
'vgchange -a y %(volume)s && '
'%(restart_cmd)s" '
'>> /etc/rc.d/rc.local' %
{'volume': cinders_volume, 'restart_cmd': rst_cmd,
'path': cinders_volume_path})
server.append('grep "#!" /etc/rc.d/rc.local || '
'sed -i \'1i#!/bin/sh\' /etc/rc.d/rc.local')
server.append('chmod +x /etc/rc.d/rc.local')
# Let's make sure it exists
server.append('vgdisplay %s' % cinders_volume)
try:
server.execute()
except exceptions.ScriptRuntimeError:
# Release loop device if cinder's volume creation
# fails.
try:
logging.debug("Release loop device, volume creation failed")
server = utils.ScriptRunner(controller.CONF['CONFIG_CINDER_HOST'])
server.append('losetup -d $(losetup -j %s | cut -d : -f 1)' %
cinders_volume_path
)
server.execute()
except:
pass
raise exceptions.MissingRequirements(err)
def create_keystone_manifest(config):
manifestfile = "%s_keystone.pp" % controller.CONF['CONFIG_KEYSTONE_HOST']
manifestdata = getManifestTemplate("keystone_cinder.pp")
appendManifestFile(manifestfile, manifestdata)
def create_manifest(config):
manifestfile = "%s_cinder.pp" % controller.CONF['CONFIG_CINDER_HOST']
manifestdata = getManifestTemplate("cinder.pp")
if config['CONFIG_CINDER_BACKEND'] == "gluster":
manifestdata += getManifestTemplate("cinder_gluster.pp")
if config['CONFIG_CINDER_BACKEND'] == "nfs":
manifestdata += getManifestTemplate("cinder_nfs.pp")
if config['CONFIG_CEILOMETER_INSTALL'] == 'y':
manifestdata += getManifestTemplate('cinder_ceilometer.pp')
hosts = config['CONFIG_NOVA_COMPUTE_HOSTS'].split(",")
config['FIREWALL_ALLOWED'] = ",".join(["'%s'" % i.strip() for i in hosts if i.strip()])
config['FIREWALL_SERVICE_NAME'] = "cinder"
config['FIREWALL_PORTS'] = "'3260', '8776'"
manifestdata += getManifestTemplate("firewall.pp")
appendManifestFile(manifestfile, manifestdata)
| radez/packstack | packstack/plugins/cinder_250.py | Python | apache-2.0 | 16,938 |
from models.tridentnet.builder import TridentFasterRcnn as Detector
from models.tridentnet.builder_v2 import TridentResNetV1bC4 as Backbone
from models.tridentnet.builder import TridentRpnHead as RpnHead
from models.tridentnet.builder import process_branch_outputs, process_branch_rpn_outputs
from symbol.builder import Neck
from symbol.builder import RoiAlign as RoiExtractor
from symbol.builder import BboxC5V1Head as BboxHead
from mxnext.complicate import normalizer_factory
def get_config(is_train):
class General:
log_frequency = 10
name = __name__.rsplit("/")[-1].rsplit(".")[-1]
batch_image = 1 if is_train else 1
fp16 = False
class Trident:
num_branch = 3
train_scaleaware = True
test_scaleaware = True
branch_ids = range(num_branch)
branch_dilates = [1, 2, 3]
valid_ranges = [(0, 90), (30, 160), (90, -1)]
valid_ranges_on_origin = True
branch_bn_shared = True
branch_conv_shared = True
branch_deform = False
assert num_branch == len(branch_ids)
assert num_branch == len(valid_ranges)
class KvstoreParam:
kvstore = "local"
batch_image = General.batch_image
gpus = [0, 1, 2, 3, 4, 5, 6, 7]
fp16 = General.fp16
class NormalizeParam:
# normalizer = normalizer_factory(type="syncbn", ndev=len(KvstoreParam.gpus))
normalizer = normalizer_factory(type="fixbn")
class BackboneParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
depth = 152
num_branch = Trident.num_branch
branch_ids = Trident.branch_ids
branch_dilates = Trident.branch_dilates
branch_bn_shared = Trident.branch_bn_shared
branch_conv_shared = Trident.branch_conv_shared
branch_deform = Trident.branch_deform
class NeckParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
class RpnParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
batch_image = General.batch_image * Trident.num_branch
class anchor_generate:
scale = (2, 4, 8, 16, 32)
ratio = (0.5, 1.0, 2.0)
stride = 16
image_anchor = 256
class head:
conv_channel = 512
mean = (0, 0, 0, 0)
std = (1, 1, 1, 1)
class proposal:
pre_nms_top_n = 12000 if is_train else 6000
post_nms_top_n = 500 if is_train else 300
nms_thr = 0.7
min_bbox_side = 0
class subsample_proposal:
proposal_wo_gt = True
image_roi = 128
fg_fraction = 0.5
fg_thr = 0.5
bg_thr_hi = 0.5
bg_thr_lo = 0.0
class bbox_target:
num_reg_class = 2
class_agnostic = True
weight = (1.0, 1.0, 1.0, 1.0)
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class BboxParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
num_class = 1 + 80
image_roi = 128
batch_image = General.batch_image * Trident.num_branch
class regress_target:
class_agnostic = True
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class RoiParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
out_size = 7
stride = 16
class DatasetParam:
if is_train:
image_set = ("coco_train2017", )
else:
image_set = ("coco_val2017", )
backbone = Backbone(BackboneParam)
neck = Neck(NeckParam)
rpn_head = RpnHead(RpnParam)
roi_extractor = RoiExtractor(RoiParam)
bbox_head = BboxHead(BboxParam)
detector = Detector()
if is_train:
train_sym = detector.get_train_symbol(
backbone, neck, rpn_head, roi_extractor, bbox_head,
num_branch=Trident.num_branch, scaleaware=Trident.train_scaleaware)
rpn_test_sym = None
test_sym = None
else:
train_sym = None
rpn_test_sym = detector.get_rpn_test_symbol(backbone, neck, rpn_head, Trident.num_branch)
test_sym = detector.get_test_symbol(
backbone, neck, rpn_head, roi_extractor, bbox_head, num_branch=Trident.num_branch)
class ModelParam:
train_symbol = train_sym
test_symbol = test_sym
rpn_test_symbol = rpn_test_sym
from_scratch = False
random = True
memonger = False
memonger_until = "stage3_unit21_plus"
class pretrain:
prefix = "pretrain_model/resnet%s_v1b" % BackboneParam.depth
epoch = 0
fixed_param = ["conv0", "stage1", "gamma", "beta"]
class OptimizeParam:
class optimizer:
type = "sgd"
lr = 0.01 / 8 * len(KvstoreParam.gpus) * KvstoreParam.batch_image
momentum = 0.9
wd = 0.0001
clip_gradient = 5
class schedule:
begin_epoch = 0
end_epoch = 12
lr_iter = [120000 * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image),
160000 * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image)]
class warmup:
type = "gradual"
lr = 0.0
iter = 3000 * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image)
class TestParam:
min_det_score = 0.001
max_det_per_image = 100
process_roidb = lambda x: x
if Trident.test_scaleaware:
process_output = lambda x, y: process_branch_outputs(
x, Trident.num_branch, Trident.valid_ranges, Trident.valid_ranges_on_origin)
else:
process_output = lambda x, y: x
process_rpn_output = lambda x, y: process_branch_rpn_outputs(x, Trident.num_branch)
class model:
prefix = "experiments/{}/checkpoint".format(General.name)
epoch = OptimizeParam.schedule.end_epoch
class nms:
type = "nms"
thr = 0.5
class coco:
annotation = "data/coco/annotations/instances_minival2014.json"
# data processing
class NormParam:
mean = tuple(i * 255 for i in (0.485, 0.456, 0.406)) # RGB order
std = tuple(i * 255 for i in (0.229, 0.224, 0.225))
class ResizeParam:
short = 800
long = 1200 if is_train else 2000
class PadParam:
short = 800
long = 1200 if is_train else 2000
max_num_gt = 100
class ScaleRange:
valid_ranges = Trident.valid_ranges
cal_on_origin = Trident.valid_ranges_on_origin # True: valid_ranges on origin image scale / valid_ranges on resized image scale
class AnchorTarget2DParam:
class generate:
short = 800 // 16
long = 1200 // 16
stride = 16
scales = (2, 4, 8, 16, 32)
aspects = (0.5, 1.0, 2.0)
class assign:
allowed_border = 0
pos_thr = 0.7
neg_thr = 0.3
min_pos_thr = 0.0
class sample:
image_anchor = 256
pos_fraction = 0.5
class trident:
invalid_anchor_threshd = 0.3
class RenameParam:
mapping = dict(image="data")
from core.detection_input import ReadRoiRecord, Resize2DImageBbox, \
ConvertImageFromHwcToChw, Flip2DImageBbox, Pad2DImageBbox, \
RenameRecord, Norm2DImage
from models.tridentnet.input import ScaleAwareRange, TridentAnchorTarget2D
if is_train:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
Resize2DImageBbox(ResizeParam),
Flip2DImageBbox(),
Pad2DImageBbox(PadParam),
ConvertImageFromHwcToChw(),
ScaleAwareRange(ScaleRange),
TridentAnchorTarget2D(AnchorTarget2DParam),
RenameRecord(RenameParam.mapping)
]
data_name = ["data", "im_info", "gt_bbox"]
if Trident.train_scaleaware:
data_name.append("valid_ranges")
label_name = ["rpn_cls_label", "rpn_reg_target", "rpn_reg_weight"]
else:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
Resize2DImageBbox(ResizeParam),
ConvertImageFromHwcToChw(),
RenameRecord(RenameParam.mapping)
]
data_name = ["data", "im_info", "im_id", "rec_id"]
label_name = []
import core.detection_metric as metric
rpn_acc_metric = metric.AccWithIgnore(
"RpnAcc",
["rpn_cls_loss_output"],
["rpn_cls_label"]
)
rpn_l1_metric = metric.L1(
"RpnL1",
["rpn_reg_loss_output"],
["rpn_cls_label"]
)
# for bbox, the label is generated in network so it is an output
box_acc_metric = metric.AccWithIgnore(
"RcnnAcc",
["bbox_cls_loss_output", "bbox_label_blockgrad_output"],
[]
)
box_l1_metric = metric.L1(
"RcnnL1",
["bbox_reg_loss_output", "bbox_label_blockgrad_output"],
[]
)
metric_list = [rpn_acc_metric, rpn_l1_metric, box_acc_metric, box_l1_metric]
return General, KvstoreParam, RpnParam, RoiParam, BboxParam, DatasetParam, \
ModelParam, OptimizeParam, TestParam, \
transform, data_name, label_name, metric_list
| TuSimple/simpledet | config/resnet_v1b/tridentnet_r152v1bc4_c5_2x.py | Python | apache-2.0 | 9,529 |
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from oslo_config import cfg
from st2common.constants.system import VERSION_STRING
def do_register_opts(opts, group=None, ignore_errors=False):
try:
cfg.CONF.register_opts(opts, group=group)
except:
if not ignore_errors:
raise
def do_register_cli_opts(opt, ignore_errors=False):
# TODO: This function has broken name, it should work with lists :/
if not isinstance(opt, (list, tuple)):
opts = [opt]
else:
opts = opt
try:
cfg.CONF.register_cli_opts(opts)
except:
if not ignore_errors:
raise
def register_opts(ignore_errors=False):
auth_opts = [
cfg.BoolOpt('enable', default=True, help='Enable authentication middleware.'),
cfg.IntOpt('token_ttl', default=86400, help='Access token ttl in seconds.')
]
do_register_opts(auth_opts, 'auth', ignore_errors)
rbac_opts = [
cfg.BoolOpt('enable', default=False, help='Enable RBAC.'),
]
do_register_opts(rbac_opts, 'rbac', ignore_errors)
system_user_opts = [
cfg.StrOpt('user',
default='stanley',
help='Default system user.'),
cfg.StrOpt('ssh_key_file',
default='/home/vagrant/.ssh/stanley_rsa',
help='SSH private key for the system user.')
]
do_register_opts(system_user_opts, 'system_user', ignore_errors)
schema_opts = [
cfg.IntOpt('version', default=4, help='Version of JSON schema to use.'),
cfg.StrOpt('draft', default='http://json-schema.org/draft-04/schema#',
help='URL to the JSON schema draft.')
]
do_register_opts(schema_opts, 'schema', ignore_errors)
system_opts = [
cfg.StrOpt('base_path', default='/opt/stackstorm',
help='Base path to all st2 artifacts.'),
cfg.ListOpt('admin_users', default=[],
help='A list of usernames for users which should have admin privileges')
]
do_register_opts(system_opts, 'system', ignore_errors)
system_packs_base_path = os.path.join(cfg.CONF.system.base_path, 'packs')
content_opts = [
cfg.StrOpt('system_packs_base_path', default=system_packs_base_path,
help='Path to the directory which contains system packs.'),
cfg.StrOpt('packs_base_paths', default=None,
help='Paths which will be searched for integration packs.')
]
do_register_opts(content_opts, 'content', ignore_errors)
db_opts = [
cfg.StrOpt('host', default='0.0.0.0', help='host of db server'),
cfg.IntOpt('port', default=27017, help='port of db server'),
cfg.StrOpt('db_name', default='st2', help='name of database'),
cfg.StrOpt('username', help='username for db login'),
cfg.StrOpt('password', help='password for db login'),
cfg.IntOpt('connection_retry_max_delay_m', help='Connection retry total time (minutes).',
default=3),
cfg.IntOpt('connection_retry_backoff_max_s', help='Connection retry backoff max (seconds).',
default=10),
cfg.IntOpt('connection_retry_backoff_mul', help='Backoff multiplier (seconds).',
default=1)
]
do_register_opts(db_opts, 'database', ignore_errors)
messaging_opts = [
# It would be nice to be able to deprecate url and completely switch to using
# url. However, this will be a breaking change and will have impact so allowing both.
cfg.StrOpt('url', default='amqp://guest:[email protected]:5672//',
help='URL of the messaging server.'),
cfg.ListOpt('cluster_urls', default=[],
help='URL of all the nodes in a messaging service cluster.')
]
do_register_opts(messaging_opts, 'messaging', ignore_errors)
syslog_opts = [
cfg.StrOpt('host', default='127.0.0.1',
help='Host for the syslog server.'),
cfg.IntOpt('port', default=514,
help='Port for the syslog server.'),
cfg.StrOpt('facility', default='local7',
help='Syslog facility level.'),
cfg.StrOpt('protocol', default='udp',
help='Transport protocol to use (udp / tcp).')
]
do_register_opts(syslog_opts, 'syslog', ignore_errors)
log_opts = [
cfg.ListOpt('excludes', default='',
help='Exclusion list of loggers to omit.'),
cfg.BoolOpt('redirect_stderr', default=False,
help='Controls if stderr should be redirected to the logs.'),
cfg.BoolOpt('mask_secrets', default=True,
help='True to mask secrets in the log files.')
]
do_register_opts(log_opts, 'log', ignore_errors)
# Common API options
api_opts = [
cfg.StrOpt('host', default='0.0.0.0', help='StackStorm API server host'),
cfg.IntOpt('port', default=9101, help='StackStorm API server port')
]
do_register_opts(api_opts, 'api', ignore_errors)
# Common auth options
auth_opts = [
cfg.StrOpt('api_url', default=None,
help='Base URL to the API endpoint excluding the version')
]
do_register_opts(auth_opts, 'auth', ignore_errors)
# Common options (used by action runner and sensor container)
action_sensor_opts = [
cfg.BoolOpt('enable', default=True,
help='Whether to enable or disable the ability to post a trigger on action.'),
]
do_register_opts(action_sensor_opts, group='action_sensor')
# Coordination options
coord_opts = [
cfg.StrOpt('url', default=None, help='Endpoint for the coordination server.'),
cfg.IntOpt('lock_timeout', default=60, help='TTL for the lock if backend suports it.')
]
do_register_opts(coord_opts, 'coordination', ignore_errors)
# Mistral options
mistral_opts = [
cfg.StrOpt('v2_base_url', default='http://127.0.0.1:8989/v2', help='v2 API root endpoint.'),
cfg.IntOpt('max_attempts', default=180, help='Max attempts to reconnect.'),
cfg.IntOpt('retry_wait', default=5, help='Seconds to wait before reconnecting.'),
cfg.StrOpt('keystone_username', default=None, help='Username for authentication.'),
cfg.StrOpt('keystone_password', default=None, help='Password for authentication.'),
cfg.StrOpt('keystone_project_name', default=None, help='OpenStack project scope.'),
cfg.StrOpt('keystone_auth_url', default=None, help='Auth endpoint for Keystone.')
]
do_register_opts(mistral_opts, group='mistral', ignore_errors=ignore_errors)
# Common CLI options
debug = cfg.BoolOpt('debug', default=False,
help='Enable debug mode. By default this will set all log levels to DEBUG.')
profile = cfg.BoolOpt('profile', default=False,
help=('Enable profile mode. In the profile mode all the MongoDB queries and related '
'profile data are logged.'))
use_debugger = cfg.BoolOpt('use-debugger', default=True,
help='Enables debugger. Note that using this option changes how the '
'eventlet library is used to support async IO. This could result in '
'failures that do not occur under normal operation.')
cli_opts = [debug, profile, use_debugger]
do_register_cli_opts(cli_opts, ignore_errors=ignore_errors)
def parse_args(args=None):
register_opts()
cfg.CONF(args=args, version=VERSION_STRING)
| alfasin/st2 | st2common/st2common/config.py | Python | apache-2.0 | 8,282 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import unittest
import sys
from cloudbaseinit import init
from cloudbaseinit.plugins import base
from cloudbaseinit.openstack.common import cfg
CONF = cfg.CONF
_win32com_mock = mock.MagicMock()
_comtypes_mock = mock.MagicMock()
_pywintypes_mock = mock.MagicMock()
_ctypes_mock = mock.MagicMock()
_ctypes_util_mock = mock.MagicMock()
mock_dict = {'ctypes.util': _ctypes_util_mock,
'win32com': _win32com_mock,
'comtypes': _comtypes_mock,
'pywintypes': _pywintypes_mock,
'ctypes': _ctypes_mock}
class InitManagerTest(unittest.TestCase):
@mock.patch.dict(sys.modules, mock_dict)
def setUp(self):
self.osutils = mock.MagicMock()
self.plugin = mock.MagicMock()
self._init = init.InitManager()
def tearDown(self):
reload(sys)
reload(init)
def test_get_plugin_status(self):
self.osutils.get_config_value.return_value = 1
response = self._init._get_plugin_status(self.osutils, 'fake plugin')
self.osutils.get_config_value.assert_called_once_with(
'fake plugin', self._init._PLUGINS_CONFIG_SECTION)
self.assertTrue(response == 1)
def test_set_plugin_status(self):
self._init._set_plugin_status(self.osutils, 'fake plugin', 'status')
self.osutils.set_config_value.assert_called_once_with(
'fake plugin', 'status', self._init._PLUGINS_CONFIG_SECTION)
@mock.patch('cloudbaseinit.init.InitManager._get_plugin_status')
@mock.patch('cloudbaseinit.init.InitManager._set_plugin_status')
def _test_exec_plugin(self, status, mock_set_plugin_status,
mock_get_plugin_status):
fake_name = 'fake name'
self.plugin.get_name.return_value = fake_name
self.plugin.execute.return_value = (status, True)
mock_get_plugin_status.return_value = status
response = self._init._exec_plugin(osutils=self.osutils,
service='fake service',
plugin=self.plugin,
shared_data='shared data')
mock_get_plugin_status.assert_called_once_with(self.osutils,
fake_name)
if status is base.PLUGIN_EXECUTE_ON_NEXT_BOOT:
self.plugin.execute.assert_called_once_with('fake service',
'shared data')
mock_set_plugin_status.assert_called_once_with(self.osutils,
fake_name, status)
self.assertTrue(response)
def test_test_exec_plugin_execution_done(self):
self._test_exec_plugin(base.PLUGIN_EXECUTION_DONE)
def test_test_exec_plugin(self):
self._test_exec_plugin(base.PLUGIN_EXECUTE_ON_NEXT_BOOT)
def _test_check_plugin_os_requirements(self, requirements):
sys.platform = 'win32'
fake_name = 'fake name'
self.plugin.get_name.return_value = fake_name
self.plugin.get_os_requirements.return_value = requirements
response = self._init._check_plugin_os_requirements(self.osutils,
self.plugin)
self.plugin.get_name.assert_called_once_with()
self.plugin.get_os_requirements.assert_called_once_with()
if requirements[0] == 'win32':
self.assertTrue(response)
else:
self.assertFalse(response)
def test_check_plugin_os_requirements(self):
self._test_check_plugin_os_requirements(('win32', (5, 2)))
def test_check_plugin_os_requirements_other_requirenments(self):
self._test_check_plugin_os_requirements(('linux', (5, 2)))
@mock.patch('cloudbaseinit.init.InitManager'
'._check_plugin_os_requirements')
@mock.patch('cloudbaseinit.init.InitManager._exec_plugin')
@mock.patch('cloudbaseinit.plugins.factory.PluginFactory.load_plugins')
@mock.patch('cloudbaseinit.osutils.factory.OSUtilsFactory.get_os_utils')
@mock.patch('cloudbaseinit.metadata.factory.MetadataServiceFactory.'
'get_metadata_service')
def test_configure_host(self, mock_get_metadata_service,
mock_get_os_utils, mock_load_plugins,
mock_exec_plugin,
mock_check_os_requirements):
fake_service = mock.MagicMock()
fake_plugin = mock.MagicMock()
mock_load_plugins.return_value = [fake_plugin]
mock_get_os_utils.return_value = self.osutils
mock_get_metadata_service.return_value = fake_service
fake_service.get_name.return_value = 'fake name'
self._init.configure_host()
self.osutils.wait_for_boot_completion.assert_called_once()
mock_get_metadata_service.assert_called_once_with()
fake_service.get_name.assert_called_once_with()
mock_check_os_requirements.assert_called_once_with(self.osutils,
fake_plugin)
mock_exec_plugin.assert_called_once_with(self.osutils, fake_service,
fake_plugin, {})
fake_service.cleanup.assert_called_once_with()
self.osutils.reboot.assert_called_once_with()
| telerik/cloudbase-init | cloudbaseinit/tests/test_init.py | Python | apache-2.0 | 6,047 |
# Copyright 2019 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""TensorFlow op that scales gradient for backwards pass."""
from typing import Tuple
from sonnet.src import types
import tensorflow as tf
@tf.custom_gradient
def scale_gradient(
t: tf.Tensor, scale: types.FloatLike
) -> Tuple[tf.Tensor, types.GradFn]:
"""Scales gradients for the backwards pass.
Args:
t: A Tensor.
scale: The scale factor for the gradient on the backwards pass.
Returns:
A Tensor same as input, with scaled backward gradient.
"""
def grad(dy: tf.Tensor) -> Tuple[tf.Tensor, None]:
"""Scaled gradient."""
return scale * dy, None
return t, grad
| deepmind/sonnet | sonnet/src/scale_gradient.py | Python | apache-2.0 | 1,288 |
# -*- coding: utf-8 -*-
'''
Tests for the Git state
'''
# Import python libs
from __future__ import absolute_import
import os
import shutil
import socket
import subprocess
import tempfile
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath, skip_if_binaries_missing
ensure_in_syspath('../../')
# Import salt libs
import integration
import salt.utils
class GitTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
'''
Validate the git state
'''
def setUp(self):
super(GitTest, self).setUp()
self.__domain = 'github.com'
try:
if hasattr(socket, 'setdefaulttimeout'):
# 10 second dns timeout
socket.setdefaulttimeout(10)
socket.gethostbyname(self.__domain)
except socket.error:
msg = 'error resolving {0}, possible network issue?'
self.skipTest(msg.format(self.__domain))
def test_latest(self):
'''
git.latest
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
target=name
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_latest_with_rev_and_submodules(self):
'''
git.latest
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
rev='develop',
target=name,
submodules=True
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_latest_failure(self):
'''
git.latest
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
ret = self.run_state(
'git.latest',
name='https://youSpelledGitHubWrong.com/saltstack/salt-test-repo.git',
rev='develop',
target=name,
submodules=True
)
self.assertSaltFalseReturn(ret)
self.assertFalse(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_latest_empty_dir(self):
'''
git.latest
'''
name = os.path.join(integration.TMP, 'salt_repo')
if not os.path.isdir(name):
os.mkdir(name)
try:
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
rev='develop',
target=name,
submodules=True
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_latest_unless_no_cwd_issue_6800(self):
'''
cwd=target was being passed to _run_check which blew up if
target dir did not already exist.
'''
name = os.path.join(integration.TMP, 'salt_repo')
if os.path.isdir(name):
shutil.rmtree(name)
try:
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
rev='develop',
target=name,
unless='test -e {0}'.format(name),
submodules=True
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_numeric_rev(self):
'''
git.latest with numeric revision
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
rev=0.11,
target=name,
submodules=True,
timeout=120
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_latest_with_local_changes(self):
'''
Ensure that we fail the state when there are local changes and succeed
when force_reset is True.
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
# Clone repo
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
target=name
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
# Make change to LICENSE file.
with salt.utils.fopen(os.path.join(name, 'LICENSE'), 'a') as fp_:
fp_.write('Lorem ipsum dolor blah blah blah....\n')
# Make sure that we now have uncommitted changes
self.assertTrue(self.run_function('git.diff', [name, 'HEAD']))
# Re-run state with force_reset=False, this should fail
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
target=name,
force_reset=False
)
self.assertSaltFalseReturn(ret)
# Now run the state with force_reset=True, this should succeed
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
target=name,
force_reset=True
)
self.assertSaltTrueReturn(ret)
# Make sure that we no longer have uncommitted changes
self.assertFalse(self.run_function('git.diff', [name, 'HEAD']))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_present(self):
'''
git.present
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
ret = self.run_state(
'git.present',
name=name,
bare=True
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isfile(os.path.join(name, 'HEAD')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_present_failure(self):
'''
git.present
'''
name = os.path.join(integration.TMP, 'salt_repo')
if not os.path.isdir(name):
os.mkdir(name)
try:
fname = os.path.join(name, 'stoptheprocess')
with salt.utils.fopen(fname, 'a') as fh_:
fh_.write('')
ret = self.run_state(
'git.present',
name=name,
bare=True
)
self.assertSaltFalseReturn(ret)
self.assertFalse(os.path.isfile(os.path.join(name, 'HEAD')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_present_empty_dir(self):
'''
git.present
'''
name = os.path.join(integration.TMP, 'salt_repo')
if not os.path.isdir(name):
os.mkdir(name)
try:
ret = self.run_state(
'git.present',
name=name,
bare=True
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isfile(os.path.join(name, 'HEAD')))
finally:
shutil.rmtree(name, ignore_errors=True)
@skip_if_binaries_missing('git')
def test_config_set_value_with_space_character(self):
'''
git.config
'''
name = tempfile.mkdtemp(dir=integration.TMP)
self.addCleanup(shutil.rmtree, name, ignore_errors=True)
subprocess.check_call(['git', 'init', '--quiet', name])
ret = self.run_state(
'git.config_set',
name='user.name',
value='foo bar',
repo=name,
**{'global': False})
self.assertSaltTrueReturn(ret)
if __name__ == '__main__':
from integration import run_tests
run_tests(GitTest)
| stephane-martin/salt-debian-packaging | salt-2016.3.2/tests/integration/states/git.py | Python | apache-2.0 | 8,866 |
# Copyright 2013 IBM Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from lxml import etree
from tempest.common.rest_client import RestClientXML
from tempest.services.compute.xml.common import xml_to_json
class HypervisorClientXML(RestClientXML):
def __init__(self, config, username, password, auth_url, tenant_name=None):
super(HypervisorClientXML, self).__init__(config, username,
password, auth_url,
tenant_name)
self.service = self.config.compute.catalog_type
def _parse_array(self, node):
return [xml_to_json(x) for x in node]
def get_hypervisor_list(self):
"""List hypervisors information."""
resp, body = self.get('os-hypervisors', self.headers)
hypervisors = self._parse_array(etree.fromstring(body))
return resp, hypervisors
def get_hypervisor_list_details(self):
"""Show detailed hypervisors information."""
resp, body = self.get('os-hypervisors/detail', self.headers)
hypervisors = self._parse_array(etree.fromstring(body))
return resp, hypervisors
def get_hypervisor_show_details(self, hyper_id):
"""Display the details of the specified hypervisor."""
resp, body = self.get('os-hypervisors/%s' % hyper_id,
self.headers)
hypervisor = xml_to_json(etree.fromstring(body))
return resp, hypervisor
def get_hypervisor_servers(self, hyper_name):
"""List instances belonging to the specified hypervisor."""
resp, body = self.get('os-hypervisors/%s/servers' % hyper_name,
self.headers)
hypervisors = self._parse_array(etree.fromstring(body))
return resp, hypervisors
def get_hypervisor_stats(self):
"""Get hypervisor statistics over all compute nodes."""
resp, body = self.get('os-hypervisors/statistics', self.headers)
stats = xml_to_json(etree.fromstring(body))
return resp, stats
def get_hypervisor_uptime(self, hyper_id):
"""Display the uptime of the specified hypervisor."""
resp, body = self.get('os-hypervisors/%s/uptime' % hyper_id,
self.headers)
uptime = xml_to_json(etree.fromstring(body))
return resp, uptime
def search_hypervisor(self, hyper_name):
"""Search specified hypervisor."""
resp, body = self.get('os-hypervisors/%s/search' % hyper_name,
self.headers)
hypervisors = self._parse_array(etree.fromstring(body))
return resp, hypervisors
| BeenzSyed/tempest | tempest/services/compute/xml/hypervisor_client.py | Python | apache-2.0 | 3,235 |
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ast
import re
import mock
import novaclient.exceptions as nova_ex
import six
from sahara.conductor import resource as r
from sahara.plugins.vanilla import plugin
import sahara.service.validation as v
from sahara.tests.unit import base
from sahara.tests.unit import testutils as tu
m = {}
_types_checks = {
"string": [1, (), {}, True],
"integer": ["a", (), {}, True],
"uuid": ["z550e8400-e29b-41d4-a716-446655440000", 1, "a", (), {}, True],
"array": [{}, 'a', 1, True],
"boolean": [1, 'a', (), {}]
}
def _update_data(data, update):
data.update(update)
return data
def _get_plugins():
vanilla = plugin.VanillaProvider
vanilla.name = 'vanilla'
return [vanilla]
def _get_plugin(name):
if name == 'vanilla':
vanilla = plugin.VanillaProvider
vanilla.name = 'vanilla'
return vanilla
return None
def _get_keypair(name):
if name != "test_keypair":
raise nova_ex.NotFound("")
def _get_network(**kwargs):
if 'id' in kwargs and (
kwargs['id'] != "d9a3bebc-f788-4b81-9a93-aa048022c1ca"):
raise nova_ex.NotFound("")
return 'OK'
def _get_fl_ip_pool_list():
return [FakeNetwork("d9a3bebc-f788-4b81-9a93-aa048022c1ca")]
def _get_availability_zone_list(detailed=True):
return [FakeAvailabilityZone('nova')]
def _get_heat_stack_list(**kwargs):
if (kwargs.get('filters') and
kwargs.get('filters').get('name') == 'test-heat'):
return [FakeStack('test-heat')]
return []
class FakeStack(object):
def __init__(self, name):
self.stack_name = name
class FakeNetwork(object):
def __init__(self, name):
self.name = name
class FakeAvailabilityZone(object):
def __init__(self, name):
self.zoneName = name
class FakeFlavor(object):
def __init__(self, id):
self.id = id
class FakeSecurityGroup(object):
def __init__(self, id, name):
self.id = id
self.name = name
def _get_flavors_list():
return [FakeFlavor("42")]
def _get_security_groups_list():
return [FakeSecurityGroup("1", "default"),
FakeSecurityGroup("2", "group1"),
FakeSecurityGroup("3", "group2")]
def start_patch(patch_templates=True):
get_clusters_p = mock.patch("sahara.service.api.get_clusters")
get_cluster_p = mock.patch("sahara.service.api.get_cluster")
if patch_templates:
get_ng_templates_p = mock.patch(
"sahara.service.api.get_node_group_templates")
get_ng_template_p = mock.patch(
"sahara.service.api.get_node_group_template")
if patch_templates:
get_cl_templates_p = mock.patch(
"sahara.service.api.get_cluster_templates")
get_cl_template_p = mock.patch(
"sahara.service.api.get_cluster_template")
nova_p = mock.patch("sahara.utils.openstack.nova.client")
heat_p = mock.patch("sahara.utils.openstack.heat.client")
cinder_p = mock.patch("sahara.utils.openstack.cinder.client")
cinder_exists_p = mock.patch(
"sahara.utils.openstack.cinder.check_cinder_exists")
get_image_p = mock.patch("sahara.service.api.get_image")
get_image = get_image_p.start()
get_clusters = get_clusters_p.start()
get_cluster = get_cluster_p.start()
if patch_templates:
get_ng_templates = get_ng_templates_p.start()
get_ng_template = get_ng_template_p.start()
if patch_templates:
get_cl_templates = get_cl_templates_p.start()
get_cl_template_p.start()
nova = nova_p.start()
if patch_templates:
get_cl_templates.return_value = []
nova().flavors.list.side_effect = _get_flavors_list
nova().security_groups.list.side_effect = _get_security_groups_list
nova().keypairs.get.side_effect = _get_keypair
nova().networks.find.side_effect = _get_network
nova().networks.find.__name__ = 'find'
nova().floating_ip_pools.list.side_effect = _get_fl_ip_pool_list
nova().availability_zones.list.side_effect = _get_availability_zone_list
heat = heat_p.start()
heat().stacks.list.side_effect = _get_heat_stack_list
cinder = cinder_p.start()
cinder().availability_zones.list.side_effect = _get_availability_zone_list
cinder_exists = cinder_exists_p.start()
cinder_exists.return_value = True
class Image(object):
def __init__(self, name='test'):
self.name = name
@property
def id(self):
if self.name == 'test':
return '550e8400-e29b-41d4-a716-446655440000'
else:
return '813fe450-40d2-4acc-ade5-ea753a1bd5bc'
@property
def tags(self):
if self.name == 'test':
return ['vanilla', '1.2.1']
else:
return ['vanilla', 'wrong_tag']
def _get_image(id):
if id == '550e8400-e29b-41d4-a716-446655440000':
return Image()
else:
return Image('wrong_test')
get_image.side_effect = _get_image
nova().images.list_registered.return_value = [Image(),
Image(name='wrong_name')]
ng_dict = tu.make_ng_dict('ng', '42', ['namenode'], 1)
cluster = tu.create_cluster('test', 't', 'vanilla', '1.2.1', [ng_dict],
id=1, status='Active')
# stub clusters list
get_clusters.return_value = [cluster]
get_cluster.return_value = cluster
# stub node templates
if patch_templates:
ngt_dict = {'name': 'test', 'tenant_id': 't', 'flavor_id': '42',
'plugin_name': 'vanilla', 'hadoop_version': '1.2.1',
'id': '550e8400-e29b-41d4-a716-446655440000',
'node_processes': ['namenode']}
get_ng_templates.return_value = [r.NodeGroupTemplateResource(ngt_dict)]
ct_dict = {'name': 'test', 'tenant_id': 't',
'plugin_name': 'vanilla', 'hadoop_version': '1.2.1'}
get_cl_templates.return_value = [r.ClusterTemplateResource(ct_dict)]
def _get_ng_template(id):
for template in get_ng_templates():
if template.id == id:
return template
return None
if patch_templates:
get_ng_template.side_effect = _get_ng_template
# request data to validate
patchers = [get_clusters_p, get_cluster_p,
nova_p, get_image_p, heat_p, cinder_p,
cinder_exists_p]
if patch_templates:
patchers.extend([get_ng_template_p, get_ng_templates_p,
get_cl_template_p, get_cl_templates_p])
return patchers
def stop_patch(patchers):
for patcher in reversed(patchers):
patcher.stop()
class ValidationTestCase(base.SaharaTestCase):
def setUp(self):
super(ValidationTestCase, self).setUp()
self._create_object_fun = None
self.scheme = None
def tearDown(self):
self._create_object_fun = None
super(ValidationTestCase, self).tearDown()
def _assert_calls(self, mock, call_info):
if not call_info:
self.assertEqual(0, mock.call_count, "Unexpected call to %s: %s"
% (mock.name, str(mock.call_args)))
else:
self.assertEqual(call_info[0], mock.call_count)
self.assertEqual(call_info[1], mock.call_args[0][0].code)
possible_messages = ([call_info[2]] if isinstance(
call_info[2], six.string_types) else call_info[2])
match = False
check = mock.call_args[0][0].message
if check.find('Error ID:') != -1:
check = check.split('\n')[0]
for message in possible_messages:
if self._check_match(message, check):
match = True
break
if not match:
self.assertIn(check, possible_messages)
def _check_match(self, expected, actual):
d1, r1 = self._extract_printed_dict(expected)
d2, r2 = self._extract_printed_dict(actual)
# Note(slukjanov): regex needed because of different
# versions of jsonschema generate different
# messages.
return (r1 == r2 or re.match(r1, r2)) and (d1 == d2)
def _extract_printed_dict(self, s):
start = s.find('{')
if start == -1:
return None, s
end = s.rfind('}')
if end == -1:
return None, s
return ast.literal_eval(s[start:end+1]), s[0:start+1] + s[end]
@mock.patch("sahara.utils.api.request_data")
@mock.patch("sahara.utils.api.bad_request")
def _assert_create_object_validation(
self, bad_req=None, request_data=None,
data=None, bad_req_i=None):
request_data.return_value = data
# mock function that should be validated
patchers = start_patch()
m_func = mock.Mock()
m_func.__name__ = "m_func"
v.validate(self.scheme, self._create_object_fun)(m_func)(data=data)
self.assertEqual(1, request_data.call_count)
self._assert_calls(bad_req, bad_req_i)
stop_patch(patchers)
def _assert_valid_name_hostname_validation(self, data):
data.update({'name': None})
self._assert_create_object_validation(
data=data,
bad_req_i=(1, "VALIDATION_ERROR",
u"None is not of type 'string'")
)
data.update({'name': ""})
self._assert_create_object_validation(
data=data,
bad_req_i=(1, "VALIDATION_ERROR",
u"'' is too short")
)
data.update({'name': ('a' * 51)})
self._assert_create_object_validation(
data=data,
bad_req_i=(1, "VALIDATION_ERROR",
u"'%s' is too long" % ('a' * 51))
)
data.update({'name': 'a-!'})
self._assert_create_object_validation(
data=data,
bad_req_i=(1, "VALIDATION_ERROR",
u"'a-!' is not a 'valid_name_hostname'")
)
def _prop_types_str(self, prop_types):
return ", ".join(["'%s'" % prop for prop in prop_types])
def _assert_types(self, default_data):
for p_name in self.scheme['properties']:
prop = self.scheme['properties'][p_name]
prop_types = prop["type"]
if type(prop_types) is not list:
prop_types = [prop_types]
for prop_type in prop_types:
if prop_type in _types_checks:
for type_ex in _types_checks[prop_type]:
data = default_data.copy()
value = type_ex
value_str = str(value)
if isinstance(value, str):
value_str = "'%s'" % value_str
data.update({p_name: value})
message = ("%s is not of type %s" %
(value_str,
self._prop_types_str(prop_types)))
if "enum" in prop:
message = [message, "%s is not one of %s" %
(value_str, prop["enum"])]
self._assert_create_object_validation(
data=data,
bad_req_i=(1, 'VALIDATION_ERROR', message)
)
def _assert_cluster_configs_validation(self, require_image_id=False):
data = {
'name': 'test-cluster',
'plugin_name': 'vanilla',
'hadoop_version': '1.2.1',
'cluster_configs': {
'HDFS': {
u'hadoop.tmp.dir': '/temp/'
}
},
'default_image_id': '550e8400-e29b-41d4-a716-446655440000'
}
if require_image_id:
data_without_image = data.copy()
data_without_image.pop('default_image_id')
self._assert_create_object_validation(
data=data_without_image,
bad_req_i=(1, 'NOT_FOUND',
"'default_image_id' field is not found")
)
self._assert_create_object_validation(
data=_update_data(data.copy(), {
'cluster_configs': {
'wrong_target': {
u'hadoop.tmp.dir': '/temp/'
}
}}),
bad_req_i=(1, 'INVALID_REFERENCE',
"Plugin doesn't contain applicable "
"target 'wrong_target'")
)
self._assert_create_object_validation(
data=_update_data(data.copy(), {
'cluster_configs': {
'HDFS': {
u's': '/temp/'
}
}
}),
bad_req_i=(1, 'INVALID_REFERENCE',
"Plugin's applicable target 'HDFS' doesn't "
"contain config with name 's'")
)
def _assert_cluster_default_image_tags_validation(self):
data = {
'name': 'test-cluster',
'plugin_name': 'vanilla',
'hadoop_version': '1.2.1',
'default_image_id': '550e8400-e29b-41d4-a716-446655440000'
}
self._assert_create_object_validation(data=data)
data = {
'name': 'test-cluster',
'plugin_name': 'vanilla',
'hadoop_version': '1.2.1',
'default_image_id': '813fe450-40d2-4acc-ade5-ea753a1bd5bc'
}
self._assert_create_object_validation(
data=data,
bad_req_i=(1, 'INVALID_REFERENCE',
"Requested image "
"'813fe450-40d2-4acc-ade5-ea753a1bd5bc' "
"doesn't contain required tags: "
"['1.2.1']"))
def assert_protected_resource_exception(self, ex):
self.assertIn("marked as protected", six.text_type(ex))
def assert_created_in_another_tenant_exception(self, ex):
self.assertIn("wasn't created in this tenant", six.text_type(ex))
| crobby/sahara | sahara/tests/unit/service/validation/utils.py | Python | apache-2.0 | 14,870 |
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import neighbor
class neighbors(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/isis-neighbor-attribute/neighbors. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container describes IS neighbors.
"""
__slots__ = ("_path_helper", "_extmethods", "__neighbor")
_yang_name = "neighbors"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__neighbor = YANGDynClass(
base=YANGListType(
False,
neighbor.neighbor,
yang_name="neighbor",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="neighbor",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"isis-neighbor-attribute",
"neighbors",
]
def _get_neighbor(self):
"""
Getter method for neighbor, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/isis_neighbor_attribute/neighbors/neighbor (list)
YANG Description: This list defines ISIS extended reachability neighbor
attributes.
"""
return self.__neighbor
def _set_neighbor(self, v, load=False):
"""
Setter method for neighbor, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/isis_neighbor_attribute/neighbors/neighbor (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_neighbor is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_neighbor() directly.
YANG Description: This list defines ISIS extended reachability neighbor
attributes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGListType(
False,
neighbor.neighbor,
yang_name="neighbor",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="neighbor",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """neighbor must be of a type compatible with list""",
"defined-type": "list",
"generated-type": """YANGDynClass(base=YANGListType(False,neighbor.neighbor, yang_name="neighbor", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='False', extensions=None), is_container='list', yang_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)""",
}
)
self.__neighbor = t
if hasattr(self, "_set"):
self._set()
def _unset_neighbor(self):
self.__neighbor = YANGDynClass(
base=YANGListType(
False,
neighbor.neighbor,
yang_name="neighbor",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="neighbor",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
neighbor = __builtin__.property(_get_neighbor)
_pyangbind_elements = OrderedDict([("neighbor", neighbor)])
from . import neighbor
class neighbors(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/isis-neighbor-attribute/neighbors. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container describes IS neighbors.
"""
__slots__ = ("_path_helper", "_extmethods", "__neighbor")
_yang_name = "neighbors"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__neighbor = YANGDynClass(
base=YANGListType(
False,
neighbor.neighbor,
yang_name="neighbor",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="neighbor",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"isis-neighbor-attribute",
"neighbors",
]
def _get_neighbor(self):
"""
Getter method for neighbor, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/isis_neighbor_attribute/neighbors/neighbor (list)
YANG Description: This list defines ISIS extended reachability neighbor
attributes.
"""
return self.__neighbor
def _set_neighbor(self, v, load=False):
"""
Setter method for neighbor, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/isis_neighbor_attribute/neighbors/neighbor (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_neighbor is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_neighbor() directly.
YANG Description: This list defines ISIS extended reachability neighbor
attributes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGListType(
False,
neighbor.neighbor,
yang_name="neighbor",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="neighbor",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """neighbor must be of a type compatible with list""",
"defined-type": "list",
"generated-type": """YANGDynClass(base=YANGListType(False,neighbor.neighbor, yang_name="neighbor", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='False', extensions=None), is_container='list', yang_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)""",
}
)
self.__neighbor = t
if hasattr(self, "_set"):
self._set()
def _unset_neighbor(self):
self.__neighbor = YANGDynClass(
base=YANGListType(
False,
neighbor.neighbor,
yang_name="neighbor",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="neighbor",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
neighbor = __builtin__.property(_get_neighbor)
_pyangbind_elements = OrderedDict([("neighbor", neighbor)])
| napalm-automation/napalm-yang | napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/isis_neighbor_attribute/neighbors/__init__.py | Python | apache-2.0 | 14,460 |
from mosaic import app
if __name__ == '__main__':
app.run()
| BumagniyPacket/mosaic | run.py | Python | apache-2.0 | 65 |
'''
Created on Nov 2, 2012
@author: maodouzi
'''
import logging
from keystoneclient.v2_0 import client as keystone_client
from novaclient.v1_1 import client as nova_client
from cinderclient.v1 import client as cinder_client
from keystoneclient.exceptions import BadRequest
from openstack_dashboard.local.local_settings import OPENSTACK_HOST
LOG = logging.getLogger(__name__)
DEFAULT_ROLE = None
MEMBER_ROLE = "_member_"
ENDPOINT_URL = "http://%s:35357/v2.0" % OPENSTACK_HOST
ERR_MSG = {"accountExist": "Account already exist"
}
ERR_MSG = {key:"ERROR: %s !" % value for key, value in ERR_MSG.items()}
class RequestException(Exception):
def __init__(self, message=None):
self.message = str(message) or self.__class__.message
def __str__(self):
return self.message
class RequestClient(object):
def __init__(self, username, password, email, phoneNum, realName, corpName,
applyReason, quota, token, adminTenant, adminUser, adminPasswd,
endpoint=ENDPOINT_URL):
self.token = token
self.endpoint = endpoint
self.conn = keystone_client.Client(token=self.token, endpoint=self.endpoint)
self._fetchInfo()
self.novaConn = nova_client.Client(username=adminUser,
api_key=adminPasswd,
project_id=adminTenant,
auth_url=endpoint)
self.cinderConn = cinder_client.Client(username=adminUser,
api_key=adminPasswd,
project_id=adminTenant,
auth_url=endpoint)
self.quota = quota
self.username = username
self.password = password
self.email = email
self.realName = realName
self.phoneNum = phoneNum
self.corpName = corpName
self.applyReason = applyReason
self.description = "==".join((self.email, self.phoneNum, self.realName, self.corpName, self.applyReason))
if self._isAccountExist():
raise RequestException(ERR_MSG["accountExist"])
def createAccount(self):
try:
self._createTenant()
self._updateQuota()
self._createUser()
self._addRole()
except Exception as e:
self.deleteAccount()
raise RequestException(e)
def deleteAccount(self):
self._deleteTenant()
self._deleteUser()
def _checkRequestArgs(self):
return self._isRequestValid() and (not self._isAccountExist())
def _fetchInfo(self):
try:
self.tenantList = self.conn.tenants.list()
self.userList = self.conn.users.list()
self.roleList = self.conn.roles.list()
self.tenantDict = {str(item.name):str(item.id) for item in self.tenantList}
self.userDict = {str(item.name):str(item.id) for item in self.userList}
self.memberRoleId = [str(item.id) for item in self.roleList
if str(item.name) == MEMBER_ROLE][0]
try:
self.username
except AttributeError:
pass
else:
self.tenantId = self.tenantDict.get(self.username, False)
self.userId = self.userDict.get(self.username, False)
if self.tenantId and self.userId:
self.boundRoleList = self.conn.roles.roles_for_user(user=self.userId,
tenant=self.tenantId)
self.boundRoleDict = {str(item.name):str(item.id) for item in self.boundRoleList}
else:
self.boundRoleDict = {}
except BadRequest as e:
LOG.debug(e)
raise RequestException(e)
except IndexError as e:
LOG.debug(e)
raise RequestException("No role named %s" % MEMBER_ROLE)
def _isRequestValid(self):
return True
def _isAccountExist(self):
return self._isTenantNameExist() or self._isUserNameExist()
def _isTenantNameExist(self):
return self.username in self.tenantDict
def _isUserNameExist(self):
return self.username in self.userDict
def _isBound2Role(self):
return MEMBER_ROLE in self.boundRoleDict
def _createTenant(self):
if not self._isTenantNameExist():
self.conn.tenants.create(tenant_name=self.username,
description=self.description,
enabled=True)
self._fetchInfo()
def _deleteTenant(self):
if self._isTenantNameExist():
self.conn.tenants.delete(tenant=self.tenantId)
self._fetchInfo()
def _createUser(self):
self._createTenant()
if not self._isUserNameExist():
self.conn.users.create(name=self.username,
password=self.password,
email=self.email,
tenant_id=self.tenantId,
enabled=False)
self._fetchInfo()
def _deleteUser(self):
if self._isUserNameExist():
self.conn.users.delete(user=self.userId)
self._fetchInfo()
def _addRole(self):
if not self._isBound2Role():
self.conn.roles.add_user_role(self.userId, self.memberRoleId, self.tenantId)
self._fetchInfo()
def _getQuota(self):
quotaDict = {}
quotaDict["nova"] = self.novaConn.quotas.get(tenant_id=self.tenantId)
quotaDict["cinder"] = self.cinderConn.quotas.get(tenant_id=self.tenantId)
return quotaDict
def _updateQuota(self):
nova_quota = self.quota.copy()
del nova_quota["volumes"]
del nova_quota["gigabytes"]
self.novaConn.quotas.update(tenant_id=self.tenantId, **nova_quota)
self.cinderConn.quotas.update(tenant_id=self.tenantId,
volumes=self.quota["volumes"],
gigabytes=self.quota["gigabytes"]
)
| 99cloud/keystone_register | openstack_dashboard/register/register.py | Python | apache-2.0 | 6,455 |
# -*- coding: utf-8 -*-
'''
Use a git repository as a Pillar source
---------------------------------------
.. note::
This external pillar has been rewritten for the :doc:`2015.8.0
</topics/releases/2015.8.0>` release. The old method of configuring this
external pillar will be maintained for a couple releases, allowing time for
configurations to be updated to reflect the new usage.
This external pillar allows for a Pillar top file and Pillar SLS files to be
sourced from a git repository.
However, since git_pillar does not have an equivalent to the
:conf_master:`pillar_roots` parameter, configuration is slightly different. The
Pillar top file must still contain the relevant environment, like so:
.. code-block:: yaml
base:
'*':
- foo
The branch/tag which maps to that environment must then be specified along with
the repo's URL. Configuration details can be found below.
.. _git-pillar-pre-2015-8-0:
Configuring git_pillar for Salt releases before 2015.8.0
========================================================
For Salt releases earlier than :doc:`2015.8.0 </topics/releases/2015.8.0>`,
GitPython is the only supported provider for git_pillar. Individual
repositories can be configured under the :conf_master:`ext_pillar`
configuration parameter like so:
.. code-block:: yaml
ext_pillar:
- git: master https://gitserver/git-pillar.git root=subdirectory
The repository is specified in the format ``<branch> <repo_url>``, with an
optional ``root`` parameter (added in the :doc:`2014.7.0
</topics/releases/2014.7.0>` release) which allows the pillar SLS files to be
served up from a subdirectory (similar to :conf_master:`gitfs_root` in gitfs).
To use more than one branch from the same repo, multiple lines must be
specified under :conf_master:`ext_pillar`:
.. code-block:: yaml
ext_pillar:
- git: master https://gitserver/git-pillar.git
- git: dev https://gitserver/git-pillar.git
To remap a specific branch to a specific Pillar environment, use the format
``<branch>:<env>``:
.. code-block:: yaml
ext_pillar:
- git: develop:dev https://gitserver/git-pillar.git
- git: master:prod https://gitserver/git-pillar.git
In this case, the ``develop`` branch would need its own ``top.sls`` with a
``dev`` section in it, like this:
.. code-block:: yaml
dev:
'*':
- bar
The ``master`` branch would need its own ``top.sls`` with a ``prod`` section in
it:
.. code-block:: yaml
prod:
'*':
- bar
If ``__env__`` is specified as the branch name, then git_pillar will use the
branch specified by :conf_master:`gitfs_base`:
.. code-block:: yaml
ext_pillar:
- git: __env__ https://gitserver/git-pillar.git root=pillar
The corresponding Pillar top file would look like this:
.. code-block:: yaml
{{env}}:
'*':
- bar
.. _git-pillar-2015-8-0-and-later:
Configuring git_pillar for Salt releases 2015.8.0 and later
===========================================================
.. note::
In version 2015.8.0, the method of configuring git external pillars has
changed, and now more closely resembles that of the :ref:`Git Fileserver
Backend <tutorial-gitfs>`. If Salt detects the old configuration schema, it
will use the pre-2015.8.0 code to compile the external pillar. A warning
will also be logged.
Beginning with Salt version 2015.8.0, pygit2_ is now supported in addition to
GitPython_ (Dulwich_ will not be supported for the forseeable future). The
requirements for GitPython_ and pygit2_ are the same as for gitfs, as described
:ref:`here <gitfs-dependencies>`.
.. important::
git_pillar has its own set of global configuration parameters. While it may
seem intuitive to use the global gitfs configuration parameters
(:conf_master:`gitfs_base`, etc.) to manage git_pillar, this will not work.
The main difference for this is the fact that the different components
which use Salt's git backend code do not all function identically. For
instance, in git_pillar it is necessary to specify which branch/tag to be
used for git_pillar remotes. This is the reverse behavior from gitfs, where
branches/tags make up your environments.
See :ref:`here <git_pillar-config-opts>` for documentation on the
git_pillar configuration options and their usage.
Here is an example git_pillar configuration:
.. code-block:: yaml
ext_pillar:
- git:
# Use 'prod' instead of the branch name 'production' as the environment
- production https://gitserver/git-pillar.git:
- env: prod
# Use 'dev' instead of the branch name 'develop' as the environment
- develop https://gitserver/git-pillar.git:
- env: dev
# No per-remote config parameters (and no trailing colon), 'qa' will
# be used as the environment
- qa https://gitserver/git-pillar.git
# SSH key authentication
- master git@other-git-server:pillardata-ssh.git:
# Pillar SLS files will be read from the 'pillar' subdirectory in
# this repository
- root: pillar
- privkey: /path/to/key
- pubkey: /path/to/key.pub
- passphrase: CorrectHorseBatteryStaple
# HTTPS authentication
- master https://other-git-server/pillardata-https.git:
- user: git
- password: CorrectHorseBatteryStaple
The main difference between this and the old way of configuring git_pillar is
that multiple remotes can be configured under one ``git`` section under
:conf_master:`ext_pillar`. More than one ``git`` section can be used, but it is
not necessary. Remotes will be evaluated sequentially.
Per-remote configuration parameters are supported (similar to :ref:`gitfs
<gitfs-per-remote-config>`), and global versions of the git_pillar
configuration parameters can also be set.
With the addition of pygit2_ support, git_pillar can now interact with
authenticated remotes. Authentication works just like in gitfs (as outlined in
the :ref:`Git Fileserver Backend Walkthrough <gitfs-authentication>`), only
with the global authenication parameter names prefixed with ``git_pillar``
instead of ``gitfs`` (e.g. :conf_master:`git_pillar_pubkey`,
:conf_master:`git_pillar_privkey`, :conf_master:`git_pillar_passphrase`, etc.).
.. _GitPython: https://github.com/gitpython-developers/GitPython
.. _pygit2: https://github.com/libgit2/pygit2
.. _Dulwich: https://www.samba.org/~jelmer/dulwich/
'''
from __future__ import absolute_import
# Import python libs
import copy
import logging
import hashlib
import os
# Import salt libs
import salt.utils.gitfs
import salt.utils.dictupdate
from salt.exceptions import FileserverConfigError
from salt.pillar import Pillar
# Import third party libs
import salt.ext.six as six
# pylint: disable=import-error
try:
import git
HAS_GITPYTHON = True
except ImportError:
HAS_GITPYTHON = False
# pylint: enable=import-error
PER_REMOTE_OVERRIDES = ('env', 'root', 'ssl_verify')
# Set up logging
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'git'
def __virtual__():
'''
Only load if gitpython is available
'''
git_ext_pillars = [x for x in __opts__['ext_pillar'] if 'git' in x]
if not git_ext_pillars:
# No git external pillars were configured
return False
for ext_pillar in git_ext_pillars:
if isinstance(ext_pillar['git'], six.string_types):
# Verification of legacy git pillar configuration
if not HAS_GITPYTHON:
log.error(
'Git-based ext_pillar is enabled in configuration but '
'could not be loaded, is GitPython installed?'
)
return False
if not git.__version__ > '0.3.0':
return False
return __virtualname__
else:
# Verification of new git pillar configuration
try:
salt.utils.gitfs.GitPillar(__opts__)
# Initialization of the GitPillar object did not fail, so we
# know we have valid configuration syntax and that a valid
# provider was detected.
return __virtualname__
except FileserverConfigError:
pass
return False
def ext_pillar(minion_id, repo, pillar_dirs):
'''
Checkout the ext_pillar sources and compile the resulting pillar SLS
'''
if isinstance(repo, six.string_types):
return _legacy_git_pillar(minion_id, repo, pillar_dirs)
else:
opts = copy.deepcopy(__opts__)
opts['pillar_roots'] = {}
pillar = salt.utils.gitfs.GitPillar(opts)
pillar.init_remotes(repo, PER_REMOTE_OVERRIDES)
pillar.checkout()
ret = {}
merge_strategy = __opts__.get(
'pillar_source_merging_strategy',
'smart'
)
merge_lists = __opts__.get(
'pillar_merge_lists',
False
)
for pillar_dir, env in six.iteritems(pillar.pillar_dirs):
log.debug(
'git_pillar is processing pillar SLS from {0} for pillar '
'env \'{1}\''.format(pillar_dir, env)
)
all_dirs = [d for (d, e) in six.iteritems(pillar.pillar_dirs)
if env == e]
# Ensure that the current pillar_dir is first in the list, so that
# the pillar top.sls is sourced from the correct location.
pillar_roots = [pillar_dir]
pillar_roots.extend([x for x in all_dirs if x != pillar_dir])
opts['pillar_roots'] = {env: pillar_roots}
local_pillar = Pillar(opts, __grains__, minion_id, env)
ret = salt.utils.dictupdate.merge(
ret,
local_pillar.compile_pillar(ext=False),
strategy=merge_strategy,
merge_lists=merge_lists
)
return ret
# Legacy git_pillar code
class _LegacyGitPillar(object):
'''
Deal with the remote git repository for Pillar
'''
def __init__(self, branch, repo_location, opts):
'''
Try to initialize the Git repo object
'''
self.branch = self.map_branch(branch, opts)
self.rp_location = repo_location
self.opts = opts
self._envs = set()
self.working_dir = ''
self.repo = None
hash_type = getattr(hashlib, opts.get('hash_type', 'md5'))
hash_str = '{0} {1}'.format(self.branch, self.rp_location)
repo_hash = hash_type(hash_str).hexdigest()
rp_ = os.path.join(self.opts['cachedir'], 'pillar_gitfs', repo_hash)
if not os.path.isdir(rp_):
os.makedirs(rp_)
try:
self.repo = git.Repo.init(rp_)
except (git.exc.NoSuchPathError,
git.exc.InvalidGitRepositoryError) as exc:
log.error('GitPython exception caught while '
'initializing the repo: {0}. Maybe '
'git is not available.'.format(exc))
# Git directory we are working on
# Should be the same as self.repo.working_dir
self.working_dir = rp_
if isinstance(self.repo, git.Repo):
if not self.repo.remotes:
try:
self.repo.create_remote('origin', self.rp_location)
# ignore git ssl verification if requested
if self.opts.get('pillar_gitfs_ssl_verify', True):
self.repo.git.config('http.sslVerify', 'true')
else:
self.repo.git.config('http.sslVerify', 'false')
except os.error:
# This exception occurs when two processes are
# trying to write to the git config at once, go
# ahead and pass over it since this is the only
# write.
# This should place a lock down.
pass
else:
if self.repo.remotes.origin.url != self.rp_location:
self.repo.remotes.origin.config_writer.set(
'url', self.rp_location)
def map_branch(self, branch, opts=None):
opts = __opts__ if opts is None else opts
if branch == '__env__':
branch = opts.get('environment') or 'base'
if branch == 'base':
branch = opts.get('gitfs_base') or 'master'
elif ':' in branch:
branch = branch.split(':', 1)[0]
return branch
def update(self):
'''
Ensure you are following the latest changes on the remote
Return boolean whether it worked
'''
try:
log.debug('Updating fileserver for git_pillar module')
self.repo.git.fetch()
except git.exc.GitCommandError as exc:
log.error('Unable to fetch the latest changes from remote '
'{0}: {1}'.format(self.rp_location, exc))
return False
try:
self.repo.git.checkout('origin/{0}'.format(self.branch))
except git.exc.GitCommandError as exc:
log.error('Unable to checkout branch '
'{0}: {1}'.format(self.branch, exc))
return False
return True
def envs(self):
'''
Return a list of refs that can be used as environments
'''
if isinstance(self.repo, git.Repo):
remote = self.repo.remote()
for ref in self.repo.refs:
parted = ref.name.partition('/')
short = parted[2] if parted[2] else parted[0]
if isinstance(ref, git.Head):
if short == 'master':
short = 'base'
if ref not in remote.stale_refs:
self._envs.add(short)
elif isinstance(ref, git.Tag):
self._envs.add(short)
return list(self._envs)
def _legacy_git_pillar(minion_id, repo_string, pillar_dirs):
'''
Support pre-Beryllium config schema
'''
if pillar_dirs is None:
return
# split the branch, repo name and optional extra (key=val) parameters.
options = repo_string.strip().split()
branch_env = options[0]
repo_location = options[1]
root = ''
for extraopt in options[2:]:
# Support multiple key=val attributes as custom parameters.
DELIM = '='
if DELIM not in extraopt:
log.error('Incorrectly formatted extra parameter. '
'Missing \'{0}\': {1}'.format(DELIM, extraopt))
key, val = _extract_key_val(extraopt, DELIM)
if key == 'root':
root = val
else:
log.warning('Unrecognized extra parameter: {0}'.format(key))
# environment is "different" from the branch
cfg_branch, _, environment = branch_env.partition(':')
gitpil = _LegacyGitPillar(cfg_branch, repo_location, __opts__)
branch = gitpil.branch
if environment == '':
if branch == 'master':
environment = 'base'
else:
environment = branch
# normpath is needed to remove appended '/' if root is empty string.
pillar_dir = os.path.normpath(os.path.join(gitpil.working_dir, root))
pillar_dirs.setdefault(pillar_dir, {})
if cfg_branch == '__env__' and branch not in ['master', 'base']:
gitpil.update()
elif pillar_dirs[pillar_dir].get(branch, False):
return {} # we've already seen this combo
pillar_dirs[pillar_dir].setdefault(branch, True)
# Don't recurse forever-- the Pillar object will re-call the ext_pillar
# function
if __opts__['pillar_roots'].get(branch, []) == [pillar_dir]:
return {}
opts = copy.deepcopy(__opts__)
opts['pillar_roots'][environment] = [pillar_dir]
pil = Pillar(opts, __grains__, minion_id, branch)
return pil.compile_pillar(ext=False)
def _update(branch, repo_location):
'''
Ensure you are following the latest changes on the remote
return boolean whether it worked
'''
gitpil = _LegacyGitPillar(branch, repo_location, __opts__)
return gitpil.update()
def _envs(branch, repo_location):
'''
Return a list of refs that can be used as environments
'''
gitpil = _LegacyGitPillar(branch, repo_location, __opts__)
return gitpil.envs()
def _extract_key_val(kv, delimiter='='):
'''Extract key and value from key=val string.
Example:
>>> _extract_key_val('foo=bar')
('foo', 'bar')
'''
pieces = kv.split(delimiter)
key = pieces[0]
val = delimiter.join(pieces[1:])
return key, val
| smallyear/linuxLearn | salt/salt/pillar/git_pillar.py | Python | apache-2.0 | 16,836 |
import os
import re
import asyncio
import logging
from collections import OrderedDict
from pypeman.message import Message
from pypeman.errors import PypemanConfigError
logger = logging.getLogger("pypeman.store")
DATE_FORMAT = '%Y%m%d_%H%M'
class MessageStoreFactory():
""" Message store factory class can generate Message store instance for specific store_id. """
def get_store(self, store_id):
"""
:param store_id: identifier of corresponding message store.
:return: A MessageStore corresponding to correct store_id.
"""
class MessageStore():
""" A MessageStore keep an history of processed messages. Mainly used in channels. """
async def start(self):
"""
Called at startup to initialize store.
"""
async def store(self, msg):
"""
Store a message in the store.
:param msg: The message to store.
:return: Id for this specific message.
"""
async def change_message_state(self, id, new_state):
"""
Change the `id` message state.
:param id: Message specific store id.
:param new_state: Target state.
"""
async def get(self, id):
"""
Return one message corresponding to given `id` with his status.
:param id: Message id. Message store dependant.
:return: A dict `{'id':<message_id>, 'state': <message_state>, 'message': <message_object>}`.
"""
async def search(self, start=0, count=10, order_by='timestamp'):
"""
Return a list of message with store specific `id` and processed status.
:param start: First element.
:param count: Count of elements since first element.
:param order_by: Message order. Allowed values : ['timestamp', 'status'].
:return: A list of dict `{'id':<message_id>, 'state': <message_state>, 'message': <message_object>}`.
"""
async def total(self):
"""
:return: total count of messages
"""
class NullMessageStoreFactory(MessageStoreFactory):
""" Return an NullMessageStore that do nothing at all. """
def get_store(self, store_id):
return NullMessageStore()
class NullMessageStore(MessageStore):
""" For testing purpose """
async def store(self, msg):
return None
async def get(self, id):
return None
async def search(self, **kwargs):
return None
async def total(self):
return 0
class FakeMessageStoreFactory(MessageStoreFactory):
""" Return an Fake message store """
def get_store(self, store_id):
return FakeMessageStore()
class FakeMessageStore(MessageStore):
""" For testing purpose """
async def store(self, msg):
logger.debug("Should store message %s", msg)
return 'fake_id'
async def get(self, id):
return {'id':id, 'state': 'processed', 'message': None}
async def search(self, **kwargs):
return []
async def total(self):
return 0
class MemoryMessageStoreFactory(MessageStoreFactory):
""" Return a Memory message store. All message are lost at pypeman stop. """
def __init__(self):
self.base_dict = {}
def get_store(self, store_id):
return MemoryMessageStore(self.base_dict, store_id)
class MemoryMessageStore(MessageStore):
""" Store messages in memory """
def __init__(self, base_dict, store_id):
super().__init__()
self.messages = base_dict.setdefault(store_id, OrderedDict())
async def store(self, msg):
msg_id = msg.uuid
self.messages[msg_id] = {'id': msg_id, 'state': Message.PENDING, 'timestamp': msg.timestamp, 'message': msg.to_dict()}
return msg_id
async def change_message_state(self, id, new_state):
self.messages[id]['state'] = new_state
async def get(self, id):
resp = dict(self.messages[id])
resp['message'] = Message.from_dict(resp['message'])
return resp
async def search(self, start=0, count=10, order_by='timestamp'):
if order_by.startswith('-'):
reverse = True
sort_key = order_by[1:]
else:
reverse = False
sort_key = order_by
result = []
for value in sorted(self.messages.values(), key=lambda x: x[sort_key], reverse=reverse):
resp = dict(value)
resp['message'] = Message.from_dict(resp['message'])
result.append(resp)
return result[start: start + count]
async def total(self):
return len(self.messages)
class FileMessageStoreFactory(MessageStoreFactory):
"""
Generate a FileMessageStore message store instance.
Store a file in `<base_path>/<store_id>/<month>/<day>/` hierachy.
"""
# TODO add an option to reguraly archive old file or delete them
def __init__(self, path):
super().__init__()
if path is None:
raise PypemanConfigError('file message store requires a path')
self.base_path = path
def get_store(self, store_id):
return FileMessageStore(self.base_path, store_id)
class FileMessageStore(MessageStore):
""" Store a file in `<base_path>/<store_id>/<month>/<day>/` hierachy."""
# TODO file access should be done in another thread. Waiting for file backend.
def __init__(self, path, store_id):
super().__init__()
self.base_path = os.path.join(path, store_id)
# Match msg file name
self.msg_re = re.compile(r'^([0-9]{8})_([0-9]{2})([0-9]{2})_[0-9abcdef]*$')
try:
# Try to make dirs if necessary
os.makedirs(os.path.join(self.base_path))
except FileExistsError:
pass
self._total = 0
async def start(self):
self._total = await self.count_msgs()
async def store(self, msg):
""" Store a file in `<base_path>/<store_id>/<month>/<day>/` hierachy."""
# TODO implement a safer store to avoid broken messages
# The filename is the file id
filename = "{}_{}".format(msg.timestamp.strftime(DATE_FORMAT), msg.uuid)
dirs = os.path.join(str(msg.timestamp.year), "%02d" % msg.timestamp.month, "%02d" % msg.timestamp.day)
try:
# Try to make dirs if necessary
os.makedirs(os.path.join(self.base_path, dirs))
except FileExistsError:
pass
file_path = os.path.join(dirs, filename)
# Write message to file
with open(os.path.join(self.base_path, file_path), "w") as f:
f.write(msg.to_json())
await self.change_message_state(file_path, Message.PENDING)
self._total += 1
return file_path
async def change_message_state(self, id, new_state):
with open(os.path.join(self.base_path, id + '.meta'), "w") as f:
f.write(new_state)
async def get_message_state(self, id):
with open(os.path.join(self.base_path, id + '.meta'), "r") as f:
state = f.read()
return state
async def get(self, id):
if not os.path.exists(os.path.join(self.base_path, id)):
raise IndexError
with open(os.path.join(self.base_path, id), "rb") as f:
msg = Message.from_json(f.read().decode('utf-8'))
return {'id': id, 'state': await self.get_message_state(id), 'message': msg}
async def sorted_list_directories(self, path, reverse=True):
"""
:param path: Base path
:param reverse: reverse order
:return: List of directories in specified path ordered
"""
return sorted([d for d in os.listdir(path) if os.path.isdir(os.path.join(path, d))], reverse=reverse)
async def count_msgs(self):
"""
Count message by listing all directories. To be used at startup.
"""
count = 0
for year in await self.sorted_list_directories(os.path.join(self.base_path)):
for month in await self.sorted_list_directories(os.path.join(self.base_path, year)):
for day in await self.sorted_list_directories(os.path.join(self.base_path, year, month)):
for msg_name in sorted(os.listdir(os.path.join(self.base_path, year, month, day))):
found = self.msg_re.match(msg_name)
if found:
count +=1
return count
async def search(self, start=0, count=10, order_by='timestamp'):
# TODO better performance for slicing by counting file in dirs ?
if order_by.startswith('-'):
reverse = True
sort_key = order_by[1:]
else:
reverse = False
sort_key = order_by
# TODO handle sort_key
result = []
end = start + count
position = 0
for year in await self.sorted_list_directories(os.path.join(self.base_path), reverse=reverse):
for month in await self.sorted_list_directories(os.path.join(self.base_path, year), reverse=reverse):
for day in await self.sorted_list_directories(os.path.join(self.base_path, year, month), reverse=reverse):
for msg_name in sorted(os.listdir(os.path.join(self.base_path, year, month, day)), reverse=reverse):
found = self.msg_re.match(msg_name)
if found:
if start <= position < end:
mid = os.path.join(year, month, day, msg_name)
result.append(await self.get(mid))
position += 1
return result
async def total(self):
return self._total
| jrmi/pypeman | pypeman/msgstore.py | Python | apache-2.0 | 9,722 |
#!/usr/bin/env python3.9
# -*- coding: utf-8 -*-
"""Eris Codebase Monitor
Eris maintains an up-to-date set of reports for every file in a codebase.
A status indicator summarises the state of each report, and a report is viewed
by selecting this status indicator with the cursor.
The reports are cached in the codebase's root directory in a ".eris"
directory.
"""
import asyncio
import contextlib
import functools
import gzip
import importlib
import importlib.resources
import itertools
import math
import multiprocessing
import os
import pickle
import shutil
import signal
import subprocess
import sys
import time
import docopt
import pygments.styles
import pyinotify
import eris
import eris.fill3 as fill3
import eris.sorted_collection as sorted_collection
import eris.terminal as terminal
import eris.termstr as termstr
import eris.tools as tools
import eris.worker as worker
import eris.paged_list as paged_list
USAGE = """
Usage:
eris [options] <directory>
eris -h | --help
eris -i | --info
Example:
# eris my_project
Options:
-h, --help Show the full help.
-i, --info Show information about the available tools.
-w COUNT, --workers=COUNT The number of processes working in parallel.
By default it is the number of cpus minus 1.
-e "COMMAND", --editor="COMMAND" The command used to start the editor, in
the *edit command. It may contain options.
-t THEME, --theme=THEME The pygment theme used for syntax
highlighting. Defaults to "native".
-c TYPE, --compression=TYPE The type of compression used in the cache:
gzip, lzma, bz2, or none. Defaults to gzip.
"""
KEYS_DOC = """Keys:
arrow keys, page up/down, mouse - Move the cursor or scroll the result pane.
tab - Change the focus between summary and result pane.
q, esc - Quit.
h - Show the help screen. (toggle)
o - Orient the result pane as portrait or landscape. (toggle)
l - Show the activity log. (toggle)
e - Edit the current file with an editor defined by -e, $EDITOR or $VISUAL.
n - Move to the next issue.
N - Move to the next issue of the current tool.
s - Sort files by type, or by directory location. (toggle)
r - Refresh the currently selected report.
R - Refresh all reports of the current tool.
f - Resize the focused pane to the full screen. (toggle)
x - Open the current file with xdg-open.
"""
class Entry:
MAX_WIDTH = 0
def __init__(self, path, results, change_time, highlighted=None,
set_results=True):
self.path = path
self.change_time = change_time
self.highlighted = highlighted
self.results = results
if set_results:
# FIX: this is missed for entries appended later
for result in results:
result.entry = self
self.widget = fill3.Row(results)
self.appearance_cache = None
self.last_width = None
def __eq__(self, other):
return self.path == other.path
def __len__(self):
return len(self.results)
def __getitem__(self, index):
return self.results[index]
def appearance_min(self):
if self.appearance_cache is None \
or self.last_width != Entry.MAX_WIDTH:
self.last_width = Entry.MAX_WIDTH
if self.highlighted is not None:
self.results[self.highlighted].is_highlighted = True
row_appearance = self.widget.appearance_min()
path = tools.path_colored(self.path)
padding = " " * (self.last_width - len(self.results) + 1)
self.appearance_cache = [row_appearance[0] + padding + path]
if self.highlighted is not None:
self.results[self.highlighted].is_highlighted = False
return self.appearance_cache
def as_html(self):
html_parts = []
styles = set()
for result in self.widget:
result_html, result_styles = result.as_html()
html_parts.append(result_html)
styles.update(result_styles)
path = tools.path_colored(self.path)
padding = " " * (Entry.MAX_WIDTH - len(self.widget) + 1)
path_html, path_styles = termstr.TermStr(padding + path).as_html()
return "".join(html_parts) + path_html, styles.union(path_styles)
def is_path_excluded(path):
return any(part.startswith(".") for part in path.split(os.path.sep))
def codebase_files(path, skip_hidden_directories=True):
for (dirpath, dirnames, filenames) in os.walk(path):
if skip_hidden_directories:
filtered_dirnames = [dirname for dirname in dirnames
if not is_path_excluded(dirname)]
dirnames[:] = filtered_dirnames
for filename in filenames:
if not is_path_excluded(filename):
yield os.path.join(dirpath, filename)
def fix_paths(root_path, paths):
return (os.path.join(".", os.path.relpath(path, root_path))
for path in paths)
def blend_color(a_color, b_color, transparency):
a_r, a_g, a_b = a_color
b_r, b_g, b_b = b_color
complement = 1 - transparency
return (int(a_r * transparency + b_r * complement),
int(a_g * transparency + b_g * complement),
int(a_b * transparency + b_b * complement))
def highlight_str(line, highlight_color, transparency):
@functools.lru_cache()
def blend_style(style):
fg_color = (style.fg_color if type(style.fg_color) == tuple
else termstr.xterm_color_to_rgb(style.fg_color))
bg_color = (style.bg_color if type(style.bg_color) == tuple
else termstr.xterm_color_to_rgb(style.bg_color))
return termstr.CharStyle(
blend_color(fg_color, highlight_color, transparency),
blend_color(bg_color, highlight_color, transparency),
is_bold=style.is_bold, is_italic=style.is_italic,
is_underlined=style.is_underlined)
return termstr.TermStr(line).transform_style(blend_style)
def in_green(str_):
return termstr.TermStr(str_, termstr.CharStyle(termstr.Color.lime))
_UP, _DOWN, _LEFT, _RIGHT = (0, -1), (0, 1), (-1, 0), (1, 0)
def directory_sort(entry):
path = entry.path
return (os.path.dirname(path), tools.splitext(path)[1],
os.path.basename(path))
def type_sort(entry):
path = entry.path
return (tools.splitext(path)[1], os.path.dirname(path),
os.path.basename(path))
class Summary:
def __init__(self, root_path, jobs_added_event):
self._root_path = root_path
self._jobs_added_event = jobs_added_event
self._view_widget = fill3.View.from_widget(self)
self.is_directory_sort = True
self._old_entries = []
self.__cursor_position = (0, 0)
self.reset()
def reset(self):
Entry.MAX_WIDTH = 0
self._max_path_length = 0
self.result_total = 0
self.completed_total = 0
self.is_loaded = False
self.closest_placeholder_generator = None
sort_func = directory_sort if self.is_directory_sort else type_sort
self._entries = sorted_collection.SortedCollection([], key=sort_func)
def __getstate__(self):
state = self.__dict__.copy()
state["closest_placeholder_generator"] = None
state["_jobs_added_event"] = None
summary_path = os.path.join(tools.CACHE_PATH, "summary_dir")
open_compressed = functools.partial(gzip.open, compresslevel=1)
x, y = self.cursor_position()
if y == 0:
entries = []
else:
entries = itertools.chain(
[self._entries[y]], itertools.islice(self._entries, y),
itertools.islice(self._entries, y+1, None))
state["_old_entries"] = paged_list.PagedList(
entries, summary_path, 2000, 1, exist_ok=True,
open_func=open_compressed)
state["_entries"] = None
state["__cursor_position"] = (x, 0)
return state
def __setstate__(self, state):
self.__dict__ = state
self.reset()
@property
def _cursor_position(self):
return self.__cursor_position
@_cursor_position.setter
def _cursor_position(self, new_position):
if new_position != self.__cursor_position:
self.__cursor_position = new_position
self.closest_placeholder_generator = None
def sort_entries(self):
key_func = directory_sort if self.is_directory_sort else type_sort
self._entries = sorted_collection.SortedCollection(
self._entries, key=key_func)
self.closest_placeholder_generator = None
def add_entry(self, entry):
if entry in self._entries:
return
for result in entry:
self.result_total += 1
if result.is_completed:
self.completed_total += 1
Entry.MAX_WIDTH = max(len(entry), Entry.MAX_WIDTH)
self._max_path_length = max(len(entry.path) - len("./"),
self._max_path_length)
entry_index = self._entries.insert(entry)
x, y = self._cursor_position
if entry_index <= y:
self.scroll(0, -1)
self._jobs_added_event.set()
if self.is_loaded:
self.closest_placeholder_generator = None
def on_file_added(self, path):
full_path = os.path.join(self._root_path, path)
try:
change_time = os.stat(full_path).st_ctime
except OSError:
return
row = [tools.Result(path, tool) for tool in tools.tools_for_path(path)]
entry = Entry(path, row, change_time)
self.add_entry(entry)
def on_file_deleted(self, path):
if os.path.exists(os.path.join(self._root_path, path)):
return
entry = Entry(path, [], None)
try:
index = self._entries.index(entry)
except ValueError:
return
x, y = self._cursor_position
if index < y:
self.scroll(0, 1)
for result in self._entries[index]:
if result.is_completed:
self.completed_total -= 1
self.result_total -= 1
result.delete()
row = self._entries[index]
del self._entries._keys[index]
del self._entries._items[index]
if len(row) == Entry.MAX_WIDTH:
Entry.MAX_WIDTH = max((len(entry) for entry in self._entries),
default=0)
if (len(path) - 2) == self._max_path_length:
self._max_path_length = max(((len(entry.path) - 2)
for entry in self._entries), default=0)
x, y = self._cursor_position
if y == len(self._entries):
self._cursor_position = x, y - 1
self.closest_placeholder_generator = None
def on_file_modified(self, path):
entry = Entry(path, [], None)
try:
entry_index = self._entries.index(entry)
except ValueError:
return
entry = self._entries[entry_index]
for result in entry:
self.refresh_result(result, only_completed=False)
self.closest_placeholder_generator = None
return entry
@contextlib.contextmanager
def keep_selection(self):
try:
cursor_path = self.get_selection().path
except AttributeError:
yield
return
x, y = self._cursor_position
yield
for index, row in enumerate(self._entries):
if row.path == cursor_path:
self._cursor_position = (x, index)
return
if y >= len(self._entries):
self._cursor_position = (x, len(self._entries) - 1)
async def sync_with_filesystem(self, appearance_changed_event, log=None):
start_time = time.time()
cache = {}
log.log_message("Started loading summary…")
for index, entry in enumerate(self._old_entries):
if index != 0 and index % 5000 == 0:
log.log_message(f"Loaded {index} files…")
await asyncio.sleep(0)
self.add_entry(entry)
if index % 1000 == 0:
appearance_changed_event.set()
cache[entry.path] = entry.change_time
duration = time.time() - start_time
log.log_message(f"Finished loading summary. {round(duration, 2)} secs")
self.is_loaded = True
self.closest_placeholder_generator = None
log.log_message("Started sync with filesystem…")
start_time = time.time()
all_paths = set()
for path in fix_paths(self._root_path, codebase_files(self._root_path)):
await asyncio.sleep(0)
all_paths.add(path)
if path in cache:
full_path = os.path.join(self._root_path, path)
change_time = os.stat(full_path).st_ctime
if change_time != cache[path]:
cache[path] = change_time
entry = self.on_file_modified(path)
entry.change_time = change_time
else:
self.on_file_added(path)
appearance_changed_event.set()
for path in cache.keys() - all_paths:
await asyncio.sleep(0)
self.on_file_deleted(path)
duration = time.time() - start_time
log.log_message(f"Finished sync with filesystem. {round(duration, 2)} secs")
def _sweep_up(self, x, y):
yield from reversed(self._entries[y][:x])
while True:
y = (y - 1) % len(self._entries)
yield from reversed(self._entries[y])
def _sweep_down(self, x, y):
yield from self._entries[y][x:]
while True:
y = (y + 1) % len(self._entries)
yield from self._entries[y]
def _sweep_combined(self, x, y):
for up_result, down_result in zip(self._sweep_up(x, y),
self._sweep_down(x, y)):
yield down_result
yield up_result
def _placeholder_sweep(self):
x, y = self.cursor_position()
for index, result in enumerate(self._sweep_combined(x, y)):
if index > self.result_total:
break
if result.status == tools.Status.pending:
yield result
async def get_closest_placeholder(self):
if self.closest_placeholder_generator is None:
self.closest_placeholder_generator = self._placeholder_sweep()
try:
return self.closest_placeholder_generator.send(None)
except StopIteration:
raise StopAsyncIteration
def appearance_dimensions(self):
return self._max_path_length + 1 + Entry.MAX_WIDTH, len(self._entries)
def appearance_interval(self, interval):
start_y, end_y = interval
x, y = self.cursor_position()
self._entries[y].highlighted = x
self._entries[y].appearance_cache = None
appearance = fill3.Column(self._entries).appearance_interval(interval)
self._entries[y].highlighted = None
self._entries[y].appearance_cache = None
return appearance
def _set_scroll_position(self, cursor_x, cursor_y, summary_height):
scroll_x, scroll_y = new_scroll_x, new_scroll_y = \
self._view_widget.position
if cursor_y < scroll_y:
new_scroll_y = max(cursor_y - summary_height + 1, 0)
if (scroll_y + summary_height - 1) < cursor_y:
new_scroll_y = cursor_y
self._view_widget.position = new_scroll_x, new_scroll_y
def _highlight_cursor_row(self, appearance, cursor_y):
scroll_x, scroll_y = self._view_widget.position
highlighted_y = cursor_y - scroll_y
appearance[highlighted_y] = (highlight_str(
appearance[highlighted_y][:-1], termstr.Color.white, 0.8)
+ appearance[highlighted_y][-1])
return appearance
def appearance(self, dimensions):
width, height = dimensions
if len(self._entries) == 0:
return [" " * width for row in range(height)]
cursor_x, cursor_y = self.cursor_position()
width, height = width - 1, height - 1 # Minus one for the scrollbars
self._set_scroll_position(cursor_x, cursor_y, height)
return self._highlight_cursor_row(
self._view_widget.appearance(dimensions), cursor_y)
def scroll(self, dx, dy):
scroll_x, scroll_y = self._view_widget.position
dy = min(dy, scroll_y)
self._view_widget.position = scroll_x, scroll_y - dy
self._move_cursor((0, -dy))
def cursor_position(self):
x, y = self._cursor_position
try:
return min(x, len(self._entries[y])-1), y
except IndexError:
return 0, 0
def get_selection(self):
x, y = self.cursor_position()
return self._entries[y][x]
def _move_cursor(self, vector):
if vector == (0, 0):
return
dx, dy = vector
if dy == 0:
x, y = self.cursor_position()
self._cursor_position = ((x + dx) % len(self._entries[y]), y)
elif dx == 0:
x, y = self._cursor_position
self._cursor_position = (x, (y + dy) % len(self._entries))
else:
raise ValueError
def cursor_right(self):
self._move_cursor(_RIGHT)
def cursor_left(self):
self._move_cursor(_LEFT)
def cursor_up(self):
self._move_cursor(_UP)
def cursor_down(self):
self._move_cursor(_DOWN)
def cursor_page_up(self):
view_width, view_height = self._view_widget.portal.last_dimensions
self.scroll(0, view_height)
def cursor_page_down(self):
view_width, view_height = self._view_widget.portal.last_dimensions
self.scroll(0, -view_height)
def cursor_home(self):
x, y = self._cursor_position
self._cursor_position = x, 0
def cursor_end(self):
x, y = self._cursor_position
self._cursor_position = x, len(self._entries) - 1
def _issue_generator(self):
x, y = self.cursor_position()
for index in range(len(self._entries) + 1):
row_index = (index + y) % len(self._entries)
row = self._entries[row_index]
for index_x, result in enumerate(row):
if (result.status == tools.Status.problem and
not (row_index == y and index_x <= x and
index != len(self._entries))):
yield result, (index_x, row_index)
def move_to_next_issue(self):
with contextlib.suppress(StopIteration):
issue, self._cursor_position = self._issue_generator().send(None)
def move_to_next_issue_of_tool(self):
current_tool = self.get_selection().tool
for issue, position in self._issue_generator():
if issue.tool == current_tool:
self._cursor_position = position
return
def refresh_result(self, result, only_completed=True):
if result.is_completed or not only_completed:
if result.is_completed:
self.completed_total -= 1
result.reset()
result.delete()
self.closest_placeholder_generator = None
self._jobs_added_event.set()
def refresh_tool(self, tool):
for row in self._entries:
for result in row:
if result.tool == tool:
self.refresh_result(result)
def clear_running(self):
for row in self._entries:
for result in row:
if result.status == tools.Status.running:
self.refresh_result(result)
def as_html(self):
html_parts = []
styles = set()
for row in self._entries:
html_row, styles_row = row.as_html()
html_parts.append(html_row)
styles.update(styles_row)
return ("<style>a { text-decoration:none; }</style><pre>" +
"<br>".join(html_parts) + "</pre>"), styles
class Log:
_GREY_BOLD_STYLE = termstr.CharStyle(termstr.Color.grey_100, is_bold=True)
_GREEN_STYLE = termstr.CharStyle(termstr.Color.lime)
def __init__(self, appearance_changed_event):
self._appearance_changed_event = appearance_changed_event
self.lines = []
self._appearance = None
def __getstate__(self):
state = self.__dict__.copy()
state["_appearance_changed_event"] = None
return state
def log_message(self, message, timestamp=None, char_style=None):
if isinstance(message, list):
message = [part[1] if isinstance(part, tuple) else part
for part in message]
message = fill3.join("", message)
if char_style is not None:
message = termstr.TermStr(message, char_style)
timestamp = (time.strftime("%H:%M:%S", time.localtime())
if timestamp is None else timestamp)
line = termstr.TermStr(timestamp, Log._GREY_BOLD_STYLE) + " " + message
if not sys.stdout.isatty():
print(line, flush=True)
return
self.lines.append(line)
self._appearance = None
self._appearance_changed_event.set()
def log_command(self, message, timestamp=None):
self.log_message(message, char_style=Log._GREEN_STYLE)
def appearance(self, dimensions):
if self._appearance is None or \
fill3.appearance_dimensions(self._appearance) != dimensions:
width, height = dimensions
del self.lines[:-height]
self._appearance = fill3.appearance_resize(self.lines, dimensions)
return self._appearance
def highlight_chars(str_, style, marker="*"):
parts = str_.split(marker)
highlighted_parts = [termstr.TermStr(part[0], style) + part[1:]
for part in parts[1:] if part != ""]
return fill3.join("", [parts[0]] + highlighted_parts)
def get_status_help():
return fill3.join("\n", ["Statuses:"] +
[" " + tools.STATUS_TO_TERMSTR[status] + " " + meaning
for status, meaning in tools.STATUS_MEANINGS])
class Help:
def __init__(self, summary, screen):
self.summary = summary
self.screen = screen
help_text = fill3.join("\n", [__doc__, KEYS_DOC, get_status_help()])
self.view = fill3.View.from_widget(fill3.Text(help_text))
self.widget = fill3.Border(self.view, title="Help")
portal = self.view.portal
self.key_map = {"h": self._exit_help, terminal.UP_KEY: portal.scroll_up,
terminal.DOWN_KEY: portal.scroll_down,
terminal.LEFT_KEY: portal.scroll_left,
terminal.RIGHT_KEY: portal.scroll_right,
"q": self._exit_help, terminal.ESC: self._exit_help}
def _exit_help(self):
self.screen._is_help_visible = False
def on_mouse_input(self, term_code, appearance_changed_event):
event = terminal.decode_mouse_input(term_code)
if event[1] == terminal.WHEEL_UP_MOUSE:
self.view.portal.scroll_up()
appearance_changed_event.set()
elif event[1] == terminal.WHEEL_DOWN_MOUSE:
self.view.portal.scroll_down()
appearance_changed_event.set()
def on_keyboard_input(self, term_code, appearance_changed_event):
action = (self.key_map.get(term_code) or
self.key_map.get(term_code.lower()))
if action is not None:
action()
appearance_changed_event.set()
def appearance(self, dimensions):
return self.widget.appearance(dimensions)
class Listing:
def __init__(self, view):
self.view = view
self.last_dimensions = None
def appearance(self, dimensions):
self.last_dimensions = dimensions
return self.view.appearance(dimensions)
class Screen:
def __init__(self, summary, log, appearance_changed_event, main_loop):
self._summary = summary
self._log = log
self._appearance_changed_event = appearance_changed_event
self._main_loop = main_loop
self._is_summary_focused = True
self.workers = None
self._is_listing_portrait = True
self._is_log_visible = True
self._is_help_visible = False
self._is_fullscreen = False
self._make_widgets()
self._last_mouse_position = 0, 0
def __getstate__(self):
state = self.__dict__.copy()
state["_appearance_changed_event"] = None
state["_main_loop"] = None
state["workers"] = None
return state
def make_workers(self, worker_count, is_being_tested, compression):
workers = []
for index in range(worker_count):
worker_ = worker.Worker(is_being_tested, compression)
workers.append(worker_)
future = worker_.job_runner(self, self._summary, self._log,
self._summary._jobs_added_event,
self._appearance_changed_event)
worker_.future = future
self.workers = workers
def stop_workers(self):
for worker_ in self.workers:
if worker_.result is not None:
worker_.result.reset()
worker_.kill()
def _partition(self, percentage, widgets, length):
smaller_length = max(int(length * (percentage / 100)), 10)
return [smaller_length, length - smaller_length]
def _make_widgets(self):
self._help_widget = Help(self._summary, self)
root_path = os.path.basename(self._summary._root_path)
summary = fill3.Border(self._summary, title="Summary of " + root_path)
self._summary_border = summary
try:
selected_widget = self._summary.get_selection()
result_widget = selected_widget.result
except IndexError:
result_widget = fill3.Text("Nothing selected")
self._view = fill3.View.from_widget(result_widget)
self._listing = fill3.Border(Listing(self._view))
log = fill3.Border(self._log, title="Log",
characters=Screen._DIMMED_BORDER)
quarter_cut = functools.partial(self._partition, 25)
golden_cut = functools.partial(self._partition, 38.198)
three_quarter_cut = functools.partial(self._partition, 75)
port_log = fill3.Row([fill3.Column([summary, log], three_quarter_cut),
self._listing], golden_cut)
land_log = fill3.Column([fill3.Row([summary, log]), self._listing],
quarter_cut)
port_no_log = fill3.Row([summary, self._listing], golden_cut)
land_no_log = fill3.Column([summary, self._listing], quarter_cut)
self._layouts = [[land_no_log, port_no_log], [land_log, port_log]]
self._set_focus()
def toggle_help(self):
self._is_help_visible = not self._is_help_visible
def toggle_log(self):
self._is_log_visible = not self._is_log_visible
def toggle_window_orientation(self):
self._is_listing_portrait = not self._is_listing_portrait
def _move_listing(self, vector):
dx, dy = vector
selected_widget = self._summary.get_selection()
x, y = selected_widget.scroll_position
widget_width, widget_height = fill3.appearance_dimensions(
selected_widget.result.appearance_min())
listing_width, listing_height = (self._listing.widget.
last_dimensions)
listing_width -= 1 # scrollbars
listing_height -= 1
x = min(x + dx, max(widget_width - listing_width, 0))
y = min(y + dy, max(widget_height - listing_height, 0))
x = max(0, x)
y = max(0, y)
selected_widget.scroll_position = x, y
def cursor_up(self):
(self._summary.cursor_up() if self._is_summary_focused
else self._move_listing(_UP))
def cursor_down(self):
(self._summary.cursor_down() if self._is_summary_focused
else self._move_listing(_DOWN))
def cursor_right(self):
(self._summary.cursor_right() if self._is_summary_focused
else self._move_listing(_RIGHT))
def cursor_left(self):
(self._summary.cursor_left() if self._is_summary_focused
else self._move_listing(_LEFT))
def cursor_page_up(self):
(self._summary.cursor_page_up() if self._is_summary_focused
else self.listing_page_up())
def cursor_page_down(self):
(self._summary.cursor_page_down() if self._is_summary_focused
else self.listing_page_down())
def cursor_end(self):
(self._summary.cursor_end() if self._is_summary_focused
else self._page_listing(_RIGHT))
def cursor_home(self):
(self._summary.cursor_home() if self._is_summary_focused
else self._page_listing(_LEFT))
def _page_listing(self, vector):
dx, dy = vector
listing_width, listing_height = self._listing.widget.last_dimensions
self._move_listing((dx * (listing_width // 2),
dy * (listing_height // 2)))
def listing_page_up(self):
self._page_listing(_UP)
def listing_page_down(self):
self._page_listing(_DOWN)
def move_to_next_issue(self):
self._summary.move_to_next_issue()
def move_to_next_issue_of_tool(self):
self._summary.move_to_next_issue_of_tool()
def edit_file(self):
if self.editor_command is None:
self._log.log_message("An editor has not been defined. "
"See option -e.")
else:
path = self._summary.get_selection().path
path_colored = tools.path_colored(path)
line_num = (self._summary.get_selection().entry[0].
scroll_position[1] + 1)
self._log.log_message([in_green("Editing "), path_colored,
in_green(f" at line {line_num}…")])
subprocess.Popen(f"{self.editor_command} +{line_num} {path}",
shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
def toggle_status_style(self):
self._summary.toggle_status_style(self._log)
def toggle_order(self):
self._summary.is_directory_sort = not self._summary.is_directory_sort
sort_order = ("directory then type" if self._summary.is_directory_sort
else "type then directory")
self._log.log_command(f"Sorting files by {sort_order}.")
with self._summary.keep_selection():
self._summary.sort_entries()
def quit_(self):
os.kill(os.getpid(), signal.SIGINT)
def refresh(self):
selection = self._summary.get_selection()
tool_name = tools.tool_name_colored(selection.tool, selection.path)
path_colored = tools.path_colored(selection.path)
self._log.log_message([in_green("Refreshing "), tool_name,
in_green(" result of "), path_colored,
in_green("…")])
self._summary.refresh_result(selection)
def refresh_tool(self):
selection = self._summary.get_selection()
tool_name = tools.tool_name_colored(selection.tool, selection.path)
self._log.log_message([in_green("Refreshing all results of "),
tool_name, in_green("…")])
self._summary.refresh_tool(selection.tool)
_DIMMED_BORDER = [termstr.TermStr(part).fg_color(termstr.Color.grey_100)
for part in fill3.Border.THIN]
def _set_focus(self):
focused, unfocused = fill3.Border.THICK, Screen._DIMMED_BORDER
self._summary_border.set_style(focused if self._is_summary_focused
else unfocused)
self._listing.set_style(unfocused if self._is_summary_focused
else focused)
def toggle_focus(self):
self._is_summary_focused = not self._is_summary_focused
self._set_focus()
def toggle_fullscreen(self):
self._is_fullscreen = not self._is_fullscreen
def xdg_open(self):
path = self._summary.get_selection().path
path_colored = tools.path_colored(path)
self._log.log_message([in_green("Opening "), path_colored,
in_green("…")])
subprocess.Popen(["xdg-open", path], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
def save(self):
worker.Worker.unsaved_jobs_total = 0
pickle_path = os.path.join(tools.CACHE_PATH, "summary.pickle")
open_compressed = functools.partial(gzip.open, compresslevel=1)
tools.dump_pickle_safe(self, pickle_path, open=open_compressed)
def _select_entry_at_position(self, x, y, view_width, view_height):
border_width = 1
if x < border_width or y < border_width or x > view_width or \
y > view_height:
return
view_x, view_y = self._summary._view_widget.portal.position
column_index = x - border_width + view_x
row_index = y - border_width + view_y
if row_index >= len(self._summary._entries):
return
row = self._summary._entries[row_index]
if column_index < 0 or column_index >= len(row):
return
self._summary._cursor_position = column_index, row_index
def _is_switching_focus(self, x, y, view_width, view_height):
return (not self._is_fullscreen and
(self._is_listing_portrait and
(x > view_width and
self._is_summary_focused or x <= view_width and
not self._is_summary_focused) or
not self._is_listing_portrait and
(y > view_height and
self._is_summary_focused or y <= view_height and
not self._is_summary_focused)))
def on_mouse_input(self, term_code):
if self._is_help_visible:
self._help_widget.on_mouse_input(
term_code, self._appearance_changed_event)
return
event = terminal.decode_mouse_input(term_code)
if event[0] not in [terminal.PRESS_MOUSE, terminal.DRAG_MOUSE]:
return
x, y = event[2:4]
if event[0] == terminal.DRAG_MOUSE:
last_x, last_y = self._last_mouse_position
dx, dy = x - last_x, y - last_y
if self._is_summary_focused:
self._summary.scroll(dx, dy)
else:
self._move_listing((-dx, -dy))
else: # Mouse press
if event[1] == terminal.WHEEL_UP_MOUSE:
self.listing_page_up()
elif event[1] == terminal.WHEEL_DOWN_MOUSE:
self.listing_page_down()
else:
view_width, view_height = \
self._summary._view_widget.portal.last_dimensions
if self._is_switching_focus(x, y, view_width, view_height):
self.toggle_focus()
else:
self._select_entry_at_position(
x, y, view_width, view_height)
self._last_mouse_position = x, y
self._appearance_changed_event.set()
def on_keyboard_input(self, term_code):
if self._is_help_visible:
self._help_widget.on_keyboard_input(
term_code, self._appearance_changed_event)
return
action = (Screen._KEY_MAP.get(term_code) or
Screen._KEY_MAP.get(term_code.lower()))
if action is not None:
action(self)
self._appearance_changed_event.set()
def _fix_listing(self):
widget = self._summary.get_selection()
view = self._listing.widget.view
view.position = widget.scroll_position
x, y = view.position
view.widget = widget.result
tool_name = tools.tool_name_colored(widget.tool, widget.path)
divider = " " + self._listing.top * 2 + " "
self._listing.title = (
tools.path_colored(widget.path) + divider + tool_name + " " +
tools.STATUS_TO_TERMSTR[widget.status] + divider +
"line " + str(y+1))
_STATUS_BAR = highlight_chars(
" *help *quit *t*a*b:focus *orient *log *edit *next *sort"
" *refresh *fullscreen *xdg-open", Log._GREEN_STYLE)
@functools.lru_cache()
def _get_partial_bar_chars(self, bar_transparency):
bar_color = blend_color(termstr.Color.black, termstr.Color.white,
bar_transparency)
return [termstr.TermStr(char).fg_color(bar_color).
bg_color(termstr.Color.black)
for char in fill3.ScrollBar._PARTIAL_CHARS[1]]
@functools.lru_cache(maxsize=2)
def _get_status_bar_appearance(self, width, progress_bar_size):
bar_transparency = 0.7
bar = self._STATUS_BAR.center(width)[:width]
fraction, whole = math.modf(progress_bar_size)
whole = int(whole)
if whole < len(bar) and bar[whole].data == " ":
left_part = bar[:whole]
right_part = (self._get_partial_bar_chars(bar_transparency)
[int(fraction * 8)] + bar[whole+1:])
else:
progress_bar_size = round(progress_bar_size)
left_part = bar[:progress_bar_size]
right_part = bar[progress_bar_size:]
return [highlight_str(left_part, termstr.Color.white, bar_transparency)
+ right_part]
def _get_status_bar(self, width):
incomplete = self._summary.result_total - self._summary.completed_total
progress_bar_size = width if self._summary.result_total == 0 else \
max(0, width * incomplete / self._summary.result_total)
return self._get_status_bar_appearance(width, progress_bar_size)
def appearance(self, dimensions):
if len(self._summary._entries) > 0:
self._fix_listing()
if self._is_help_visible:
body = self._help_widget
elif self._is_fullscreen:
body = (self._summary_border if self._is_summary_focused
else self._listing)
else:
body = (self._layouts[self._is_log_visible]
[self._is_listing_portrait])
width, height = max(dimensions[0], 10), max(dimensions[1], 20)
result = (body.appearance((width, height-1)) +
self._get_status_bar(width))
return (result if (width, height) == dimensions
else fill3.appearance_resize(result, dimensions))
_KEY_MAP = {
"o": toggle_window_orientation, "l": toggle_log, "h": toggle_help,
terminal.UP_KEY: cursor_up, terminal.DOWN_KEY: cursor_down,
terminal.LEFT_KEY: cursor_left, terminal.RIGHT_KEY: cursor_right,
terminal.PAGE_DOWN_KEY: cursor_page_down,
terminal.PAGE_UP_KEY: cursor_page_up, "s": toggle_order,
terminal.HOME_KEY: cursor_home, terminal.END_KEY: cursor_end,
"n": move_to_next_issue, "N": move_to_next_issue_of_tool,
"e": edit_file, "q": quit_, terminal.ESC: quit_, "r": refresh,
"R": refresh_tool, "\t": toggle_focus, "f": toggle_fullscreen,
"x": xdg_open}
def setup_inotify(root_path, loop, on_filesystem_event, exclude_filter):
watch_manager = pyinotify.WatchManager()
event_mask = (pyinotify.IN_CREATE | pyinotify.IN_DELETE |
pyinotify.IN_CLOSE_WRITE | pyinotify.IN_ATTRIB |
pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO)
watch_manager.add_watch(root_path, event_mask, rec=True, auto_add=True,
proc_fun=on_filesystem_event,
exclude_filter=exclude_filter, quiet=False)
return pyinotify.AsyncioNotifier(watch_manager, loop,
callback=lambda notifier: None)
def load_state(pickle_path, jobs_added_event, appearance_changed_event,
root_path, loop):
is_first_run = True
try:
with gzip.open(pickle_path, "rb") as file_:
screen = pickle.load(file_)
except (FileNotFoundError, AttributeError):
summary = Summary(root_path, jobs_added_event)
log = Log(appearance_changed_event)
screen = Screen(summary, log, appearance_changed_event, loop)
else:
is_first_run = False
screen._appearance_changed_event = appearance_changed_event
screen._main_loop = loop
summary = screen._summary
summary._jobs_added_event = jobs_added_event
summary._root_path = root_path
summary.clear_running()
log = screen._log
log._appearance_changed_event = appearance_changed_event
return summary, screen, log, is_first_run
def on_filesystem_event(event, summary, root_path, appearance_changed_event):
path = list(fix_paths(root_path, [event.pathname]))[0]
if is_path_excluded(path[2:]):
return
inotify_actions = {pyinotify.IN_CREATE: summary.on_file_added,
pyinotify.IN_MOVED_TO: summary.on_file_added,
pyinotify.IN_DELETE: summary.on_file_deleted,
pyinotify.IN_MOVED_FROM: summary.on_file_deleted,
pyinotify.IN_ATTRIB: summary.on_file_modified,
pyinotify.IN_CLOSE_WRITE: summary.on_file_modified}
if event.mask not in inotify_actions:
return
try:
inotify_actions[event.mask](path)
except Exception:
tools.log_error()
raise KeyboardInterrupt
appearance_changed_event.set()
def main(root_path, loop, worker_count=None, editor_command=None, theme=None,
compression=None, is_being_tested=False):
if worker_count is None:
worker_count = max(multiprocessing.cpu_count() - 1, 1)
if theme is None:
theme = "native"
if compression is None:
compression = "gzip"
os.environ["PYGMENT_STYLE"] = theme
pickle_path = os.path.join(tools.CACHE_PATH, "summary.pickle")
jobs_added_event = asyncio.Event()
appearance_changed_event = asyncio.Event()
summary, screen, log, is_first_run = load_state(
pickle_path, jobs_added_event, appearance_changed_event, root_path,
loop)
screen.editor_command = editor_command
log.log_message("Program started.")
jobs_added_event.set()
callback = lambda event: on_filesystem_event(event, summary, root_path,
appearance_changed_event)
notifier = setup_inotify(root_path, loop, callback, is_path_excluded)
try:
log.log_message(f"Starting workers ({worker_count}) …")
screen.make_workers(worker_count, is_being_tested, compression)
def exit_loop():
log.log_command("Exiting…")
time.sleep(0.05)
screen.stop_workers()
loop.stop()
loop.create_task(summary.sync_with_filesystem(
appearance_changed_event, log))
for worker in screen.workers:
loop.create_task(worker.future)
if sys.stdout.isatty():
with fill3.context(loop, appearance_changed_event, screen,
exit_loop=exit_loop):
loop.run_forever()
log.log_message("Program stopped.")
else:
try:
loop.run_forever()
except KeyboardInterrupt:
screen.stop_workers()
loop.stop()
finally:
notifier.stop()
if summary.is_loaded:
screen.save()
@contextlib.contextmanager
def chdir(path):
old_cwd = os.getcwd()
os.chdir(path)
try:
yield
finally:
os.chdir(old_cwd)
def manage_cache(root_path):
cache_path = os.path.join(root_path, tools.CACHE_PATH)
timestamp_path = os.path.join(cache_path, "creation_time")
if os.path.exists(cache_path):
timestamp = os.stat(timestamp_path).st_mtime
for resource_path in ["__main__.py", "tools.py", "tools.toml"]:
with importlib.resources.path(eris, resource_path) as resource:
if resource.stat().st_mtime > timestamp:
print("Eris has been updated, so clearing the cache and"
" recalculating all results…")
shutil.rmtree(cache_path)
break
if not os.path.exists(cache_path):
os.mkdir(cache_path)
open(timestamp_path, "w").close()
def print_tool_info():
extensions_for_tool = {}
for extensions, tools_ in tools.TOOLS_FOR_EXTENSIONS:
for extension in extensions:
for tool in tools_:
extensions_for_tool.setdefault(
tool, {extension}).add(extension)
for tool in sorted(tools.tools_all(), key=lambda t: t.__name__):
print(termstr.TermStr(tool.__name__).bold()
if tools.is_tool_available(tool)
else termstr.TermStr(tool.__name__).fg_color(termstr.Color.red)
+ " (not available) ")
print("url:", tool.url)
extensions = list(extensions_for_tool.get(tool, {"*"}))
print("extensions:", ", ".join(extensions))
if hasattr(tool, "command"):
print(f"command: {tool.command} foo.{extensions[0]}")
else:
print("function:", "eris.tools." + tool.__name__)
print()
def check_arguments():
cmdline_help = __doc__ + USAGE.replace("*", "")
arguments = docopt.docopt(cmdline_help, help=False)
if arguments["--help"]:
print(cmdline_help)
sys.exit(0)
if arguments["--info"]:
print_tool_info()
sys.exit(0)
worker_count = None
try:
if arguments["--workers"] is not None:
worker_count = int(arguments["--workers"])
if worker_count == 0:
print("There must be at least one worker.")
sys.exit(1)
except ValueError:
print("--workers requires a number.")
sys.exit(1)
root_path = os.path.abspath(arguments["<directory>"])
if not os.path.exists(root_path):
print("File does not exist:", root_path)
sys.exit(1)
if not os.path.isdir(root_path):
print("File is not a directory:", root_path)
sys.exit(1)
if arguments["--theme"] is not None:
themes = list(pygments.styles.get_all_styles())
if arguments["--theme"] not in themes:
print("--theme must be one of:", " ".join(themes))
sys.exit(1)
if arguments["--compression"] is not None:
compressions = ["gzip", "lzma", "bz2", "none"]
if arguments["--compression"] not in compressions:
print("--compression must be one of:", " ".join(compressions))
sys.exit(1)
editor_command = arguments["--editor"] or os.environ.get("EDITOR", None)\
or os.environ.get("VISUAL", None)
return root_path, worker_count, editor_command, arguments["--theme"], \
arguments["--compression"]
def inotify_watches_exceeded():
print("Error: This codebase has too many directories to be monitored.")
print(" Fix by increasing the kernel parameter user.max_inotify_watches "
"to exceed the number of directories.")
print(" e.g. 'sudo sysctl user.max_inotify_watches=200000'")
def entry_point():
root_path, worker_count, editor_command, theme, compression = \
check_arguments()
manage_cache(root_path)
with terminal.terminal_title("eris: " + os.path.basename(root_path)):
with chdir(root_path): # FIX: Don't change directory if possible.
loop = asyncio.get_event_loop()
try:
main(root_path, loop, worker_count, editor_command, theme,
compression)
except pyinotify.WatchManagerError:
inotify_watches_exceeded()
if __name__ == "__main__":
entry_point()
| ahamilton/vigil | eris/__main__.py | Python | artistic-2.0 | 48,571 |
from hamcrest import assert_that, contains, has_properties
from mammoth.styles.parser.tokeniser import tokenise
def test_unknown_tokens_are_tokenised():
assert_tokens("~", is_token("unknown", "~"))
def test_empty_string_is_tokenised_to_end_of_file_token():
assert_tokens("")
def test_whitespace_is_tokenised():
assert_tokens(" \t\t ", is_token("whitespace", " \t\t "))
def test_identifiers_are_tokenised():
assert_tokens("Overture", is_token("identifier", "Overture"))
def test_escape_sequences_in_identifiers_are_tokenised():
assert_tokens(r"\:", is_token("identifier", r"\:"))
def test_integers_are_tokenised():
assert_tokens("123", is_token("integer", "123"))
def test_strings_are_tokenised():
assert_tokens("'Tristan'", is_token("string", "'Tristan'"))
def test_escape_sequences_in_strings_are_tokenised():
assert_tokens(r"'Tristan\''", is_token("string", r"'Tristan\''"))
def test_unterminated_strings_are_tokenised():
assert_tokens("'Tristan", is_token("unterminated string", "'Tristan"))
def test_arrows_are_tokenised():
assert_tokens("=>=>", is_token("symbol", "=>"), is_token("symbol", "=>"))
def test_dots_are_tokenised():
assert_tokens(".", is_token("symbol", "."))
def test_colons_are_tokenised():
assert_tokens("::", is_token("symbol", ":"), is_token("symbol", ":"))
def test_greater_thans_are_tokenised():
assert_tokens(">>", is_token("symbol", ">"), is_token("symbol", ">"))
def test_equals_are_tokenised():
assert_tokens("==", is_token("symbol", "="), is_token("symbol", "="))
def test_open_parens_are_tokenised():
assert_tokens("((", is_token("symbol", "("), is_token("symbol", "("))
def test_close_parens_are_tokenised():
assert_tokens("))", is_token("symbol", ")"), is_token("symbol", ")"))
def test_open_square_brackets_are_tokenised():
assert_tokens("[[", is_token("symbol", "["), is_token("symbol", "["))
def test_close_square_brackets_are_tokenised():
assert_tokens("]]", is_token("symbol", "]"), is_token("symbol", "]"))
def test_choices_are_tokenised():
assert_tokens("||", is_token("symbol", "|"), is_token("symbol", "|"))
def test_bangs_are_tokenised():
assert_tokens("!!", is_token("symbol", "!"), is_token("symbol", "!"))
def test_can_tokenise_multiple_tokens():
assert_tokens("The Magic Position",
is_token("identifier", "The"),
is_token("whitespace", " "),
is_token("identifier", "Magic"),
is_token("whitespace", " "),
is_token("identifier", "Position"),
)
def assert_tokens(string, *expected):
expected = list(expected)
expected.append(is_token("end", ""))
assert_that(
tokenise(string),
contains(*expected),
)
def is_token(token_type, value):
return has_properties(
type=token_type,
value=value,
)
| mwilliamson/python-mammoth | tests/styles/parser/tokeniser_tests.py | Python | bsd-2-clause | 2,864 |
#!/usr/bin/env python
# -----------------------------------------------------------------------------
# VORONOI.ACCRETION
# Laura L Watkins [[email protected]]
# - converted from IDL code by Michele Cappellari (bin2d_accretion)
# -----------------------------------------------------------------------------
from numpy import *
from .bin_roundness import bin_roundness
def accretion(x, y, signal, noise, targetsn, pixelsize=False, quiet=False):
"""
Initial binning -- steps i-v of eq 5.1 of Cappellari & Copin (2003)
INPUTS:
x : x coordinates of pixels to bin
y : y coordinates of pixels to bin
signal : signal associated with each pixel
noise : noise (1-sigma error) associated with each pixel
targetsn : desired signal-to-noise ration in final 2d-binned data
OPTIONS:
pixelsize : pixel scale of the input data
quiet : if set, suppress printed outputs
"""
n = x.size
clas = zeros(x.size, dtype="<i8") # bin number of each pixel
good = zeros(x.size, dtype="<i8") # =1 if bin accepted as good
# for each point, find distance to all other points and select minimum
# (robust but slow way of determining the pixel size of unbinned data)
if not pixelsize:
dx = 1.e30
for j in range(x.size-1):
d = (x[j] - x[j+1:])**2 + (y[j] - y[j+1:])**2
dx = min(d.min(), dx)
pixelsize = sqrt(dx)
# start from the pixel with highest S/N
sn = (signal/noise).max()
currentbin = (signal/noise).argmax()
# rough estimate of the expected final bin number
# This value is only used to have a feeling of the expected
# remaining computation time when binning very big dataset.
wh = where(signal/noise<targetsn)
npass = size(where(signal/noise >= targetsn))
maxnum = int(round( (signal[wh]**2/noise[wh]**2).sum()/targetsn**2 ))+npass
# first bin assigned CLAS = 1 -- with N pixels, get at most N bins
for ind in range(1, n+1):
if not quiet:
print(" bin: {:} / {:}".format(ind, maxnum))
# to start the current bin is only one pixel
clas[currentbin] = ind
# centroid of bin
xbar = x[currentbin]
ybar = y[currentbin]
while True:
# stop if all pixels are binned
unbinned = where(clas == 0)[0]
if unbinned.size == 0: break
# find unbinned pixel closest to centroid of current bin
dist = (x[unbinned]-xbar)**2 + (y[unbinned]-ybar)**2
mindist = dist.min()
k = dist.argmin()
# find the distance from the closest pixel to the current bin
mindist = ((x[currentbin]-x[unbinned[k]])**2 \
+ (y[currentbin]-y[unbinned[k]])**2).min()
# estimate roundness of bin with candidate pixel added
nextbin = append(currentbin, unbinned[k])
roundness = bin_roundness(x[nextbin], y[nextbin], pixelsize)
# compute sn of bin with candidate pixel added
snold = sn
sn = signal[nextbin].sum()/sqrt((noise[nextbin]**2).sum())
# Test whether the CANDIDATE pixel is connected to the
# current bin, whether the POSSIBLE new bin is round enough
# and whether the resulting S/N would get closer to targetsn
if sqrt(mindist) > 1.2*pixelsize or roundness > 0.3 \
or abs(sn-targetsn) > abs(snold-targetsn):
if (snold > 0.8*targetsn):
good[currentbin] = 1
break
# if all the above tests are negative then accept the CANDIDATE
# pixel, add it to the current bin, and continue accreting pixels
clas[unbinned[k]] = ind
currentbin = nextbin
# update the centroid of the current bin
xbar = x[currentbin].mean()
ybar = y[currentbin].mean()
# get the centroid of all the binned pixels
binned = where(clas != 0)[0]
unbinned = where(clas == 0)[0]
# stop if all pixels are binned
if unbinned.size == 0: break
xbar = x[binned].mean()
ybar = y[binned].mean()
# find the closest unbinned pixel to the centroid of all
# the binned pixels, and start a new bin from that pixel
k = ((x[unbinned]-xbar)**2 + (y[unbinned]-ybar)**2).argmin()
currentbin = unbinned[k] # the bin is initially made of one pixel
sn = signal[currentbin] / noise[currentbin]
# set to zero all bins that did not reach the target S/N
clas = clas*good
return clas
| lauralwatkins/voronoi | voronoi/accretion.py | Python | bsd-2-clause | 4,862 |
"""
Tests for priors.
"""
# pylint: disable=missing-docstring
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import numpy as np
import numpy.testing as nt
import scipy.optimize as spop
import reggie.core.priors as priors
### BASE TEST CLASS ###########################################################
class PriorTest(object):
def __init__(self, prior):
self.prior = prior
def test_repr(self):
_ = repr(self.prior)
def test_bounds(self):
bshape = np.shape(self.prior.bounds)
assert bshape == (2,) or bshape == (self.prior.ndim, 2)
def test_sample(self):
assert np.shape(self.prior.sample()) == (self.prior.ndim,)
assert np.shape(self.prior.sample(5)) == (5, self.prior.ndim)
def test_logprior(self):
for theta in self.prior.sample(5, 0):
g1 = spop.approx_fprime(theta, self.prior.get_logprior, 1e-8)
_, g2 = self.prior.get_logprior(theta, True)
nt.assert_allclose(g1, g2, rtol=1e-6)
### PER-INSTANCE TESTS ########################################################
class TestUniform(PriorTest):
def __init__(self):
PriorTest.__init__(self, priors.Uniform([0, 0], [1, 1]))
class TestNormal(PriorTest):
def __init__(self):
PriorTest.__init__(self, priors.Normal([0, 0], [1, 1]))
class TestLogNormal(PriorTest):
def __init__(self):
PriorTest.__init__(self, priors.LogNormal([0, 0], [1, 1]))
def test_uniform():
nt.assert_raises(ValueError, priors.Uniform, 0, -1)
| mwhoffman/reggie | tests/test_core_priors.py | Python | bsd-2-clause | 1,591 |
#!/usr/bin/python
"""nrvr.xml.etree - Utilities for xml.etree.ElementTree
The main class provided by this module is ElementTreeUtil.
To be expanded as needed.
Idea and first implementation - Leo Baschy <srguiwiz12 AT nrvr DOT com>
Public repository - https://github.com/srguiwiz/nrvr-commander
Copyright (c) Nirvana Research 2006-2015.
Simplified BSD License"""
import copy
import xml.etree.ElementTree
class ElementTreeUtil(object):
"""Utilities for xml.etree.ElementTree.
Written for Python 2.6."""
@classmethod
def indent(cls, element, indent=" ", level=0):
"""Set whitespace for indentation.
element
an xml.etree.ElementTree.Element instance.
indent
the additional indent for each level down.
level
increases on recursive calls.
Need not be set on regular use."""
levelIndent = "\n" + level * indent
if len(element):
# element has child element
if not element.text or not element.text.strip():
# element has no text or text is only whitespace
element.text = levelIndent + indent
for child in element:
# child indented one level more
cls.indent(child, indent=indent, level=level + 1)
if not child.tail or not child.tail.strip():
# last child has no tail or tail is only whitespace
child.tail = levelIndent
if level > 0:
# any level except top level
if not element.tail or not element.tail.strip():
# element has no tail or tail is only whitespace
element.tail = levelIndent
else:
# top level
element.tail = ""
@classmethod
def unindent(cls, element):
"""Remove whitespace from indentation.
element
an xml.etree.ElementTree.Element instance."""
if len(element):
# element has child element
if not element.text or not element.text.strip():
# element has no text or text is only whitespace
element.text = ""
for child in element:
# child indented one level more
cls.unindent(child)
if not element.tail or not element.tail.strip():
# element has no tail or tail is only whitespace
element.tail = ""
@classmethod
def tostring(cls, element, indent=" ", xml_declaration=True, encoding="utf-8"):
"""Generate a string representation.
element
an xml.etree.ElementTree.Element instance.
Tolerates xml.etree.ElementTree.ElementTree.
indent
the additional indent for each level down.
If None then unindented.
xml_declaration
whether with XML declaration <?xml version="1.0" encoding="utf-8"?>."""
# tolerate tree instead of element
if isinstance(element, xml.etree.ElementTree.ElementTree):
# if given a tree
element = element.getroot()
element = copy.deepcopy(element)
if indent is not None:
cls.indent(element, indent)
else:
cls.unindent(element)
string = xml.etree.ElementTree.tostring(element, encoding=encoding)
if xml_declaration:
string = '<?xml version="1.0" encoding="{0}"?>\n'.format(encoding) + string
return string
@classmethod
def simpledict(cls, element):
"""Generate a dictionary from child element tags and text.
element
an xml.etree.ElementTree.Element instance."""
children = element.findall('*')
dictionary = {}
for child in children:
dictionary[child.tag] = child.text
return dictionary
if __name__ == "__main__":
import sys
tree = xml.etree.ElementTree.ElementTree(xml.etree.ElementTree.XML \
("""<e1 a1="A1">
<e2 a2="A2">E2</e2>
<e3 a3="A3">E3</e3>
<e4><e5/></e4>
<e6/></e1>"""))
tree.write(sys.stdout)
print # a newline after the write of unindented XML
ElementTreeUtil.indent(tree.getroot())
tree.write(sys.stdout)
print # a newline after the write of unindented XML
print xml.etree.ElementTree.tostring(tree.getroot())
ElementTreeUtil.unindent(tree.getroot())
tree.write(sys.stdout)
print # a newline after the write of unindented XML
print ElementTreeUtil.tostring(tree)
print ElementTreeUtil.tostring(tree.getroot())
print ElementTreeUtil.tostring(tree, indent=None)
| srguiwiz/nrvr-commander | src/nrvr/xml/etree.py | Python | bsd-2-clause | 4,914 |
# -*- coding: utf-8 -*-
from amnesia.exc import AmnesiaError
class PasteError(AmnesiaError):
def __init__(self, container):
super()
self.container = container
def __str__(self):
return 'Paste into container {} failed'.format(self.container.id)
| silenius/amnesia | amnesia/modules/folder/exc.py | Python | bsd-2-clause | 280 |
from __future__ import unicode_literals
import os
import sys
import threading
from contextlib import contextmanager
from django.contrib.sites.models import Site
from mezzanine.conf import settings
from mezzanine.core.request import current_request
from mezzanine.utils.conf import middlewares_or_subclasses_installed
SITE_PERMISSION_MIDDLEWARE = \
"mezzanine.core.middleware.SitePermissionMiddleware"
def current_site_id():
"""
Responsible for determining the current ``Site`` instance to use
when retrieving data for any ``SiteRelated`` models. If we're inside an
override_current_site_id context manager, return the overriding site ID.
Otherwise, try to determine the site using the following methods in order:
- ``site_id`` in session. Used in the admin so that admin users
can switch sites and stay on the same domain for the admin.
- The id of the Site object corresponding to the hostname in the current
request. This result is cached.
- ``MEZZANINE_SITE_ID`` environment variable, so management
commands or anything else outside of a request can specify a
site.
- ``SITE_ID`` setting.
If a current request exists and the current site is not overridden, the
site ID is stored on the request object to speed up subsequent calls.
"""
if hasattr(override_current_site_id.thread_local, "site_id"):
return override_current_site_id.thread_local.site_id
from mezzanine.utils.cache import cache_installed, cache_get, cache_set
request = current_request()
site_id = getattr(request, "site_id", None)
if request and not site_id:
site_id = request.session.get("site_id", None)
if not site_id:
domain = request.get_host().lower()
if cache_installed():
# Don't use Mezzanine's cache_key_prefix here, since it
# uses this very function we're in right now to create a
# per-site cache key.
bits = (settings.CACHE_MIDDLEWARE_KEY_PREFIX, domain)
cache_key = "%s.site_id.%s" % bits
site_id = cache_get(cache_key)
if not site_id:
try:
site = Site.objects.get(domain__iexact=domain)
except Site.DoesNotExist:
pass
else:
site_id = site.id
if cache_installed():
cache_set(cache_key, site_id)
if not site_id:
site_id = os.environ.get("MEZZANINE_SITE_ID", settings.SITE_ID)
if request and site_id and not getattr(settings, "TESTING", False):
request.site_id = site_id
return site_id
@contextmanager
def override_current_site_id(site_id):
"""
Context manager that overrides the current site id for code executed
within it. Used to access SiteRelated objects outside the current site.
"""
override_current_site_id.thread_local.site_id = site_id
yield
del override_current_site_id.thread_local.site_id
override_current_site_id.thread_local = threading.local()
def has_site_permission(user):
"""
Checks if a staff user has staff-level access for the current site.
The actual permission lookup occurs in ``SitePermissionMiddleware``
which then marks the request with the ``has_site_permission`` flag,
so that we only query the db once per request, so this function
serves as the entry point for everything else to check access. We
also fall back to an ``is_staff`` check if the middleware is not
installed, to ease migration.
"""
if not middlewares_or_subclasses_installed([SITE_PERMISSION_MIDDLEWARE]):
return user.is_staff and user.is_active
return getattr(user, "has_site_permission", False)
def host_theme_path():
"""
Returns the directory of the theme associated with the given host.
"""
# Set domain to None, which we'll then query for in the first
# iteration of HOST_THEMES. We use the current site_id rather
# than a request object here, as it may differ for admin users.
domain = None
for (host, theme) in settings.HOST_THEMES:
if domain is None:
domain = Site.objects.get(id=current_site_id()).domain
if host.lower() == domain.lower():
try:
__import__(theme)
module = sys.modules[theme]
except ImportError:
pass
else:
return os.path.dirname(os.path.abspath(module.__file__))
return ""
| wbtuomela/mezzanine | mezzanine/utils/sites.py | Python | bsd-2-clause | 4,584 |
import itertools
import unittest
from parameterized import parameterized
import torch
import torch.nn as nn
from nsoltChannelConcatenation2dLayer import NsoltChannelConcatenation2dLayer
nchs = [ [3, 3], [4, 4] ]
datatype = [ torch.float, torch.double ]
nrows = [ 4, 8, 16 ]
ncols = [ 4, 8, 16 ]
class NsoltChannelConcatenation2dLayerTestCase(unittest.TestCase):
"""
NSOLTCHANNELCONCATENATION2DLAYERTESTCASE
2コンポーネント入力(nComponents=2のみサポート):
nSamples x nRows x nCols x (nChsTotal-1)
nSamples x nRows x nCols
1コンポーネント出力(nComponents=1のみサポート):
nSamples x nRows x nCols x nChsTotal
Requirements: Python 3.7.x, PyTorch 1.7.x
Copyright (c) 2020-2021, Shogo MURAMATSU
All rights reserved.
Contact address: Shogo MURAMATSU,
Faculty of Engineering, Niigata University,
8050 2-no-cho Ikarashi, Nishi-ku,
Niigata, 950-2181, JAPAN
http://msiplab.eng.niigata-u.ac.jp/
"""
def testConstructor(self):
# Expected values
expctdName = 'Cn'
expctdDescription = "Channel concatenation"
# Instantiation of target class
layer = NsoltChannelConcatenation2dLayer(
name=expctdName
)
# Actual values
actualName = layer.name
actualDescription = layer.description
# Evaluation
self.assertTrue(isinstance(layer, nn.Module))
self.assertEqual(actualName,expctdName)
self.assertEqual(actualDescription,expctdDescription)
@parameterized.expand(
list(itertools.product(nchs,nrows,ncols,datatype))
)
def testPredict(self,
nchs,nrows,ncols,datatype):
rtol,atol=1e-5,1e-8
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# Parameters
nSamples = 8
nChsTotal = sum(nchs)
# nSamples x nRows x nCols x (nChsTotal-1)
Xac = torch.randn(nSamples,nrows,ncols,nChsTotal-1,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols
Xdc = torch.randn(nSamples,nrows,ncols,dtype=datatype,device=device,requires_grad=True)
# Expected values
# nSamples x nRows x nCols x nChsTotal
expctdZ = torch.cat((Xdc.unsqueeze(dim=3),Xac),dim=3)
# Instantiation of target class
layer = NsoltChannelConcatenation2dLayer(
name='Cn'
)
# Actual values
with torch.no_grad():
actualZ = layer.forward(Xac=Xac,Xdc=Xdc)
# Evaluation
self.assertEqual(actualZ.dtype,datatype)
self.assertTrue(torch.allclose(actualZ,expctdZ,rtol=rtol,atol=atol))
self.assertFalse(actualZ.requires_grad)
@parameterized.expand(
list(itertools.product(nchs,nrows,ncols,datatype))
)
def testPredictUnsqueezedXdc(self,
nchs,nrows,ncols,datatype):
rtol,atol=1e-5,1e-8
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# Parameters
nSamples = 8
nChsTotal = sum(nchs)
# nSamples x nRows x nCols x (nChsTotal-1)
Xac = torch.randn(nSamples,nrows,ncols,nChsTotal-1,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols x 1
Xdc = torch.randn(nSamples,nrows,ncols,1,dtype=datatype,device=device,requires_grad=True)
# Expected values
# nSamples x nRows x nCols x nChsTotal
expctdZ = torch.cat((Xdc,Xac),dim=3)
# Instantiation of target class
layer = NsoltChannelConcatenation2dLayer(
name='Cn'
)
# Actual values
with torch.no_grad():
actualZ = layer.forward(Xac=Xac,Xdc=Xdc)
# Evaluation
self.assertEqual(actualZ.dtype,datatype)
self.assertTrue(torch.allclose(actualZ,expctdZ,rtol=rtol,atol=atol))
self.assertFalse(actualZ.requires_grad)
@parameterized.expand(
list(itertools.product(nchs,nrows,ncols,datatype))
)
def testBackward(self,
nchs,nrows,ncols,datatype):
rtol,atol=1e-5,1e-8
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# Parameters
nSamples = 8
nChsTotal = sum(nchs)
# nSamples x nRows x nCols x (nChsTotal-1)
Xac = torch.randn(nSamples,nrows,ncols,nChsTotal-1,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols
Xdc = torch.randn(nSamples,nrows,ncols,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols x nChsTotal
dLdZ = torch.randn(nSamples,nrows,ncols,nChsTotal,dtype=datatype)
dLdZ = dLdZ.to(device)
# Expected values
# nSamples x nRows x nCols x (nChsTotal-1)
expctddLdXac = dLdZ[:,:,:,1:]
# nSamples x nRows x nCols x 1
expctddLdXdc = dLdZ[:,:,:,0]
# Instantiation of target class
layer = NsoltChannelConcatenation2dLayer(
name='Cn'
)
# Actual values
Z = layer.forward(Xac=Xac,Xdc=Xdc)
Z.backward(dLdZ)
actualdLdXac = Xac.grad
actualdLdXdc = Xdc.grad
# Evaluation
self.assertEqual(actualdLdXdc.dtype,datatype)
self.assertEqual(actualdLdXac.dtype,datatype)
self.assertTrue(torch.allclose(actualdLdXdc,expctddLdXdc,rtol=rtol,atol=atol))
self.assertTrue(torch.allclose(actualdLdXac,expctddLdXac,rtol=rtol,atol=atol))
self.assertTrue(Z.requires_grad)
@parameterized.expand(
list(itertools.product(nchs,nrows,ncols,datatype))
)
def testBackwardUnsqueezedXdc(self,
nchs,nrows,ncols,datatype):
rtol,atol=1e-5,1e-8
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# Parameters
nSamples = 8
nChsTotal = sum(nchs)
# nSamples x nRows x nCols x (nChsTotal-1)
Xac = torch.randn(nSamples,nrows,ncols,nChsTotal-1,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols x 1
Xdc = torch.randn(nSamples,nrows,ncols,1,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols x nChsTotal
dLdZ = torch.randn(nSamples,nrows,ncols,nChsTotal,dtype=datatype)
dLdZ = dLdZ.to(device)
# Expected values
# nSamples x nRows x nCols x (nChsTotal-1)
expctddLdXac = dLdZ[:,:,:,1:]
# nSamples x nRows x nCols x 1
expctddLdXdc = dLdZ[:,:,:,0].unsqueeze(dim=3)
# Instantiation of target class
layer = NsoltChannelConcatenation2dLayer(
name='Cn'
)
# Actual values
Z = layer.forward(Xac=Xac,Xdc=Xdc)
Z.backward(dLdZ)
actualdLdXac = Xac.grad
actualdLdXdc = Xdc.grad
# Evaluation
self.assertEqual(actualdLdXdc.dtype,datatype)
self.assertEqual(actualdLdXac.dtype,datatype)
self.assertTrue(torch.allclose(actualdLdXdc,expctddLdXdc,rtol=rtol,atol=atol))
self.assertTrue(torch.allclose(actualdLdXac,expctddLdXac,rtol=rtol,atol=atol))
self.assertTrue(Z.requires_grad)
if __name__ == '__main__':
unittest.main()
| shodimaggio/SaivDr | appendix/torch_nsolt/test_nsoltChannelConcatenation2dLayer.py | Python | bsd-2-clause | 7,477 |
import random
class Character:
def __init__(self):
self.name = ""
self.life = 20
self.health = random.choice(0, life)
self.zombie_life = 10
self.zombie_health = random.choice(0, zombie_life)
def attack(self, zombie):
self.hit = self.health - self.zombie_health
zombie.life -= self.hit
if self.hit == 0:
print ("..like a nimble sloth, %s evades %s's attack." % (zombie.name, self.name))
else:
print ("%s inflicts debilitating damage on %s!!" % (self.name, zombie.name))
return zombie.health <= 0
class Zombie(Character):
def __init__(self, player):
Character.__init__(self)
ran_adj = random.choice['wretched', 'filthy', 'disgusting', 'oozing']
self.name = "a", rand_adj(), " zombie"
self.health = random.choice(player.health)
class Player(Character):
def __init__(self):
Character.__init__(self)
self.level = 'normal'
self.health = 10
self.health_max = 10
def quit(self):
print ("The zombie virus has infected %s. You are now undead and crave brains.") % self.name
self.health = 0
def help(self):
print Commands.keys()
def status(self):
print ("%s's health: %d/%d" % (self.name, self.health, self.health_max))
def weak(self):
print ("%s is cold, hungry and tired.") % self.name
self.health = max(1, self.health - 1)
def rest(self):
ran_adj = random.choice['under a desk','in a locker','in a closet']
if self.state != 'normal':
print ("keep moving %s, zombies coming in hot!") % (self.name, self.zombie_attack)
else:
print ("%s hides" + ran_adj + " and takes a breather.") % self.name
if randint(0,1):
self.zombie = Zombie(self)
print (%s is surprised by %s) %(self.name self.zombie)
self.state = 'fight'
self.zombie_attacks()
def attack(self):
if randint(0, 1):
self.zombie = Zombie(self)
print ("Look out %s! -%s appears!") % (self.name, self.zombie_name)
self.state = 'fight'
self.zombie_attacks()
else:
if self.health < self.health_max:
self.health = self.health + 1
else:
print ("%s has hidden too long.") % self.name
self.health -= 1
def look_around(self):
if self.state != 'normal':
print ("%s runs into %s") % (self.name, self.zombie.name)
self.zombie_attacks()
else:
print ("%s runs into the "+ look) % self.name
look=random.choice["gymnasium","library","metal shop","cafeteria"]
if random.randint(0, 1):
self.zombie = Zombie(self)
print "%s encounters %s!" % (self.name, self.zombie.name)
self.state = 'fight'
else:
if random.randint(0, 1):
self.tired()
def flee(self):
if self.state != 'fight':
print "%s runs down a corridor" % self.name
self.tired()
else:
if random.randint(1, self.health + 5) > random.randint(1, self.zombie.health):
print "%s flees from %s." % (self.name, self.zombie.name)
self.zombie = None
self.state = 'normal'
else:
print "%s couldn't escape from %s!" % (self.name, self.zombie.name);
self.zombie_attacks()
def attack(self):
if self.state != 'fight':
print "%s flails in the air like a twit." % self.name;
self.tired()
else:
if self.do_damage(self.zombie):
print ("%s decapitates %s!") % (self.name, self.zombie.name)
self.zombie = None
self.state = 'normal'
if random.choice(self.health) < 10:
self.health += 1
self.health_max += 1
print "%s is rejuvenated" % self.name
else:
self.zombie_attacks()
def zombie_attacks(self):
if self.zombie.attack(self):
print ("%s's brains were devoured by %s!!!\nyou are undead and crave BRAINS!!/nunless you're a veggetarian then seek GRAINS!!") % (self.name, self.zombie.name)
def menu():
Commands = {
'quit': Player.quit,
'help': Player.help,
'status': Player.status,
'rest': Player.rest,
'look around': Player.look_around,
'flee': Player.flee,
'attack': Player.attack,
}
hero = Player()
hero.name = raw_input("What is your character's name? ")
print "(type help to get a list of actions)\n"
print """When %s leaves homeroom, they
a strange stench in the air
maybe we are dissecting a frog in biology today...""" % hero.name
while (p.health > 0):
line = raw_input("> ")
args = line.split()
if len(args) > 0:
commandFound = False
for c in Commands.keys():
if args[0] == c[:len(args[0])]:
Commands[c](p)
commandFound = True
break
if not commandFound:
print "%s is confused, enter a command" % p.name
"""
living on the outskirts of a government national lab
has it's pros and cons. when the kids in school
say that a rouge virus has started to infect people
and turn them into zombies, you laugh it off.
"""
| bobbybabra/codeGuild | zombieHS.py | Python | bsd-2-clause | 5,598 |
# flake8: noqa
from .elementwise import (
Unary,
Binary,
ColsBinary,
Reduce,
func2class_name,
unary_module,
make_unary,
binary_module,
make_binary,
reduce_module,
make_reduce,
binary_dict_int_tst,
unary_dict_gen_tst,
binary_dict_gen_tst,
)
from .linear_map import LinearMap
from .nexpr import NumExprABC
from .mixufunc import make_local, make_local_dict, get_ufunc_args, MixUfuncABC
from ._elementwise import (
BitwiseNot,
Absolute,
Arccos,
Arccosh,
Arcsin,
Arcsinh,
Arctan,
Arctanh,
Cbrt,
Ceil,
Conj,
Conjugate,
Cos,
Cosh,
Deg2rad,
Degrees,
Exp,
Exp2,
Expm1,
Fabs,
Floor,
Frexp,
Invert,
Isfinite,
Isinf,
Isnan,
Isnat,
Log,
Log10,
Log1p,
Log2,
LogicalNot,
Modf,
Negative,
Positive,
Rad2deg,
Radians,
Reciprocal,
Rint,
Sign,
Signbit,
Sin,
Sinh,
Spacing,
Sqrt,
Square,
Tan,
Tanh,
Trunc,
Abs,
Add,
Arctan2,
BitwiseAnd,
BitwiseOr,
BitwiseXor,
Copysign,
Divide,
Divmod,
Equal,
FloorDivide,
FloatPower,
Fmax,
Fmin,
Fmod,
Gcd,
Greater,
GreaterEqual,
Heaviside,
Hypot,
Lcm,
Ldexp,
LeftShift,
Less,
LessEqual,
Logaddexp,
Logaddexp2,
LogicalAnd,
LogicalOr,
LogicalXor,
Maximum,
Minimum,
Mod,
Multiply,
Nextafter,
NotEqual,
Power,
Remainder,
RightShift,
Subtract,
TrueDivide,
ColsAdd,
ColsArctan2,
ColsBitwiseAnd,
ColsBitwiseOr,
ColsBitwiseXor,
ColsCopysign,
ColsDivide,
ColsDivmod,
ColsEqual,
ColsFloorDivide,
ColsFloatPower,
ColsFmax,
ColsFmin,
ColsFmod,
ColsGcd,
ColsGreater,
ColsGreaterEqual,
ColsHeaviside,
ColsHypot,
ColsLcm,
ColsLdexp,
ColsLeftShift,
ColsLess,
ColsLessEqual,
ColsLogaddexp,
ColsLogaddexp2,
ColsLogicalAnd,
ColsLogicalOr,
ColsLogicalXor,
ColsMaximum,
ColsMinimum,
ColsMod,
ColsMultiply,
ColsNextafter,
ColsNotEqual,
ColsPower,
ColsRemainder,
ColsRightShift,
ColsSubtract,
ColsTrueDivide,
AddReduce,
Arctan2Reduce,
BitwiseAndReduce,
BitwiseOrReduce,
BitwiseXorReduce,
CopysignReduce,
DivideReduce,
DivmodReduce,
EqualReduce,
FloorDivideReduce,
FloatPowerReduce,
FmaxReduce,
FminReduce,
FmodReduce,
GcdReduce,
GreaterReduce,
GreaterEqualReduce,
HeavisideReduce,
HypotReduce,
LcmReduce,
LdexpReduce,
LeftShiftReduce,
LessReduce,
LessEqualReduce,
LogaddexpReduce,
Logaddexp2Reduce,
LogicalAndReduce,
LogicalOrReduce,
LogicalXorReduce,
MaximumReduce,
MinimumReduce,
ModReduce,
MultiplyReduce,
NextafterReduce,
NotEqualReduce,
PowerReduce,
RemainderReduce,
RightShiftReduce,
SubtractReduce,
TrueDivideReduce,
)
__all__ = [
"Unary",
"Binary",
"ColsBinary",
"Reduce",
"func2class_name",
"unary_module",
"make_unary",
"binary_module",
"make_binary",
"reduce_module",
"make_reduce",
"binary_dict_int_tst",
"unary_dict_gen_tst",
"binary_dict_gen_tst",
"LinearMap",
"NumExprABC",
"make_local",
"make_local_dict",
"get_ufunc_args",
"MixUfuncABC",
"BitwiseNot",
"Absolute",
"Arccos",
"Arccosh",
"Arcsin",
"Arcsinh",
"Arctan",
"Arctanh",
"Cbrt",
"Ceil",
"Conj",
"Conjugate",
"Cos",
"Cosh",
"Deg2rad",
"Degrees",
"Exp",
"Exp2",
"Expm1",
"Fabs",
"Floor",
"Frexp",
"Invert",
"Isfinite",
"Isinf",
"Isnan",
"Isnat",
"Log",
"Log10",
"Log1p",
"Log2",
"LogicalNot",
"Modf",
"Negative",
"Positive",
"Rad2deg",
"Radians",
"Reciprocal",
"Rint",
"Sign",
"Signbit",
"Sin",
"Sinh",
"Spacing",
"Sqrt",
"Square",
"Tan",
"Tanh",
"Trunc",
"Abs",
"Add",
"Arctan2",
"BitwiseAnd",
"BitwiseOr",
"BitwiseXor",
"Copysign",
"Divide",
"Divmod",
"Equal",
"FloorDivide",
"FloatPower",
"Fmax",
"Fmin",
"Fmod",
"Gcd",
"Greater",
"GreaterEqual",
"Heaviside",
"Hypot",
"Lcm",
"Ldexp",
"LeftShift",
"Less",
"LessEqual",
"Logaddexp",
"Logaddexp2",
"LogicalAnd",
"LogicalOr",
"LogicalXor",
"Maximum",
"Minimum",
"Mod",
"Multiply",
"Nextafter",
"NotEqual",
"Power",
"Remainder",
"RightShift",
"Subtract",
"TrueDivide",
"ColsAdd",
"ColsArctan2",
"ColsBitwiseAnd",
"ColsBitwiseOr",
"ColsBitwiseXor",
"ColsCopysign",
"ColsDivide",
"ColsDivmod",
"ColsEqual",
"ColsFloorDivide",
"ColsFloatPower",
"ColsFmax",
"ColsFmin",
"ColsFmod",
"ColsGcd",
"ColsGreater",
"ColsGreaterEqual",
"ColsHeaviside",
"ColsHypot",
"ColsLcm",
"ColsLdexp",
"ColsLeftShift",
"ColsLess",
"ColsLessEqual",
"ColsLogaddexp",
"ColsLogaddexp2",
"ColsLogicalAnd",
"ColsLogicalOr",
"ColsLogicalXor",
"ColsMaximum",
"ColsMinimum",
"ColsMod",
"ColsMultiply",
"ColsNextafter",
"ColsNotEqual",
"ColsPower",
"ColsRemainder",
"ColsRightShift",
"ColsSubtract",
"ColsTrueDivide",
"AddReduce",
"Arctan2Reduce",
"BitwiseAndReduce",
"BitwiseOrReduce",
"BitwiseXorReduce",
"CopysignReduce",
"DivideReduce",
"DivmodReduce",
"EqualReduce",
"FloorDivideReduce",
"FloatPowerReduce",
"FmaxReduce",
"FminReduce",
"FmodReduce",
"GcdReduce",
"GreaterReduce",
"GreaterEqualReduce",
"HeavisideReduce",
"HypotReduce",
"LcmReduce",
"LdexpReduce",
"LeftShiftReduce",
"LessReduce",
"LessEqualReduce",
"LogaddexpReduce",
"Logaddexp2Reduce",
"LogicalAndReduce",
"LogicalOrReduce",
"LogicalXorReduce",
"MaximumReduce",
"MinimumReduce",
"ModReduce",
"MultiplyReduce",
"NextafterReduce",
"NotEqualReduce",
"PowerReduce",
"RemainderReduce",
"RightShiftReduce",
"SubtractReduce",
"TrueDivideReduce",
]
| jdfekete/progressivis | progressivis/linalg/__init__.py | Python | bsd-2-clause | 6,390 |
from indra.statements import *
from rdflib import URIRef, Namespace
from rdflib.namespace import RDF
import collections
import urllib
import re
import keyword
BEL = Namespace("http://www.openbel.org/")
prefixes = """
PREFIX belvoc: <http://www.openbel.org/vocabulary/>
PREFIX belsc: <http://www.openbel.org/bel/>
PREFIX belns: <http://www.openbel.org/bel/namespace/>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>"""
phospho_mods = [
'PhosphorylationSerine',
'PhosphorylationThreonine',
'PhosphorylationTyrosine',
'Phosphorylation',
]
class InvalidNameError(ValueError):
def __init__(self, name):
ValueError.__init__(self, "Not a valid name: %s" % name)
def name_from_uri(uri):
"""Make the URI term usable as a valid Python identifier, if possible.
First strips of the extra URI information by calling term_from_uri,
then checks to make sure the name is a valid Python identifier.
Currently fixes identifiers starting with numbers by prepending with
the letter 'p'. For other cases it raises an exception.
This function should be called when the string that is returned is to be
used as a PySB component name, which are required to be valid Python
identifiers.
"""
name = term_from_uri(uri)
# Handle the case where the string starts with a number
if name[0].isdigit():
name = 'p' + name
if re.match("[_A-Za-z][_a-zA-Z0-9]*$", name) \
and not keyword.iskeyword(name):
pass
else:
raise InvalidNameError(name)
return name
def gene_name_from_uri(uri):
return name_from_uri(uri).upper()
def term_from_uri(uri):
"""Basic conversion of RDF URIs to more friendly strings.
Removes prepended URI information, and replaces spaces and hyphens with
underscores.
"""
if uri is None:
return None
# Strip gene name off from URI
term = uri.rsplit('/')[-1]
# Decode URL to handle spaces, special characters
term = urllib.unquote(term)
# Replace any spaces, hyphens, or periods with underscores
term = term.replace(' ', '_')
term = term.replace('-', '_')
term = term.replace('.', '_')
term = term.encode('ascii', 'ignore')
return term
def strip_statement(uri):
uri = uri.replace(r'http://www.openbel.org/bel/', '')
uri = uri.replace(r'http://www.openbel.org/vocabulary/', '')
return uri
class BelProcessor(object):
def __init__(self, g):
self.g = g
self.statements = []
self.all_stmts = []
self.converted_stmts = []
self.degenerate_stmts = []
self.indirect_stmts = []
def get_evidence(self, statement):
evidence = None
citation = None
annotations = []
# Query for evidence text and citation
q_evidence = prefixes + """
SELECT ?evidenceText ?citation
WHERE {
<%s> belvoc:hasEvidence ?evidence .
?evidence belvoc:hasEvidenceText ?evidenceText .
?evidence belvoc:hasCitation ?citation .
}
""" % statement.format()
res_evidence = self.g.query(q_evidence)
for stmt in res_evidence:
try:
evidence = stmt[0].format()
citation = stmt[1].format()
except KeyError:
warnings.warn('Problem converting evidence/citation string')
# Query for all annotations of the statement
q_annotations = prefixes + """
SELECT ?annotation
WHERE {
<%s> belvoc:hasEvidence ?evidence .
?evidence belvoc:hasAnnotation ?annotation .
}
""" % statement.format()
res_annotations = self.g.query(q_annotations)
for stmt in res_annotations:
annotations.append(stmt[0].format())
return (citation, evidence, annotations)
def get_modifications(self):
q_phospho = prefixes + """
SELECT ?enzName ?actType ?substrateName ?mod ?pos
?stmt
WHERE {
?stmt a belvoc:Statement .
?stmt belvoc:hasRelationship belvoc:DirectlyIncreases .
?stmt belvoc:hasSubject ?subject .
?stmt belvoc:hasObject ?object .
?subject a belvoc:AbundanceActivity .
?subject belvoc:hasActivityType ?actType .
?subject belvoc:hasChild ?enzyme .
?enzyme a belvoc:ProteinAbundance .
?enzyme belvoc:hasConcept ?enzName .
?object a belvoc:ModifiedProteinAbundance .
?object belvoc:hasModificationType ?mod .
?object belvoc:hasChild ?substrate .
?substrate belvoc:hasConcept ?substrateName .
OPTIONAL { ?object belvoc:hasModificationPosition ?pos . }
}
"""
# Now make the PySB for the phosphorylation
res_phospho = self.g.query(q_phospho)
for stmt in res_phospho:
(citation, evidence, annotations) = self.get_evidence(stmt[5])
# Parse out the elements of the query
enz_name = gene_name_from_uri(stmt[0])
enz = Agent(enz_name)
act_type = name_from_uri(stmt[1])
sub_name = gene_name_from_uri(stmt[2])
sub = Agent(sub_name)
mod = term_from_uri(stmt[3])
mod_pos = term_from_uri(stmt[4])
stmt_str = strip_statement(stmt[5])
# Mark this as a converted statement
self.converted_stmts.append(stmt_str)
if act_type == 'Kinase' and mod in phospho_mods:
self.statements.append(
Phosphorylation(enz, sub, mod, mod_pos, stmt_str,
citation, evidence, annotations))
elif act_type == 'Catalytic':
if mod == 'Hydroxylation':
self.statements.append(
Hydroxylation(enz, sub, mod, mod_pos, stmt_str,
citation, evidence, annotations))
elif mod == 'Sumoylation':
self.statements.append(
Sumoylation(enz, sub, mod, mod_pos, stmt_str,
citation, evidence, annotations))
elif mod == 'Acetylation':
self.statements.append(
Acetylation(enz, sub, mod, mod_pos, stmt_str,
citation, evidence, annotations))
elif mod == 'Ubiquitination':
self.statements.append(
Ubiquitination(enz, sub, mod, mod_pos, stmt_str,
citation, evidence, annotations))
else:
print "Warning: Unknown modification type!"
print("Activity: %s, Mod: %s, Mod_Pos: %s" %
(act_type, mod, mod_pos))
else:
print "Warning: Unknown modification type!"
print("Activity: %s, Mod: %s, Mod_Pos: %s" %
(act_type, mod, mod_pos))
def get_dephosphorylations(self):
q_phospho = prefixes + """
SELECT ?phosName ?substrateName ?mod ?pos ?stmt
WHERE {
?stmt a belvoc:Statement .
?stmt belvoc:hasRelationship belvoc:DirectlyDecreases .
?stmt belvoc:hasSubject ?subject .
?stmt belvoc:hasObject ?object .
?subject belvoc:hasActivityType belvoc:Phosphatase .
?subject belvoc:hasChild ?phosphatase .
?phosphatase a belvoc:ProteinAbundance .
?phosphatase belvoc:hasConcept ?phosName .
?object a belvoc:ModifiedProteinAbundance .
?object belvoc:hasModificationType ?mod .
?object belvoc:hasChild ?substrate .
?substrate belvoc:hasConcept ?substrateName .
OPTIONAL { ?object belvoc:hasModificationPosition ?pos . }
}
"""
# Now make the PySB for the phosphorylation
res_phospho = self.g.query(q_phospho)
for stmt in res_phospho:
(citation, evidence, annotations) = self.get_evidence(stmt[4])
# Parse out the elements of the query
phos_name = gene_name_from_uri(stmt[0])
phos = Agent(phos_name)
sub_name = gene_name_from_uri(stmt[1])
sub = Agent(sub_name)
mod = term_from_uri(stmt[2])
mod_pos = term_from_uri(stmt[3])
stmt_str = strip_statement(stmt[4])
# Mark this as a converted statement
self.converted_stmts.append(stmt_str)
self.statements.append(
Dephosphorylation(phos, sub, mod, mod_pos,
stmt_str, citation,
evidence, annotations))
def get_composite_activating_mods(self):
# To eliminate multiple matches, we use pos1 < pos2 but this will
# only work if the pos is given, otherwise multiple matches of
# the same mod combination may appear in the result
q_mods = prefixes + """
SELECT ?speciesName ?actType ?mod1 ?pos1 ?mod2 ?pos2 ?rel ?stmt
WHERE {
?stmt a belvoc:Statement .
?stmt belvoc:hasRelationship ?rel .
?stmt belvoc:hasSubject ?subject .
?stmt belvoc:hasObject ?object .
?object belvoc:hasActivityType ?actType .
?object belvoc:hasChild ?species .
?species a belvoc:ProteinAbundance .
?species belvoc:hasConcept ?speciesName .
?subject a belvoc:CompositeAbundance .
?subject belvoc:hasChild ?subject1 .
?subject1 a belvoc:ModifiedProteinAbundance .
?subject1 belvoc:hasModificationType ?mod1 .
?subject1 belvoc:hasChild ?species .
?subject belvoc:hasChild ?subject2 .
?subject2 a belvoc:ModifiedProteinAbundance .
?subject2 belvoc:hasModificationType ?mod2 .
?subject2 belvoc:hasChild ?species .
OPTIONAL { ?subject1 belvoc:hasModificationPosition ?pos1 . }
OPTIONAL { ?subject2 belvoc:hasModificationPosition ?pos2 . }
FILTER ((?rel = belvoc:DirectlyIncreases ||
?rel = belvoc:DirectlyDecreases) &&
?pos1 < ?pos2)
}
"""
# Now make the PySB for the phosphorylation
res_mods = self.g.query(q_mods)
for stmt in res_mods:
(citation, evidence, annotations) = self.get_evidence(stmt[7])
# Parse out the elements of the query
species_name = gene_name_from_uri(stmt[0])
species = Agent(species_name)
act_type = term_from_uri(stmt[1])
mod1 = term_from_uri(stmt[2])
mod_pos1 = term_from_uri(stmt[3])
mod2 = term_from_uri(stmt[4])
mod_pos2 = term_from_uri(stmt[5])
rel = term_from_uri(stmt[6])
if rel == 'DirectlyDecreases':
rel = 'decreases'
else:
rel = 'increases'
stmt_str = strip_statement(stmt[7])
# Mark this as a converted statement
self.converted_stmts.append(stmt_str)
self.statements.append(
ActivityModification(species, (mod1, mod2),
(mod_pos1, mod_pos2),
rel, act_type, stmt_str,
citation, evidence, annotations))
def get_activating_mods(self):
q_mods = prefixes + """
SELECT ?speciesName ?actType ?mod ?pos ?rel ?stmt
WHERE {
?stmt a belvoc:Statement .
?stmt belvoc:hasRelationship ?rel .
?stmt belvoc:hasSubject ?subject .
?stmt belvoc:hasObject ?object .
?object belvoc:hasActivityType ?actType .
?object belvoc:hasChild ?species .
?species a belvoc:ProteinAbundance .
?species belvoc:hasConcept ?speciesName .
?subject a belvoc:ModifiedProteinAbundance .
?subject belvoc:hasModificationType ?mod .
?subject belvoc:hasChild ?species .
OPTIONAL { ?subject belvoc:hasModificationPosition ?pos . }
FILTER (?rel = belvoc:DirectlyIncreases ||
?rel = belvoc:DirectlyDecreases)
}
"""
# Now make the PySB for the phosphorylation
res_mods = self.g.query(q_mods)
for stmt in res_mods:
(citation, evidence, annotations) = self.get_evidence(stmt[5])
# Parse out the elements of the query
species_name = gene_name_from_uri(stmt[0])
species = Agent(species_name)
act_type = term_from_uri(stmt[1])
mod = term_from_uri(stmt[2])
mod_pos = term_from_uri(stmt[3])
rel = term_from_uri(stmt[4])
if rel == 'DirectlyDecreases':
rel = 'decreases'
else:
rel = 'increases'
stmt_str = strip_statement(stmt[5])
# Mark this as a converted statement
self.converted_stmts.append(stmt_str)
self.statements.append(
ActivityModification(species, (mod,), (mod_pos,), rel,
act_type, stmt_str,
citation, evidence, annotations))
def get_complexes(self):
# Find all complexes described in the corpus
q_cmplx = prefixes + """
SELECT ?complexTerm ?childName
WHERE {
?complexTerm a belvoc:Term .
?complexTerm a belvoc:ComplexAbundance .
?complexTerm belvoc:hasChild ?child .
?child belvoc:hasConcept ?childName .
}
"""
# Run the query
res_cmplx = self.g.query(q_cmplx)
# Store the members of each complex in a dict of lists, keyed by the
# term for the complex
cmplx_dict = collections.defaultdict(list)
for stmt in res_cmplx:
cmplx_name = term_from_uri(stmt[0])
child_name = gene_name_from_uri(stmt[1])
child = Agent(child_name)
cmplx_dict[cmplx_name].append(child)
# Now iterate over the stored complex information and create binding
# statements
for cmplx_name, cmplx_list in cmplx_dict.iteritems():
if len(cmplx_list) < 2:
msg = 'Complex %s has less than 2 members! Skipping.' % \
cmplx_name
warnings.warn(msg)
else:
self.statements.append(Complex(cmplx_list))
def get_activating_subs(self):
"""
p_HGNC_NRAS_sub_Q_61_K_DirectlyIncreases_gtp_p_HGNC_NRAS
p_HGNC_KRAS_sub_G_12_R_DirectlyIncreases_gtp_p_PFH_RAS_Family
p_HGNC_BRAF_sub_V_600_E_DirectlyIncreases_kin_p_HGNC_BRAF
"""
q_mods = prefixes + """
SELECT ?enzyme_name ?sub_label ?act_type ?stmt
WHERE {
?stmt a belvoc:Statement .
?stmt belvoc:hasRelationship belvoc:DirectlyIncreases .
?stmt belvoc:hasSubject ?subject .
?stmt belvoc:hasObject ?object .
?subject a belvoc:ProteinAbundance .
?subject belvoc:hasConcept ?enzyme_name .
?subject belvoc:hasChild ?sub_expr .
?sub_expr rdfs:label ?sub_label .
?object a belvoc:AbundanceActivity .
?object belvoc:hasActivityType ?act_type .
?object belvoc:hasChild ?enzyme .
?enzyme a belvoc:ProteinAbundance .
?enzyme belvoc:hasConcept ?enzyme_name .
}
"""
# Now make the PySB for the phosphorylation
res_mods = self.g.query(q_mods)
for stmt in res_mods:
(citation, evidence, annotations) = self.get_evidence(stmt[3])
# Parse out the elements of the query
enz_name = gene_name_from_uri(stmt[0])
enz = Agent(enz_name)
sub_expr = term_from_uri(stmt[1])
act_type = term_from_uri(stmt[2])
# Parse the WT and substituted residues from the node label.
# Strangely, the RDF for substituted residue doesn't break the
# terms of the BEL expression down into their meaning, as happens
# for modified protein abundances. Instead, the substitution
# just comes back as a string, e.g., "sub(V,600,E)". This code
# parses the arguments back out using a regular expression.
match = re.match('sub\(([A-Z]),([0-9]*),([A-Z])\)', sub_expr)
if match:
matches = match.groups()
wt_residue = matches[0]
position = matches[1]
sub_residue = matches[2]
else:
print("Warning: Could not parse substitution expression %s" %
sub_expr)
continue
stmt_str = strip_statement(stmt[3])
# Mark this as a converted statement
self.converted_stmts.append(stmt_str)
self.statements.append(
ActivatingSubstitution(enz, wt_residue, position,
sub_residue, act_type,
stmt_str,
citation, evidence, annotations))
def get_activity_activity(self):
# Query for all statements where the activity of one protein
# directlyIncreases the activity of another protein, without reference
# to a modification.
q_stmts = prefixes + """
SELECT ?subjName ?subjActType ?rel ?objName ?objActType
?stmt
WHERE {
?stmt a belvoc:Statement .
?stmt belvoc:hasSubject ?subj .
?stmt belvoc:hasObject ?obj .
?stmt belvoc:hasRelationship ?rel .
?subj belvoc:hasActivityType ?subjActType .
?subj belvoc:hasChild ?subjProt .
?subjProt belvoc:hasConcept ?subjName .
?obj belvoc:hasActivityType ?objActType .
?obj belvoc:hasChild ?objProt .
?objProt belvoc:hasConcept ?objName .
FILTER (?rel = belvoc:DirectlyIncreases ||
?rel = belvoc:DirectlyDecreases)
}
"""
res_stmts = self.g.query(q_stmts)
for stmt in res_stmts:
(citation, evidence, annotations) = self.get_evidence(stmt[5])
subj_name = gene_name_from_uri(stmt[0])
subj = Agent(subj_name)
subj_activity = name_from_uri(stmt[1])
rel = term_from_uri(stmt[2])
if rel == 'DirectlyDecreases':
rel = 'decreases'
else:
rel = 'increases'
obj_name = gene_name_from_uri(stmt[3])
obj = Agent(obj_name)
obj_activity = name_from_uri(stmt[4])
stmt_str = strip_statement(stmt[5])
# Mark this as a converted statement
self.converted_stmts.append(stmt_str)
# Distinguish the case when the activator is a RasGTPase
# (since this may involve unique and stereotyped mechanisms)
if subj_activity == 'GtpBound':
self.statements.append(
RasGtpActivityActivity(subj, subj_activity,
rel, obj, obj_activity,
stmt_str,
citation, evidence, annotations))
# If the object is a Ras-like GTPase, and the subject *increases*
# its GtpBound activity, then the subject is a RasGEF
elif obj_activity == 'GtpBound' and \
rel == 'DirectlyIncreases':
self.statements.append(
RasGef(subj, subj_activity, obj,
stmt_str, citation, evidence, annotations))
# If the object is a Ras-like GTPase, and the subject *decreases*
# its GtpBound activity, then the subject is a RasGAP
elif obj_activity == 'GtpBound' and \
rel == 'DirectlyDecreases':
self.statements.append(
RasGap(subj, subj_activity, obj,
stmt_str, citation, evidence, annotations))
# Otherwise, create a generic Activity->Activity statement
else:
self.statements.append(
ActivityActivity(subj, subj_activity,
rel, obj, obj_activity,
stmt_str,
citation, evidence, annotations))
"""
#print "--------------------------------"
print stmt_str
print("This statement says that:")
print("%s activity increases activity of %s" %
(subj_name, obj_name))
print "It doesn't specify the site."
act_mods = []
for bps in self.statements:
if type(bps) == ActivatingModification and \
bps.monomer_name == obj_name:
act_mods.append(bps)
# If we know about an activation modification...
if act_mods:
print "However, I happen to know about the following"
print "activating modifications for %s:" % obj_name
for act_mod in act_mods:
print " %s at %s" % (act_mod.mod, act_mod.mod_pos)
"""
def get_all_direct_statements(self):
"""Get all directlyIncreases/Decreases statements in the corpus.
Stores the results of the query in self.all_stmts.
"""
print "Getting all direct statements...\n"
q_stmts = prefixes + """
SELECT ?stmt
WHERE {
?stmt a belvoc:Statement .
?stmt belvoc:hasSubject ?subj .
?stmt belvoc:hasObject ?obj .
{
{ ?subj a belvoc:AbundanceActivity . }
UNION
{ ?subj a belvoc:ComplexAbundance . }
UNION
{ ?subj a belvoc:ProteinAbundance . }
UNION
{ ?subj a belvoc:ModifiedProteinAbundance . }
}
{
{ ?obj a belvoc:AbundanceActivity . }
UNION
{ ?obj a belvoc:ComplexAbundance . }
UNION
{ ?obj a belvoc:ProteinAbundance . }
UNION
{ ?obj a belvoc:ModifiedProteinAbundance . }
}
{
{ ?stmt belvoc:hasRelationship belvoc:DirectlyIncreases . }
UNION
{ ?stmt belvoc:hasRelationship belvoc:DirectlyDecreases . }
}
}
"""
q_stmts = prefixes + """
SELECT ?stmt
WHERE {
?stmt a belvoc:Statement .
{
{ ?stmt belvoc:hasRelationship belvoc:DirectlyIncreases . }
UNION
{ ?stmt belvoc:hasRelationship belvoc:DirectlyDecreases . }
}
}
"""
res_stmts = self.g.query(q_stmts)
self.all_stmts = [strip_statement(stmt[0]) for stmt in res_stmts]
def get_indirect_statements(self):
q_stmts = prefixes + """
SELECT ?stmt
WHERE {
?stmt a belvoc:Statement .
{
{ ?stmt belvoc:hasRelationship belvoc:Increases . }
UNION
{ ?stmt belvoc:hasRelationship belvoc:Decreases . }
}
}
"""
res_stmts = self.g.query(q_stmts)
self.indirect_stmts = [strip_statement(stmt[0]) for stmt in res_stmts]
def get_degenerate_statements(self):
print "Checking for 'degenerate' statements...\n"
# Get rules of type protein X -> activity Y
q_stmts = prefixes + """
SELECT ?stmt
WHERE {
?stmt a belvoc:Statement .
?stmt belvoc:hasSubject ?subj .
?stmt belvoc:hasObject ?obj .
{
{ ?stmt belvoc:hasRelationship belvoc:DirectlyIncreases . }
UNION
{ ?stmt belvoc:hasRelationship belvoc:DirectlyDecreases . }
}
{
{ ?subj a belvoc:ProteinAbundance . }
UNION
{ ?subj a belvoc:ModifiedProteinAbundance . }
}
?subj belvoc:hasConcept ?xName .
{
{
?obj a belvoc:ProteinAbundance .
?obj belvoc:hasConcept ?yName .
}
UNION
{
?obj a belvoc:ModifiedProteinAbundance .
?obj belvoc:hasChild ?proteinY .
?proteinY belvoc:hasConcept ?yName .
}
UNION
{
?obj a belvoc:AbundanceActivity .
?obj belvoc:hasChild ?objChild .
?objChild a belvoc:ProteinAbundance .
?objChild belvoc:hasConcept ?yName .
}
}
FILTER (?xName != ?yName)
}
"""
res_stmts = self.g.query(q_stmts)
print "Protein -> Protein/Activity statements:"
print "---------------------------------------"
for stmt in res_stmts:
stmt_str = strip_statement(stmt[0])
print stmt_str
self.degenerate_stmts.append(stmt_str)
def print_statement_coverage(self):
"""Display how many of the direct statements have been converted,
and how many are considered 'degenerate' and not converted."""
if not self.all_stmts:
self.get_all_direct_statements()
if not self.degenerate_stmts:
self.get_degenerate_statements()
if not self.indirect_stmts:
self.get_indirect_statements()
print
print("Total indirect statements: %d" % len(self.indirect_stmts))
print("Total direct statements: %d" % len(self.all_stmts))
print("Converted statements: %d" % len(self.converted_stmts))
print("Degenerate statements: %d" % len(self.degenerate_stmts))
print(">> Total unhandled statements: %d" %
(len(self.all_stmts) - len(self.converted_stmts) -
len(self.degenerate_stmts)))
print
print "--- Unhandled statements ---------"
for stmt in self.all_stmts:
if not (stmt in self.converted_stmts or
stmt in self.degenerate_stmts):
print stmt
def print_statements(self):
for i, stmt in enumerate(self.statements):
print "%s: %s" % (i, stmt)
| decarlin/indra | indra/bel/processor.py | Python | bsd-2-clause | 27,816 |
#!/usr/bin/python2.7 -tt
"""
Copyright (c) 2013, Adel Qodmani
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import tarfile # For the compression
import os # For everything related to path
import logging
import sys # For the argv and exit
import datetime
def main():
""" zipper source-dir-full-path dest-dir-full-path
Tars and zips the source-dir and put it in the dest-dir with the name:
source-dir-name_date_time.tar.gz
"""
check_args()
source_path = sys.argv[1]
source_path = source_path.rstrip('/')
logging.debug("source_path: %s" % source_path)
dest_path = sys.argv[2]
dest_path = dest_path.rstrip('/')
logging.debug("dest_path: %s" % dest_path)
# source name is the name of the dir to be archived
source_name = source_path.split("/")[-1]
logging.debug("source_name: %s" % source_name)
# tar_path
tar_path = create_tar_path(source_name, dest_path)
logging.debug("tar_path: %s" % tar_path)
create_tar_file(tar_path, source_path)
def check_args():
""" Checks if the args supplied to the script are what it expects """
if len(sys.argv) > 1 and sys.argv[1] == "--help":
help_text = ("zipper creates a zipped tar-ball of the <source> directory"
+ "and puts it in \nthe <destination> directory ")
usage = "e.g: zipper /tmp/ /home/sally/Desktop/"
result = "will create a file called tmp_date_time.tar.gz in "
result += "/home/sally/Desktop/ which has all the contents of /tmp/"
print(help_text)
print(usage)
print(result)
sys.exit(0)
elif len(sys.argv) < 3:
print("Missing arguments!")
print("Usage:")
print("\tzipper source destination")
print("You can get the help by: zipper --help")
logging.error("Missing arguments!")
logging.error("Shutting down!")
sys.exit(1)
elif not os.path.isabs(sys.argv[1]):
print("Source directory is not an absolute path")
print("You can get the help by: zipper --help")
logging.error("Source is not absolute")
logging.error("Shutting down")
sys.exit(2)
elif not os.path.isabs(sys.argv[2]):
print("Destination directory is not an absolute path")
print("You can get the help by: zipper --help")
logging.error("Destination is not absolute")
logging.error("Shutting down")
sys.exit(3)
elif not os.path.isdir(sys.argv[1]):
print("Path given as a source directory is not a directory")
print("You can get the help by: zipper --help")
logging.error("Source is not a directory")
logging.error("Shutting down")
sys.exit(4)
elif not os.path.isdir(sys.argv[2]):
print("Path given as destination directory is not a directory")
print("You can get the help by: zipper --help")
logging.error("Destination is not a directory")
logging.error("Shutting down")
sys.exit(5)
def create_tar_path(source_name, dest_path):
""" Creates a path for a backup that will be in the desktop of the user
and the file name will be the /path/to/desktktop/source_name_date.tar.gz
"""
# Get the path to the desktop ready
path = os.path.expanduser('~') # changes ~ to home dir path
logging.debug(path)
path = os.path.join(path, dest_path+"/")
logging.debug(path)
# string from time(strftime): %Year %month %day %Hour %Minute $Second
now = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
logging.debug(now)
# The dest path is the path + source_name + date + extension
path = os.path.join(path, source_name)
logging.debug(path)
path += '_' + now + ".tar.gz"
logging.debug(path)
return path
def create_tar_file(tar_path, source_path):
# "w:gz" is open for writing a gz tarball
try:
tar = tarfile.open(tar_path, "w:gz")
tar.add(source_path)
tar.close()
logging.debug("Tar ball [%s] created for directory [%s]" % (tar_path,
source_path))
except IOError:
logging.critical("IOError exception! Aborting ..")
sys.exit(6)
except TarError:
logging.critical("TarError exception! Aborting ...")
sys.exit(7)
if __name__ == "__main__":
# Set up the logging env
# Format: (asctime) (filename) (funcname) (linenumber) (level) (msg)
# The time can be formated with the datefmt parameter
FORMAT = "%(asctime)s %(filename)s::%(funcName)s::%(lineno)d"
FORMAT += " [%(levelname)s]: %(msg)s"
DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
try:
STREAM = open("/home/aral/learn/zipper/log", "a+")
except IOError:
print("Can't create a log file in [%s]" % STREAM)
sys.abort()
# Setting the log stream to go to stderr and print all log info from debug
# and higher levels (debug, info, warning, error, critical)
logging.basicConfig(stream=STREAM, level=logging.DEBUG, format=FORMAT,
datefmt=DATE_FORMAT)
main()
| adel-qod/zipper | zipper.py | Python | bsd-2-clause | 6,014 |
# Copyright (c) 2014, Fundacion Dr. Manuel Sadosky
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from barf.core.reil import ReilParser
from barf.core.smt.smtsymbol import BitVec
from barf.core.smt.smtsymbol import Bool
from barf.core.smt.smtsolver import Z3Solver as SmtSolver
# from barf.core.smt.smtsolver import CVC4Solver as SmtSolver
class SmtSolverBitVecTests(unittest.TestCase):
def setUp(self):
self._address_size = 32
self._parser = ReilParser()
self._solver = SmtSolver()
# Arithmetic operations.
def test_add(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x + y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val + y_val == z_val)
def test_sub(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x - y == z)
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
# Add constraints to avoid trivial solutions.
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue((x_val - y_val) & 0xffffffff == z_val)
def test_mul(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x * y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue((x_val * y_val) & 0xffffffff == z_val)
def test_div(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x / y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val / y_val == z_val)
def test_mod(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x % y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val % y_val == z_val)
def test_neg(self):
x = BitVec(32, "x")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("z", z)
self._solver.add(-x == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
z_val = self._solver.get_value(z)
self.assertTrue(-x_val & 0xffffffff == z_val)
# Bitwise operations.
def test_and(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x & y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val & y_val == z_val)
def test_xor(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x ^ y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val ^ y_val == z_val)
def test_or(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x | y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val | y_val == z_val)
def test_lshift(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x << y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue((x_val << y_val) & 0xffffffff == z_val)
def test_rshift(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x >> y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val >> y_val == z_val)
def test_invert(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(~x == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(~x_val & 0xffffffff == z_val)
# Comparison operators (signed)
def test_lt(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.add(x < y)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
self.assertTrue(x_val < y_val)
def test_le(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.add(x <= y)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
self.assertTrue(x_val <= y_val)
def test_eq(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.add(x == y)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
self.assertTrue(x_val == y_val)
def test_neq(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.add(x != y)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
self.assertTrue(x_val != y_val)
def test_gt(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.add(x > y)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
self.assertTrue(x_val > y_val)
def test_ge(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.add(x >= y)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
self.assertTrue(x_val >= y_val)
# Comparison operators (unsigned)
def test_ult(self):
# TODO Implement.
pass
def test_ule(self):
# TODO Implement.
pass
def test_ugt(self):
# TODO Implement.
pass
def test_uge(self):
# TODO Implement.
pass
# Arithmetic operators (unsigned)
def test_udiv(self):
# TODO Implement.
pass
def test_urem(self):
# TODO Implement.
pass
def main():
unittest.main()
if __name__ == '__main__':
main()
| cnheitman/barf-project | tests/core/smt/test_smtsolver.py | Python | bsd-2-clause | 13,055 |
from __future__ import absolute_import, division, print_function
from dynd._pydynd import w_type, \
make_var_dim, make_strided_dim, make_fixed_dim, make_cfixed_dim
__all__ = ['var', 'strided', 'fixed', 'cfixed']
class _Dim(object):
__slots__ = []
def __mul__(self, rhs):
if isinstance(rhs, w_type):
# Apply all the dimensions to get
# produce a type
for dim in reversed(self.dims):
rhs = dim.create(rhs)
return rhs
elif isinstance(rhs, (str, type)):
# Allow:
# ndt.strided * 'int32'
# ndt.strided * int
rhs = w_type(rhs)
for dim in reversed(self.dims):
rhs = dim.create(rhs)
return rhs
elif isinstance(rhs, _Dim):
# Combine the dimension fragments
return _DimFragment(self.dims + rhs.dims)
else:
raise TypeError('Expected a dynd dimension or type, not %r' % rhs)
def __pow__(self, count):
return _DimFragment(self.dims * count)
class _DimFragment(_Dim):
__slots__ = ['dims']
def __init__(self, dims):
self.dims = dims
def __repr__(self):
return ' * '.join(repr(dim) for dim in self.dims)
class _Var(_Dim):
"""
Creates a var dimension when combined with other types.
Examples
--------
>>> ndt.var * ndt.int32
ndt.type('var * int32')
>>> ndt.fixed[5] * ndt.var * ndt.float64
ndt.type('5 * var * float64')
"""
__slots__ = []
@property
def dims(self):
return (self,)
def create(self, eltype):
return make_var_dim(eltype)
def __repr__(self):
return 'ndt.var'
class _Strided(_Dim):
"""
Creates a strided dimension when combined with other types.
Examples
--------
>>> ndt.strided * ndt.int32
ndt.type('strided * int32')
>>> ndt.fixed[5] * ndt.strided * ndt.float64
ndt.type('5 * strided * float64')
"""
__slots__ = []
@property
def dims(self):
return (self,)
def create(self, eltype):
return make_strided_dim(eltype)
def __repr__(self):
return 'ndt.strided'
class _Fixed(_Dim):
"""
Creates a fixed dimension when combined with other types.
Examples
--------
>>> ndt.fixed[3] * ndt.int32
ndt.type('3 * int32')
>>> ndt.fixed[5] * ndt.var * ndt.float64
ndt.type('5 * var * float64')
"""
__slots__ = ['dim_size']
def __init__(self, dim_size = None):
self.dim_size = dim_size
@property
def dims(self):
if self.dim_size is not None:
return (self,)
else:
raise TypeError('Need to specify ndt.fixed[dim_size],' +
' not just ndt.fixed')
def create(self, eltype):
return make_fixed_dim(self.dim_size, eltype)
def __getitem__(self, dim_size):
return _Fixed(dim_size)
def __repr__(self):
if self.dim_size is not None:
return 'ndt.fixed[%d]' % self.dim_size
else:
return 'ndt.fixed'
class _CFixed(_Dim):
"""
Creates a cfixed dimension when combined with other types.
Examples
--------
>>> ndt.cfixed[3] * ndt.int32
ndt.type('cfixed[3] * int32')
>>> ndt.fixed[5] * ndt.cfixed[2] * ndt.float64
ndt.type('5 * cfixed[2] * float64')
"""
__slots__ = ['dim_size']
def __init__(self, dim_size = None):
self.dim_size = dim_size
@property
def dims(self):
if self.dim_size is not None:
return (self,)
else:
raise TypeError('Need to specify ndt.cfixed[dim_size],' +
' not just ndt.cfixed')
def create(self, eltype):
return make_cfixed_dim(self.dim_size, eltype)
def __getitem__(self, dim_size):
return _CFixed(dim_size)
def __repr__(self):
if self.dim_size is not None:
return 'ndt.cfixed[%d]' % self.dim_size
else:
return 'ndt.cfixed'
var = _Var()
strided = _Strided()
fixed = _Fixed()
cfixed = _CFixed()
| aterrel/dynd-python | dynd/ndt/dim_helpers.py | Python | bsd-2-clause | 4,145 |
# --------------------------------------------------------
# Theano @ Dragon
# Copyright(c) 2017 SeetaTech
# Written by Ting Pan
# --------------------------------------------------------
import numpy as np
import dragon.core.workspace as ws
from dragon.core.tensor import Tensor, GetTensorName
def shared(value, name=None, **kwargs):
"""Construct a Tensor initialized with ``value``.
Parameters
----------
value : basic type, list or numpy.ndarray
The numerical values.
name : str
The name of tensor.
Returns
-------
Tensor
The initialized tensor.
"""
if not isinstance(value, (int, float, list, np.ndarray)):
raise TypeError("Unsupported type of value: {}".format(type(value)))
if name is None: name = GetTensorName()
tensor = Tensor(name).Variable()
ws.FeedTensor(tensor, value)
return tensor | neopenx/Dragon | Dragon/python/dragon/vm/theano/compile/sharedvalue.py | Python | bsd-2-clause | 890 |
from ..tools import add_bias, confirm
from ..activation_functions import softmax_function
from ..cost_functions import softmax_neg_loss
import numpy as np
def resilient_backpropagation(network, trainingset, testset, cost_function, ERROR_LIMIT=1e-3, max_iterations = (), weight_step_max = 50., weight_step_min = 0., start_step = 0.5, learn_max = 1.2, learn_min = 0.5, print_rate = 1000, save_trained_network = False ):
# Implemented according to iRprop+
# http://sci2s.ugr.es/keel/pdf/algorithm/articulo/2003-Neuro-Igel-IRprop+.pdf
assert softmax_function != network.layers[-1][1] or cost_function == softmax_neg_loss,\
"When using the `softmax` activation function, the cost function MUST be `softmax_neg_loss`."
assert cost_function != softmax_neg_loss or softmax_function == network.layers[-1][1],\
"When using the `softmax_neg_loss` cost function, the activation function in the final layer MUST be `softmax`."
assert trainingset[0].features.shape[0] == network.n_inputs, \
"ERROR: input size varies from the defined input setting"
assert trainingset[0].targets.shape[0] == network.layers[-1][0], \
"ERROR: output size varies from the defined output setting"
training_data = np.array( [instance.features for instance in trainingset ] )
training_targets = np.array( [instance.targets for instance in trainingset ] )
test_data = np.array( [instance.features for instance in testset ] )
test_targets = np.array( [instance.targets for instance in testset ] )
# Storing the current / previous weight step size
weight_step = [ np.full( weight_layer.shape, start_step ) for weight_layer in network.weights ]
# Storing the current / previous weight update
dW = [ np.ones(shape=weight_layer.shape) for weight_layer in network.weights ]
# Storing the previous derivative
previous_dEdW = [ 1 ] * len( network.weights )
# Storing the previous error measurement
prev_error = ( ) # inf
input_signals, derivatives = network.update( training_data, trace=True )
out = input_signals[-1]
cost_derivative = cost_function(out, training_targets, derivative=True).T
delta = cost_derivative * derivatives[-1]
error = cost_function(network.update( test_data ), test_targets )
n_samples = float(training_data.shape[0])
layer_indexes = range( len(network.layers) )[::-1] # reversed
epoch = 0
while error > ERROR_LIMIT and epoch < max_iterations:
epoch += 1
for i in layer_indexes:
# Loop over the weight layers in reversed order to calculate the deltas
# Calculate the delta with respect to the weights
dEdW = (np.dot( delta, add_bias(input_signals[i]) )/n_samples).T
if i != 0:
"""Do not calculate the delta unnecessarily."""
# Skip the bias weight
weight_delta = np.dot( network.weights[ i ][1:,:], delta )
# Calculate the delta for the subsequent layer
delta = weight_delta * derivatives[i-1]
# Calculate sign changes and note where they have changed
diffs = np.multiply( dEdW, previous_dEdW[i] )
pos_indexes = np.where( diffs > 0 )
neg_indexes = np.where( diffs < 0 )
zero_indexes = np.where( diffs == 0 )
# positive
if np.any(pos_indexes):
# Calculate the weight step size
weight_step[i][pos_indexes] = np.minimum( weight_step[i][pos_indexes] * learn_max, weight_step_max )
# Calculate the weight step direction
dW[i][pos_indexes] = np.multiply( -np.sign( dEdW[pos_indexes] ), weight_step[i][pos_indexes] )
# Apply the weight deltas
network.weights[i][ pos_indexes ] += dW[i][pos_indexes]
# negative
if np.any(neg_indexes):
weight_step[i][neg_indexes] = np.maximum( weight_step[i][neg_indexes] * learn_min, weight_step_min )
if error > prev_error:
# iRprop+ version of resilient backpropagation
network.weights[i][ neg_indexes ] -= dW[i][neg_indexes] # backtrack
dEdW[ neg_indexes ] = 0
# zeros
if np.any(zero_indexes):
dW[i][zero_indexes] = np.multiply( -np.sign( dEdW[zero_indexes] ), weight_step[i][zero_indexes] )
network.weights[i][ zero_indexes ] += dW[i][zero_indexes]
# Store the previous weight step
previous_dEdW[i] = dEdW
#end weight adjustment loop
prev_error = error
input_signals, derivatives = network.update( training_data, trace=True )
out = input_signals[-1]
cost_derivative = cost_function(out, training_targets, derivative=True).T
delta = cost_derivative * derivatives[-1]
error = cost_function(network.update( test_data ), test_targets )
if epoch%print_rate==0:
# Show the current training status
print "[training] Current error:", error, "\tEpoch:", epoch
print "[training] Finished:"
print "[training] Converged to error bound (%.4g) with error %.4g." % ( ERROR_LIMIT, error )
print "[training] Measured quality: %.4g" % network.measure_quality( training_data, training_targets, cost_function )
print "[training] Trained for %d epochs." % epoch
if save_trained_network and confirm( promt = "Do you wish to store the trained network?" ):
network.save_network_to_file()
# end backprop | jorgenkg/python-neural-network | nimblenet/learning_algorithms/resilient_backpropagation.py | Python | bsd-2-clause | 6,265 |
# Copyright (c) 2014-2021, Freja Nordsiek
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os.path
import tempfile
import pkg_resources
import pytest
import hdf5storage
import hdf5storage.plugins
# Check if the example package is installed because some tests will
# depend on it.
try:
import example_hdf5storage_marshaller_plugin
has_example_hdf5storage_marshaller_plugin = True
except:
has_example_hdf5storage_marshaller_plugin = False
def test_marshaller_api_versions():
assert ('1.0', ) == \
hdf5storage.plugins.supported_marshaller_api_versions()
def test_find_thirdparty_marshaller_plugins():
found_example = False
apivs = hdf5storage.plugins.supported_marshaller_api_versions()
plugins = hdf5storage.plugins.find_thirdparty_marshaller_plugins()
assert isinstance(plugins, dict)
assert set(apivs) == set(plugins)
for k, v in plugins.items():
assert isinstance(k, str)
assert isinstance(v, dict)
for k2, v2 in v.items():
assert isinstance(k2, str)
assert isinstance(v2, pkg_resources.EntryPoint)
if k2 == 'example_hdf5storage_marshaller_plugin':
found_example = True
assert has_example_hdf5storage_marshaller_plugin == found_example
@pytest.mark.skipif(has_example_hdf5storage_marshaller_plugin,
reason='requires example_hdf5storage_marshaller_'
'plugin')
def test_plugin_marshaller_SubList():
mc = hdf5storage.MarshallerCollection(load_plugins=True,
lazy_loading=True)
options = hdf5storage.Options(store_python_metadata=True,
matlab_compatible=False,
marshaller_collection=mc)
ell = [1, 2, 'b1', b'3991', True, None]
data = example_hdf5storage_marshaller_plugin.SubList(ell)
name = '/a'
with tempfile.TemporaryDirectory() as folder:
filename = os.path.join(folder, 'data.h5')
hdf5storage.write(data, path=name, filename=filename,
options=options)
out = hdf5storage.read(path=name, filename=filename,
options=options)
assert ell == list(out)
assert type(out) == example_hdf5storage_marshaller_plugin.SubList
| frejanordsiek/hdf5storage | tests/test_marshaller_plugins.py | Python | bsd-2-clause | 3,575 |
### extends 'class_empty.py'
### block ClassImports
# NOTICE: Do not edit anything here, it is generated code
from . import gxapi_cy
from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref
from .GXDB import GXDB
from .GXVA import GXVA
from .GXVV import GXVV
### endblock ClassImports
### block Header
# NOTICE: The code generator will not replace the code in this block
### endblock Header
### block ClassImplementation
# NOTICE: Do not edit anything here, it is generated code
class GXDBWRITE(gxapi_cy.WrapDBWRITE):
"""
GXDBWRITE class.
The `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` class is used to open and write to databases. Large blocks of data
are split into blocks and served up sequentially to prevent the over-use of virtual memory when VVs or VAs are being written to channels.
Individual data blocks are limited by default to 1 MB (which is user-alterable). Data less than the block size
are served up whole, one block per line.
"""
def __init__(self, handle=0):
super(GXDBWRITE, self).__init__(GXContext._get_tls_geo(), handle)
@classmethod
def null(cls):
"""
A null (undefined) instance of `GXDBWRITE <geosoft.gxapi.GXDBWRITE>`
:returns: A null `GXDBWRITE <geosoft.gxapi.GXDBWRITE>`
:rtype: GXDBWRITE
"""
return GXDBWRITE()
def is_null(self):
"""
Check if this is a null (undefined) instance
:returns: True if this is a null (undefined) instance, False otherwise.
:rtype: bool
"""
return self._internal_handle() == 0
# Create Methods
@classmethod
def create(cls, db):
"""
Create a `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object
Add channels using the `add_channel <geosoft.gxapi.GXDBWRITE.add_channel>` method.channel.
:param db: Database input
:type db: GXDB
:returns: `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object
:rtype: GXDBWRITE
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = gxapi_cy.WrapDBWRITE._create(GXContext._get_tls_geo(), db)
return GXDBWRITE(ret_val)
@classmethod
def create_xy(cls, db):
"""
Create a `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object for a XY-located data. Add channels using the
`add_channel <geosoft.gxapi.GXDBWRITE.add_channel>` method.
:param db: Database input
:type db: GXDB
:returns: `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object
:rtype: GXDBWRITE
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = gxapi_cy.WrapDBWRITE._create_xy(GXContext._get_tls_geo(), db)
return GXDBWRITE(ret_val)
@classmethod
def create_xyz(cls, db):
"""
Create a `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object for a XYZ-located data.
Add channels using the `add_channel <geosoft.gxapi.GXDBWRITE.add_channel>` method.channel
:param db: Database input
:type db: GXDB
:returns: `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object
:rtype: GXDBWRITE
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = gxapi_cy.WrapDBWRITE._create_xyz(GXContext._get_tls_geo(), db)
return GXDBWRITE(ret_val)
def add_channel(self, chan):
"""
Add a data channel to the `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object.
:param chan: Channel handle (does not need to be locked, but can be.)
:type chan: int
:returns: Channel index. Use for getting the correct `GXVV <geosoft.gxapi.GXVV>` or `GXVA <geosoft.gxapi.GXVA>` object.
:rtype: int
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = self._add_channel(chan)
return ret_val
# Data Access Methods
def get_db(self):
"""
Get the output `GXDB <geosoft.gxapi.GXDB>` handle from the `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object.
:returns: `GXDB <geosoft.gxapi.GXDB>` handle
:rtype: GXDB
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = self._get_db()
return GXDB(ret_val)
def get_vv(self, chan):
"""
Get the `GXVV <geosoft.gxapi.GXVV>` handle for a channel.
:param chan: Index of channel to access.
:type chan: int
:returns: `GXVV <geosoft.gxapi.GXVV>` handle
:rtype: GXVV
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Call only for single-column (regular) channels. You can call the `get_chan_array_size <geosoft.gxapi.GXDBWRITE.get_chan_array_size>`
function to find the number fo columns in a given channel. The `GXVV <geosoft.gxapi.GXVV>` is filled anew for each block served up.
"""
ret_val = self._get_vv(chan)
return GXVV(ret_val)
def get_va(self, chan):
"""
Get the `GXVA <geosoft.gxapi.GXVA>` handle for an array channel.
:param chan: Index of channel to access.
:type chan: int
:returns: `GXVA <geosoft.gxapi.GXVA>` handle
:rtype: GXVA
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Call only for array (multi-column) channels. You can call the `get_chan_array_size <geosoft.gxapi.GXDBWRITE.get_chan_array_size>`
function to find the number fo columns in a given channel, or you can call `GXVA.col <geosoft.gxapi.GXVA.col>` on the returned `GXVA <geosoft.gxapi.GXVA>` handle.
The `GXVA <geosoft.gxapi.GXVA>` is filled anew for each block served up.
"""
ret_val = self._get_va(chan)
return GXVA(ret_val)
def get_v_vx(self):
"""
Get the X channel `GXVV <geosoft.gxapi.GXVV>` handle.
:returns: `GXVV <geosoft.gxapi.GXVV>` handle
:rtype: GXVV
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Only available for the CreateXY or CreateXYZ methods.
The `GXVV <geosoft.gxapi.GXVV>` is filled anew for each block served up.
"""
ret_val = self._get_v_vx()
return GXVV(ret_val)
def get_v_vy(self):
"""
Get the Y channel `GXVV <geosoft.gxapi.GXVV>` handle.
:returns: `GXVV <geosoft.gxapi.GXVV>` handle
:rtype: GXVV
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Only available for the CreateXY or CreateXYZ methods.
The `GXVV <geosoft.gxapi.GXVV>` is filled anew for each block served up.
"""
ret_val = self._get_v_vy()
return GXVV(ret_val)
def get_v_vz(self):
"""
Get the Z channel `GXVV <geosoft.gxapi.GXVV>` handle.
:returns: `GXVV <geosoft.gxapi.GXVV>` handle
:rtype: GXVV
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Only available for the CreateXY or CreateXYZ methods.
The `GXVV <geosoft.gxapi.GXVV>` is filled anew for each block served up.
If the Z channel is an array channel, the returned `GXVV <geosoft.gxapi.GXVV>` is the "base" `GXVV <geosoft.gxapi.GXVV>` of the `GXVA <geosoft.gxapi.GXVA>` and contains all items sequentially.
"""
ret_val = self._get_v_vz()
return GXVV(ret_val)
def get_chan_array_size(self, chan):
"""
Get the number of columns of data in a channel.
:param chan: Index of channel to access.
:type chan: int
:returns: The number of columns (array size) for a channel
:rtype: int
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Regular channels have one column of data. Array channels have more than one column of data.
This function should be called to determine whether to use `get_vv <geosoft.gxapi.GXDBWRITE.get_vv>` or `get_va <geosoft.gxapi.GXDBWRITE.get_va>` to access data
for a channel.
"""
ret_val = self._get_chan_array_size(chan)
return ret_val
# Processing
def add_block(self, line):
"""
Add the current block of data.
:param line: Line
:type line: int
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** First, set up the data for each channel by copying values into the individual channel VVs and VAs.
"""
self._add_block(line)
def commit(self):
"""
Commit remaining data to the database.
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
self._commit()
def test_func(self, ra):
"""
Temporary test function.
:param ra: `GXRA <geosoft.gxapi.GXRA>` handle to text file to import.
:type ra: GXRA
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Designed to import the "Massive.xyz" file, which has data in the format "X Y Z Data".
"""
self._test_func(ra)
### endblock ClassImplementation
### block ClassExtend
# NOTICE: The code generator will not replace the code in this block
### endblock ClassExtend
### block Footer
# NOTICE: The code generator will not replace the code in this block
### endblock Footer | GeosoftInc/gxpy | geosoft/gxapi/GXDBWRITE.py | Python | bsd-2-clause | 11,130 |
# -*- coding: UTF-8 -*-
# Copyright 2019-2020 Rumma & Ko Ltd
# License: GNU Affero General Public License v3 (see file COPYING for details)
from django.db import models
from lino_xl.lib.ledger.choicelists import VoucherStates
from lino.api import dd, _
class OrderStates(VoucherStates):
pass
add = OrderStates.add_item
add('10', _("Waiting"), 'draft', is_editable=True)
add('20', _("Active"), 'active', is_editable=True)
add('30', _("Urgent"), 'urgent', is_editable=True)
add('40', _("Done"), 'registered')
add('50', _("Cancelled"), 'cancelled')
OrderStates.draft.add_transition(required_states="active urgent registered cancelled")
OrderStates.active.add_transition(required_states="draft urgent registered cancelled")
OrderStates.urgent.add_transition(required_states="draft active registered cancelled")
OrderStates.registered.add_transition(required_states="draft active urgent cancelled")
OrderStates.cancelled.add_transition(required_states="draft active urgent registered")
| lino-framework/xl | lino_xl/lib/orders/choicelists.py | Python | bsd-2-clause | 992 |
import unittest
import numpy as np
import socket
import Pyro4
from nested_sampling import NestedSampling, MonteCarloWalker, Harmonic, Replica
class TestNS(unittest.TestCase):
"""to test distributed computing must start a dispatcher with --server-name test and --port 9090
"""
def setUp(self):
self.setUp1()
def setUp1(self, nproc=1, multiproc=True):
self.ndim = 3
self.harmonic = Harmonic(self.ndim)
self.nreplicas = 10
self.stepsize = 0.1
self.nproc = nproc
self.mc_runner = MonteCarloWalker(self.harmonic, mciter=40)
if multiproc == False:
hostname=socket.gethostname()
host = Pyro4.socketutil.getIpAddress(hostname, workaround127=True)
self.dispatcher_URI = "PYRO:"+"test@"+host+":9090"
else:
self.dispatcher_URI = None
replicas = []
for i in xrange(self.nreplicas):
x = self.harmonic.get_random_configuration()
replicas.append(Replica(x, self.harmonic.get_energy(x)))
self.ns = NestedSampling(replicas, self.mc_runner,
stepsize=0.1, nproc=nproc, verbose=False, dispatcher_URI=self.dispatcher_URI)
self.Emax0 = self.ns.replicas[-1].energy
self.niter = 100
for i in xrange(self.niter):
self.ns.one_iteration()
self.Emax = self.ns.replicas[-1].energy
self.Emin = self.ns.replicas[0].energy
def test1(self):
print "running TestNS"
self.assert_(len(self.ns.replicas) == self.nreplicas)
self.assert_(self.Emax < self.Emax0)
self.assert_(self.Emin < self.Emax)
self.assert_(self.Emin >= 0)
self.assert_(self.ns.stepsize != self.stepsize)
self.assertEqual(len(self.ns.max_energies), self.niter * self.nproc)
class testNSParMultiproc(TestNS):
def setUp(self):
self.setUp1(nproc=3)
class testNSParPyro(TestNS):
def setUp(self):
self.setUp1(nproc=3,multiproc=False)
if __name__ == "__main__":
unittest.main() | js850/nested_sampling | nested_sampling/tests/_test_ns.py | Python | bsd-2-clause | 2,120 |
__all__ = [
"CorruptedMessage",
"Stats",
"UnexpectedCommand",
"UnexpectedEOF",
"UnknownCommand",
"log",
]
import logging
import struct
log = logging.getLogger("offhand")
class UnexpectedEOF(Exception):
def __init__(self):
Exception.__init__(self, "Connection closed unexpectedly")
class UnknownCommand(Exception):
def __init__(self, command):
Exception.__init__(self, "Unknown command: %r" % command)
class UnexpectedCommand(Exception):
def __init__(self, command):
Exception.__init__(self, "Unexpected command: %r" % command)
class CorruptedMessage(Exception):
def __init__(self):
Exception.__init__(self, "Corrupted message")
class Stats(object):
__slots__ = [
"connecting",
"connected",
"idle",
"busy",
"total_engaged",
"total_canceled",
"total_rolledback",
"total_timeouts",
"total_disconnects",
"total_errors",
]
def __init__(self, copy=None):
for key in self.__slots__:
setattr(self, key, getattr(copy, key) if copy else 0)
def __nonzero__(self):
return any(getattr(self, key) for key in self.__slots__)
def __str__(self):
return " ".join("%s=%s" % (key, getattr(self, key)) for key in self.__slots__)
def parse_message(data):
message = []
offset = 0
while True:
remain = len(data) - offset
if remain == 0:
break
if remain < 4:
raise CorruptedMessage()
part_size, = struct.unpack("<I", data[offset: offset + 4])
offset += 4
if remain < 4 + part_size:
raise CorruptedMessage()
message.append(data[offset: offset + part_size])
offset += part_size
return message
| ninchat/offhand | python/offhand/__init__.py | Python | bsd-2-clause | 1,817 |
# -*- coding:utf-8 -*-
from django import forms
try:
from django.utils.encoding import smart_unicode as smart_text
except ImportError:
from django.utils.encoding import smart_text
from cached_modelforms.tests.utils import SettingsTestCase
from cached_modelforms.tests.models import SimpleModel
from cached_modelforms import (
CachedModelChoiceField, CachedModelMultipleChoiceField)
class TestFields(SettingsTestCase):
def setUp(self):
self.settings_manager.set(INSTALLED_APPS=('cached_modelforms.tests',))
self.obj1 = SimpleModel.objects.create(name='name1')
self.obj2 = SimpleModel.objects.create(name='name2')
self.obj3 = SimpleModel.objects.create(name='name3')
self.cached_list = [self.obj1, self.obj2, self.obj3]
class FormSingle(forms.Form):
obj = CachedModelChoiceField(
objects=lambda:self.cached_list,
required=False
)
class FormMultiple(forms.Form):
obj = CachedModelMultipleChoiceField(
objects=lambda:self.cached_list,
required=False
)
self.FormSingle = FormSingle
self.FormMultiple = FormMultiple
def test_modelchoicefield_objects_arg(self):
'''
Test, how the field accepts different types of ``objects`` argument.
'''
as_list = CachedModelChoiceField(objects=lambda:self.cached_list)
as_iterable = CachedModelChoiceField(
objects=lambda:iter(self.cached_list)
)
list_of_tuples = [(x.pk, x) for x in self.cached_list]
as_list_of_tuples = CachedModelChoiceField(
objects=lambda:list_of_tuples
)
as_dict = CachedModelChoiceField(objects=lambda:dict(list_of_tuples))
choices_without_empty_label = as_list.choices[:]
if as_list.empty_label is not None:
choices_without_empty_label.pop(0)
# make sure all of the ``choices`` attrs are the same
self.assertTrue(
as_list.choices ==
as_iterable.choices ==
as_list_of_tuples.choices ==
as_dict.choices
)
# same for ``objects``
self.assertTrue(
as_list.objects ==
as_iterable.objects ==
as_list_of_tuples.objects ==
as_dict.objects
)
# ``objects`` should be a dict as ``{smart_text(pk1): obj1, ...}``
self.assertEqual(
set(as_list.objects.keys()),
set(smart_text(x.pk) for x in self.cached_list)
)
self.assertEqual(set(as_list.objects.values()), set(self.cached_list))
# ``choices`` should be a list as ``[(smart_text(pk1), smart_text(obj1)), ...]``
self.assertEqual(
choices_without_empty_label,
[(smart_text(x.pk), smart_text(x)) for x in self.cached_list]
)
def test_modelmultiplechoicefield_objects_arg(self):
'''
Test, how the field accepts different types of ``objects`` argument.
'''
as_list = CachedModelMultipleChoiceField(
objects=lambda:self.cached_list
)
as_iterable = CachedModelMultipleChoiceField(
objects=lambda:iter(self.cached_list)
)
list_of_tuples = [(x.pk, x) for x in self.cached_list]
as_list_of_tuples = CachedModelMultipleChoiceField(
objects=lambda:list_of_tuples
)
as_dict = CachedModelMultipleChoiceField(objects=dict(list_of_tuples))
# make sure all of the ``choices`` attrs are the same
self.assertTrue(
as_list.choices ==
as_iterable.choices ==
as_list_of_tuples.choices ==
as_dict.choices)
# same for ``objects``
self.assertTrue(
as_list.objects ==
as_iterable.objects ==
as_list_of_tuples.objects ==
as_dict.objects)
# ``objects`` should be a dict as ``{smart_text(pk1): obj1, ...}``
self.assertEqual(
set(as_list.objects.keys()),
set(smart_text(x.pk) for x in self.cached_list)
)
self.assertEqual(set(as_list.objects.values()), set(self.cached_list))
# ``choices`` should be a list as ``[(smart_text(pk1), smart_text(obj1)), ...]``
self.assertEqual(
as_list.choices,
[(smart_text(x.pk), smart_text(x)) for x in self.cached_list]
)
def test_modelchoicefield_behavior(self):
'''
Test, how the field handles data in form.
'''
# some value
form = self.FormSingle({'obj': smart_text(self.obj1.pk)})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['obj'], self.obj1)
# no value
form = self.FormSingle({})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['obj'], None)
# invalid value
form = self.FormSingle({'obj': '-1'})
self.assertFalse(form.is_valid())
self.assertTrue(form._errors['obj'])
def test_modelmultiplechoicefield_behavior(self):
'''
Test, how the field handles data in form.
'''
# some value
form = self.FormMultiple({'obj': [smart_text(self.obj1.pk), smart_text(self.obj2.pk)]})
self.assertTrue(form.is_valid())
self.assertEqual(set(form.cleaned_data['obj']), set([self.obj1, self.obj2]))
# no value
form = self.FormMultiple({})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['obj'], [])
# invalid value
form = self.FormMultiple({'obj': [smart_text(self.obj1.pk), '-1']})
self.assertFalse(form.is_valid())
self.assertTrue(form._errors['obj'])
# invalid list
form = self.FormMultiple({'obj': '-1'})
self.assertFalse(form.is_valid())
self.assertTrue(form._errors['obj'])
def test_modelchoicefield_objects_assignment(self):
field = CachedModelChoiceField(objects=self.cached_list)
field2 = CachedModelChoiceField(objects=self.cached_list[:2])
field.objects = self.cached_list[:2]
self.assertEqual(field.objects, field2.objects)
self.assertEqual(field.choices, field2.choices)
def test_modelmultiplechoicefield_objects_assignment(self):
field = CachedModelMultipleChoiceField(objects=self.cached_list)
field2 = CachedModelMultipleChoiceField(objects=self.cached_list[:2])
field.objects = self.cached_list[:2]
self.assertEqual(field.objects, field2.objects)
self.assertEqual(field.choices, field2.choices)
| drtyrsa/django-cached-modelforms | cached_modelforms/tests/test_fields.py | Python | bsd-2-clause | 6,699 |
#! /usr/bin/env python
# $Id: makedep.py 1054 2008-12-03 00:38:43Z kena $
import sys
import notes
thenote = sys.argv[1]
outfile = sys.argv[2]
notes.init_repo(sys.argv[3:])
note = notes.repo.get(thenote)
deps = note.get_deps()
print "%s.txt(note) -> %s(referenced keys)" % (thenote, outfile)
f = file(outfile, 'w')
for d in deps:
print >>f, d
f.close()
| knz/restcrumbs | compiler/tools/makedep.py | Python | bsd-2-clause | 368 |
#!/usr/bin/env python
"""interpret a comapct grid specification using regex"""
import re
# use a compact regular expression with nested OR expressions,
# and hence many groups, but name the outer (main) groups:
real_short1 = \
r'\s*(?P<lower>-?(\d+(\.\d*)?|\d*\.\d+)([eE][+\-]?\d+)?)\s*'
real_short2 = \
r'\s*(?P<upper>-?(\d+(\.\d*)?|\d*\.\d+)([eE][+\-]?\d+)?)\s*'
# regex for real interval [a,b] :
domain = r'\[' + real_short1 + ',' + real_short2 + r'\]'
# regex for integer interval [a:b] :
indices = r'\[\s*(-?\d+)\s*:\s*(-?\d+)\s*\]'
# test:
examples = ('domain=[0,10] indices=[0:11]',
'domain=[0.1,1.1]x[0,2E+00] indices=[1:21]x[1:101]',
'[0,1]x[0,2]x[-1,1.5] [1:21]x[1:11]x[-10:15]')
for ex in examples:
print re.findall(indices, ex)
# a nested list is returned; requires nested group counting
print re.findall(domain, ex)
print
# work with compiled expressions and the groupindex dictionary to
# extract the named groups easily from the nested list that is
# returned from re.findall:
print 'work with groupindex:'
for ex in examples:
print re.findall(indices, ex)
c = re.compile(domain)
groups = c.findall(ex)
intervals = []
for i in range(len(groups)):
intervals.append(
(groups[i][c.groupindex['lower']-1],
groups[i][c.groupindex['upper']-1]))
print intervals
print
# work with non-capturing parenthesis of the form (?:pattern)
real_short1 = \
r'\s*(?P<lower>-?(?:\d+(?:\.\d*)?|\d*\.\d+)(?:[eE][+\-]?\d+)?)\s*'
real_short2 = \
r'\s*(?P<upper>-?(?:\d+(?:\.\d*)?|\d*\.\d+)(?:[eE][+\-]?\d+)?)\s*'
# regex for real interval [a,b] :
domain = r'\[' + real_short1 + ',' + real_short2 + r'\]'
print 'non-capturing groups:'
for ex in examples:
print re.findall(domain, ex)
print
# avoid parenthesis, i.e., nested OR expressions:
real_sn = r'-?\d\.?\d*[Ee][+\-][0-9]+'
real_dn = r'-?\d*\.\d*'
real_in = r'-?\d+'
real1 = \
r'\s*(?P<lower>' + real_sn + '|' + real_dn + '|' + real_in + ')\s*'
real2 = \
r'\s*(?P<upper>' + real_sn + '|' + real_dn + '|' + real_in + ')\s*'
# regex for real interval [a,b] :
domain = r'\[' + real1 + ',' + real2 + r'\]'
# regex for integer interval [a:b] :
indices = r'\[\s*(-?\d+)\s*:\s*(-?\d+)\s*\]'
print '\navoid so many parenthesis (just two groups now for each interval):'
for ex in examples:
print re.findall(indices, ex)
print re.findall(domain, ex)
print
# much simpler _working_ versions:
domain = r'\[([^,]*),([^\]]*)\]'
indices = r'\[([^:,]*):([^\]]*)\]'
print '\nsimpler regular expressions:\n', domain, indices
for ex in examples:
print re.findall(indices, ex)
print re.findall(domain, ex)
print
# these give wrong results
domain = r'\[(.*?),(.*?)\]'
indices = r'\[(.*?):(.*?)\]'
print '\nalternative; simpler regular expressions:\n', domain, indices
for ex in examples:
print re.findall(indices, ex)
print re.findall(domain, ex)
print
| sniemi/SamPy | sandbox/src1/TCSE3-3rd-examples/src/py/regex/fdmgrid.py | Python | bsd-2-clause | 2,941 |
import pandas as pd
import numpy as np
import cobra
from pyefm.ElementaryFluxModes import EFMToolWrapper
from tqdm import tqdm
class EFVWrapper(EFMToolWrapper):
def create_matrices(self, extra_g=None, extra_h=None):
""" Initialize the augmented stoichiometric matrix.
extra_g: (n x nr) array
Extra entries in the constraint matrix. postive values for lower
bounds, negative values for upper bounds
extra_h: (n) array
Corresponding bounds for the extra entries matrix
"""
# Create stoichiometric matrix, get key dimensions
N = cobra.util.create_stoichiometric_matrix(self.model)
nm, nr = N.shape
self.nm = nm
self.nr = nr
# Construct full G and h matrices, then drop homogeneous (or near
# homogeneous) entries
g_full = np.vstack([np.eye(nr), -np.eye(nr)])
h_full = np.array([(r.lower_bound, -r.upper_bound)
for r in self.model.reactions]).T.flatten()
inhomogeneous = ~((h_full <= -1000) | np.isclose(h_full, 0))
h_full = h_full[inhomogeneous]
g_full = g_full[inhomogeneous]
if extra_g is not None:
assert extra_g.shape[1] == nr
assert extra_g.shape[0] == len(extra_h)
g_full = np.vstack([g_full, extra_g])
h_full = np.hstack([h_full, extra_h])
G = g_full
h = h_full
self.nt = nt = len(h)
self.D = np.vstack([
np.hstack([N, np.zeros((nm, nt)), np.zeros((nm, 1))]),
np.hstack([G, -np.eye(nt), np.atleast_2d(-h).T])
])
def create_model_files(self, temp_dir):
# Stoichiometric Matrix
np.savetxt(temp_dir + '/stoich.txt', self.D, delimiter='\t')
# Reaction reversibilities
np.savetxt(
temp_dir + '/revs.txt', np.hstack([
np.array([r.lower_bound < 0 for r in self.model.reactions]),
np.zeros((self.nt + 1))]),
delimiter='\t', fmt='%d', newline='\t')
# Reaction Names
r_names = np.hstack([
np.array([r.id for r in self.model.reactions]),
np.array(['s{}'.format(i) for i in range(self.nt)]),
np.array(['lambda'])
])
with open(temp_dir + '/rnames.txt', 'w') as f:
f.write('\t'.join(('"{}"'.format(name) for name in r_names)))
# Metabolite Names
m_names = np.hstack([
np.array([m.id for m in self.model.metabolites]),
np.array(['s{}'.format(i) for i in range(self.nt)]),
])
with open(temp_dir + '/mnames.txt', 'w') as f:
f.write('\t'.join(('"{}"'.format(name) for name in m_names)))
pass
def read_double_out(self, out_file):
with open(out_file, 'rb') as f:
out_arr = np.fromstring(f.read()[13:], dtype='>d').reshape(
(-1, self.nt + self.nr + 1)).T
out_arr = np.asarray(out_arr, dtype=np.float64).T
# Sort by the absolute value of the stoichiometry
sort_inds= np.abs(out_arr[:, :self.nr]).sum(1).argsort()
out_arr = out_arr[sort_inds]
unbounded = out_arr[np.isclose(out_arr[:,-1], 0.)]
bounded = out_arr[~np.isclose(out_arr[:,-1], 0.)]
if bounded.size: # Test if its empty
bounded /= np.atleast_2d(bounded[:,-1]).T
unbounded_df = pd.DataFrame(
unbounded[:, :self.nr],
columns=[r.id for r in self.model.reactions],
index=['UEV{}'.format(i)
for i in range(1, unbounded.shape[0] + 1)])
bounded_df = pd.DataFrame(
bounded[:, :self.nr],
columns=[r.id for r in self.model.reactions],
index=('BEV{}'.format(i)
for i in range(1, bounded.shape[0] + 1)))
return unbounded_df.append(bounded_df)
def calculate_elementary_vectors(cobra_model, opts=None, verbose=True,
java_args=None, extra_g=None, extra_h=None):
"""Calculate elementary flux vectors, which capture arbitrary linear
constraints. Approach as detailed in S. Klamt et al., PLoS Comput Biol. 13,
e1005409–22 (2017).
Augmented constraints as a hacky workaround for implementing more
complicated constraints without using optlang.
java_args: string
Extra command-line options to pass to the java virtual machine.
Eg. '-Xmx1g' will set the heap space to 1 GB.
extra_g: (n x nr) array
Extra entries in the constraint matrix. postive values for lower
bounds, negative values for upper bounds
extra_h: (n) array
Corresponding bounds for the extra entries matrix
"""
efv_wrap = EFVWrapper(cobra_model, opts, verbose, java_args=java_args)
efv_wrap.create_matrices(extra_g=extra_g, extra_h=extra_h)
return efv_wrap()
def get_support_minimal(efvs):
"""Return only those elementary flux vectors whose support is not a proper
superset of another EFV"""
bool_df = pd.DataFrame(np.isclose(efvs, 0),
columns=efvs.columns, index=efvs.index)
set_df = bool_df.apply(lambda x: set(x.index[~x]), 1)
set_df = set_df[set_df != set()] # Drop the empty set EFV
set_dict = set_df.to_dict()
is_support_minimal = _get_support_minimal_list(set_dict)
return efvs.loc[is_support_minimal]
def _get_support_minimal_list(set_dict):
all_keys = set(set_dict.keys())
is_support_minimal = []
for this_key, val in tqdm(set_dict.items()):
for key in all_keys.difference(set([this_key])):
if val.issuperset(set_dict[key]):
break
else:
is_support_minimal.append(this_key)
return is_support_minimal
| pstjohn/pyefm | pyefm/ElementaryFluxVectors.py | Python | bsd-2-clause | 5,857 |
"""
"""
from django.core.urlresolvers import reverse
from django.test import TestCase
from wagtail.tests.utils import WagtailTestUtils
class BaseTestIndexView(TestCase, WagtailTestUtils):
"""
Base test case for CRUD index view.
"""
url_namespace = None
template_dir = None
def _create_sequential_instance(self, index):
"""
Stub method for extending class to create sequential
model instances.
:param index: the sequential index to use.
"""
raise NotImplemented(
'This method must be implemented by {0}'.format(
self.__class__.__name__
)
)
def setUp(self):
self.login()
def get(self, params=None):
if not params:
params = {}
return self.client.get(
reverse('{0}:index'.format(self.url_namespace)), params)
def populate(self):
"""
Populates several model class instance.
"""
for i in range(50):
self._create_sequential_instance(i)
def test_get(self):
# Generate the response.
response = self.get()
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/index.html'.format(self.template_dir)
)
def test_search(self):
# Generate the response.
response = self.get({'q': 'keyword'})
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], 'keyword')
def test_pagination(self):
# Create model class instances.
self.populate()
# Generate the response.
response = self.get({'p': 2})
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/index.html'.format(self.template_dir)
)
self.assertEqual(response.context['page_obj'].number, 2)
def test_pagination_invalid(self):
# Create model class instances.
self.populate()
# Generate the response.
response = self.get({'p': 'fake'})
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/index.html'.format(self.template_dir)
)
self.assertEqual(response.context['page_obj'].number, 1)
def test_pagination_out_of_range(self):
# Create model class instances.
self.populate()
# Generate the response.
response = self.get({'p': 99999})
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/index.html'.format(self.template_dir)
)
self.assertEqual(
response.context['page_obj'].number,
response.context['paginator'].num_pages
)
def test_ordering(self):
orderings = ['title', '-created_at']
for ordering in orderings:
response = self.get({'ordering': ordering})
self.assertEqual(response.status_code, 200)
class BaseTestCreateView(TestCase, WagtailTestUtils):
"""
Base test case for CRUD add view.
"""
url_namespace = None
template_dir = None
model_class = None
def _get_post_data(self):
"""
Stub method for extending class to return data dictionary
to create a new model instance on POST.
:rtype: dict.
"""
raise NotImplemented(
'This method must be implemented by {0}'.format(
self.__class__.__name__
)
)
def setUp(self):
self.login()
def test_get(self):
# Generate the response.
response = self.client.get(
reverse('{0}:add'.format(self.url_namespace))
)
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/add.html'.format(self.template_dir)
)
def test_post(self):
# Get POST data.
data = self._get_post_data()
# Generate the response.
response = self.client.post(
reverse('{0}:add'.format(self.url_namespace)),
data
)
# Check assertions.
self.assertRedirects(
response,
reverse('{0}:index'.format(self.url_namespace))
)
self.assertTrue(
self.model_class.objects.filter(**data).exists()
)
class BaseTestUpdateView(TestCase, WagtailTestUtils):
"""
Base test case for CRUD edit view.
"""
url_namespace = None
template_dir = None
model_class = None
def _get_instance(self):
"""
Stub method for extending class to return saved model class
instance.
:rtype: django.db.models.Model.
"""
raise NotImplemented(
'This method must be implemented by {0}'.format(
self.__class__.__name__
)
)
def _get_post_data(self):
"""
Stub method for extending class to return data dictionary
to create a new model instance on POST.
:rtype: dict.
"""
raise NotImplemented(
'This method must be implemented by {0}'.format(
self.__class__.__name__
)
)
def setUp(self):
# Create the instance and login.
self.instance = self._get_instance()
self.login()
def test_get(self):
# Generate the response.
response = self.client.get(
reverse(
'{0}:edit'.format(self.url_namespace),
args=(self.instance.pk,)
)
)
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/edit.html'.format(self.template_dir)
)
def test_post(self):
# Get POST data.
data = self._get_post_data()
# Generate the response.
response = self.client.post(
reverse(
'{0}:edit'.format(self.url_namespace),
args=(self.instance.pk,)
),
data
)
# Check assertions.
self.assertRedirects(
response,
reverse('{0}:index'.format(self.url_namespace)))
self.assertTrue(
self.model_class.objects.filter(**data).exists()
)
class BaseTestDeleteView(TestCase, WagtailTestUtils):
"""
Base test case for CRUD delete view.
"""
url_namespace = None
template_dir = None
model_class = None
def _get_instance(self):
"""
Stub method for extending class to return saved model class
instance.
:rtype: django.db.models.Model.
"""
raise NotImplemented(
'This method must be implemented by {0}'.format(
self.__class__.__name__
)
)
def setUp(self):
# Create the instance and login.
self.instance = self._get_instance()
self.login()
def test_get(self):
# Generate the response.
response = self.client.get(
reverse(
'{0}:delete'.format(self.url_namespace),
args=(self.instance.pk,)
)
)
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/confirm_delete.html'.format(self.template_dir)
)
def test_delete(self):
# Generate the response.
response = self.client.post(
reverse(
'{0}:delete'.format(self.url_namespace),
args=(self.instance.pk,)
),
{'foo': 'bar'}
)
# Check assertions.
self.assertRedirects(
response,
reverse('{0}:index'.format(self.url_namespace))
)
self.assertFalse(
self.model_class.objects.filter(pk=self.instance.pk).exists()
)
class BaseTestChooserView(TestCase, WagtailTestUtils):
"""
Base test for chooser view.
"""
url_namespace = None
template_dir = None
model_class = None
def _create_sequential_instance(self, index):
"""
Stub method for extending class to create sequential
model instances.
:param index: the sequential index to use.
"""
raise NotImplemented(
'This method must be implemented by {0}'.format(
self.__class__.__name__
)
)
def setUp(self):
self.login()
def get(self, params=None):
if not params:
params = {}
return self.client.get(
reverse('{0}:choose'.format(self.url_namespace)),
params
)
def populate(self):
"""
Populates several model class instance.
"""
for i in range(50):
self._create_sequential_instance(i)
def test_get(self):
# Generate the response.
response = self.get()
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/chooser.html'.format(self.template_dir)
)
self.assertTemplateUsed(
response,
'{0}/results.html'.format(self.template_dir)
)
self.assertTemplateUsed(
response,
'{0}/chooser.js'.format(self.template_dir)
)
def test_search(self):
# Generate the response.
response = self.get({'q': 'keyword'})
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], 'keyword')
def test_pagination(self):
# Create model class instances.
self.populate()
# Generate the response.
response = self.get({'p': 2})
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/results.html'.format(self.template_dir)
)
self.assertEqual(response.context['page_obj'].number, 2)
def test_pagination_invalid(self):
# Create model class instances.
self.populate()
# Generate the response.
response = self.get({'p': 'fake'})
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/results.html'.format(self.template_dir)
)
self.assertEqual(response.context['page_obj'].number, 1)
def test_pagination_out_of_range(self):
# Create model class instances.
self.populate()
# Generate the response.
response = self.get({'p': 99999})
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/results.html'.format(self.template_dir)
)
self.assertEqual(
response.context['page_obj'].number,
response.context['paginator'].num_pages
)
class BaseTestChosenView(TestCase, WagtailTestUtils):
url_namespace = None
template_dir = None
model_class = None
def _get_instance(self):
"""
Stub method for extending class to return saved model class
instance.
:rtype: django.db.models.Model.
"""
raise NotImplemented(
'This method must be implemented by {0}'.format(
self.__class__.__name__
)
)
def setUp(self):
# Create the instance and login.
self.instance = self._get_instance()
self.login()
def test_get(self):
# Generate the response.
response = self.client.get(
reverse(
'{0}:chosen'.format(self.url_namespace),
args=(self.instance.id,)
)
)
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/chosen.js'.format(self.template_dir)
)
class BaseTestChooserCreateView(TestCase, WagtailTestUtils):
"""
Base test case for CRUD add view.
"""
url_namespace = None
template_dir = None
model_class = None
def _get_post_data(self):
"""
Stub method for extending class to return data dictionary
to create a new model instance on POST.
:rtype: dict.
"""
raise NotImplemented(
'This method must be implemented by {0}'.format(
self.__class__.__name__
)
)
def setUp(self):
self.login()
def test_get(self):
# Generate the response.
response = self.client.get(
reverse('{0}:choose'.format(self.url_namespace))
)
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/chooser.html'.format(self.template_dir)
)
self.assertTemplateUsed(
response,
'{0}/results.html'.format(self.template_dir)
)
self.assertTemplateUsed(
response,
'{0}/chooser.js'.format(self.template_dir)
)
def test_post(self):
# Get POST data.
data = self._get_post_data()
# Generate the response.
response = self.client.post(
reverse('{0}:choose'.format(self.url_namespace)),
data
)
# Check assertions.
self.assertTemplateUsed(
response,
'{0}/chosen.js'.format(self.template_dir)
)
self.assertContains(
response,
'modal.respond'
)
self.assertTrue(
self.model_class.objects.filter(**data).exists()
)
| rfosterslo/wagtailplus | wagtailplus/tests/views.py | Python | bsd-2-clause | 14,293 |
from __future__ import unicode_literals
from future.builtins import str
from future.utils import with_metaclass
from json import loads
try:
from urllib.request import urlopen
from urllib.parse import urlencode
except ImportError:
from urllib import urlopen, urlencode
from django.contrib.contenttypes.generic import GenericForeignKey
from django.db import models
from django.db.models.base import ModelBase
from django.db.models.signals import post_save
from django.template.defaultfilters import truncatewords_html
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.timesince import timesince
from django.utils.timezone import now
from django.utils.translation import ugettext, ugettext_lazy as _
from mezzanine.conf import settings
from mezzanine.core.fields import RichTextField
from mezzanine.core.managers import DisplayableManager, CurrentSiteManager
from mezzanine.generic.fields import KeywordsField
from mezzanine.utils.html import TagCloser
from mezzanine.utils.models import base_concrete_model, get_user_model_name
from mezzanine.utils.sites import current_site_id
from mezzanine.utils.urls import admin_url, slugify, unique_slug
user_model_name = get_user_model_name()
class SiteRelated(models.Model):
"""
Abstract model for all things site-related. Adds a foreignkey to
Django's ``Site`` model, and filters by site with all querysets.
See ``mezzanine.utils.sites.current_site_id`` for implementation
details.
"""
objects = CurrentSiteManager()
class Meta:
abstract = True
site = models.ForeignKey("sites.Site", editable=False)
def save(self, update_site=False, *args, **kwargs):
"""
Set the site to the current site when the record is first
created, or the ``update_site`` argument is explicitly set
to ``True``.
"""
if update_site or not self.id:
self.site_id = current_site_id()
super(SiteRelated, self).save(*args, **kwargs)
@python_2_unicode_compatible
class Slugged(SiteRelated):
"""
Abstract model that handles auto-generating slugs. Each slugged
object is also affiliated with a specific site object.
"""
title = models.CharField(_("Title"), max_length=500)
slug = models.CharField(_("URL"), max_length=2000, blank=True, null=True,
help_text=_("Leave blank to have the URL auto-generated from "
"the title."))
class Meta:
abstract = True
def __str__(self):
return self.title
def save(self, *args, **kwargs):
"""
If no slug is provided, generates one before saving.
"""
if not self.slug:
self.slug = self.generate_unique_slug()
super(Slugged, self).save(*args, **kwargs)
def generate_unique_slug(self):
"""
Create a unique slug by passing the result of get_slug() to
utils.urls.unique_slug, which appends an index if necessary.
"""
# For custom content types, use the ``Page`` instance for
# slug lookup.
concrete_model = base_concrete_model(Slugged, self)
slug_qs = concrete_model.objects.exclude(id=self.id)
return unique_slug(slug_qs, "slug", self.get_slug())
def get_slug(self):
"""
Allows subclasses to implement their own slug creation logic.
"""
attr = "title"
if settings.USE_MODELTRANSLATION:
from modeltranslation.utils import build_localized_fieldname
attr = build_localized_fieldname(attr, settings.LANGUAGE_CODE)
# Get self.title_xx where xx is the default language, if any.
# Get self.title otherwise.
return slugify(getattr(self, attr, None) or self.title)
def admin_link(self):
return "<a href='%s'>%s</a>" % (self.get_absolute_url(),
ugettext("View on site"))
admin_link.allow_tags = True
admin_link.short_description = ""
class MetaData(models.Model):
"""
Abstract model that provides meta data for content.
"""
_meta_title = models.CharField(_("Title"), null=True, blank=True,
max_length=500,
help_text=_("Optional title to be used in the HTML title tag. "
"If left blank, the main title field will be used."))
description = models.TextField(_("Description"), blank=True)
gen_description = models.BooleanField(_("Generate description"),
help_text=_("If checked, the description will be automatically "
"generated from content. Uncheck if you want to manually "
"set a custom description."), default=True)
keywords = KeywordsField(verbose_name=_("Keywords"))
class Meta:
abstract = True
def save(self, *args, **kwargs):
"""
Set the description field on save.
"""
if self.gen_description:
self.description = strip_tags(self.description_from_content())
super(MetaData, self).save(*args, **kwargs)
def meta_title(self):
"""
Accessor for the optional ``_meta_title`` field, which returns
the string version of the instance if not provided.
"""
return self._meta_title or str(self)
def description_from_content(self):
"""
Returns the first block or sentence of the first content-like
field.
"""
description = ""
# Use the first RichTextField, or TextField if none found.
for field_type in (RichTextField, models.TextField):
if not description:
for field in self._meta.fields:
if isinstance(field, field_type) and \
field.name != "description":
description = getattr(self, field.name)
if description:
from mezzanine.core.templatetags.mezzanine_tags \
import richtext_filters
description = richtext_filters(description)
break
# Fall back to the title if description couldn't be determined.
if not description:
description = str(self)
# Strip everything after the first block or sentence.
ends = ("</p>", "<br />", "<br/>", "<br>", "</ul>",
"\n", ". ", "! ", "? ")
for end in ends:
pos = description.lower().find(end)
if pos > -1:
description = TagCloser(description[:pos]).html
break
else:
description = truncatewords_html(description, 100)
return description
class TimeStamped(models.Model):
"""
Provides created and updated timestamps on models.
"""
class Meta:
abstract = True
created = models.DateTimeField(null=True, editable=False)
updated = models.DateTimeField(null=True, editable=False)
def save(self, *args, **kwargs):
_now = now()
self.updated = _now
if not self.id:
self.created = _now
super(TimeStamped, self).save(*args, **kwargs)
CONTENT_STATUS_DRAFT = 1
CONTENT_STATUS_PUBLISHED = 2
CONTENT_STATUS_CHOICES = (
(CONTENT_STATUS_DRAFT, _("Draft")),
(CONTENT_STATUS_PUBLISHED, _("Published")),
)
class Displayable(Slugged, MetaData, TimeStamped):
"""
Abstract model that provides features of a visible page on the
website such as publishing fields. Basis of Mezzanine pages,
blog posts, and Cartridge products.
"""
status = models.IntegerField(_("Status"),
choices=CONTENT_STATUS_CHOICES, default=CONTENT_STATUS_PUBLISHED,
help_text=_("With Draft chosen, will only be shown for admin users "
"on the site."))
publish_date = models.DateTimeField(_("Published from"),
help_text=_("With Published chosen, won't be shown until this time"),
blank=True, null=True)
expiry_date = models.DateTimeField(_("Expires on"),
help_text=_("With Published chosen, won't be shown after this time"),
blank=True, null=True)
short_url = models.URLField(blank=True, null=True)
in_sitemap = models.BooleanField(_("Show in sitemap"), default=True)
objects = DisplayableManager()
search_fields = {"keywords": 10, "title": 5}
class Meta:
abstract = True
def save(self, *args, **kwargs):
"""
Set default for ``publish_date``. We can't use ``auto_now_add`` on
the field as it will be blank when a blog post is created from
the quick blog form in the admin dashboard.
"""
if self.publish_date is None:
self.publish_date = now()
super(Displayable, self).save(*args, **kwargs)
def get_admin_url(self):
return admin_url(self, "change", self.id)
def publish_date_since(self):
"""
Returns the time since ``publish_date``.
"""
return timesince(self.publish_date)
publish_date_since.short_description = _("Published from")
def get_absolute_url(self):
"""
Raise an error if called on a subclass without
``get_absolute_url`` defined, to ensure all search results
contains a URL.
"""
name = self.__class__.__name__
raise NotImplementedError("The model %s does not have "
"get_absolute_url defined" % name)
def set_short_url(self):
"""
Sets the ``short_url`` attribute using the bit.ly credentials
if they have been specified, and saves it. Used by the
``set_short_url_for`` template tag, and ``TweetableAdmin``.
"""
if not self.short_url:
from mezzanine.conf import settings
settings.use_editable()
parts = (self.site.domain, self.get_absolute_url())
self.short_url = "http://%s%s" % parts
if settings.BITLY_ACCESS_TOKEN:
url = "https://api-ssl.bit.ly/v3/shorten?%s" % urlencode({
"access_token": settings.BITLY_ACCESS_TOKEN,
"uri": self.short_url,
})
response = loads(urlopen(url).read().decode("utf-8"))
if response["status_code"] == 200:
self.short_url = response["data"]["url"]
self.save()
return ""
def _get_next_or_previous_by_publish_date(self, is_next, **kwargs):
"""
Retrieves next or previous object by publish date. We implement
our own version instead of Django's so we can hook into the
published manager and concrete subclasses.
"""
arg = "publish_date__gt" if is_next else "publish_date__lt"
order = "publish_date" if is_next else "-publish_date"
lookup = {arg: self.publish_date}
concrete_model = base_concrete_model(Displayable, self)
try:
queryset = concrete_model.objects.published
except AttributeError:
queryset = concrete_model.objects.all
try:
return queryset(**kwargs).filter(**lookup).order_by(order)[0]
except IndexError:
pass
def get_next_by_publish_date(self, **kwargs):
"""
Retrieves next object by publish date.
"""
return self._get_next_or_previous_by_publish_date(True, **kwargs)
def get_previous_by_publish_date(self, **kwargs):
"""
Retrieves previous object by publish date.
"""
return self._get_next_or_previous_by_publish_date(False, **kwargs)
class RichText(models.Model):
"""
Provides a Rich Text field for managing general content and making
it searchable.
"""
content = RichTextField(_("Content"))
search_fields = ("content",)
class Meta:
abstract = True
class OrderableBase(ModelBase):
"""
Checks for ``order_with_respect_to`` on the model's inner ``Meta``
class and if found, copies it to a custom attribute and deletes it
since it will cause errors when used with ``ForeignKey("self")``.
Also creates the ``ordering`` attribute on the ``Meta`` class if
not yet provided.
"""
def __new__(cls, name, bases, attrs):
if "Meta" not in attrs:
class Meta:
pass
attrs["Meta"] = Meta
if hasattr(attrs["Meta"], "order_with_respect_to"):
order_field = attrs["Meta"].order_with_respect_to
attrs["order_with_respect_to"] = order_field
del attrs["Meta"].order_with_respect_to
if not hasattr(attrs["Meta"], "ordering"):
setattr(attrs["Meta"], "ordering", ("_order",))
return super(OrderableBase, cls).__new__(cls, name, bases, attrs)
class Orderable(with_metaclass(OrderableBase, models.Model)):
"""
Abstract model that provides a custom ordering integer field
similar to using Meta's ``order_with_respect_to``, since to
date (Django 1.2) this doesn't work with ``ForeignKey("self")``,
or with Generic Relations. We may also want this feature for
models that aren't ordered with respect to a particular field.
"""
_order = models.IntegerField(_("Order"), null=True)
class Meta:
abstract = True
def with_respect_to(self):
"""
Returns a dict to use as a filter for ordering operations
containing the original ``Meta.order_with_respect_to`` value
if provided. If the field is a Generic Relation, the dict
returned contains names and values for looking up the
relation's ``ct_field`` and ``fk_field`` attributes.
"""
try:
name = self.order_with_respect_to
value = getattr(self, name)
except AttributeError:
# No ``order_with_respect_to`` specified on the model.
return {}
# Support for generic relations.
field = getattr(self.__class__, name)
if isinstance(field, GenericForeignKey):
names = (field.ct_field, field.fk_field)
return dict([(n, getattr(self, n)) for n in names])
return {name: value}
def save(self, *args, **kwargs):
"""
Set the initial ordering value.
"""
if self._order is None:
lookup = self.with_respect_to()
lookup["_order__isnull"] = False
concrete_model = base_concrete_model(Orderable, self)
self._order = concrete_model.objects.filter(**lookup).count()
super(Orderable, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
"""
Update the ordering values for siblings.
"""
lookup = self.with_respect_to()
lookup["_order__gte"] = self._order
concrete_model = base_concrete_model(Orderable, self)
after = concrete_model.objects.filter(**lookup)
after.update(_order=models.F("_order") - 1)
super(Orderable, self).delete(*args, **kwargs)
def _get_next_or_previous_by_order(self, is_next, **kwargs):
"""
Retrieves next or previous object by order. We implement our
own version instead of Django's so we can hook into the
published manager, concrete subclasses and our custom
``with_respect_to`` method.
"""
lookup = self.with_respect_to()
lookup["_order"] = self._order + (1 if is_next else -1)
concrete_model = base_concrete_model(Orderable, self)
try:
queryset = concrete_model.objects.published
except AttributeError:
queryset = concrete_model.objects.filter
try:
return queryset(**kwargs).get(**lookup)
except concrete_model.DoesNotExist:
pass
def get_next_by_order(self, **kwargs):
"""
Retrieves next object by order.
"""
return self._get_next_or_previous_by_order(True, **kwargs)
def get_previous_by_order(self, **kwargs):
"""
Retrieves previous object by order.
"""
return self._get_next_or_previous_by_order(False, **kwargs)
class Ownable(models.Model):
"""
Abstract model that provides ownership of an object for a user.
"""
user = models.ForeignKey(user_model_name, verbose_name=_("Author"),
related_name="%(class)ss")
class Meta:
abstract = True
def is_editable(self, request):
"""
Restrict in-line editing to the objects's owner and superusers.
"""
return request.user.is_superuser or request.user.id == self.user_id
class SitePermission(models.Model):
"""
Permission relationship between a user and a site that's
used instead of ``User.is_staff``, for admin and inline-editing
access.
"""
user = models.ForeignKey(user_model_name, verbose_name=_("Author"),
related_name="%(class)ss")
sites = models.ManyToManyField("sites.Site", blank=True,
verbose_name=_("Sites"))
class Meta:
verbose_name = _("Site permission")
verbose_name_plural = _("Site permissions")
def create_site_permission(sender, **kw):
sender_name = "%s.%s" % (sender._meta.app_label, sender._meta.object_name)
if sender_name.lower() != user_model_name.lower():
return
user = kw["instance"]
if user.is_staff and not user.is_superuser:
perm, created = SitePermission.objects.get_or_create(user=user)
if created or perm.sites.count() < 1:
perm.sites.add(current_site_id())
# We don't specify the user model here, because with 1.5's custom
# user models, everything explodes. So we check the name of it in
# the signal.
post_save.connect(create_site_permission)
| cccs-web/mezzanine | mezzanine/core/models.py | Python | bsd-2-clause | 17,835 |
from PyObjCTools.TestSupport import *
from Quartz.QuartzCore import *
from Quartz import *
class TestCIPluginInterfaceHelper (NSObject):
def load_(self, h): return 1
class TestCIPlugInInterface (TestCase):
def testMethods(self):
self.assertResultIsBOOL(TestCIPluginInterfaceHelper.load_)
def no_testProtocol(self):
p = objc.protocolNamed('CIPlugInRegistration')
self.assertIsInstancE(p, objc.formal_protocol)
if __name__ == "__main__":
main()
| albertz/music-player | mac/pyobjc-framework-Quartz/PyObjCTest/test_ciplugininterface.py | Python | bsd-2-clause | 488 |
# Software License Agreement (BSD License)
#
# Copyright (c) 2009-2011, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Redistribution and use of this software in source and binary forms, with or
# without modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
#
# Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author: [email protected]
'''
Created on Mar 7, 2012
@author: clarkmatthew
Place holder class to provide convenience for testing, modifying, and
retrieving Eucalyptus cloud property information
Intention is to reduce the time in looking up property names, and values
outside of the eutester test lib, etc
Note: Debug output for the tester.sys command are controled by the
eutester/eucaops object
Sample:
cat my_cloud.conf
> 192.168.1.76 CENTOS 6.3 64 REPO [CLC WS]
> 192.168.1.77 CENTOS 6.3 64 REPO [SC00 CC00]
> 192.168.1.78 CENTOS 6.3 64 REPO [NC00]
from eucaops import Eucaops
from eutester import euproperties
Eucaops(config_file='my_cloud.conf', password='mypassword')
ep_mgr = euproperties.Euproperty_Manager(tester,
verbose=True,
debugmethod=tester.debug)
#get some storage service properties, and some property values...
#Get/Set value from dynamic method created in Euproperty_Manager...
san_host_prop_value = ep_mgr.get_storage_sanhost_value()
ep_mgr.set_storage_sanhost_value('192.168.1.200')
#Get/set value from euproperty directly...
san_host_prop = ep_mgr.get_property('san_host', 'storage', 'PARTI00')
san_host_prop_value = san_host_prop.get()
san_host_prop_set('192.168.1.200'
#Get multiple properties at once based on certain filters...
storage_properties = ep_mgr.get_properties(service_type='storage')
partition1_properties = ep_mgr.get_properties(partition='partition1')
'''
import types
import re
import copy
class Euproperty_Type():
authentication = 'authentication'
autoscaling = 'autoscaling'
bootstrap = 'bootstrap'
cloud = 'cloud'
cloudwatch = 'cloudwatch'
cluster = 'cluster'
dns = 'dns'
imaging = 'imaging'
loadbalancing = 'loadbalancing'
objectstorage = 'objectstorage'
reporting = 'reporting'
storage = 'storage'
system = 'system'
tagging = 'tagging'
tokens = 'tokens'
vmwarebroker = 'vmwarebroker'
walrus = 'walrus'
www = 'www'
@classmethod
def get_type_by_string(cls, typestring):
try:
if hasattr(cls, str(typestring)):
return getattr(cls, str(typestring))
except AttributeError, ae:
print ('Property type:' + str(str) +
" not defined, new property type?")
raise ae
class Euproperty():
def __init__(self, prop_mgr, property_string, service_type, partition,
name, value, mandatory=False, description=""):
self.prop_mgr = prop_mgr
self.service_type = Euproperty_Type.get_type_by_string(service_type)
self.partition = partition
self.name = name
self.value = value
self.property_string = property_string
self.prop_mgr = prop_mgr
self.lastvalue = value
self.mandatory = mandatory
self.description = description
def update(self):
newprop = self.prop_mgr.update_property_list(
property_name=self.property_string)[0]
self = newprop
def get(self):
return self.value
def set(self, value):
return self.prop_mgr.set_property(self, value)
def reset_to_default(self):
return self.prop_mgr.reset_property_to_default(self)
def print_self(self, include_header=True, show_description=True,
print_method=None, printout=True):
if printout and not print_method:
print_method = self.prop_mgr.debug
name_len = 50
service_len = 20
part_len = 20
value_len = 30
line_len = 120
ret = ""
header = str('NAME').ljust(name_len)
header += "|" + str('SERVICE TYPE').center(service_len)
header += "|" + str('PARTITION').center(part_len)
header += "|" + str('VALUE').center(value_len)
header += "\n"
out = str(self.name).ljust(name_len)
out += "|" + str(self.service_type).center(service_len)
out += "|" + str(self.partition).center(part_len)
out += "|" + str(self.value).center(value_len)
out += "\n"
line = "-"
for x in xrange(0, line_len):
line += "-"
line += "\n"
if include_header:
ret = "\n" + line + header + line
ret += out
if show_description:
ret += "DESCRIPTION: " + self.description + "\n"
ret += line
if print_method:
print_method(ret)
return ret
class Property_Map():
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
class Euproperty_Manager():
tester = None
verbose = False
debugmethod = None
def __init__(self, tester, verbose=False, machine=None,
service_url=None, debugmethod=None):
self.tester = tester
self.debugmethod = debugmethod or tester.debug
self.verbose = verbose
#self.work_machine = machine or self.get_clc()
#self.access_key = self.tester.aws_access_key_id
#self.secret_key = self.tester.aws_secret_access_key
#self.service_url = service_url or str(
# 'http://' + str(self.get_clc().hostname) +
# ':8773/services/Eucalytpus')
#self.cmdpath = self.tester.eucapath+'/usr/sbin/'
self.properties = []
#self.property_map = Property_Map()
self.update_property_list()
#self.tester.property_manager = self
#self.zones = self.tester.ec2.get_zones()
#def get_clc(self):
# return self.tester.service_manager.get_enabled_clc().machine
def debug(self, msg):
'''
simple method for printing debug.
msg - mandatory - string to be printed
method - optional - callback to over ride default printing method
'''
if (self.debugmethod is None):
print (str(msg))
else:
self.debugmethod(msg)
def show_all_authentication_properties(self,
partition=None,
debug_method=None,
descriptions=True):
return self.show_all_properties(
service_type=Euproperty_Type.authentication,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_bootstrap_properties(self, partition=None, debug_method=None, descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.bootstrap,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_cloud_properties(self, partition=None, debug_method=None, descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.cloud,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_cluster_properties(self, partition=None, debug_method=None, descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.cluster,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_reporting_properties(self, partition=None, debug_method=None, descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.reporting,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_storage_properties(self, partition=None, debug_method=None, descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.storage,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_system_properties(self, partition=None, debug_method=None, descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.system,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_vmwarebroker_properties(self,
partition=None,
debug_method=None,
descriptions=True):
return self.show_all_properties(
service_type=Euproperty_Type.vmwarebroker,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_walrus_properties(self, partition=None, debug_method=None, descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.walrus,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_objectstorage_properties(self,
partition=None,
debug_method=None,
descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.objectstorage,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_www_properties(self, partition=None, debug_method=None, descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.www,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_autoscaling_properties(self,
partition=None,
debug_method=None,
descriptions=True):
return self.show_all_properties(
service_type=Euproperty_Type.autoscaling,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_loadbalancing_properties(self,
partition=None,
debug_method=None):
return self.show_all_properties(
service_type=Euproperty_Type.loadbalancing,
partition=partition,
debug_method=debug_method,
descriptions=True)
def show_all_tagging_properties(self, partition=None, debug_method=None):
return self.show_all_properties(service_type=Euproperty_Type.tagging,
partition=partition,
debug_method=debug_method,
descriptions=True)
def show_all_imaging_properties(self, partition=None, debug_method=None):
return self.show_all_properties(service_type=Euproperty_Type.imaging,
partition=partition,
debug_method=debug_method,
descriptions=True)
def show_all_properties(self,
partition=None,
service_type=None,
value=None,
search_string=None,
list=None,
debug_method=None,
descriptions=True):
debug_method = debug_method or self.debug
list = list or self.get_properties(partition=partition,
service_type=service_type,
value=value,
search_string=search_string)
first = list.pop(0)
buf = first.print_self(include_header=True,
show_description=descriptions,
printout=False)
count = 1
last_service_type = first.service_type
for prop in list:
count += 1
if prop.service_type != last_service_type:
last_service_type = prop.service_type
print_header = True
else:
print_header = False
buf += prop.print_self(include_header=print_header,
show_description=descriptions,
printout=False)
debug_method(buf)
def get_properties(self,
partition=None,
service_type=None,
value=None,
search_string=None,
force_update=False):
self.debug('get_properties: partition:' +
str(partition) + ", service_type:" + str(service_type) +
", value:" + str(value) + ", force_update:" +
str(force_update))
ret_props = []
if not self.properties or force_update:
self.update_property_list()
properties = copy.copy(self.properties)
if partition and properties:
properties = self.get_all_properties_for_partition(partition,
list=properties)
if service_type and properties:
properties = self.get_all_properties_for_service(service_type,
list=properties)
if search_string and properties:
properties = self.get_all_properties_by_search_string(
search_string, list=properties)
if properties:
if value:
for prop in properties:
if prop.value == value:
ret_props.append(prop)
else:
ret_props.extend(properties)
return ret_props
def get_property(self, name, service_type, partition, force_update=False):
self.debug('Get Property:' + str(name))
ret_prop = None
list = self.get_properties(partition=partition,
service_type=service_type,
force_update=force_update)
if list:
ret_prop = self.get_euproperty_by_name(name, list=list)
return ret_prop
def update_property_list(self, property_name=''):
newlist = []
newprop = None
self.debug("updating property list...")
self.zones = self.tester.ec2.get_zones()
cmdout = self.work_machine.sys(
self.cmdpath+'euca-describe-properties -v -U ' +
str(self.service_url) + ' -I ' + str(self.access_key) +
' -S ' + str(self.secret_key) + ' ' + property_name,
code=0, verbose=self.verbose)
for propstring in cmdout:
try:
if re.search("^PROPERTY", propstring):
newprop = self.parse_euproperty_from_string(propstring)
elif newprop:
if (re.search("^DESCRIPTION", propstring) and
re.search(newprop.name, propstring)):
newprop.description = \
self.parse_euproperty_description(propstring)
else:
newprop.value = str(newprop.value) + str(propstring)
except Exception, e:
self.debug('Error processing property line: ' + propstring)
raise e
if not newprop in newlist:
newlist.append(newprop)
if property_name:
for newprop in newlist:
for oldprop in self.properties:
if oldprop.property_string == newprop.property_string:
oldprop = newprop
self.create_dynamic_property_map_from_property(newprop)
else:
self.properties = newlist
self.property_map = Property_Map()
for prop in self.properties:
self.create_dynamic_property_map_from_property(prop)
return newlist
def parse_euproperty_description(self, propstring):
'''
Example string to parse:
"DESCRIPTION www.http_port Listen to HTTP on this port."
'''
split = str(propstring).replace('DESCRIPTION', '').split()
description = " ".join(str(x) for x in split[1:])
return str(description)
def parse_property_value_from_string(self, propstring):
split = str(propstring).replace('PROPERTY', '').split()
prop_value = " ".join(str(x) for x in split[1:])
return str(prop_value)
def parse_euproperty_from_string(self, propstring):
'''
Intended to convert a line of ouptut from euca-describe-properties into
a euproperty.
:param str: line of output, example:
"PROPERTY walrus.storagemaxbucketsizeinmb 5120"
:returns euproperty
'''
propstring = str(propstring).replace('PROPERTY', '').strip()
ret_service_type = None
ret_partition = None
splitstring = propstring.split()
#get the property string, example: "walrus.storagemaxbucketsizeinmb"
property_string = splitstring.pop(0)
ret_value = " ".join(splitstring)
for prop in self.properties:
#if this property is in our list, update the value and return
if prop.property_string == property_string:
prop.lastvalue = prop.value
prop.value = ret_value
return prop
ret_name = property_string
#...otherwise this property is not in our list yet,
# create a new property
#parse property string into values...
propattrs = property_string.split('.')
#See if the first element is a zone-partition
#First store and remove the zone-partition if it's in the list
for zone in self.zones:
if zone == propattrs[0]:
#Assume this is the zone-partition id/name,
# remove it from the propattrs list
ret_partition = propattrs.pop(0)
break
#Move along items in list until we reach a service type
for index in xrange(0, len(propattrs)):
try:
ret_service_type = Euproperty_Type.get_type_by_string(
propattrs[index])
propattrs.remove(propattrs[index])
break
except AttributeError:
pass
except IndexError:
self.debug("\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
"!!!!!!!!!!!!!!!!!!!!!!!!!\n" +
"Need to add new service? " +
"No service type found for: " +
str(property_string) +
"\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
"!!!!!!!!!!!!!!!!!!!!!!!!!\n")
ret_service_type = propattrs.pop(0)
#self.debug("ret_service_type: "+str(ret_service_type))
#Store the name of the property
ret_name = ".".join(propattrs)
newprop = Euproperty(self, property_string, ret_service_type,
ret_partition, ret_name, ret_value)
return newprop
def create_dynamic_property_map_from_property(self, euproperty):
context = self.property_map
if not hasattr(context, 'all'):
setattr(context, 'all', Property_Map())
all_map = getattr(context, 'all')
if euproperty.partition:
if not hasattr(context, str(euproperty.partition)):
setattr(context, str(euproperty.partition), Property_Map())
context = getattr(context, str(euproperty.partition))
if euproperty.service_type:
if not hasattr(context, str(euproperty.service_type)):
setattr(context, str(euproperty.service_type), Property_Map())
context = getattr(context, str(euproperty.service_type))
object_name = str(euproperty.name).replace('.', '_')
if not hasattr(context, object_name):
setattr(context, object_name, euproperty)
if not hasattr(all_map, object_name):
setattr(all_map, object_name, euproperty)
def get_euproperty_by_name(self, name, list=None):
props = []
list = list or self.properties
for property in list:
if property.name == name:
return property
raise EupropertyNotFoundException('Property not found by name:' +
str(name))
def get_all_properties_for_partition(self,
partition,
list=None,
verbose=False):
self.debug('Get all properties for partition:' + str(partition))
props = []
list = list or self.properties
for property in list:
if property.partition == partition:
if verbose:
self.debug('property:' + str(property.name) +
", prop.partition:" + str(property.partition) +
",partition:" + str(partition))
props.append(property)
self.debug('Returning list of len:' + str(len(props)))
return props
def get_all_properties_for_service(self, service, list=None):
props = []
list = list or self.properties
for property in list:
if property.service_type == service:
props.append(property)
return props
def get_all_properties_by_search_string(self, search_string, list=None):
props = []
list = list or self.properties
for property in list:
if re.search(search_string, property.property_string):
props.append(property)
return props
def set_property(self, property, value):
if isinstance(property, Euproperty):
return self.set_property_by_property_string(
property.property_string, value)
else:
return self.set_property_by_property_string(str(property), value)
def set_property(self, property, value, reset_to_default=False):
'''
Sets the property 'prop' at eucaops/eutester object 'tester' to 'value'
Returns new value
prop - mandatory - str representing the property to set
value - mandatory - str representing the value to set the property to
eucaops - optional - the eucaops/eutester object to set the property at
'''
value = str(value)
if not isinstance(property, Euproperty):
try:
property = self.get_all_properties_by_search_string(property)
if len(property) > 1:
raise Exception('More than one euproperty found for '
'property string:' + str(property))
else:
property = property[0]
except Exception, e:
raise Exception('Could not fetch property to set. '
'Using string:' + str(property))
property.lastvalue = property.value
self.debug('Setting property(' + property.property_string +
') to value:' + str(value))
if reset_to_default:
ret_string = self.work_machine.sys(
self.cmdpath + 'euca-modify-property -U ' +
str(self.service_url) + ' -I ' + str(self.access_key) +
' -S ' + str(self.secret_key) + ' -r ' +
str(property.property_string), code=0)[0]
else:
ret_string = self.work_machine.sys(
self.cmdpath + 'euca-modify-property -U ' +
str(self.service_url) + ' -I '+str(self.access_key) + ' -S ' +
str(self.secret_key) + ' -p ' +
str(property.property_string) + '=' + str(value),
code=0)[0]
if ret_string:
ret_value = str(ret_string).split()[2]
else:
raise EupropertiesException("set_property output from modify "
"was None")
#Confirm property value was set
if not reset_to_default and (ret_value != value) and\
not (not value and ret_value == '{}'):
ret_string = "\n".join(str(x) for x in ret_string)
raise EupropertiesException(
"set property(" + property.property_string + ") to value(" +
str(value) + ") failed.Ret Value (" + str(ret_value) +
")\nRet String\n" + ret_string)
property.value = ret_value
return ret_value
def get_property_by_string(self, property_string):
property = None
for prop in self.properties:
if prop.property_string == property_string:
property = prop
break
return property
def set_property_value_by_string(self, property_string, value):
property = self.get_property_by_string(property_string)
if not property:
raise Exception('Property not found for:' + str(property_string))
property.set(value)
def get_property_value_by_string(self, property_string):
property = self.get_property_by_string(property_string)
if not property:
raise Exception('Property not found for:' + str(property_string))
return property.value
def reset_property_to_default(self, prop):
'''
Sets a property 'prop' at eucaops/eutester object 'eucaops' to it's
default value
Returns new value
prop - mandatory - string representing the property to set
ucaops - optional - the eucaops/eutester object to set the property at
'''
if not isinstance(prop, Euproperty):
prop = self.get_all_properties_by_search_string(prop)[0]
return self.set_property(prop, None, reset_to_default=True)
def get_property_default_value(self, prop, ireadthewarning=False):
'''
Note: This hack method is intrusive! It will briefly reset the property
This is a temporary method to get a properties default method
prop - mandatory - string, eucalyptus property
ireadthewarning - mandatory - boolean, to warn user this method
is intrusive
'''
if (ireadthewarning is False):
raise EupropertiesException("ireadthewarning is set to false in "
"get_property_default_value")
original = prop.get()
default = self.reset_property_to_default(prop)
prop.set(original)
return default
class EupropertiesException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class EupropertyNotFoundException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
| nephomaniac/eutester | eutester/euca/euproperties.py | Python | bsd-2-clause | 29,433 |
"""Unittests that do not require the server to be running an common tests of responses.
The TestCase here just calls the functions that provide the logic to the ws views with DummyRequest
objects to mock a real request.
The functions starting with `check_...` are called with UnitTest.TestCase instance as the first
arg and the response. These functions are used within the unit tests in this file, but also
in the `ws-tests` calls that perform the tests through http.
"""
import os
import unittest
from pyramid import testing
from phylesystem_api.utility import fill_app_settings, umbrella_from_request
from phylesystem_api.views import import_nexson_from_crossref_metadata
def get_app_settings_for_testing(settings):
"""Fills the settings of a DummyRequest, with info from the development.ini
This allows the dummy requests to mock a real request wrt configuration-dependent settings."""
from peyotl.utility.imports import SafeConfigParser
cfg = SafeConfigParser()
devini_path = os.path.abspath(os.path.join('..', 'development.ini'))
if not os.path.isfile(devini_path):
raise RuntimeError('Expecting a INI file at "{}" to run tests'.format(devini_path))
cfg.read(devini_path)
settings['repo_parent'] = cfg.get('app:main', 'repo_parent')
fill_app_settings(settings=settings)
def gen_versioned_dummy_request():
"""Adds a version number (3) to the request to mimic the matching based on URL in the real app.
"""
req = testing.DummyRequest()
get_app_settings_for_testing(req.registry.settings)
req.matchdict['api_version'] = 'v3'
return req
def check_index_response(test_case, response):
"""Verifies the existene of expected keys in the response to an index call.
'documentation_url', 'description', and 'source_url' keys must be in the response.
"""
for k in ['documentation_url', 'description', 'source_url']:
test_case.assertIn(k, response)
def check_render_markdown_response(test_case, response):
"""Check of `response` to a `render_markdown` call."""
expected = '<p>hi from <a href="http://phylo.bio.ku.edu" target="_blank">' \
'http://phylo.bio.ku.edu</a> and ' \
'<a href="https://github.com/orgs/OpenTreeOfLife/dashboard" target="_blank">' \
'https://github.com/orgs/OpenTreeOfLife/dashboard</a></p>'
test_case.assertEquals(response.body, expected)
def check_study_list_and_config_response(test_case,
sl_response,
config_response,
from_generic_config):
"""Checks of responses from study_list, config, and the generic config calls."""
nsis = sum([i['number of documents'] for i in config_response['shards']])
test_case.assertEquals(nsis, len(sl_response))
test_case.assertEquals(from_generic_config, config_response)
def check_unmerged_response(test_case, ub):
"""Check of `ub` response from an `unmerged_branches` call"""
test_case.assertTrue('master' not in ub)
def check_config_response(test_case, cfg):
"""Check of `cfg` response from a `config` call"""
test_case.assertSetEqual(set(cfg.keys()), {"initialization", "shards", "number_of_shards"})
def check_external_url_response(test_case, doc_id, resp):
"""Simple check of an `external_url` `resp` response for `doc_id`.
`doc_id` and `url` fields of the response are checked."""
test_case.assertEquals(resp.get('doc_id'), doc_id)
test_case.assertTrue(resp.get('url', '').endswith('{}.json'.format(doc_id)))
def check_push_failure_response(test_case, resp):
"""Check of the `resp` response of a `push_failure` method call to verify it has the right keys.
"""
test_case.assertSetEqual(set(resp.keys()), {"doc_type", "errors", "pushes_succeeding"})
test_case.assertTrue(resp["pushes_succeeding"])
render_test_input = 'hi from <a href="http://phylo.bio.ku.edu" target="new">' \
'http://phylo.bio.ku.edu</a> and ' \
'https://github.com/orgs/OpenTreeOfLife/dashboard'
class ViewTests(unittest.TestCase):
"""UnitTest of the functions that underlie the ws views."""
def setUp(self):
"""Calls pyramid testing.setUp"""
self.config = testing.setUp()
def tearDown(self):
"""Calls pyramid testing.tearDown"""
testing.tearDown()
def test_index(self):
"""Test of index view"""
request = gen_versioned_dummy_request()
from phylesystem_api.views import index
check_index_response(self, index(request))
def test_render_markdown(self):
"""Test of render_markdown view"""
request = testing.DummyRequest(post={'src': render_test_input})
from phylesystem_api.views import render_markdown
check_render_markdown_response(self, render_markdown(request))
def test_study_list_and_config(self):
"""Test of study_list and phylesystem_config views"""
request = gen_versioned_dummy_request()
from phylesystem_api.views import study_list
sl = study_list(request)
request = gen_versioned_dummy_request()
from phylesystem_api.views import phylesystem_config
x = phylesystem_config(request)
request = gen_versioned_dummy_request()
request.matchdict['resource_type'] = 'study'
from phylesystem_api.views import generic_config
y = generic_config(request)
check_study_list_and_config_response(self, sl, x, y)
if not sl:
return
from phylesystem_api.views import external_url
doc_id = sl[0]
request.matchdict['doc_id'] = doc_id
e = external_url(request)
check_external_url_response(self, doc_id, e)
def test_unmerged(self):
"""Test of unmerged_branches view"""
request = gen_versioned_dummy_request()
request.matchdict['resource_type'] = 'study'
from phylesystem_api.views import unmerged_branches
check_unmerged_response(self, unmerged_branches(request))
def test_config(self):
"""Test of generic_config view"""
request = gen_versioned_dummy_request()
from phylesystem_api.views import phylesystem_config, generic_config
r2 = phylesystem_config(request)
check_config_response(self, r2)
request.matchdict['resource_type'] = 'study'
r = generic_config(request)
check_config_response(self, r)
self.assertDictEqual(r, r2)
request.matchdict['resource_type'] = 'amendment'
ra = generic_config(request)
check_config_response(self, ra)
self.assertNotEqual(ra, r)
def test_push_failure_state(self):
"""Test of push_failure view"""
request = gen_versioned_dummy_request()
request.matchdict['resource_type'] = 'collection'
from phylesystem_api.views import push_failure
pf = push_failure(request)
check_push_failure_response(self, pf)
def test_doi_import(self):
"""Make sure that fetching from DOI generates a valid study shell."""
doi = "10.3732/ajb.0800060"
document = import_nexson_from_crossref_metadata(doi=doi,
ref_string=None,
include_cc0=None)
request = gen_versioned_dummy_request()
request.matchdict['resource_type'] = 'study'
umbrella = umbrella_from_request(request)
errors = umbrella.validate_and_convert_doc(document, {})[1]
self.assertEquals(len(errors), 0)
if __name__ == '__main__':
unittest.main()
| mtholder/pyraphyletic | phylesystem_api/tests.py | Python | bsd-2-clause | 7,707 |
# Importing standard libraries
import sys
'''
Returns the index of the element in the grid. Element passed in
must have a unique position. If not present returns [-1, -1]. If
multiple occurences present, returns the first one
'''
def findIndex(grid,charElem):
for i in range(len(grid)):
for j in range(len(grid[i])):
if(grid[i][j] == charElem):
return [i,j]
return [-1,-1]
'''
Function that generates the valid surrounding indices for a parti
- cular index in a grid The surroundings are just 4 as of now. But
this function can easily be modified by modifying the surrIndices
array.
Returns a list of tuples that are indicative of valid indices
'''
def genSurr(grid,i,j):
validIndices = []
surrIndices = [ (1,0) , (-1,0) , (0,1) , (0,-1) ]
if(len(grid) == 0): return -1
else:
# Number of rows and columns in grid
ROWS = len(grid)
COLS = len(grid[0])
for (a,b) in surrIndices:
xIndex = i + a
yIndex = j + b
if(xIndex >= ROWS or xIndex < 0):
continue
if(yIndex >= COLS or yIndex < 0):
continue
validIndices.append((xIndex,yIndex))
return validIndices
'''
Returns a list of tuples that belong to the validChars set and have
not yet been visited (not cointained in visited Set)
'''
def genValidSurr(grid,surr,validChars,visitedSet):
validSet = []
for point in surr:
indexI = point[0]
indexJ = point[1]
gridPoint = grid[indexI][indexJ]
if((gridPoint in validChars) and not(point in visitedSet)):
validSet.append(point)
return validSet
'''
DFS on a matrix graph/grid which computes one of the Paths from
start to the goal passed in as parameters. Returns the path as an
array of indices from start to goal
Slight Modification for problem [wandUse variable]
wandUse is used each time we encounter a point from which there are
variable routes and we know that there exists a path from this point
till the end
'''
def dfsPathSearch(grid,
startIndex,
goalIndex,
pathSoFar,
visitedNodes):
# Marking the current node as explored
visitedNodes.add(startIndex)
# Base case of recursion in case we want to stop
# after certain condition
if(startIndex == goalIndex):
return True
else: # Recursive steps
# Generate all valid surrounding points
s = genSurr(grid,startIndex[0],startIndex[1])
validChars = set()
validChars.add('.')
validChars.add('*')
sValid = genValidSurr(grid,s,validChars,visitedNodes)
# Return False in case no valid surrounding pt found
if(len(sValid) == 0): return False
# Iterate through all valid surrouding points
for point in sValid:
pathExists = dfsPathSearch(grid,
point,
goalIndex,
pathSoFar,
visitedNodes)
if(pathExists):
# If there were more than one choices here, increment
# wand use by one
pathSoFar.append(point)
return True
# Return false if no point in valid surroundings
# can generate a path to goal
return False
'''
Parses a grid from the passed in stream. Can be used to parse the
grid from standard input (by passing in sys.stdin) as well as from
a text file (by passing in f, where f = open('somename.txt'))
'''
def parseGrid(stream,r,c):
grid = [[] for x in range(r)]
for i in range(r):
grid[i] = list(stream.readline().rstrip())
return grid
'''
Main Function to run the program. We first find a path using DFS and
later compute the number of turns that are necessary (wand usage)
'''
if __name__ == "__main__":
# No of test cases
t = int(sys.stdin.readline().rstrip())
for i in range(t): # For each test case
# Parsing the input for the test case
[r,c] = [int(x) for x in sys.stdin.readline().rstrip().split()]
grid = parseGrid(sys.stdin,r,c)
k = int(sys.stdin.readline().rstrip())
# Exploring and computing the path from start to goal using DFS
# Path is an array of indices
startIndex = tuple(findIndex(grid,'M'))
goalIndex = tuple(findIndex(grid,'*'))
visitedNodes = set()
path = []
dfsPathSearch(grid,
startIndex,
goalIndex,
path,
visitedNodes)
path.append(startIndex)
path.reverse()
# Prints the path in order from start to goal
print path
| tejasnikumbh/Algorithms | genericCode/matrixAndGraphUtility/matrixDFS.py | Python | bsd-2-clause | 5,021 |
import unittest
from mock import Mock
from nosealert.plugin import AlertPlugin
from nosealert.notifications import Notification
class TestAlertPlugin(unittest.TestCase):
def setUp(self):
self.plugin = AlertPlugin()
def test_get_notification_success(self):
result = Mock(
failures=[],
errors=[],
testsRun=3,
)
self.assertEqual(self.plugin.get_notification(result), Notification(
total=3,
))
def test_get_notification_with_fails(self):
result = Mock(
failures=[1, 2],
errors=[3],
testsRun=5,
)
self.assertEqual(self.plugin.get_notification(result), Notification(
fails=2,
errors=1,
total=5,
))
def test_finalize_sends_notification(self):
notification = Mock()
result = Mock()
self.plugin.get_notification = Mock(return_value=notification)
self.plugin.finalize(result)
notification.send.assert_called_once_with()
| lukaszb/nose-alert | nosealert/tests/test_plugin.py | Python | bsd-2-clause | 1,067 |
# -*- coding: utf-8 -*-
"""
hydrogen
~~~~~~~~
Hydrogen is an extremely lightweight workflow enhancement tool for Python
web applications, providing bower/npm-like functionality for both pip and
bower packages.
:author: David Gidwani <[email protected]>
:license: BSD, see LICENSE for details
"""
import atexit
from collections import defaultdict
from functools import update_wrapper
import json
import os
import re
import shutil
import sys
import tempfile
import yaml
import zipfile
import click
import envoy
from pathlib import Path, PurePath
from pathspec import GitIgnorePattern, PathSpec
from pip._vendor import pkg_resources
import requests
import rfc6266
import semver
__version__ = "0.0.1-alpha"
prog_name = "hydrogen"
app_dir = click.get_app_dir(prog_name)
github_api_uri = "https://api.github.com"
debug = True
# borrowed from werkzeug._compat
PY2 = sys.version_info[0] == 2
if PY2:
from urlparse import urlparse
text_type = unicode # noqa: Undefined in py3
else:
from urllib.parse import urlparse
text_type = str
class InvalidRequirementSpecError(Exception):
pass
class InvalidPackageError(Exception):
pass
class PackageNotFoundError(Exception):
pass
class VersionNotFoundError(Exception):
pass
def get_installed_pypackages():
return {p.project_name.lower(): p for p in pkg_resources.working_set}
def success(message, **kwargs):
kwargs["fg"] = kwargs.get("fg", "green")
click.secho(message, **kwargs)
def warning(message, **kwargs):
kwargs["fg"] = kwargs.get("fg", "red")
click.secho(u"warning: {}".format(message), **kwargs)
def error(message, level="error", exit_code=1, **kwargs):
kwargs["fg"] = kwargs.get("fg", "red")
click.secho(u"error: {}".format(message), **kwargs)
sys.exit(exit_code)
def fatal(message, **kwargs):
error(message, level="fatal", **kwargs)
def secure_filename(filename):
r"""Borrowed from :mod:`werkzeug.utils`, under the BSD 3-clause license.
Pass it a filename and it will return a secure version of it. This
filename can then safely be stored on a regular file system and passed
to :func:`os.path.join`. The filename returned is an ASCII only string
for maximum portability.
On windows systems the function also makes sure that the file is not
named after one of the special device files.
>>> secure_filename("My cool movie.mov")
'My_cool_movie.mov'
>>> secure_filename("../../../etc/passwd")
'etc_passwd'
>>> secure_filename(u'i contain cool \xfcml\xe4uts.txt')
'i_contain_cool_umlauts.txt'
The function might return an empty filename. It's your responsibility
to ensure that the filename is unique and that you generate random
filename if the function returned an empty one.
:param filename: the filename to secure
"""
_filename_ascii_strip_re = re.compile(r'[^A-Za-z0-9_.-]')
_windows_device_files = ('CON', 'AUX', 'COM1', 'COM2', 'COM3', 'COM4',
'LPT1', 'LPT2', 'LPT3', 'PRN', 'NUL')
if isinstance(filename, text_type):
from unicodedata import normalize
filename = normalize('NFKD', filename).encode('ascii', 'ignore')
if not PY2:
filename = filename.decode('ascii')
for sep in os.path.sep, os.path.altsep:
if sep:
filename = filename.replace(sep, ' ')
filename = str(_filename_ascii_strip_re.sub('', '_'.join(
filename.split()))).strip('._')
# on nt a couple of special files are present in each folder. We
# have to ensure that the target file is not such a filename. In
# this case we prepend an underline
if os.name == 'nt' and filename and \
filename.split('.')[0].upper() in _windows_device_files:
filename = '_' + filename
return filename
def get(url, session=None, silent=not debug, **kwargs):
"""Retrieve a given URL and log response.
:param session: a :class:`requests.Session` object.
:param silent: if **True**, response status and URL will not be printed.
"""
session = session or requests
kwargs["verify"] = kwargs.get("verify", True)
r = session.get(url, **kwargs)
if not silent:
status_code = click.style(
str(r.status_code),
fg="green" if r.status_code in (200, 304) else "red")
click.echo(status_code + " " + url)
if r.status_code == 404:
raise PackageNotFoundError
return r
def download_file(url, dest=None, chunk_size=1024, replace="ask",
label="Downloading {dest_basename} ({size:.2f}MB)",
expected_extension=None):
"""Download a file from a given URL and display progress.
:param dest: If the destination exists and is a directory, the filename
will be guessed from the Content-Disposition header. If the destination
is an existing file, the user will either be prompted to overwrite, or
the file will be replaced (depending on the value of **replace**). If
the destination does not exist, it will be used as the filename.
:param int chunk_size: bytes read in at a time.
:param replace: If `False`, an existing destination file will not be
overwritten.
:param label: a string which is formatted and displayed as the progress bar
label. Variables provided include *dest_basename*, *dest*, and *size*.
:param expected_extension: if set, the filename will be sanitized to ensure
it has the given extension. The extension should not start with a dot
(`.`).
"""
dest = Path(dest or url.split("/")[-1])
response = get(url, stream=True)
if (dest.exists()
and dest.is_dir()
and "Content-Disposition" in response.headers):
content_disposition = rfc6266.parse_requests_response(response)
if expected_extension is not None:
filename = content_disposition.filename_sanitized(
expected_extension)
filename = secure_filename(filename)
dest = dest / filename
if dest.exists() and not dest.is_dir():
if (replace is False
or replace == "ask"
and not click.confirm("Replace {}?".format(dest))):
return str(dest)
size = int(response.headers.get("content-length", 0))
label = label.format(dest=dest, dest_basename=dest.name,
size=size/1024.0/1024)
with click.open_file(str(dest), "wb") as f:
content_iter = response.iter_content(chunk_size=chunk_size)
with click.progressbar(content_iter, length=size/1024,
label=label) as bar:
for chunk in bar:
if chunk:
f.write(chunk)
f.flush()
return str(dest)
def get_dir_from_zipfile(zip_file, fallback=None):
"""Return the name of the root folder in a zip file.
:param zip_file: a :class:`zipfile.ZipFile` instance.
:param fallback: if `None`, the name of the zip file is used. This is
returned if the zip file contains more than one top-level directory,
or none at all.
"""
fallback = fallback or zip_file.filename
directories = [name for name in zip_file.namelist() if name.endswith("/")
and len(PurePath(name).parts) == 1]
return fallback if len(directories) > 1 else directories[0]
def mkdtemp(suffix="", prefix=__name__ + "_", dir=None, cleanup=True,
on_cleanup_error=None):
"""Create a temporary directory and register a handler to cleanup on exit.
:param suffix: suffix of the temporary directory, defaults to empty.
:param prefix: prefix of the temporary directory, defaults to `__name__`
and an underscore.
:param dir: if provided, the directory will be created in `dir` rather than
the system default temp directory.
:param cleanup: if `True`, an atexit handler will be registered to remove
the temp directory on exit.
:param on_cleanup_error: a callback which is called if the atexit handler
encounters an exception. It is passed three parameters: *function*,
*path*, and *excinfo*. For more information, see the :mod:`atexit`
documentation.
"""
path = tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=dir)
if cleanup:
if on_cleanup_error is None:
def on_cleanup_error(function, path, excinfo):
click.secho("warning: failed to remove file or directory: {}\n"
"please delete it manually.".format(path),
fg="red")
atexit.register(shutil.rmtree, path=path, onerror=on_cleanup_error)
return path
class Requirement(object):
"""Represents a single package requirement.
.. note::
This class overrides `__hash__` in order to ensure that package
names remain unique when in a set.
.. todo::
Extend :class:`pkg_resources.Requirement` for Python requirements.
"""
# TODO: support multiple version specs (e.g. >=1.0,<=2.0)
spec_regex = r"(.+?)\s*(?:([<>~=]?=)\s*(.+?))?$"
def __init__(self, package, version):
"""Construct a new requirement.
:param package: the package name.
:param version: a semver compatible version specification.
"""
self.package = package
self.version = version
if self.version and not re.match(r"[<=>~]", version[:2]):
self.version = "=={}".format(self.version)
@classmethod
def coerce(cls, string):
"""Create a :class:`Requirement` object from a given package spec."""
match = re.match(cls.spec_regex, string)
if not match:
raise InvalidRequirementSpecError("could not parse requirement")
package = match.group(1)
if all(match.group(2, 3)):
version = "".join(match.group(2, 3))
else:
version = None
return cls(package, version)
def load_installed_version(self):
installed_packages = get_installed_pypackages()
if self.package in installed_packages:
self.version = "=={}".format(
installed_packages[self.package].version)
def __eq__(self, other):
return (isinstance(other, self.__class__) and
other.package == self.package)
def __hash__(self):
return hash(self.package)
def __str__(self):
return "".join([self.package, self.version or ""])
def __repr__(self):
return "<Requirement(package={package}, version='{version}')>".format(
package=self.package, version=self.version)
class Requirements(set):
"""Represents a set of requirements."""
def __init__(self, filename=None):
self.filename = None
if filename:
self.load(filename)
def add(self, elem, replace=False):
"""Add a requirement.
:param elem: a string or :class:`Requirement` instance.
:param replace: if `True`, packages in the set with the same name will
be removed first.
"""
if isinstance(elem, text_type):
elem = Requirement.coerce(elem)
if replace and elem in self:
self.remove(elem)
super(Requirements, self).add(elem)
def load(self, requirements_file=None):
"""Load or reload requirements from a requirements.txt file.
:param requirements_file: if not given, the filename used from
initialization will be read again.
"""
if requirements_file is None:
requirements_file = self.filename
if requirements_file is None:
raise ValueError("no filename provided")
elif isinstance(requirements_file, text_type):
requirements_file = Path(requirements_file)
self.clear()
with requirements_file.open() as f:
self.loads(f.read())
if isinstance(requirements_file, (text_type, Path)):
self.filename = requirements_file
def loads(self, requirements_text):
lines = re.findall(Requirement.spec_regex,
requirements_text,
re.MULTILINE)
for line in lines:
self.add(Requirement(line[0], "".join(line[1:])))
def remove(self, elem):
"""Remove a requirement.
:param elem: a string or :class:`Requirement` instance.
"""
if isinstance(elem, text_type):
for requirement in self:
if requirement.package == elem:
return super(Requirements, self).remove(requirement)
return super(Requirements, self).remove(elem)
def __str__(self):
return "\n".join([str(x) for x in self])
def __repr__(self):
return "<Requirements({})>".format(self.filename.name or "")
class NamedRequirements(Requirements):
def __init__(self, name, filename=None):
self.name = name
super(NamedRequirements, self).__init__(filename=filename)
def __repr__(self):
return "<NamedRequirements({}{})>".format(
self.name,
", filename='{}'".format(self.filename.name) if self.filename
else "")
class GroupedRequirements(defaultdict):
default_groups = ["all", "dev", "bower", "bower-dev"]
default_pip_files = {
"all": "requirements.txt",
"dev": "dev-requirements.txt"
}
def __init__(self, groups=None):
super(GroupedRequirements, self).__init__(NamedRequirements)
self.groups = groups or self.default_groups
self.filename = None
self.create_default_groups()
def clear(self):
super(GroupedRequirements, self).clear()
self.create_default_groups()
def create_default_groups(self):
for group in self.groups:
group = group.replace(" ", "_").lower()
self[group] = NamedRequirements(group)
def load_pip_requirements(self, files_map=None, freeze=True):
if files_map is None:
files_map = self.default_pip_files
for group, requirements_txt in files_map.items():
path = Path(requirements_txt)
if not path.exists() and group.lower() == "all" and freeze:
cmd = envoy.run("pip freeze")
self[group].loads(cmd.std_out)
elif path.exists():
self[group].load(path)
def load(self, filename, create_if_missing=True):
filename = Path(filename)
if not filename.exists() and create_if_missing:
self.load_pip_requirements()
with filename.open("w") as f:
f.write(yaml.dump(self.serialized, default_flow_style=False,
encoding=None))
self.filename = filename
return self.save(filename)
with filename.open() as f:
for group, requirements in yaml.load(f.read()).items():
for requirement in requirements:
self[group].add(Requirement.coerce(requirement))
self.filename = filename
def save(self, filename=None):
filename = Path(filename) if filename is not None else self.filename
with filename.open("w") as f:
f.write(self.yaml)
@property
def serialized(self):
to_ret = {}
for group, requirements in self.items():
to_ret[group] = [str(requirement) for requirement in requirements]
return to_ret
@property
def yaml(self):
return yaml.dump(self.serialized, default_flow_style=False,
encoding=None)
def __missing__(self, key):
if self.default_factory is None:
raise KeyError(key)
else:
ret = self[key] = self.default_factory(name=key)
return ret
class Bower(object):
bower_base_uri = "https://bower.herokuapp.com"
@classmethod
def get_package_url(cls, package, session=None, silent=False):
response = get("{}/packages/{}".format(cls.bower_base_uri, package))
return response.json().get("url", None)
@classmethod
def clean_semver(cls, version_spec):
return re.sub(r"([<>=~])\s+?v?", "\\1", version_spec, re.IGNORECASE)
class Hydrogen(object):
def __init__(self, assets_dir=None, requirements_file="requirements.yml"):
self.assets_dir = assets_dir or Path(".") / "assets"
self.requirements = GroupedRequirements()
self.requirements.load(requirements_file)
self.temp_dir = mkdtemp()
def extract_bower_zipfile(self, zip_file, dest, expected_version=None):
bower_json = None
root = None
deps_installed = []
for info in zip_file.infolist():
if PurePath(info.filename).name == "bower.json":
with zip_file.open(info) as f:
bower_json = json.load(f)
root = str(PurePath(info.filename).parent)
break
version = bower_json["version"]
if expected_version is not None:
expected_version = Bower.clean_semver(expected_version)
if not semver.match(version, expected_version):
click.secho("error: versions do not match ({} =/= {})".format(
version, expected_version))
raise InvalidPackageError
if "dependencies" in bower_json:
for package, version in bower_json["dependencies"].items():
url = Bower.get_package_url(package)
deps_installed.extend(self.get_bower_package(
url, dest=dest, version=version))
ignore_patterns = [GitIgnorePattern(ig) for ig in bower_json["ignore"]]
path_spec = PathSpec(ignore_patterns)
namelist = [path for path in zip_file.namelist()
if PurePath(path).parts[0] == root]
ignored = list(path_spec.match_files(namelist))
for path in namelist:
dest_path = PurePath(
bower_json["name"],
*PurePath(path).parts[1:])
if path in ignored:
continue
for path in ignored:
for parent in PurePath(path):
if parent in ignored:
continue
if path.endswith("/"):
if list(path_spec.match_files([str(dest_path)])):
ignored.append(PurePath(path))
elif not (dest / dest_path).is_dir():
(dest / dest_path).mkdir(parents=True)
else:
target_path = dest / dest_path.parent / dest_path.name
source = zip_file.open(path)
target = target_path.open("wb")
with source, target:
shutil.copyfileobj(source, target)
deps_installed.append((bower_json["name"], bower_json["version"]))
return deps_installed
def get_bower_package(self, url, dest=None, version=None,
process_deps=True):
dest = dest or Path(".") / "assets"
parsed_url = urlparse(url)
if parsed_url.scheme == "git" or parsed_url.path.endswith(".git"):
if parsed_url.netloc == "github.com":
user, repo = parsed_url.path[1:-4].split("/")
response = get(github_api_uri +
"/repos/{}/{}/tags".format(user, repo))
tags = response.json()
target = None
if not len(tags):
click.secho("fatal: no tags exist for {}/{}".format(
user, repo), fg="red")
raise InvalidPackageError
if version is None:
target = tags[0]
else:
for tag in tags:
if semver.match(tag["name"],
Bower.clean_semver(version)):
target = tag
break
if not target:
click.secho(
"fatal: failed to find matching tag for "
"{user}/{repo} {version}".format(user, repo, version),
fg="red")
raise VersionNotFoundError
click.secho("installing {}/{}#{}".format(
user, repo, tags[0]["name"]), fg="green")
return self.get_bower_package(
url=target["zipball_url"],
dest=dest,
version=version)
raise NotImplementedError
click.echo("git clone {url}".format(url=url))
cmd = envoy.run('git clone {url} "{dest}"'.format(
url=url, dest=dest))
elif parsed_url.scheme in ("http", "https"):
zip_dest = download_file(url, dest=self.temp_dir,
label="{dest_basename}",
expected_extension="zip")
with zipfile.ZipFile(zip_dest, "r") as pkg:
return self.extract_bower_zipfile(pkg, dest,
expected_version=version)
# pkg.extractall(str(dest))
else:
click.secho("protocol currently unsupported :(")
sys.exit(1)
def install_bower(self, package, save=True, save_dev=False):
"""Installs a bower package.
:param save: if `True`, pins the package to the Hydrogen requirements
YAML file.
:param save_dev: if `True`, pins the package as a development
dependency to the Hydrogen requirements YAML file.
:param return: a list of tuples, containing all installed package names
and versions, including any dependencies.
"""
requirement = Requirement.coerce(package)
url = Bower.get_package_url(requirement.package)
installed = []
for name, _ in self.get_bower_package(url):
installed.append(Requirement(name, requirement.version))
for requirement in installed:
if save:
self.requirements["bower"].add(requirement, replace=True)
if save_dev:
self.requirements["bower-dev"].add(requirement, replace=True)
success("installed {}".format(str(requirement)))
if save or save_dev:
self.requirements.save()
return installed
def install_pip(self, package, save=True, save_dev=False):
"""Installs a pip package.
:param save: if `True`, pins the package to the Hydrogen requirements
YAML file.
:param save_dev: if `True`, pins the package as a development
dependency to the Hydrogen requirements YAML file.
:param return: a **single** :class:`Requirement` object, representing
the installed version of the given package.
"""
requirement = Requirement.coerce(package)
click.echo("pip install " + requirement.package)
cmd = envoy.run("pip install {}".format(str(requirement)))
if cmd.status_code == 0:
installed_packages = get_installed_pypackages()
package = installed_packages[requirement.package]
requirement.version = "=={}".format(package.version)
if save:
self.requirements["all"].add(requirement)
if save_dev:
self.requirements["dev"].add(requirement)
if save or save_dev:
self.requirements.save()
return requirement
else:
fatal(cmd.std_err)
def groups_option(f):
new_func = click.option("-g", "--groups",
help="Comma-separated list of requirement groups "
"to include.")(f)
return update_wrapper(new_func, f)
@click.group()
@click.version_option(prog_name=prog_name)
@click.pass_context
def main(ctx):
which = "where" if sys.platform == "win32" else "which"
if envoy.run(which + " git").status_code != 0:
click.secho("fatal: git not found in PATH", fg="red")
sys.exit(1)
ctx.obj = Hydrogen()
@main.command()
@click.pass_obj
@click.option("output_yaml", "--yaml", "-y", is_flag=True,
help="Show requirements in YAML format.")
@click.option("--resolve", "-r", is_flag=True,
help="Resolve version numbers for ambiguous packages.")
@groups_option
def freeze(h, output_yaml, resolve, groups):
"""Output installed packages."""
if not groups:
groups = filter(lambda group: not group.lower().startswith("bower"),
h.requirements.keys())
else:
groups = [text_type.strip(group) for group in groups.split(",")]
if output_yaml:
for requirements in h.requirements.values():
for requirement in requirements:
if resolve and not requirement.version:
requirement.load_installed_version()
click.echo(h.requirements.yaml)
else:
for group in groups:
if not h.requirements[group]:
continue
click.echo("# {}".format(group))
for requirement in h.requirements[group]:
if resolve and not requirement.version:
requirement.load_installed_version()
click.echo(str(requirement))
@main.command()
@click.pass_obj
@click.option("--pip/--bower", default=True)
@groups_option
@click.option("--save", is_flag=True)
@click.option("--save-dev", is_flag=True)
@click.argument("packages", nargs=-1)
def install(h, pip, groups, save, save_dev, packages):
"""Install a pip or bower package."""
if groups:
groups = [text_type.strip(group) for group in groups.split(",")]
else:
groups = h.requirements.keys()
if not packages:
for group in groups:
if group not in h.requirements:
warning("{} not in requirements".format(group))
continue
install = (h.install_bower if group.startswith("bower")
else h.install_pip)
for requirement in h.requirements[group]:
install(str(requirement), save=False, save_dev=False)
if pip:
for package in packages:
h.install_pip(package, save=save, save_dev=save_dev)
else:
for package in packages:
h.install_bower(package, save=save, save_dev=save_dev)
if __name__ == "__main__":
main()
| darvid/hydrogen | hydrogen.py | Python | bsd-2-clause | 26,677 |
from __future__ import unicode_literals
import os.path
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='django-aloha-edit',
version='0.4.0',
description='Django Aloha Edit',
author='Nathaniel Tucker',
author_email='[email protected]',
url='https://github.com/ntucker/django-aloha-edit',
packages=find_packages(),
include_package_data=True,
install_requires=['django>=1.6', 'bleach>=1.4', 'lxml>=2.3', 'Pillow>=2.9.0', 'tinycss>=0.3'],
long_description=read('README.rst'),
license="BSD",
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
],
)
| ntucker/django-aloha-edit | setup.py | Python | bsd-2-clause | 1,190 |
from __future__ import unicode_literals
from future.builtins import str
from datetime import datetime
import re
try:
from urllib.parse import quote
except ImportError:
# Python 2
from urllib import quote
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import urlize
from django.utils.timezone import make_aware, utc
from django.utils.translation import ugettext_lazy as _
from requests_oauthlib import OAuth1
import requests
from mezzanine.conf import settings
from mezzanine.twitter import QUERY_TYPE_CHOICES, QUERY_TYPE_USER, \
QUERY_TYPE_LIST, QUERY_TYPE_SEARCH
from mezzanine.twitter import get_auth_settings
from mezzanine.twitter.managers import TweetManager
re_usernames = re.compile("@([0-9a-zA-Z+_]+)", re.IGNORECASE)
re_hashtags = re.compile("#([0-9a-zA-Z+_]+)", re.IGNORECASE)
replace_hashtags = "<a href=\"http://twitter.com/search?q=%23\\1\">#\\1</a>"
replace_usernames = "<a href=\"http://twitter.com/\\1\">@\\1</a>"
class TwitterQueryException(Exception):
pass
@python_2_unicode_compatible
class Query(models.Model):
type = models.CharField(_("Type"), choices=QUERY_TYPE_CHOICES,
max_length=10)
value = models.CharField(_("Value"), max_length=140)
interested = models.BooleanField("Interested", default=True)
class Meta:
verbose_name = _("Twitter query")
verbose_name_plural = _("Twitter queries")
ordering = ("-id",)
def __str__(self):
return "%s: %s" % (self.get_type_display(), self.value)
def run(self):
"""
Request new tweets from the Twitter API.
"""
try:
value = quote(self.value)
except KeyError:
value = self.value
urls = {
QUERY_TYPE_USER: ("https://api.twitter.com/1.1/statuses/"
"user_timeline.json?screen_name=%s"
"&include_rts=true" % value.lstrip("@")),
QUERY_TYPE_LIST: ("https://api.twitter.com/1.1/lists/statuses.json"
"?list_id=%s&include_rts=true" % value),
QUERY_TYPE_SEARCH: "https://api.twitter.com/1.1/search/tweets.json"
"?q=%s" % value,
}
try:
url = urls[self.type]
except KeyError:
raise TwitterQueryException("Invalid query type: %s" % self.type)
settings.use_editable()
auth_settings = get_auth_settings()
if not auth_settings:
from mezzanine.conf import registry
if self.value == registry["TWITTER_DEFAULT_QUERY"]["default"]:
# These are some read-only keys and secrets we use
# for the default query (eg nothing has been configured)
auth_settings = (
"KxZTRD3OBft4PP0iQW0aNQ",
"sXpQRSDUVJ2AVPZTfh6MrJjHfOGcdK4wRb1WTGQ",
"1368725588-ldWCsd54AJpG2xcB5nyTHyCeIC3RJcNVUAkB1OI",
"r9u7qS18t8ad4Hu9XVqmCGxlIpzoCN3e1vx6LOSVgyw3R",
)
else:
raise TwitterQueryException("Twitter OAuth settings missing")
try:
tweets = requests.get(url, auth=OAuth1(*auth_settings)).json()
except Exception as e:
raise TwitterQueryException("Error retrieving: %s" % e)
try:
raise TwitterQueryException(tweets["errors"][0]["message"])
except (IndexError, KeyError, TypeError):
pass
if self.type == "search":
tweets = tweets["statuses"]
for tweet_json in tweets:
remote_id = str(tweet_json["id"])
tweet, created = self.tweets.get_or_create(remote_id=remote_id)
if not created:
continue
if "retweeted_status" in tweet_json:
user = tweet_json['user']
tweet.retweeter_user_name = user["screen_name"]
tweet.retweeter_full_name = user["name"]
tweet.retweeter_profile_image_url = user["profile_image_url"]
tweet_json = tweet_json["retweeted_status"]
if self.type == QUERY_TYPE_SEARCH:
tweet.user_name = tweet_json['user']['screen_name']
tweet.full_name = tweet_json['user']['name']
tweet.profile_image_url = \
tweet_json['user']["profile_image_url"]
date_format = "%a %b %d %H:%M:%S +0000 %Y"
else:
user = tweet_json["user"]
tweet.user_name = user["screen_name"]
tweet.full_name = user["name"]
tweet.profile_image_url = user["profile_image_url"]
date_format = "%a %b %d %H:%M:%S +0000 %Y"
tweet.text = urlize(tweet_json["text"])
tweet.text = re_usernames.sub(replace_usernames, tweet.text)
tweet.text = re_hashtags.sub(replace_hashtags, tweet.text)
if getattr(settings, 'TWITTER_STRIP_HIGH_MULTIBYTE', False):
chars = [ch for ch in tweet.text if ord(ch) < 0x800]
tweet.text = ''.join(chars)
d = datetime.strptime(tweet_json["created_at"], date_format)
tweet.created_at = make_aware(d, utc)
try:
tweet.save()
except Warning:
pass
tweet.save()
self.interested = False
self.save()
class Tweet(models.Model):
remote_id = models.CharField(_("Twitter ID"), max_length=50)
created_at = models.DateTimeField(_("Date/time"), null=True)
text = models.TextField(_("Message"), null=True)
profile_image_url = models.URLField(_("Profile image URL"), null=True)
user_name = models.CharField(_("User name"), max_length=100, null=True)
full_name = models.CharField(_("Full name"), max_length=100, null=True)
retweeter_profile_image_url = models.URLField(
_("Profile image URL (Retweeted by)"), null=True)
retweeter_user_name = models.CharField(
_("User name (Retweeted by)"), max_length=100, null=True)
retweeter_full_name = models.CharField(
_("Full name (Retweeted by)"), max_length=100, null=True)
query = models.ForeignKey("Query", related_name="tweets")
objects = TweetManager()
class Meta:
verbose_name = _("Tweet")
verbose_name_plural = _("Tweets")
ordering = ("-created_at",)
def __str__(self):
return "%s: %s" % (self.user_name, self.text)
def is_retweet(self):
return self.retweeter_user_name is not None
| Kniyl/mezzanine | mezzanine/twitter/models.py | Python | bsd-2-clause | 6,647 |
fig, ax = plt.subplots()
data['2012':].mean().plot(kind='bar', ax=ax, rot=0, color='C0')
ax.set_ylabel("NO$_2$ concentration (µg/m³)")
ax.axhline(y=40., color='darkorange')
ax.text(0.01, 0.48, 'Yearly limit is 40 µg/m³',
horizontalalignment='left', fontsize=13,
transform=ax.transAxes, color='darkorange'); | jorisvandenbossche/DS-python-data-analysis | notebooks/_solutions/case4_air_quality_analysis9.py | Python | bsd-3-clause | 327 |
"""
OOB configuration.
This module should be included in (or replace) the
default module set in settings.OOB_PLUGIN_MODULES
All functions defined in this module are made available
to be called by the OOB handler.
See src/server/oob_msdp.py for more information.
function execution - the oob protocol can execute a function directly on
the server. The available functions must be defined
as global functions via settings.OOB_PLUGIN_MODULES.
repeat func execution - the oob protocol can request a given function be
executed repeatedly at a regular interval. This
uses an internal script pool.
tracking - the oob protocol can request Evennia to track changes to
fields on objects, as well as changes in Attributes. This is
done by dynamically adding tracker-objects on entities. The
behaviour of those objects can be customized via
settings.OOB_PLUGIN_MODULES.
oob functions have the following call signature:
function(caller, session, *args, **kwargs)
oob trackers should inherit from the OOBTracker class in src/server.oob_msdp.py
and implement a minimum of the same functionality.
a global function oob_error will be used as optional error management.
"""
# import the contents of the default msdp module
from src.server.oob_cmds import *
| Pathel/deuterium | game/gamesrc/conf/examples/oobfuncs.py | Python | bsd-3-clause | 1,435 |
import os.path
import os
import random
def rename(src, dst):
"Atomic rename on windows."
# This is taken from mercurial
try:
os.rename(src, dst)
except OSError, err:
# If dst exists, rename will fail on windows, and we cannot
# unlink an opened file. Instead, the destination is moved to
# a temporary location if it already exists.
def tempname(prefix):
for i in range(5):
fn = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
if not os.path.exists(fn):
return fn
raise IOError, (errno.EEXIST, "No usable temporary filename found")
temp = tempname(dst)
os.rename(dst, temp)
try:
os.unlink(temp)
except:
# Some rude AV-scanners on Windows may cause the unlink to
# fail. Not aborting here just leaks the temp file, whereas
# aborting at this point may leave serious inconsistencies.
# Ideally, we would notify the user here.
pass
os.rename(src, dst)
| abadger/Bento | bento/compat/rename.py | Python | bsd-3-clause | 1,100 |
from dateutil.relativedelta import relativedelta
from script.models import Script, ScriptProgress
from rapidsms.models import Connection
import datetime
from rapidsms.models import Contact
from rapidsms.contrib.locations.models import Location
from poll.models import Poll
from script.models import ScriptStep
from django.db.models import Count
from django.conf import settings
from education.scheduling import schedule_at, at
def is_holiday(date1, holidays = getattr(settings, 'SCHOOL_HOLIDAYS', [])):
for date_start, date_end in holidays:
if isinstance(date_end, str):
if date1.date() == date_start.date():
return True
elif date1.date() >= date_start.date() and date1.date() <= date_end.date():
return True
return False
def is_empty(arg):
"""
Generalizes 'empty' checks on Strings, sequences, and dicts.
Returns 'True' for None, empty strings, strings with just white-space,
and sequences with len == 0
"""
if arg is None:
return True
if isinstance(arg, basestring):
arg = arg.strip()
try:
if not len(arg):
return True
except TypeError:
# wasn't a sequence
pass
return False
def previous_calendar_week(t=None):
"""
To education monitoring, a week runs between Thursdays,
Thursday marks the beginning of a new week of data submission
Data for a new week is accepted until Wednesday evening of the following week
"""
d = t or datetime.datetime.now()
if not d.weekday() == 3:
# last Thursday == next Thursday minus 7 days.
last_thursday = d + (datetime.timedelta((3-d.weekday())%7) - (datetime.timedelta(days=7)))
else:
last_thursday = d
end_date = last_thursday + datetime.timedelta(days=6)
return (last_thursday.date(), end_date)
def _this_thursday(sp=None, get_time=datetime.datetime.now, time_set=None, holidays=getattr(settings, 'SCHOOL_HOLIDAYS', [])):
"""
This Thursday of the week which is not a school holiday.
"""
schedule = time_set or get_time()
d = sp.time if sp else schedule
d = d + datetime.timedelta((3 - d.weekday()) % 7)
while(is_holiday(d, holidays)):
d = d + datetime.timedelta(1) # try next day
return at(d.date(), 10)
def get_polls(**kwargs):
script_polls = ScriptStep.objects.values_list('poll', flat=True).exclude(poll=None)
return Poll.objects.exclude(pk__in=script_polls).annotate(Count('responses'))
def compute_average_percentage(list_of_percentages):
"""
Average percentage
-> this is also a handly tool to compute averages generally while sanitizing
"""
sanitize = []
try:
for i in list_of_percentages:
if isinstance(float(i), float):
sanitize.append(float(i))
else:
pass
except ValueError:
print "non-numeric characters used"
pass
if len(sanitize) <= 0:
return 0
return sum(sanitize) / float(len(sanitize))
def list_poll_responses(poll, **kwargs):
"""
pass a poll queryset and you get yourself a dict with locations vs responses (quite handy for the charts)
dependecies: Contact and Location must be in your module; this lists all Poll responses by district
"""
#forceful import
from poll.models import Poll
to_ret = {}
"""
narrowed down to 3 districts (and up to 14 districts)
"""
DISTRICT = ['Kaabong', 'Kabarole', 'Kyegegwa', 'Kotido']
if not kwargs:
# if no other arguments are provided
for location in Location.objects.filter(name__in=DISTRICT):
to_ret[location.__unicode__()] = compute_average_percentage([msg.message.text for msg in poll.responses.filter(contact__in=Contact.objects.filter(reporting_location=location))])
return to_ret
else:
# filter by number of weeks
#TODO more elegant solution to coincide with actual school term weeks
date_filter = kwargs['weeks'] #give the date in weeks
date_now = datetime.datetime.now()
date_diff = date_now - datetime.timedelta(weeks=date_filter)
all_emis_reports = EmisReporter.objects.filter(reporting_location__in=[loc for loc in Locations.objects.filter(name__in=DISTRICT)])
for location in Location.objects.filter(name__in=DISTRICT):
to_ret[location.__unicode__()] = compute_average_percentage([msg.message.text for msg in poll.responses.filter(date__gte=date_diff, contact__in=Contact.objects.filter(reporting_location=location))])
return to_ret
themes = {
1.1 : "Name and location of our Sub-county/Division",
1.2 : 'Physical features of our Sub-County/Division',
1.3 : 'People in our Sub-county/Division',
2.1 : 'Occupations of people in our Sub-county/Division and their importance',
2.2 : 'Social Services and their importance',
2.3 : 'Challenges in social services and their possible solutions',
3.1 : 'Soil',
3.2 : 'Natural causes of changes in the environment',
3.3 : 'Changes in the environment through human activities',
4.1 : 'Air and the Sun',
4.2 : 'Water',
4.3 : 'Managing Water',
5.1 : 'Living things',
5.2 : 'Birds and Insects',
5.3 : 'Care for insects, birds and animals',
6.1 : 'Plants and their habitat',
6.2 : 'Parts of a flowering plant and their uses',
6.3 : 'Crop-growing practices',
7.1 : 'Saving resources',
7.2 : 'Spending resources',
7.3 : 'Projects',
8.1 : 'Living in peace with others',
8.2 : 'Child rights, needs and their importance',
8.3 : 'Child responsibility',
9.1 : 'Customs in our sub-county/division',
9.2 : 'Gender',
9.3 : 'Ways of promoting and preserving culture',
10.1: 'Disease vectors',
10.2: 'Diseases spread by vectors',
10.3: 'HIV/AIDS',
11.1: 'Concept of technology',
11.2: 'Processing and making things from natural materials',
11.3: 'Making things from artificial materials',
12.1: 'Sources of energy',
12.2: 'Ways of saving energy',
12.3: 'Dangers of energy and ways of avoiding them'
}
## {{{ http://code.activestate.com/recipes/409413/ (r2)
"""
Descriptive statistical analysis tool.
"""
class StatisticsException(Exception):
"""Statistics Exception class."""
pass
class Statistics(object):
"""Class for descriptive statistical analysis.
Behavior:
Computes numerical statistics for a given data set.
Available public methods:
None
Available instance attributes:
N: total number of elements in the data set
sum: sum of all values (n) in the data set
min: smallest value of the data set
max: largest value of the data set
mode: value(s) that appear(s) most often in the data set
mean: arithmetic average of the data set
range: difference between the largest and smallest value in the data set
median: value which is in the exact middle of the data set
variance: measure of the spread of the data set about the mean
stddev: standard deviation - measure of the dispersion of the data set
based on variance
identification: Instance ID
Raised Exceptions:
StatisticsException
Bases Classes:
object (builtin)
Example Usage:
x = [ -1, 0, 1 ]
try:
stats = Statistics(x)
except StatisticsException, mesg:
<handle exception>
print "N: %s" % stats.N
print "SUM: %s" % stats.sum
print "MIN: %s" % stats.min
print "MAX: %s" % stats.max
print "MODE: %s" % stats.mode
print "MEAN: %0.2f" % stats.mean
print "RANGE: %s" % stats.range
print "MEDIAN: %0.2f" % stats.median
print "VARIANCE: %0.5f" % stats.variance
print "STDDEV: %0.5f" % stats.stddev
print "DATA LIST: %s" % stats.sample
"""
def __init__(self, sample=[], population=False):
"""Statistics class initializer method."""
# Raise an exception if the data set is empty.
if (not sample):
raise StatisticsException, "Empty data set!: %s" % sample
# The data set (a list).
self.sample = sample
# Sample/Population variance determination flag.
self.population = population
self.N = len(self.sample)
self.sum = float(sum(self.sample))
self.min = min(self.sample)
self.max = max(self.sample)
self.range = self.max - self.min
self.mean = self.sum/self.N
# Inplace sort (list is now in ascending order).
self.sample.sort()
self.__getMode()
# Instance identification attribute.
self.identification = id(self)
def __getMode(self):
"""Determine the most repeated value(s) in the data set."""
# Initialize a dictionary to store frequency data.
frequency = {}
# Build dictionary: key - data set values; item - data frequency.
for x in self.sample:
if (x in frequency):
frequency[x] += 1
else:
frequency[x] = 1
# Create a new list containing the values of the frequency dict. Convert
# the list, which may have duplicate elements, into a set. This will
# remove duplicate elements. Convert the set back into a sorted list
# (in descending order). The first element of the new list now contains
# the frequency of the most repeated values(s) in the data set.
# mode = sorted(list(set(frequency.values())), reverse=True)[0]
# Or use the builtin - max(), which returns the largest item of a
# non-empty sequence.
mode = max(frequency.values())
# If the value of mode is 1, there is no mode for the given data set.
if (mode == 1):
self.mode = []
return
# Step through the frequency dictionary, looking for values equaling
# the current value of mode. If found, append the value and its
# associated key to the self.mode list.
self.mode = [(x, mode) for x in frequency if (mode == frequency[x])]
def __getVariance(self):
"""Determine the measure of the spread of the data set about the mean.
Sample variance is determined by default; population variance can be
determined by setting population attribute to True.
"""
x = 0 # Summation variable.
# Subtract the mean from each data item and square the difference.
# Sum all the squared deviations.
for item in self.sample:
x += (item - self.mean)**2.0
try:
if (not self.population):
# Divide sum of squares by N-1 (sample variance).
self.variance = x/(self.N-1)
else:
# Divide sum of squares by N (population variance).
self.variance = x/self.N
except:
self.variance = 0
def __getStandardDeviation(self):
"""Determine the measure of the dispersion of the data set based on the
variance.
"""
from math import sqrt # Mathematical functions.
# Take the square root of the variance.
self.stddev = sqrt(self.variance)
def extract_key_count(list, key=None):
"""
A utility function written to count the number of times a `key` would appear in, for example, a categorized poll.
Examples:
>>> extract_key_count('yes',
"""
if list and key:
# go through a list of dictionaries
for dict in list:
if dict.get('category__name') == key:
return dict.get('value')
else:
return 0
def get_week_count(reference_date, d):
week_count = 0
while(reference_date.date() <= d.date()):
d = d - datetime.timedelta(days=7)
week_count = week_count + 1
return week_count
def get_months(start_date,end_date):
to_ret = []
first_day = start_date
while start_date < end_date:
last_day = start_date + relativedelta(day=1, months=+1, days=-1,hour=23,minute=59)
start_date += relativedelta(months=1)
to_ret.append([
datetime.datetime(first_day.year, first_day.month, first_day.day,first_day.hour,first_day.minute),
datetime.datetime(last_day.year, last_day.month, last_day.day,last_day.hour,last_day.minute)])
first_day = start_date + relativedelta(day=1,hour=00,minute=00)
to_ret.append([
datetime.datetime(first_day.year, first_day.month, first_day.day,first_day.hour,first_day.minute),
datetime.datetime(end_date.year, end_date.month, end_date.day,end_date.hour,end_date.minute)])
return to_ret
| unicefuganda/edtrac | edtrac_project/rapidsms_edtrac/education/utils.py | Python | bsd-3-clause | 12,792 |
import os
from example_builder import ExampleBuilder
RST_TEMPLATE = """
.. _%(sphinx_tag)s:
%(docstring)s
%(image_list)s
.. raw:: html
<div class="toggle_trigger"><a href="#">
**Code output:**
.. raw:: html
</a></div>
<div class="toggle_container">
.. literalinclude:: %(stdout)s
.. raw:: html
</div>
<div class="toggle_trigger" id="start_open"><a href="#">
**Python source code:**
.. raw:: html
</a></div>
<div class="toggle_container">
.. literalinclude:: %(fname)s
:lines: %(end_line)s-
.. raw:: html
</div>
<div align="right">
:download:`[download source: %(fname)s] <%(fname)s>`
.. raw:: html
</div>
"""
def main(app):
target_dir = os.path.join(app.builder.srcdir, 'book_figures')
source_dir = os.path.abspath(app.builder.srcdir + '/../' + 'examples')
try:
plot_gallery = eval(app.builder.config.plot_gallery)
except TypeError:
plot_gallery = bool(app.builder.config.plot_gallery)
if not os.path.exists(source_dir):
os.makedirs(source_dir)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
EB = ExampleBuilder(source_dir, target_dir,
execute_files=plot_gallery,
contents_file='contents.txt',
dir_info_file='README.rst',
dir_footer_file='FOOTER.rst',
sphinx_tag_base='book_fig',
template_example=RST_TEMPLATE)
EB.run()
def setup(app):
app.connect('builder-inited', main)
#app.add_config_value('plot_gallery', True, 'html')
| cigroup-ol/metaopt | docs/_extensions/gen_figure_rst.py | Python | bsd-3-clause | 1,626 |
from importlib import import_module
from django.core.urlresolvers import (RegexURLPattern,
RegexURLResolver, LocaleRegexURLResolver)
from django.core.exceptions import ImproperlyConfigured
from django.utils import six
__all__ = ['handler400', 'handler403', 'handler404', 'handler500', 'include', 'patterns', 'url']
handler400 = 'django.views.defaults.bad_request'
handler403 = 'django.views.defaults.permission_denied'
handler404 = 'django.views.defaults.page_not_found'
handler500 = 'django.views.defaults.server_error'
def include(arg, namespace=None, app_name=None):
if isinstance(arg, tuple):
# callable returning a namespace hint
if namespace:
raise ImproperlyConfigured('Cannot override the namespace for a dynamic module that provides a namespace')
urlconf_module, app_name, namespace = arg
else:
# No namespace hint - use manually provided namespace
urlconf_module = arg
if isinstance(urlconf_module, six.string_types):
urlconf_module = import_module(urlconf_module)
patterns = getattr(urlconf_module, 'urlpatterns', urlconf_module)
# Make sure we can iterate through the patterns (without this, some
# testcases will break).
if isinstance(patterns, (list, tuple)):
for url_pattern in patterns:
# Test if the LocaleRegexURLResolver is used within the include;
# this should throw an error since this is not allowed!
if isinstance(url_pattern, LocaleRegexURLResolver):
raise ImproperlyConfigured(
'Using i18n_patterns in an included URLconf is not allowed.')
return (urlconf_module, app_name, namespace)
def patterns(prefix, *args):
pattern_list = []
for t in args:
if isinstance(t, (list, tuple)):
t = url(prefix=prefix, *t)
elif isinstance(t, RegexURLPattern):
t.add_prefix(prefix)
pattern_list.append(t)
return pattern_list
def url(regex, view, kwargs=None, name=None, prefix=''):
if isinstance(view, (list, tuple)):
# For include(...) processing.
urlconf_module, app_name, namespace = view
return RegexURLResolver(regex, urlconf_module, kwargs, app_name=app_name, namespace=namespace)
else:
if isinstance(view, six.string_types):
if not view:
raise ImproperlyConfigured('Empty URL pattern view name not permitted (for pattern %r)' % regex)
if prefix:
view = prefix + '.' + view
return RegexURLPattern(regex, view, kwargs, name)
| ericholscher/django | django/conf/urls/__init__.py | Python | bsd-3-clause | 2,589 |
from __future__ import absolute_import
import datetime
import jwt
import re
import logging
from six.moves.urllib.parse import parse_qs, urlparse, urlsplit
from sentry.integrations.atlassian_connect import get_query_hash
from sentry.shared_integrations.exceptions import ApiError
from sentry.integrations.client import ApiClient
from sentry.utils.http import absolute_uri
logger = logging.getLogger("sentry.integrations.jira")
JIRA_KEY = "%s.jira" % (urlparse(absolute_uri()).hostname,)
ISSUE_KEY_RE = re.compile(r"^[A-Za-z][A-Za-z0-9]*-\d+$")
class JiraCloud(object):
"""
Contains the jira-cloud specifics that a JiraClient needs
in order to communicate with jira
"""
def __init__(self, shared_secret):
self.shared_secret = shared_secret
@property
def cache_prefix(self):
return "sentry-jira-2:"
def request_hook(self, method, path, data, params, **kwargs):
"""
Used by Jira Client to apply the jira-cloud authentication
"""
# handle params that are already part of the path
url_params = dict(parse_qs(urlsplit(path).query))
url_params.update(params or {})
path = path.split("?")[0]
jwt_payload = {
"iss": JIRA_KEY,
"iat": datetime.datetime.utcnow(),
"exp": datetime.datetime.utcnow() + datetime.timedelta(seconds=5 * 60),
"qsh": get_query_hash(path, method.upper(), url_params),
}
encoded_jwt = jwt.encode(jwt_payload, self.shared_secret)
params = dict(jwt=encoded_jwt, **(url_params or {}))
request_spec = kwargs.copy()
request_spec.update(dict(method=method, path=path, data=data, params=params))
return request_spec
def user_id_field(self):
"""
Jira-Cloud requires GDPR compliant API usage so we have to use accountId
"""
return "accountId"
def user_query_param(self):
"""
Jira-Cloud requires GDPR compliant API usage so we have to use query
"""
return "query"
def user_id_get_param(self):
"""
Jira-Cloud requires GDPR compliant API usage so we have to use accountId
"""
return "accountId"
class JiraApiClient(ApiClient):
# TODO: Update to v3 endpoints
COMMENTS_URL = "/rest/api/2/issue/%s/comment"
COMMENT_URL = "/rest/api/2/issue/%s/comment/%s"
STATUS_URL = "/rest/api/2/status"
CREATE_URL = "/rest/api/2/issue"
ISSUE_URL = "/rest/api/2/issue/%s"
META_URL = "/rest/api/2/issue/createmeta"
PRIORITIES_URL = "/rest/api/2/priority"
PROJECT_URL = "/rest/api/2/project"
SEARCH_URL = "/rest/api/2/search/"
VERSIONS_URL = "/rest/api/2/project/%s/versions"
USERS_URL = "/rest/api/2/user/assignable/search"
USER_URL = "/rest/api/2/user"
SERVER_INFO_URL = "/rest/api/2/serverInfo"
ASSIGN_URL = "/rest/api/2/issue/%s/assignee"
TRANSITION_URL = "/rest/api/2/issue/%s/transitions"
EMAIL_URL = "/rest/api/3/user/email"
integration_name = "jira"
# This timeout is completely arbitrary. Jira doesn't give us any
# caching headers to work with. Ideally we want a duration that
# lets the user make their second jira issue with cached data.
cache_time = 240
def __init__(self, base_url, jira_style, verify_ssl, logging_context=None):
self.base_url = base_url
# `jira_style` encapsulates differences between jira server & jira cloud.
# We only support one API version for Jira, but server/cloud require different
# authentication mechanisms and caching.
self.jira_style = jira_style
super(JiraApiClient, self).__init__(verify_ssl, logging_context)
def get_cache_prefix(self):
return self.jira_style.cache_prefix
def request(self, method, path, data=None, params=None, **kwargs):
"""
Use the request_hook method for our specific style of Jira to
add authentication data and transform parameters.
"""
request_spec = self.jira_style.request_hook(method, path, data, params, **kwargs)
if "headers" not in request_spec:
request_spec["headers"] = {}
# Force adherence to the GDPR compliant API conventions.
# See
# https://developer.atlassian.com/cloud/jira/platform/deprecation-notice-user-privacy-api-migration-guide
request_spec["headers"]["x-atlassian-force-account-id"] = "true"
return self._request(**request_spec)
def user_id_get_param(self):
return self.jira_style.user_id_get_param()
def user_id_field(self):
return self.jira_style.user_id_field()
def user_query_param(self):
return self.jira_style.user_query_param()
def get_issue(self, issue_id):
return self.get(self.ISSUE_URL % (issue_id,))
def search_issues(self, query):
# check if it looks like an issue id
if ISSUE_KEY_RE.match(query):
jql = 'id="%s"' % query.replace('"', '\\"')
else:
jql = 'text ~ "%s"' % query.replace('"', '\\"')
return self.get(self.SEARCH_URL, params={"jql": jql})
def create_comment(self, issue_key, comment):
return self.post(self.COMMENTS_URL % issue_key, data={"body": comment})
def update_comment(self, issue_key, comment_id, comment):
return self.put(self.COMMENT_URL % (issue_key, comment_id), data={"body": comment})
def get_projects_list(self):
return self.get_cached(self.PROJECT_URL)
def get_project_key_for_id(self, project_id):
if not project_id:
return ""
projects = self.get_projects_list()
for project in projects:
if project["id"] == project_id:
return project["key"].encode("utf-8")
return ""
def get_create_meta_for_project(self, project):
params = {"expand": "projects.issuetypes.fields", "projectIds": project}
metas = self.get_cached(self.META_URL, params=params)
# We saw an empty JSON response come back from the API :(
if not metas:
logger.info(
"jira.get-create-meta.empty-response",
extra={"base_url": self.base_url, "project": project},
)
return None
# XXX(dcramer): document how this is possible, if it even is
if len(metas["projects"]) > 1:
raise ApiError(u"More than one project found matching {}.".format(project))
try:
return metas["projects"][0]
except IndexError:
logger.info(
"jira.get-create-meta.key-error",
extra={"base_url": self.base_url, "project": project},
)
return None
def get_versions(self, project):
return self.get_cached(self.VERSIONS_URL % project)
def get_priorities(self):
return self.get_cached(self.PRIORITIES_URL)
def get_users_for_project(self, project):
# Jira Server wants a project key, while cloud is indifferent.
project_key = self.get_project_key_for_id(project)
return self.get_cached(self.USERS_URL, params={"project": project_key})
def search_users_for_project(self, project, username):
# Jira Server wants a project key, while cloud is indifferent.
project_key = self.get_project_key_for_id(project)
return self.get_cached(
self.USERS_URL, params={"project": project_key, self.user_query_param(): username}
)
def search_users_for_issue(self, issue_key, email):
return self.get_cached(
self.USERS_URL, params={"issueKey": issue_key, self.user_query_param(): email}
)
def get_user(self, user_id):
user_id_get_param = self.user_id_get_param()
return self.get_cached(self.USER_URL, params={user_id_get_param: user_id})
def create_issue(self, raw_form_data):
data = {"fields": raw_form_data}
return self.post(self.CREATE_URL, data=data)
def get_server_info(self):
return self.get(self.SERVER_INFO_URL)
def get_valid_statuses(self):
return self.get_cached(self.STATUS_URL)
def get_transitions(self, issue_key):
return self.get_cached(self.TRANSITION_URL % issue_key)["transitions"]
def transition_issue(self, issue_key, transition_id):
return self.post(self.TRANSITION_URL % issue_key, {"transition": {"id": transition_id}})
def assign_issue(self, key, name_or_account_id):
user_id_field = self.user_id_field()
return self.put(self.ASSIGN_URL % key, data={user_id_field: name_or_account_id})
def get_email(self, account_id):
user = self.get_cached(self.EMAIL_URL, params={"accountId": account_id})
return user.get("email")
| beeftornado/sentry | src/sentry/integrations/jira/client.py | Python | bsd-3-clause | 8,782 |
import fbchat
from fbchat import PageData
def test_page_from_graphql(session):
data = {
"id": "123456",
"name": "Some school",
"profile_picture": {"uri": "https://scontent-arn2-1.xx.fbcdn.net/v/..."},
"url": "https://www.facebook.com/some-school/",
"category_type": "SCHOOL",
"city": None,
}
assert PageData(
session=session,
id="123456",
photo=fbchat.Image(url="https://scontent-arn2-1.xx.fbcdn.net/v/..."),
name="Some school",
url="https://www.facebook.com/some-school/",
city=None,
category="SCHOOL",
) == PageData._from_graphql(session, data)
| carpedm20/fbchat | tests/threads/test_page.py | Python | bsd-3-clause | 669 |
import tempfile
import shutil
from voltgrid import GitManager
def git_checkout(git_url, git_branch=None, git_tag=None, git_hash=None):
git_dst = tempfile.mkdtemp()
g = GitManager(url=git_url, git_dst=git_dst, git_branch=git_branch, git_tag=git_tag, git_hash=git_hash)
g.run()
shutil.rmtree(git_dst)
def test_git_tag():
""" Test checkout w/ Tag """
git_checkout(git_url='https://github.com/voltgrid/voltgrid-pie.git', git_branch=None, git_tag='v0.1.0')
def test_git_branch():
""" Test checkout w/ Branch """
git_checkout(git_url='https://github.com/voltgrid/voltgrid-pie.git', git_branch='master', git_tag=None)
def test_git_hash():
""" Test checkout w/ Commit Hash """
git_checkout(git_url='https://github.com/voltgrid/voltgrid-pie.git', git_hash='ab052369c675057dccc90a75fb92317e9b689a56')
| voltgrid/voltgrid-pie | tests/test_git.py | Python | bsd-3-clause | 840 |
"""
===========================================
Main Components (:mod:`artview.components`)
===========================================
.. currentmodule:: artview.components
ARTview offers some basic Components for visualization
of weather radar data using Py-ART and
ARTview functions.
.. autosummary::
:toctree: generated/
RadarDisplay
GridDisplay
Menu
LevelButtonWindow
FieldButtonWindow
LinkPlugins
SelectRegion
PlotDisplay
"""
import pyart
from pkg_resources import parse_version
from .plot_radar import RadarDisplay
if parse_version(pyart.__version__) >= parse_version('1.6.0'):
from .plot_grid import GridDisplay
else:
from .plot_grid_legacy import GridDisplay
from .plot_points import PointsDisplay
from .menu import Menu
from .level import LevelButtonWindow
from .field import FieldButtonWindow
from .component_control import LinkPlugins
from .select_region import SelectRegion as SelectRegion_dev
from .select_region_old import SelectRegion
from .plot_simple import PlotDisplay
del pyart
del parse_version | jjhelmus/artview | artview/components/__init__.py | Python | bsd-3-clause | 1,065 |
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class TiebaItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
pass
class ThreadItem(scrapy.Item):
url = scrapy.Field()
title = scrapy.Field()
preview = scrapy.Field()
author = scrapy.Field()
tieba = scrapy.Field()
date = scrapy.Field()
keywords = scrapy.Field()
class NoneItem(scrapy.Item):
url = scrapy.Field()
title = scrapy.Field()
preview = scrapy.Field()
author = scrapy.Field()
tieba = scrapy.Field()
date = scrapy.Field()
keywords = scrapy.Field()
| fozzysec/tieba-keyword-spider | tieba/items.py | Python | bsd-3-clause | 730 |
#Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
__version__=''' $Id$ '''
___doc__=''
#odyssey.py
#
#Demo/benchmark of PDFgen rendering Homer's Odyssey.
#results on my humble P266 with 64MB:
# Without page compression:
# 239 pages in 3.76 seconds = 77 pages per second
# With textOut rather than textLine, i.e. computing width
# of every word as we would for wrapping:
# 239 pages in 10.83 seconds = 22 pages per second
# With page compression and textLine():
# 239 pages in 39.39 seconds = 6 pages per second
from reportlab.pdfgen import canvas
import time, os, sys
#find out what platform we are on and whether accelerator is
#present, in order to print this as part of benchmark info.
try:
import _rl_accel
ACCEL = 1
except ImportError:
ACCEL = 0
from reportlab.lib.units import inch, cm
from reportlab.lib.pagesizes import A4
#precalculate some basics
top_margin = A4[1] - inch
bottom_margin = inch
left_margin = inch
right_margin = A4[0] - inch
frame_width = right_margin - left_margin
def drawPageFrame(canv):
canv.line(left_margin, top_margin, right_margin, top_margin)
canv.setFont('Times-Italic',12)
canv.drawString(left_margin, top_margin + 2, "Homer's Odyssey")
canv.line(left_margin, top_margin, right_margin, top_margin)
canv.line(left_margin, bottom_margin, right_margin, bottom_margin)
canv.drawCentredString(0.5*A4[0], 0.5 * inch,
"Page %d" % canv.getPageNumber())
def run(verbose=1):
if sys.platform[0:4] == 'java':
impl = 'Jython'
else:
impl = 'Python'
verStr = '%d.%d' % (sys.version_info[0:2])
if ACCEL:
accelStr = 'with _rl_accel'
else:
accelStr = 'without _rl_accel'
print 'Benchmark of %s %s %s' % (impl, verStr, accelStr)
started = time.time()
canv = canvas.Canvas('odyssey.pdf', invariant=1)
canv.setPageCompression(1)
drawPageFrame(canv)
#do some title page stuff
canv.setFont("Times-Bold", 36)
canv.drawCentredString(0.5 * A4[0], 7 * inch, "Homer's Odyssey")
canv.setFont("Times-Bold", 18)
canv.drawCentredString(0.5 * A4[0], 5 * inch, "Translated by Samuel Burton")
canv.setFont("Times-Bold", 12)
tx = canv.beginText(left_margin, 3 * inch)
tx.textLine("This is a demo-cum-benchmark for PDFgen. It renders the complete text of Homer's Odyssey")
tx.textLine("from a text file. On my humble P266, it does 77 pages per secondwhile creating a 238 page")
tx.textLine("document. If it is asked to computer text metrics, measuring the width of each word as ")
tx.textLine("one would for paragraph wrapping, it still manages 22 pages per second.")
tx.textLine("")
tx.textLine("Andy Robinson, Robinson Analytics Ltd.")
canv.drawText(tx)
canv.showPage()
#on with the text...
drawPageFrame(canv)
canv.setFont('Times-Roman', 12)
tx = canv.beginText(left_margin, top_margin - 0.5*inch)
for fn in ('odyssey.full.txt','odyssey.txt'):
if os.path.isfile(fn):
break
data = open(fn,'r').readlines()
for line in data:
#this just does it the fast way...
tx.textLine(line.rstrip())
#page breaking
y = tx.getY() #get y coordinate
if y < bottom_margin + 0.5*inch:
canv.drawText(tx)
canv.showPage()
drawPageFrame(canv)
canv.setFont('Times-Roman', 12)
tx = canv.beginText(left_margin, top_margin - 0.5*inch)
#page
pg = canv.getPageNumber()
if verbose and pg % 10 == 0:
print 'formatted page %d' % canv.getPageNumber()
if tx:
canv.drawText(tx)
canv.showPage()
drawPageFrame(canv)
if verbose:
print 'about to write to disk...'
canv.save()
finished = time.time()
elapsed = finished - started
pages = canv.getPageNumber()-1
speed = pages / elapsed
fileSize = os.stat('odyssey.pdf')[6] / 1024
print '%d pages in %0.2f seconds = %0.2f pages per second, file size %d kb' % (
pages, elapsed, speed, fileSize)
import md5
print 'file digest: %s' % md5.md5(open('odyssey.pdf','rb').read()).hexdigest()
if __name__=='__main__':
quiet = ('-q' in sys.argv)
run(verbose = not quiet)
| MatthewWilkes/reportlab | demos/odyssey/odyssey.py | Python | bsd-3-clause | 4,323 |
import os
import sys
from distutils.core import setup
from distutils.sysconfig import get_python_lib
def fullsplit(path, result=None):
"""
Split a pathname into components (the opposite of os.path.join)
in a platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, package_data = [], {}
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
for dirpath, dirnames, filenames in os.walk('bibletext'):
# Ignore PEP 3147 cache dirs and those whose names start with '.'
dirnames[:] = [d for d in dirnames if not d.startswith('.') and d != '__pycache__']
parts = fullsplit(dirpath)
package_name = '.'.join(parts)
if '__init__.py' in filenames:
packages.append(package_name)
elif filenames:
relative_path = []
while '.'.join(parts) not in packages:
relative_path.append(parts.pop())
relative_path.reverse()
path = os.path.join(*relative_path)
package_files = package_data.setdefault('.'.join(parts), [])
package_files.extend([os.path.join(path, f) for f in filenames])
version = '0.1.0'
setup(
name='Django-Bibletext',
version=version,
url='https://github.com/richardbolt/django-bibletext',
author='Richard Bolt',
author_email='[email protected]',
description=('A Django app that has a full Bible viewer, '
'including the KJV text by default.'),
license='BSD',
packages=packages,
package_data=package_data,
classifiers=[
'Development Status :: 3 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],
) | richardbolt/django-bibletext | setup.py | Python | bsd-3-clause | 2,316 |
"""
CharNullField from
http://stackoverflow.com/questions/454436/unique-fields-that-allow-nulls-in-django/1934764#1934764
"""
from django.db import models
class CharNullField(models.CharField):
description = "CharField that stores NULL but returns ''"
def to_python(self, value):
"""return django-friendly '' if NULL in DB"""
if isinstance(value, models.CharField):
return value
if value==None:
return ""
else:
return value
def get_db_prep_value(self, value):
"""Save NULL in DB if field is empty"""
if value=="":
return None
else:
return value #otherwise, just pass the value
| fwenzel/millimeter | apps/shortener/fields.py | Python | bsd-3-clause | 709 |
"""Base material for signature backends."""
from django.urls import reverse
class SignatureBackend(object):
"""Encapsulate signature workflow and integration with vendor backend.
Here is a typical workflow:
* :class:`~django_anysign.models.SignatureType` instance is created. It
encapsulates the backend type and its configuration.
* A :class:`~django_anysign.models.Signature` instance is created.
The signature instance has a signature type attribute, hence a backend.
* Signers are notified, by email, text or whatever. They get an hyperlink
to the "signer view". The URL may vary depending on the signature
backend.
* A signer goes to the backend's "signer view" entry point: typically a
view that integrates backend specific form to sign a document.
* Most backends have a "notification view", for the third-party service to
signal updates.
* Most backends have a "signer return view", where the signer is redirected
when he ends the signature process (whatever signature status).
* The backend's specific workflow can be made of several views. At the
beginning, there is a Signature instance which carries data (typically a
document). At the end, Signature is done.
"""
def __init__(self, name, code, url_namespace='anysign', **kwargs):
"""Configure backend."""
#: Human-readable name.
self.name = name
#: Machine-readable name. Should be lowercase alphanumeric only, i.e.
#: PEP-8 compliant.
self.code = code
#: Namespace for URL resolution.
self.url_namespace = url_namespace
def send_signature(self, signature):
"""Initiate the signature process.
At this state, the signature object has been configured.
Typical implementation consists in sending signer URL to first signer.
Raise ``NotImplementedError`` if the backend does not support such a
feature.
"""
raise NotImplementedError()
def get_signer_url(self, signer):
"""Return URL where signer signs document.
Raise ``NotImplementedError`` in case the backend does not support
"signer view" feature.
Default implementation reverses :meth:`get_signer_url_name` with
``signer.pk`` as argument.
"""
return reverse(self.get_signer_url_name(), args=[signer.pk])
def get_signer_url_name(self):
"""Return URL name where signer signs document.
Raise ``NotImplementedError`` in case the backend does not support
"signer view" feature.
Default implementation returns ``anysign:signer``.
"""
return '{ns}:signer'.format(ns=self.url_namespace)
def get_signer_return_url(self, signer):
"""Return absolute URL where signer is redirected after signing.
The URL must be **absolute** because it is typically used by external
signature service: the signer uses external web UI to sign the
document(s) and then the signature service redirects the signer to
(this) `Django` website.
Raise ``NotImplementedError`` in case the backend does not support
"signer return view" feature.
Default implementation reverses :meth:`get_signer_return_url_name`
with ``signer.pk`` as argument.
"""
return reverse(
self.get_signer_return_url_name(),
args=[signer.pk])
def get_signer_return_url_name(self):
"""Return URL name where signer is redirected once document has been
signed.
Raise ``NotImplementedError`` in case the backend does not support
"signer return view" feature.
Default implementation returns ``anysign:signer_return``.
"""
return '{ns}:signer_return'.format(ns=self.url_namespace)
def get_signature_callback_url(self, signature):
"""Return URL where backend can post signature notifications.
Raise ``NotImplementedError`` in case the backend does not support
"signature callback url" feature.
Default implementation reverses :meth:`get_signature_callback_url_name`
with ``signature.pk`` as argument.
"""
return reverse(
self.get_signature_callback_url_name(),
args=[signature.pk])
def get_signature_callback_url_name(self):
"""Return URL name where backend can post signature notifications.
Raise ``NotImplementedError`` in case the backend does not support
"signer return view" feature.
Default implementation returns ``anysign:signature_callback``.
"""
return '{ns}:signature_callback'.format(ns=self.url_namespace)
def create_signature(self, signature):
"""Register ``signature`` in backend, return updated object.
This method is typically called by views which create
:class:`~django_anysign.models.Signature` instances.
If backend stores a signature object, then implementation should update
:attr:`~django_anysign.models.Signature.signature_backend_id`.
Base implementation does nothing: override this method in backends.
"""
return signature
| novafloss/django-anysign | django_anysign/backend.py | Python | bsd-3-clause | 5,255 |
from wrapper import get, run
import logging
import requests
@get('/')
def f(*args, **kwargs):
return '<html><head></head><body><h1>Hello!</h1></body></html>'
@get('/test', ['php'])
def test_f(*args, **kwargs):
arguments = kwargs['arguments']
php = arguments['php'][0]
self = args[0]
self.write("Head")
return 'Test{}'.format(php)
def test():
run(8888)
def main():
pass
if __name__ == '__main__':
test() | eeue56/just-columns | just-columns/test.py | Python | bsd-3-clause | 447 |
def extract17LiterarycornerWordpressCom(item):
'''
Parser for '17literarycorner.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('King Of Hell\'s Genius Pampered Wife', 'King Of Hell\'s Genius Pampered Wife', 'translated'),
('KOH', 'King Of Hell\'s Genius Pampered Wife', 'translated'),
('Addicted to Boundlessly Pampering You', 'Addicted to Boundlessly Pampering You', 'translated'),
('ATBPY', 'Addicted to Boundlessly Pampering You', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
if item['tags'] == ['Uncategorized']:
titlemap = [
('KOH Chapter ', 'King Of Hell\'s Genius Pampered Wife', 'translated'),
('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'),
('Master of Dungeon', 'Master of Dungeon', 'oel'),
]
for titlecomponent, name, tl_type in titlemap:
if titlecomponent.lower() in item['title'].lower():
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False | fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extract17LiterarycornerWordpressCom.py | Python | bsd-3-clause | 1,613 |
class SqliteMixin:
config_params = {'DATASTORE': 'sqlite://'}
class OdmUtils:
config_file = 'tests.odm'
async def _create_task(self, token, subject='This is a task', person=None,
**data):
data['subject'] = subject
if person:
data['assigned'] = person['id']
request = await self.client.post(self.api_url('tasks'),
json=data,
token=token)
data = self.json(request.response, 201)
self.assertIsInstance(data, dict)
self.assertTrue('id' in data)
self.assertEqual(data['subject'], subject)
self.assertTrue('created' in data)
self.assertEqual(len(request.cache.new_items), 1)
self.assertEqual(request.cache.new_items[0]['id'], data['id'])
self.assertFalse(request.cache.new_items_before_commit)
return data
async def _get_task(self, token, id):
request = await self.client.get(
'/tasks/{}'.format(id),
token=token)
response = request.response
self.assertEqual(response.status_code, 200)
data = self.json(response)
self.assertIsInstance(data, dict)
self.assertTrue('id' in data)
return data
async def _delete_task(self, token, id):
request = await self.client.delete(
'/tasks/{}'.format(id),
token=token)
response = request.response
self.assertEqual(response.status_code, 204)
async def _create_person(self, token, username, name=None):
name = name or username
request = await self.client.post(
'/people',
json={'username': username, 'name': name},
token=token)
data = self.json(request.response, 201)
self.assertIsInstance(data, dict)
self.assertTrue('id' in data)
self.assertEqual(data['name'], name)
return data
async def _update_person(self, token, id, username=None, name=None):
request = await self.client.patch(
self.api_url('people/%s' % id),
json={'username': username, 'name': name},
token=token
)
data = self.json(request.response, 200)
self.assertIsInstance(data, dict)
self.assertTrue('id' in data)
if name:
self.assertEqual(data['name'], name)
return data
| quantmind/lux | tests/odm/utils.py | Python | bsd-3-clause | 2,440 |
from django.conf.urls import patterns, include, url
from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.views.generic.base import TemplateView
from django.views.generic.detail import DetailView
from django.views.generic.base import RedirectView
from preferences import preferences
from jmbo.urls import v1_api
from foundry.models import Page
from foundry import views, forms
from foundry.api import ListingResource, LinkResource, NavbarResource, \
MenuResource, PageResource, BlogPostResource
admin.autodiscover()
try:
import object_tools
object_tools.autodiscover()
except ImportError:
pass
v1_api.register(ListingResource())
v1_api.register(LinkResource())
v1_api.register(NavbarResource())
v1_api.register(MenuResource())
v1_api.register(PageResource())
v1_api.register(BlogPostResource())
urlpatterns = patterns('',
# Pre-empt url call for comment post
url(
r'^comments/post/$',
'foundry.views.post_comment',
{},
name='comments-post-comment'
),
(r'^favicon\.ico$', RedirectView.as_view(url='/static/images/favicon.ico', permanent=False)),
(r'^googlesearch/', include('googlesearch.urls')),
(r'^jmbo/', include('jmbo.urls')),
(r'^comments/', include('django.contrib.comments.urls')),
(r'^likes/', include('likes.urls')),
(r'^object-tools/', include(object_tools.tools.urls)),
(r'^ckeditor/', include('ckeditor.urls')),
(r'^contact/', include('contact.urls')),
(r'^post/', include('post.urls')),
(r'^simple-autocomplete/', include('simple_autocomplete.urls')),
(r'^jmbo-analytics/', include('jmbo_analytics.urls')),
url(r'social-auth', include('social_auth.urls')),
(r'^admin/', include(admin.site.urls)),
url(
r'^$',
TemplateView.as_view(template_name='base.html'),
name='home'
),
url(
r'^logo/$',
TemplateView.as_view(template_name='foundry/logo.html'),
name='logo'
),
url(
r'^header/$',
TemplateView.as_view(template_name='foundry/inclusion_tags/header.html'),
name='header'
),
url(
r'^footer/$',
TemplateView.as_view(template_name='foundry/inclusion_tags/footer.html'),
name='footer'
),
# Join, login, password reset
url(
r'^join/$',
'foundry.views.join',
{},
name='join',
),
url(
r'^join-finish/$',
'foundry.views.join_finish',
{},
name='join-finish',
),
(r'^auth/', include('django.contrib.auth.urls')),
url(
r'^login/$',
'django.contrib.auth.views.login',
{'authentication_form': forms.LoginForm},
name='login',
),
url(
r'^logout/$',
'django.contrib.auth.views.logout',
{'next_page':'/'},
name='logout',
),
# Password reset with custom form
url(
r'^password_reset/$',
'django.contrib.auth.views.password_reset',
{
'password_reset_form': forms.PasswordResetForm,
},
name='password_reset',
),
# Pages defined in preferences
url(
r'^about-us/$',
views.StaticView.as_view(
content=lambda:preferences.GeneralPreferences.about_us,
title=_("About us")
),
name='about-us'
),
url(
r'^terms-and-conditions/$',
views.StaticView.as_view(
content=lambda:preferences.GeneralPreferences.terms_and_conditions,
title=_("Terms and conditions")
),
name='terms-and-conditions'
),
url(
r'^privacy-policy/$',
views.StaticView.as_view(
content=lambda:preferences.GeneralPreferences.privacy_policy,
title=_("Privacy policy")
),
name='privacy-policy'
),
# Age gateway
url(
r'^age-gateway/$',
'foundry.views.age_gateway',
{},
name='age-gateway',
),
# Listing
url(
r'^listing/(?P<slug>[\w-]+)/$',
'foundry.views.listing_detail',
{},
name='listing-detail'
),
# Listing feed
url(
r'^listing/(?P<slug>[\w-]+)/feed/$',
'foundry.feeds.listing_feed',
{},
name='listing-feed'
),
# Edit profile
url(r'^edit-profile/$',
login_required(
views.EditProfile.as_view(
form_class=forms.EditProfileForm,
template_name='foundry/edit_profile.html'
)
),
name='edit-profile'
),
# Complete profile
url(r'^complete-profile/$',
login_required(
views.EditProfile.as_view(
form_class=forms.EditProfileForm,
template_name='foundry/complete_profile.html'
)
),
name='complete-profile'
),
# Page detail
url(
r'^page/(?P<slug>[\w-]+)/$',
'foundry.views.page_detail',
{},
name='page-detail'
),
# Lorem ipsum
url(
r'^lorem-ipsum/$',
TemplateView.as_view(template_name='foundry/lorem_ipsum.html'),
name='lorem-ipsum'
),
# Search
url(
r'^search/$',
'foundry.views.search',
{},
name='search'
),
# Search results
url(
r'^search-results/$',
'foundry.views.search_results',
{},
name='search-results'
),
# Comment reply form in case of no javascript
url(
r'^comment-reply-form/$',
'foundry.views.comment_reply_form',
{},
name='comment-reply-form'
),
# Report comment
url(
r'^report-comment/(?P<comment_id>\d+)/$',
'foundry.views.report_comment',
{},
name='report-comment'
),
# Chatroom detail
url(
r'^chatroom/(?P<slug>[\w-]+)/$',
'foundry.views.chatroom_detail',
{},
name='chatroom-detail'
),
# Create blogpost
url(
r'^create-blogpost/$',
'foundry.views.create_blogpost',
{},
name='create-blogpost',
),
# Blogpost list
url(
r'^blogposts/$',
views.BlogPostObjectList.as_view(),
{'limit': 300},
name='blogpost_object_list'
),
# Blogpost detail
url(
r'^blogpost/(?P<slug>[\w-]+)/$',
views.BlogPostObjectDetail.as_view(),
{},
name='blogpost_object_detail'
),
# Member notifications
url(
r'^member-notifications/$',
login_required(views.member_notifications),
{},
name='member-notifications'
),
# User detail page
url(
r'^users/(?P<username>[=@\.\w-]+)/$',
'foundry.views.user_detail',
{},
name='user-detail'
),
# Coming soon
url(
r'^coming-soon/$',
TemplateView.as_view(template_name='foundry/coming_soon.html'),
name='coming-soon'
),
# Load new comments
url(
r'^fetch-new-comments-ajax/(?P<content_type_id>\d+)/(?P<oid>\d+)/(?P<last_comment_id>\d+)/$',
'foundry.views.fetch_new_comments_ajax',
{},
name='fetch-new-comments-ajax'
),
# Test views
url(
r'^test-plain-response/$',
'foundry.views.test_plain_response',
{},
name='test-plain-response'
),
url(
r'^test-redirect/$',
'foundry.views.test_redirect',
{},
name='test-redirect'
),
url(
r'^pages/$',
DetailView.as_view(),
{'queryset':Page.permitted.all().order_by('title')},
'page-list'
),
# Member detail page
url(
r'^members/(?P<username>[\w-]+)/$',
'foundry.views.member_detail',
{},
name='member-detail'
),
# Admin
url(
r'^admin-row-create-ajax/$',
'foundry.admin_views.row_create_ajax',
{},
name='admin-row-create-ajax',
),
url(
r'^admin-column-create-ajax/$',
'foundry.admin_views.column_create_ajax',
{},
name='admin-column-create-ajax',
),
url(
r'^admin-tile-create-ajax/$',
'foundry.admin_views.tile_create_ajax',
{},
name='admin-tile-create-ajax',
),
url(
r'^admin-row-edit-ajax/$',
'foundry.admin_views.row_edit_ajax',
{},
name='admin-row-edit-ajax',
),
url(
r'^admin-column-edit-ajax/$',
'foundry.admin_views.column_edit_ajax',
{},
name='admin-column-edit-ajax',
),
url(
r'^admin-tile-edit-ajax/$',
'foundry.admin_views.tile_edit_ajax',
{},
name='admin-tile-edit-ajax',
),
url(
r'^admin-row-delete-ajax/$',
'foundry.admin_views.row_delete_ajax',
{},
name='admin-row-delete-ajax',
),
url(
r'^admin-column-delete-ajax/$',
'foundry.admin_views.column_delete_ajax',
{},
name='admin-column-delete-ajax',
),
url(
r'^admin-tile-delete-ajax/$',
'foundry.admin_views.tile_delete_ajax',
{},
name='admin-tile-delete-ajax',
),
url(
r'^admin-persist-sort-ajax/$',
'foundry.admin_views.persist_sort_ajax',
{},
name='admin-persist-sort-ajax',
),
url(
r'^admin-remove-comment/(?P<comment_id>\d+)/$',
'foundry.admin_views.remove_comment',
{},
name='admin-remove-comment'
),
url(
r'^admin-allow-comment/(?P<comment_id>\d+)/$',
'foundry.admin_views.allow_comment',
{},
name='admin-allow-comment'
),
)
# Praekelt maintained Jmbo packages which are optional
if "banner" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^banner/', include('banner.urls')))
if "chart" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^chart/', include('chart.urls')))
if "competition" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^competition/', include('competition.urls')))
if "downloads" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^downloads/', include('downloads.urls')))
if "friends" in settings.INSTALLED_APPS:
# Friends has a fancy member detail page and needs to resolve first
urlpatterns.insert(1, url(r'^friends/', include('friends.urls')))
if "gallery" in settings.INSTALLED_APPS:
urlpatterns += patterns('',
(r'^gallery/', include('gallery.urls')),
(r'^admin/', include('gallery.admin_urls')),
)
if "jmbo_calendar" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^calendar/', include('jmbo_calendar.urls')))
if "jmbo_twitter" in settings.INSTALLED_APPS:
urlpatterns += patterns('',
(r'^jmbo_twitter', include('jmbo_twitter.urls')),
(r'^admin/', include('jmbo_twitter.admin_urls')),
)
if "music" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^music/', include('music.urls')))
if "poll" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^poll/', include('poll.urls')))
if "show" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^show/', include('show.urls')))
if "video" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^video/', include('video.urls')))
if "jmbo_sitemap" in settings.INSTALLED_APPS:
from jmbo_sitemap import sitemaps
from jmbo_sitemap.views import sitemap, SitemapHTMLView
urlpatterns += patterns(
'',
# Unidentified issue with Jmbo URLPatternItem class means
# (r'^', include('jmbo_sitemap.urls')) causes error. Use a workaround.
url(
r'^sitemap\.xml$',
sitemap,
{'sitemaps': sitemaps},
name='sitemap'
),
url(
r'^sitemap/$',
SitemapHTMLView.as_view(),
name='html-sitemap'
),
)
# Add api last because all resources are registered at this point
urlpatterns += patterns('', (r'^api/', include(v1_api.urls)))
urlpatterns += staticfiles_urlpatterns()
# Flatpages must be last
urlpatterns += patterns('', ('r^/', include('django.contrib.flatpages.urls')))
handler500 = 'foundry.views.server_error'
if settings.DEBUG:
urlpatterns += patterns('',
(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
)
| praekelt/jmbo-foundry | foundry/urls.py | Python | bsd-3-clause | 12,605 |
# -*- coding: utf-8 -*-
# Copyright 2019 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""An implementation of the ReplicationConfig proto interface."""
from __future__ import print_function
import json
import os
import shutil
import sys
from chromite.api.gen.config import replication_config_pb2
from chromite.lib import constants
from chromite.lib import cros_logging as logging
from chromite.lib import osutils
from chromite.utils import field_mask_util
assert sys.version_info >= (3, 6), 'This module requires Python 3.6+'
def _ValidateFileReplicationRule(rule):
"""Raises an error if a FileReplicationRule is invalid.
For example, checks that if REPLICATION_TYPE_FILTER, destination_fields
are specified.
Args:
rule: (FileReplicationRule) The rule to validate.
"""
if rule.file_type == replication_config_pb2.FILE_TYPE_JSON:
if rule.replication_type != replication_config_pb2.REPLICATION_TYPE_FILTER:
raise ValueError(
'Rule for JSON source %s must use REPLICATION_TYPE_FILTER.' %
rule.source_path)
elif rule.file_type == replication_config_pb2.FILE_TYPE_OTHER:
if rule.replication_type != replication_config_pb2.REPLICATION_TYPE_COPY:
raise ValueError('Rule for source %s must use REPLICATION_TYPE_COPY.' %
rule.source_path)
else:
raise NotImplementedError('Replicate not implemented for file type %s' %
rule.file_type)
if rule.replication_type == replication_config_pb2.REPLICATION_TYPE_COPY:
if rule.destination_fields.paths:
raise ValueError(
'Rule with REPLICATION_TYPE_COPY cannot use destination_fields.')
elif rule.replication_type == replication_config_pb2.REPLICATION_TYPE_FILTER:
if not rule.destination_fields.paths:
raise ValueError(
'Rule with REPLICATION_TYPE_FILTER must use destination_fields.')
else:
raise NotImplementedError(
'Replicate not implemented for replication type %s' %
rule.replication_type)
if os.path.isabs(rule.source_path) or os.path.isabs(rule.destination_path):
raise ValueError(
'Only paths relative to the source root are allowed. In rule: %s' %
rule)
def _ApplyStringReplacementRules(destination_path, rules):
"""Read the file at destination path, apply rules, and write a new file.
Args:
destination_path: (str) Path to the destination file to read. The new file
will also be written at this path.
rules: (list[StringReplacementRule]) Rules to apply. Must not be empty.
"""
assert rules
with open(destination_path, 'r') as f:
dst_data = f.read()
for string_replacement_rule in rules:
dst_data = dst_data.replace(string_replacement_rule.before,
string_replacement_rule.after)
with open(destination_path, 'w') as f:
f.write(dst_data)
def Replicate(replication_config):
"""Run the replication described in replication_config.
Args:
replication_config: (ReplicationConfig) Describes the replication to run.
"""
# Validate all rules before any of them are run, to decrease chance of ending
# with a partial replication.
for rule in replication_config.file_replication_rules:
_ValidateFileReplicationRule(rule)
for rule in replication_config.file_replication_rules:
logging.info('Processing FileReplicationRule: %s', rule)
src = os.path.join(constants.SOURCE_ROOT, rule.source_path)
dst = os.path.join(constants.SOURCE_ROOT, rule.destination_path)
osutils.SafeMakedirs(os.path.dirname(dst))
if rule.file_type == replication_config_pb2.FILE_TYPE_JSON:
assert (rule.replication_type ==
replication_config_pb2.REPLICATION_TYPE_FILTER)
assert rule.destination_fields.paths
with open(src, 'r') as f:
source_json = json.load(f)
try:
source_device_configs = source_json['chromeos']['configs']
except KeyError:
raise NotImplementedError(
('Currently only ChromeOS Configs are supported (expected file %s '
'to have a list at "$.chromeos.configs")') % src)
destination_device_configs = []
for source_device_config in source_device_configs:
destination_device_configs.append(
field_mask_util.CreateFilteredDict(rule.destination_fields,
source_device_config))
destination_json = {'chromeos': {'configs': destination_device_configs}}
logging.info('Writing filtered JSON source to %s', dst)
with open(dst, 'w') as f:
# Use the print function, so the file ends in a newline.
print(
json.dumps(
destination_json,
sort_keys=True,
indent=2,
separators=(',', ': ')),
file=f)
else:
assert rule.file_type == replication_config_pb2.FILE_TYPE_OTHER
assert (
rule.replication_type == replication_config_pb2.REPLICATION_TYPE_COPY)
assert not rule.destination_fields.paths
logging.info('Copying full file from %s to %s', src, dst)
shutil.copy2(src, dst)
if rule.string_replacement_rules:
_ApplyStringReplacementRules(dst, rule.string_replacement_rules)
| endlessm/chromium-browser | third_party/chromite/lib/replication_lib.py | Python | bsd-3-clause | 5,370 |
import math
from sympy.mpmath import *
def test_bessel():
mp.dps = 15
assert j0(1).ae(0.765197686557966551)
assert j0(pi).ae(-0.304242177644093864)
assert j0(1000).ae(0.0247866861524201746)
assert j0(-25).ae(0.0962667832759581162)
assert j1(1).ae(0.440050585744933516)
assert j1(pi).ae(0.284615343179752757)
assert j1(1000).ae(0.00472831190708952392)
assert j1(-25).ae(0.125350249580289905)
assert besselj(5,1).ae(0.000249757730211234431)
assert besselj(5,pi).ae(0.0521411843671184747)
assert besselj(5,1000).ae(0.00502540694523318607)
assert besselj(5,-25).ae(0.0660079953984229934)
assert besselj(-3,2).ae(-0.128943249474402051)
assert besselj(-4,2).ae(0.0339957198075684341)
assert besselj(3,3+2j).ae(0.424718794929639595942 + 0.625665327745785804812j)
assert besselj(0.25,4).ae(-0.374760630804249715)
assert besselj(1+2j,3+4j).ae(0.319247428741872131 - 0.669557748880365678j)
assert (besselj(3, 10**10) * 10**5).ae(0.76765081748139204023)
assert bessely(-0.5, 0) == 0
assert bessely(0.5, 0) == -inf
assert bessely(1.5, 0) == -inf
assert bessely(0,0) == -inf
assert bessely(-0.4, 0) == -inf
assert bessely(-0.6, 0) == inf
assert bessely(-1, 0) == inf
assert bessely(-1.4, 0) == inf
assert bessely(-1.6, 0) == -inf
assert bessely(-1, 0) == inf
assert bessely(-2, 0) == -inf
assert bessely(-3, 0) == inf
assert bessely(0.5, 0) == -inf
assert bessely(1, 0) == -inf
assert bessely(1.5, 0) == -inf
assert bessely(2, 0) == -inf
assert bessely(2.5, 0) == -inf
assert bessely(3, 0) == -inf
assert bessely(0,0.5).ae(-0.44451873350670655715)
assert bessely(1,0.5).ae(-1.4714723926702430692)
assert bessely(-1,0.5).ae(1.4714723926702430692)
assert bessely(3.5,0.5).ae(-138.86400867242488443)
assert bessely(0,3+4j).ae(4.6047596915010138655-8.8110771408232264208j)
assert bessely(0,j).ae(-0.26803248203398854876+1.26606587775200833560j)
assert (bessely(3, 10**10) * 10**5).ae(0.21755917537013204058)
assert besseli(0,0) == 1
assert besseli(1,0) == 0
assert besseli(2,0) == 0
assert besseli(-1,0) == 0
assert besseli(-2,0) == 0
assert besseli(0,0.5).ae(1.0634833707413235193)
assert besseli(1,0.5).ae(0.25789430539089631636)
assert besseli(-1,0.5).ae(0.25789430539089631636)
assert besseli(3.5,0.5).ae(0.00068103597085793815863)
assert besseli(0,3+4j).ae(-3.3924877882755196097-1.3239458916287264815j)
assert besseli(0,j).ae(besselj(0,1))
assert (besseli(3, 10**10) * mpf(10)**(-4342944813)).ae(4.2996028505491271875)
assert besselk(0,0) == inf
assert besselk(1,0) == inf
assert besselk(2,0) == inf
assert besselk(-1,0) == inf
assert besselk(-2,0) == inf
assert besselk(0,0.5).ae(0.92441907122766586178)
assert besselk(1,0.5).ae(1.6564411200033008937)
assert besselk(-1,0.5).ae(1.6564411200033008937)
assert besselk(3.5,0.5).ae(207.48418747548460607)
assert besselk(0,3+4j).ae(-0.007239051213570155013+0.026510418350267677215j)
assert besselk(0,j).ae(-0.13863371520405399968-1.20196971531720649914j)
assert (besselk(3, 10**10) * mpf(10)**4342944824).ae(1.1628981033356187851)
def test_hankel():
mp.dps = 15
assert hankel1(0,0.5).ae(0.93846980724081290423-0.44451873350670655715j)
assert hankel1(1,0.5).ae(0.2422684576748738864-1.4714723926702430692j)
assert hankel1(-1,0.5).ae(-0.2422684576748738864+1.4714723926702430692j)
assert hankel1(1.5,0.5).ae(0.0917016996256513026-2.5214655504213378514j)
assert hankel1(1.5,3+4j).ae(0.0066806866476728165382-0.0036684231610839127106j)
assert hankel2(0,0.5).ae(0.93846980724081290423+0.44451873350670655715j)
assert hankel2(1,0.5).ae(0.2422684576748738864+1.4714723926702430692j)
assert hankel2(-1,0.5).ae(-0.2422684576748738864-1.4714723926702430692j)
assert hankel2(1.5,0.5).ae(0.0917016996256513026+2.5214655504213378514j)
assert hankel2(1.5,3+4j).ae(14.783528526098567526-7.397390270853446512j)
def test_struve():
mp.dps = 15
assert struveh(2,3).ae(0.74238666967748318564)
assert struveh(-2.5,3).ae(0.41271003220971599344)
assert struvel(2,3).ae(1.7476573277362782744)
assert struvel(-2.5,3).ae(1.5153394466819651377)
def test_whittaker():
mp.dps = 15
assert whitm(2,3,4).ae(49.753745589025246591)
assert whitw(2,3,4).ae(14.111656223052932215)
def test_kelvin():
mp.dps = 15
assert ber(2,3).ae(0.80836846563726819091)
assert ber(3,4).ae(-0.28262680167242600233)
assert ber(-3,2).ae(-0.085611448496796363669)
assert bei(2,3).ae(-0.89102236377977331571)
assert bei(-3,2).ae(-0.14420994155731828415)
assert ker(2,3).ae(0.12839126695733458928)
assert ker(-3,2).ae(-0.29802153400559142783)
assert ker(0.5,3).ae(-0.085662378535217097524)
assert kei(2,3).ae(0.036804426134164634000)
assert kei(-3,2).ae(0.88682069845786731114)
assert kei(0.5,3).ae(0.013633041571314302948)
def test_hyper_misc():
mp.dps = 15
assert hyp0f1(1,0) == 1
assert hyp1f1(1,2,0) == 1
assert hyp1f2(1,2,3,0) == 1
assert hyp2f1(1,2,3,0) == 1
assert hyp2f2(1,2,3,4,0) == 1
assert hyp2f3(1,2,3,4,5,0) == 1
# Degenerate case: 0F0
assert hyper([],[],0) == 1
assert hyper([],[],-2).ae(exp(-2))
# Degenerate case: 1F0
assert hyper([2],[],1.5) == 4
#
assert hyp2f1((1,3),(2,3),(5,6),mpf(27)/32).ae(1.6)
assert hyp2f1((1,4),(1,2),(3,4),mpf(80)/81).ae(1.8)
assert hyp2f1((2,3),(1,1),(3,2),(2+j)/3).ae(1.327531603558679093+0.439585080092769253j)
mp.dps = 25
v = mpc('1.2282306665029814734863026', '-0.1225033830118305184672133')
assert hyper([(3,4),2+j,1],[1,5,j/3],mpf(1)/5+j/8).ae(v)
mp.dps = 15
def test_elliptic_integrals():
mp.dps = 15
assert ellipk(0).ae(pi/2)
assert ellipk(0.5).ae(gamma(0.25)**2/(4*sqrt(pi)))
assert ellipk(1) == inf
assert ellipk(1+0j) == inf
assert ellipk(-1).ae('1.3110287771460599052')
assert ellipk(-2).ae('1.1714200841467698589')
assert isinstance(ellipk(-2), mpf)
assert isinstance(ellipe(-2), mpf)
assert ellipk(-50).ae('0.47103424540873331679')
mp.dps = 30
n1 = +fraction(99999,100000)
n2 = +fraction(100001,100000)
mp.dps = 15
assert ellipk(n1).ae('7.1427724505817781901')
assert ellipk(n2).ae(mpc('7.1427417367963090109', '-1.5707923998261688019'))
assert ellipe(n1).ae('1.0000332138990829170')
v = ellipe(n2)
assert v.real.ae('0.999966786328145474069137')
assert (v.imag*10**6).ae('7.853952181727432')
assert ellipk(2).ae(mpc('1.3110287771460599052', '-1.3110287771460599052'))
assert ellipk(50).ae(mpc('0.22326753950210985451', '-0.47434723226254522087'))
assert ellipk(3+4j).ae(mpc('0.91119556380496500866', '0.63133428324134524388'))
assert ellipk(3-4j).ae(mpc('0.91119556380496500866', '-0.63133428324134524388'))
assert ellipk(-3+4j).ae(mpc('0.95357894880405122483', '0.23093044503746114444'))
assert ellipk(-3-4j).ae(mpc('0.95357894880405122483', '-0.23093044503746114444'))
assert isnan(ellipk(nan))
assert isnan(ellipe(nan))
assert ellipk(inf) == 0
assert isinstance(ellipk(inf), mpc)
assert ellipk(-inf) == 0
assert ellipk(1+0j) == inf
assert ellipe(0).ae(pi/2)
assert ellipe(0.5).ae(pi**(mpf(3)/2)/gamma(0.25)**2 +gamma(0.25)**2/(8*sqrt(pi)))
assert ellipe(1) == 1
assert ellipe(1+0j) == 1
assert ellipe(inf) == mpc(0,inf)
assert ellipe(-inf) == inf
assert ellipe(3+4j).ae(1.4995535209333469543-1.5778790079127582745j)
assert ellipe(3-4j).ae(1.4995535209333469543+1.5778790079127582745j)
assert ellipe(-3+4j).ae(2.5804237855343377803-0.8306096791000413778j)
assert ellipe(-3-4j).ae(2.5804237855343377803+0.8306096791000413778j)
assert ellipe(2).ae(0.59907011736779610372+0.59907011736779610372j)
assert ellipe('1e-1000000000').ae(pi/2)
assert ellipk('1e-1000000000').ae(pi/2)
assert ellipe(-pi).ae(2.4535865983838923)
mp.dps = 50
assert ellipk(1/pi).ae('1.724756270009501831744438120951614673874904182624739673')
assert ellipe(1/pi).ae('1.437129808135123030101542922290970050337425479058225712')
assert ellipk(-10*pi).ae('0.5519067523886233967683646782286965823151896970015484512')
assert ellipe(-10*pi).ae('5.926192483740483797854383268707108012328213431657645509')
v = ellipk(pi)
assert v.real.ae('0.973089521698042334840454592642137667227167622330325225')
assert v.imag.ae('-1.156151296372835303836814390793087600271609993858798016')
v = ellipe(pi)
assert v.real.ae('0.4632848917264710404078033487934663562998345622611263332')
assert v.imag.ae('1.0637961621753130852473300451583414489944099504180510966')
mp.dps = 15
def test_exp_integrals():
mp.dps = 15
x = +e
z = e + sqrt(3)*j
assert ei(x).ae(8.21168165538361560)
assert li(x).ae(1.89511781635593676)
assert si(x).ae(1.82104026914756705)
assert ci(x).ae(0.213958001340379779)
assert shi(x).ae(4.11520706247846193)
assert chi(x).ae(4.09647459290515367)
assert fresnels(x).ae(0.437189718149787643)
assert fresnelc(x).ae(0.401777759590243012)
assert airyai(x).ae(0.0108502401568586681)
assert airybi(x).ae(8.98245748585468627)
assert ei(z).ae(3.72597969491314951 + 7.34213212314224421j)
assert li(z).ae(2.28662658112562502 + 1.50427225297269364j)
assert si(z).ae(2.48122029237669054 + 0.12684703275254834j)
assert ci(z).ae(0.169255590269456633 - 0.892020751420780353j)
assert shi(z).ae(1.85810366559344468 + 3.66435842914920263j)
assert chi(z).ae(1.86787602931970484 + 3.67777369399304159j)
assert fresnels(z/3).ae(0.034534397197008182 + 0.754859844188218737j)
assert fresnelc(z/3).ae(1.261581645990027372 + 0.417949198775061893j)
assert airyai(z).ae(-0.0162552579839056062 - 0.0018045715700210556j)
assert airybi(z).ae(-4.98856113282883371 + 2.08558537872180623j)
assert li(0) == 0.0
assert li(1) == -inf
assert li(inf) == inf
assert isinstance(li(0.7), mpf)
assert si(inf).ae(pi/2)
assert si(-inf).ae(-pi/2)
assert ci(inf) == 0
assert ci(0) == -inf
assert isinstance(ei(-0.7), mpf)
assert airyai(inf) == 0
assert airybi(inf) == inf
assert airyai(-inf) == 0
assert airybi(-inf) == 0
assert fresnels(inf) == 0.5
assert fresnelc(inf) == 0.5
assert fresnels(-inf) == -0.5
assert fresnelc(-inf) == -0.5
assert shi(0) == 0
assert shi(inf) == inf
assert shi(-inf) == -inf
assert chi(0) == -inf
assert chi(inf) == inf
def test_ei():
mp.dps = 15
assert ei(0) == -inf
assert ei(inf) == inf
assert ei(-inf) == -0.0
assert ei(20+70j).ae(6.1041351911152984397e6 - 2.7324109310519928872e6j)
# tests for the asymptotic expansion
# values checked with Mathematica ExpIntegralEi
mp.dps = 50
r = ei(20000)
s = '3.8781962825045010930273870085501819470698476975019e+8681'
assert str(r) == s
r = ei(-200)
s = '-6.8852261063076355977108174824557929738368086933303e-90'
assert str(r) == s
r =ei(20000 + 10*j)
sre = '-3.255138234032069402493850638874410725961401274106e+8681'
sim = '-2.1081929993474403520785942429469187647767369645423e+8681'
assert str(r.real) == sre and str(r.imag) == sim
mp.dps = 15
# More asymptotic expansions
assert chi(-10**6+100j).ae('1.3077239389562548386e+434288 + 7.6808956999707408158e+434287j')
assert shi(-10**6+100j).ae('-1.3077239389562548386e+434288 - 7.6808956999707408158e+434287j')
mp.dps = 15
assert ei(10j).ae(-0.0454564330044553726+3.2291439210137706686j)
assert ei(100j).ae(-0.0051488251426104921+3.1330217936839529126j)
u = ei(fmul(10**20, j, exact=True))
assert u.real.ae(-6.4525128526578084421345e-21, abs_eps=0, rel_eps=8*eps)
assert u.imag.ae(pi)
assert ei(-10j).ae(-0.0454564330044553726-3.2291439210137706686j)
assert ei(-100j).ae(-0.0051488251426104921-3.1330217936839529126j)
u = ei(fmul(-10**20, j, exact=True))
assert u.real.ae(-6.4525128526578084421345e-21, abs_eps=0, rel_eps=8*eps)
assert u.imag.ae(-pi)
assert ei(10+10j).ae(-1576.1504265768517448+436.9192317011328140j)
u = ei(-10+10j)
assert u.real.ae(7.6698978415553488362543e-7, abs_eps=0, rel_eps=8*eps)
assert u.imag.ae(3.141595611735621062025)
def test_e1():
mp.dps = 15
assert e1(0) == inf
assert e1(inf) == 0
assert e1(-inf) == mpc(-inf, -pi)
assert e1(10j).ae(0.045456433004455372635 + 0.087551267423977430100j)
assert e1(100j).ae(0.0051488251426104921444 - 0.0085708599058403258790j)
assert e1(fmul(10**20, j, exact=True)).ae(6.4525128526578084421e-21 - 7.6397040444172830039e-21j, abs_eps=0, rel_eps=8*eps)
assert e1(-10j).ae(0.045456433004455372635 - 0.087551267423977430100j)
assert e1(-100j).ae(0.0051488251426104921444 + 0.0085708599058403258790j)
assert e1(fmul(-10**20, j, exact=True)).ae(6.4525128526578084421e-21 + 7.6397040444172830039e-21j, abs_eps=0, rel_eps=8*eps)
def test_expint():
mp.dps = 15
assert expint(0,0) == inf
assert expint(0,1).ae(1/e)
assert expint(0,1.5).ae(2/exp(1.5)/3)
assert expint(1,1).ae(-ei(-1))
assert expint(2,0).ae(1)
assert expint(3,0).ae(1/2.)
assert expint(4,0).ae(1/3.)
assert expint(-2, 0.5).ae(26/sqrt(e))
assert expint(-1,-1) == 0
assert expint(-2,-1).ae(-e)
assert expint(5.5, 0).ae(2/9.)
assert expint(2.00000001,0).ae(100000000./100000001)
assert expint(2+3j,4-j).ae(0.0023461179581675065414+0.0020395540604713669262j)
assert expint('1.01', '1e-1000').ae(99.9999999899412802)
assert expint('1.000000000001', 3.5).ae(0.00697013985754701819446)
assert expint(2,3).ae(3*ei(-3)+exp(-3))
assert (expint(10,20)*10**10).ae(0.694439055541231353)
assert expint(3,inf) == 0
assert expint(3.2,inf) == 0
assert expint(3.2+2j,inf) == 0
assert expint(1,3j).ae(-0.11962978600800032763 + 0.27785620120457163717j)
assert expint(1,3).ae(0.013048381094197037413)
assert expint(1,-3).ae(-ei(3)-pi*j)
#assert expint(3) == expint(1,3)
assert expint(1,-20).ae(-25615652.66405658882 - 3.1415926535897932385j)
assert expint(1000000,0).ae(1./999999)
assert expint(0,2+3j).ae(-0.025019798357114678171 + 0.027980439405104419040j)
assert expint(-1,2+3j).ae(-0.022411973626262070419 + 0.038058922011377716932j)
assert expint(-1.5,0) == inf
def test_trig_integrals():
mp.dps = 30
assert si(mpf(1)/1000000).ae('0.000000999999999999944444444444446111')
assert ci(mpf(1)/1000000).ae('-13.2382948930629912435014366276')
assert si(10**10).ae('1.5707963267075846569685111517747537')
assert ci(10**10).ae('-4.87506025174822653785729773959e-11')
assert si(10**100).ae(pi/2)
assert (ci(10**100)*10**100).ae('-0.372376123661276688262086695553')
assert si(-3) == -si(3)
assert ci(-3).ae(ci(3) + pi*j)
# Test complex structure
mp.dps = 15
assert mp.ci(50).ae(-0.0056283863241163054402)
assert mp.ci(50+2j).ae(-0.018378282946133067149+0.070352808023688336193j)
assert mp.ci(20j).ae(1.28078263320282943611e7+1.5707963267949j)
assert mp.ci(-2+20j).ae(-4.050116856873293505e6+1.207476188206989909e7j)
assert mp.ci(-50+2j).ae(-0.0183782829461330671+3.0712398455661049023j)
assert mp.ci(-50).ae(-0.0056283863241163054+3.1415926535897932385j)
assert mp.ci(-50-2j).ae(-0.0183782829461330671-3.0712398455661049023j)
assert mp.ci(-2-20j).ae(-4.050116856873293505e6-1.207476188206989909e7j)
assert mp.ci(-20j).ae(1.28078263320282943611e7-1.5707963267949j)
assert mp.ci(50-2j).ae(-0.018378282946133067149-0.070352808023688336193j)
assert mp.si(50).ae(1.5516170724859358947)
assert mp.si(50+2j).ae(1.497884414277228461-0.017515007378437448j)
assert mp.si(20j).ae(1.2807826332028294459e7j)
assert mp.si(-2+20j).ae(-1.20747603112735722103e7-4.050116856873293554e6j)
assert mp.si(-50+2j).ae(-1.497884414277228461-0.017515007378437448j)
assert mp.si(-50).ae(-1.5516170724859358947)
assert mp.si(-50-2j).ae(-1.497884414277228461+0.017515007378437448j)
assert mp.si(-2-20j).ae(-1.20747603112735722103e7+4.050116856873293554e6j)
assert mp.si(-20j).ae(-1.2807826332028294459e7j)
assert mp.si(50-2j).ae(1.497884414277228461+0.017515007378437448j)
assert mp.chi(50j).ae(-0.0056283863241163054+1.5707963267948966192j)
assert mp.chi(-2+50j).ae(-0.0183782829461330671+1.6411491348185849554j)
assert mp.chi(-20).ae(1.28078263320282943611e7+3.1415926535898j)
assert mp.chi(-20-2j).ae(-4.050116856873293505e6+1.20747571696809187053e7j)
assert mp.chi(-2-50j).ae(-0.0183782829461330671-1.6411491348185849554j)
assert mp.chi(-50j).ae(-0.0056283863241163054-1.5707963267948966192j)
assert mp.chi(2-50j).ae(-0.0183782829461330671-1.500443518771208283j)
assert mp.chi(20-2j).ae(-4.050116856873293505e6-1.20747603112735722951e7j)
assert mp.chi(20).ae(1.2807826332028294361e7)
assert mp.chi(2+50j).ae(-0.0183782829461330671+1.500443518771208283j)
assert mp.shi(50j).ae(1.5516170724859358947j)
assert mp.shi(-2+50j).ae(0.017515007378437448+1.497884414277228461j)
assert mp.shi(-20).ae(-1.2807826332028294459e7)
assert mp.shi(-20-2j).ae(4.050116856873293554e6-1.20747603112735722103e7j)
assert mp.shi(-2-50j).ae(0.017515007378437448-1.497884414277228461j)
assert mp.shi(-50j).ae(-1.5516170724859358947j)
assert mp.shi(2-50j).ae(-0.017515007378437448-1.497884414277228461j)
assert mp.shi(20-2j).ae(-4.050116856873293554e6-1.20747603112735722103e7j)
assert mp.shi(20).ae(1.2807826332028294459e7)
assert mp.shi(2+50j).ae(-0.017515007378437448+1.497884414277228461j)
def ae(x,y,tol=1e-12):
return abs(x-y) <= abs(y)*tol
assert fp.ci(fp.inf) == 0
assert ae(fp.ci(fp.ninf), fp.pi*1j)
assert ae(fp.si(fp.inf), fp.pi/2)
assert ae(fp.si(fp.ninf), -fp.pi/2)
assert fp.si(0) == 0
assert ae(fp.ci(50), -0.0056283863241163054402)
assert ae(fp.ci(50+2j), -0.018378282946133067149+0.070352808023688336193j)
assert ae(fp.ci(20j), 1.28078263320282943611e7+1.5707963267949j)
assert ae(fp.ci(-2+20j), -4.050116856873293505e6+1.207476188206989909e7j)
assert ae(fp.ci(-50+2j), -0.0183782829461330671+3.0712398455661049023j)
assert ae(fp.ci(-50), -0.0056283863241163054+3.1415926535897932385j)
assert ae(fp.ci(-50-2j), -0.0183782829461330671-3.0712398455661049023j)
assert ae(fp.ci(-2-20j), -4.050116856873293505e6-1.207476188206989909e7j)
assert ae(fp.ci(-20j), 1.28078263320282943611e7-1.5707963267949j)
assert ae(fp.ci(50-2j), -0.018378282946133067149-0.070352808023688336193j)
assert ae(fp.si(50), 1.5516170724859358947)
assert ae(fp.si(50+2j), 1.497884414277228461-0.017515007378437448j)
assert ae(fp.si(20j), 1.2807826332028294459e7j)
assert ae(fp.si(-2+20j), -1.20747603112735722103e7-4.050116856873293554e6j)
assert ae(fp.si(-50+2j), -1.497884414277228461-0.017515007378437448j)
assert ae(fp.si(-50), -1.5516170724859358947)
assert ae(fp.si(-50-2j), -1.497884414277228461+0.017515007378437448j)
assert ae(fp.si(-2-20j), -1.20747603112735722103e7+4.050116856873293554e6j)
assert ae(fp.si(-20j), -1.2807826332028294459e7j)
assert ae(fp.si(50-2j), 1.497884414277228461+0.017515007378437448j)
assert ae(fp.chi(50j), -0.0056283863241163054+1.5707963267948966192j)
assert ae(fp.chi(-2+50j), -0.0183782829461330671+1.6411491348185849554j)
assert ae(fp.chi(-20), 1.28078263320282943611e7+3.1415926535898j)
assert ae(fp.chi(-20-2j), -4.050116856873293505e6+1.20747571696809187053e7j)
assert ae(fp.chi(-2-50j), -0.0183782829461330671-1.6411491348185849554j)
assert ae(fp.chi(-50j), -0.0056283863241163054-1.5707963267948966192j)
assert ae(fp.chi(2-50j), -0.0183782829461330671-1.500443518771208283j)
assert ae(fp.chi(20-2j), -4.050116856873293505e6-1.20747603112735722951e7j)
assert ae(fp.chi(20), 1.2807826332028294361e7)
assert ae(fp.chi(2+50j), -0.0183782829461330671+1.500443518771208283j)
assert ae(fp.shi(50j), 1.5516170724859358947j)
assert ae(fp.shi(-2+50j), 0.017515007378437448+1.497884414277228461j)
assert ae(fp.shi(-20), -1.2807826332028294459e7)
assert ae(fp.shi(-20-2j), 4.050116856873293554e6-1.20747603112735722103e7j)
assert ae(fp.shi(-2-50j), 0.017515007378437448-1.497884414277228461j)
assert ae(fp.shi(-50j), -1.5516170724859358947j)
assert ae(fp.shi(2-50j), -0.017515007378437448-1.497884414277228461j)
assert ae(fp.shi(20-2j), -4.050116856873293554e6-1.20747603112735722103e7j)
assert ae(fp.shi(20), 1.2807826332028294459e7)
assert ae(fp.shi(2+50j), -0.017515007378437448+1.497884414277228461j)
def test_airy():
mp.dps = 15
assert (airyai(10)*10**10).ae(1.1047532552898687)
assert (airybi(10)/10**9).ae(0.45564115354822515)
assert (airyai(1000)*10**9158).ae(9.306933063179556004)
assert (airybi(1000)/10**9154).ae(5.4077118391949465477)
assert airyai(-1000).ae(0.055971895773019918842)
assert airybi(-1000).ae(-0.083264574117080633012)
assert (airyai(100+100j)*10**188).ae(2.9099582462207032076 + 2.353013591706178756j)
assert (airybi(100+100j)/10**185).ae(1.7086751714463652039 - 3.1416590020830804578j)
def test_hyper_0f1():
mp.dps = 15
v = 8.63911136507950465
assert hyper([],[(1,3)],1.5).ae(v)
assert hyper([],[1/3.],1.5).ae(v)
assert hyp0f1(1/3.,1.5).ae(v)
assert hyp0f1((1,3),1.5).ae(v)
# Asymptotic expansion
assert hyp0f1(3,1e9).ae('4.9679055380347771271e+27455')
assert hyp0f1(3,1e9j).ae('-2.1222788784457702157e+19410 + 5.0840597555401854116e+19410j')
def test_hyper_1f1():
mp.dps = 15
v = 1.2917526488617656673
assert hyper([(1,2)],[(3,2)],0.7).ae(v)
assert hyper([(1,2)],[(3,2)],0.7+0j).ae(v)
assert hyper([0.5],[(3,2)],0.7).ae(v)
assert hyper([0.5],[1.5],0.7).ae(v)
assert hyper([0.5],[(3,2)],0.7+0j).ae(v)
assert hyper([0.5],[1.5],0.7+0j).ae(v)
assert hyper([(1,2)],[1.5+0j],0.7).ae(v)
assert hyper([0.5+0j],[1.5],0.7).ae(v)
assert hyper([0.5+0j],[1.5+0j],0.7+0j).ae(v)
assert hyp1f1(0.5,1.5,0.7).ae(v)
assert hyp1f1((1,2),1.5,0.7).ae(v)
# Asymptotic expansion
assert hyp1f1(2,3,1e10).ae('2.1555012157015796988e+4342944809')
assert (hyp1f1(2,3,1e10j)*10**10).ae(-0.97501205020039745852 - 1.7462392454512132074j)
# Shouldn't use asymptotic expansion
assert hyp1f1(-2, 1, 10000).ae(49980001)
# Bug
assert hyp1f1(1j,fraction(1,3),0.415-69.739j).ae(25.857588206024346592 + 15.738060264515292063j)
def test_hyper_2f1():
mp.dps = 15
v = 1.0652207633823291032
assert hyper([(1,2), (3,4)], [2], 0.3).ae(v)
assert hyper([(1,2), 0.75], [2], 0.3).ae(v)
assert hyper([0.5, 0.75], [2.0], 0.3).ae(v)
assert hyper([0.5, 0.75], [2.0], 0.3+0j).ae(v)
assert hyper([0.5+0j, (3,4)], [2.0], 0.3+0j).ae(v)
assert hyper([0.5+0j, (3,4)], [2.0], 0.3).ae(v)
assert hyper([0.5, (3,4)], [2.0+0j], 0.3).ae(v)
assert hyper([0.5+0j, 0.75+0j], [2.0+0j], 0.3+0j).ae(v)
v = 1.09234681096223231717 + 0.18104859169479360380j
assert hyper([(1,2),0.75+j], [2], 0.5).ae(v)
assert hyper([0.5,0.75+j], [2.0], 0.5).ae(v)
assert hyper([0.5,0.75+j], [2.0], 0.5+0j).ae(v)
assert hyper([0.5,0.75+j], [2.0+0j], 0.5+0j).ae(v)
v = 0.9625 - 0.125j
assert hyper([(3,2),-1],[4], 0.1+j/3).ae(v)
assert hyper([1.5,-1.0],[4], 0.1+j/3).ae(v)
assert hyper([1.5,-1.0],[4+0j], 0.1+j/3).ae(v)
assert hyper([1.5+0j,-1.0+0j],[4+0j], 0.1+j/3).ae(v)
v = 1.02111069501693445001 - 0.50402252613466859521j
assert hyper([(2,10),(3,10)],[(4,10)],1.5).ae(v)
assert hyper([0.2,(3,10)],[0.4+0j],1.5).ae(v)
assert hyper([0.2,(3,10)],[0.4+0j],1.5+0j).ae(v)
v = 0.76922501362865848528 + 0.32640579593235886194j
assert hyper([(2,10),(3,10)],[(4,10)],4+2j).ae(v)
assert hyper([0.2,(3,10)],[0.4+0j],4+2j).ae(v)
assert hyper([0.2,(3,10)],[(4,10)],4+2j).ae(v)
def test_hyper_2f1_hard():
mp.dps = 15
# Singular cases
assert hyp2f1(2,-1,-1,3).ae(0.25)
assert hyp2f1(2,-2,-2,3).ae(0.25)
assert hyp2f1(2,-1,-1,3,eliminate=False) == 7
assert hyp2f1(2,-2,-2,3,eliminate=False) == 34
assert hyp2f1(2,-2,-3,3) == 14
assert hyp2f1(2,-3,-2,3) == inf
assert hyp2f1(2,-1.5,-1.5,3) == 0.25
assert hyp2f1(1,2,3,0) == 1
assert hyp2f1(0,1,0,0) == 1
assert hyp2f1(0,0,0,0) == 1
assert isnan(hyp2f1(1,1,0,0))
assert hyp2f1(2,-1,-5, 0.25+0.25j).ae(1.1+0.1j)
assert hyp2f1(2,-5,-5, 0.25+0.25j, eliminate=False).ae(163./128 + 125./128*j)
assert hyp2f1(0.7235, -1, -5, 0.3).ae(1.04341)
assert hyp2f1(0.7235, -5, -5, 0.3, eliminate=False).ae(1.2939225017815903812)
assert hyp2f1(-1,-2,4,1) == 1.5
assert hyp2f1(1,2,-3,1) == inf
assert hyp2f1(-2,-2,1,1) == 6
assert hyp2f1(1,-2,-4,1).ae(5./3)
assert hyp2f1(0,-6,-4,1) == 1
assert hyp2f1(0,-3,-4,1) == 1
assert hyp2f1(0,0,0,1) == 1
assert hyp2f1(1,0,0,1,eliminate=False) == 1
assert hyp2f1(1,1,0,1) == inf
assert hyp2f1(1,-6,-4,1) == inf
assert hyp2f1(-7.2,-0.5,-4.5,1) == 0
assert hyp2f1(-7.2,-1,-2,1).ae(-2.6)
assert hyp2f1(1,-0.5,-4.5, 1) == inf
assert hyp2f1(1,0.5,-4.5, 1) == -inf
# Check evaluation on / close to unit circle
z = exp(j*pi/3)
w = (nthroot(2,3)+1)*exp(j*pi/12)/nthroot(3,4)**3
assert hyp2f1('1/2','1/6','1/3', z).ae(w)
assert hyp2f1('1/2','1/6','1/3', z.conjugate()).ae(w.conjugate())
assert hyp2f1(0.25, (1,3), 2, '0.999').ae(1.06826449496030635)
assert hyp2f1(0.25, (1,3), 2, '1.001').ae(1.06867299254830309446-0.00001446586793975874j)
assert hyp2f1(0.25, (1,3), 2, -1).ae(0.96656584492524351673)
assert hyp2f1(0.25, (1,3), 2, j).ae(0.99041766248982072266+0.03777135604180735522j)
assert hyp2f1(2,3,5,'0.99').ae(27.699347904322690602)
assert hyp2f1((3,2),-0.5,3,'0.99').ae(0.68403036843911661388)
assert hyp2f1(2,3,5,1j).ae(0.37290667145974386127+0.59210004902748285917j)
assert fsum([hyp2f1((7,10),(2,3),(-1,2), 0.95*exp(j*k)) for k in range(1,15)]).ae(52.851400204289452922+6.244285013912953225j)
assert fsum([hyp2f1((7,10),(2,3),(-1,2), 1.05*exp(j*k)) for k in range(1,15)]).ae(54.506013786220655330-3.000118813413217097j)
assert fsum([hyp2f1((7,10),(2,3),(-1,2), exp(j*k)) for k in range(1,15)]).ae(55.792077935955314887+1.731986485778500241j)
assert hyp2f1(2,2.5,-3.25,0.999).ae(218373932801217082543180041.33)
# Branches
assert hyp2f1(1,1,2,1.01).ae(4.5595744415723676911-3.1104877758314784539j)
assert hyp2f1(1,1,2,1.01+0.1j).ae(2.4149427480552782484+1.4148224796836938829j)
assert hyp2f1(1,1,2,3+4j).ae(0.14576709331407297807+0.48379185417980360773j)
assert hyp2f1(1,1,2,4).ae(-0.27465307216702742285 - 0.78539816339744830962j)
assert hyp2f1(1,1,2,-4).ae(0.40235947810852509365)
# Other:
# Cancellation with a large parameter involved (bug reported on sage-devel)
assert hyp2f1(112, (51,10), (-9,10), -0.99999).ae(-1.6241361047970862961e-24, abs_eps=0, rel_eps=eps*16)
def test_hyper_3f2_etc():
assert hyper([1,2,3],[1.5,8],-1).ae(0.67108992351533333030)
assert hyper([1,2,3,4],[5,6,7], -1).ae(0.90232988035425506008)
assert hyper([1,2,3],[1.25,5], 1).ae(28.924181329701905701)
assert hyper([1,2,3,4],[5,6,7],5).ae(1.5192307344006649499-1.1529845225075537461j)
assert hyper([1,2,3,4,5],[6,7,8,9],-1).ae(0.96288759462882357253)
assert hyper([1,2,3,4,5],[6,7,8,9],1).ae(1.0428697385885855841)
assert hyper([1,2,3,4,5],[6,7,8,9],5).ae(1.33980653631074769423-0.07143405251029226699j)
assert hyper([1,2.79,3.08,4.37],[5.2,6.1,7.3],5).ae(1.0996321464692607231-1.7748052293979985001j)
assert hyper([1,1,1],[1,2],1) == inf
assert hyper([1,1,1],[2,(101,100)],1).ae(100.01621213528313220)
# slow -- covered by doctests
#assert hyper([1,1,1],[2,3],0.9999).ae(1.2897972005319693905)
def test_hyper_u():
mp.dps = 15
assert hyperu(2,-3,0).ae(0.05)
assert hyperu(2,-3.5,0).ae(4./99)
assert hyperu(2,0,0) == 0.5
assert hyperu(-5,1,0) == -120
assert hyperu(-5,2,0) == inf
assert hyperu(-5,-2,0) == 0
assert hyperu(7,7,3).ae(0.00014681269365593503986) #exp(3)*gammainc(-6,3)
assert hyperu(2,-3,4).ae(0.011836478100271995559)
assert hyperu(3,4,5).ae(1./125)
assert hyperu(2,3,0.0625) == 256
assert hyperu(-1,2,0.25+0.5j) == -1.75+0.5j
assert hyperu(0.5,1.5,7.25).ae(2/sqrt(29))
assert hyperu(2,6,pi).ae(0.55804439825913399130)
assert (hyperu((3,2),8,100+201j)*10**4).ae(-0.3797318333856738798 - 2.9974928453561707782j)
assert (hyperu((5,2),(-1,2),-5000)*10**10).ae(-5.6681877926881664678j)
# XXX: fails because of undetected cancellation in low level series code
# Alternatively: could use asymptotic series here, if convergence test
# tweaked back to recognize this one
#assert (hyperu((5,2),(-1,2),-500)*10**7).ae(-1.82526906001593252847j)
def test_hyper_2f0():
mp.dps = 15
assert hyper([1,2],[],3) == hyp2f0(1,2,3)
assert hyp2f0(2,3,7).ae(0.0116108068639728714668 - 0.0073727413865865802130j)
assert hyp2f0(2,3,0) == 1
assert hyp2f0(0,0,0) == 1
assert hyp2f0(-1,-1,1).ae(2)
assert hyp2f0(-4,1,1.5).ae(62.5)
assert hyp2f0(-4,1,50).ae(147029801)
assert hyp2f0(-4,1,0.0001).ae(0.99960011997600240000)
assert hyp2f0(0.5,0.25,0.001).ae(1.0001251174078538115)
assert hyp2f0(0.5,0.25,3+4j).ae(0.85548875824755163518 + 0.21636041283392292973j)
# Important: cancellation check
assert hyp2f0((1,6),(5,6),-0.02371708245126284498).ae(0.996785723120804309)
# Should be exact; polynomial case
assert hyp2f0(-2,1,0.5+0.5j) == 0
assert hyp2f0(1,-2,0.5+0.5j) == 0
# There used to be a bug in thresholds that made one of the following hang
for d in [15, 50, 80]:
mp.dps = d
assert hyp2f0(1.5, 0.5, 0.009).ae('1.006867007239309717945323585695344927904000945829843527398772456281301440034218290443367270629519483 + 1.238277162240704919639384945859073461954721356062919829456053965502443570466701567100438048602352623e-46j')
def test_hyper_1f2():
mp.dps = 15
assert hyper([1],[2,3],4) == hyp1f2(1,2,3,4)
a1,b1,b2 = (1,10),(2,3),1./16
assert hyp1f2(a1,b1,b2,10).ae(298.7482725554557568)
assert hyp1f2(a1,b1,b2,100).ae(224128961.48602947604)
assert hyp1f2(a1,b1,b2,1000).ae(1.1669528298622675109e+27)
assert hyp1f2(a1,b1,b2,10000).ae(2.4780514622487212192e+86)
assert hyp1f2(a1,b1,b2,100000).ae(1.3885391458871523997e+274)
assert hyp1f2(a1,b1,b2,1000000).ae('9.8851796978960318255e+867')
assert hyp1f2(a1,b1,b2,10**7).ae('1.1505659189516303646e+2746')
assert hyp1f2(a1,b1,b2,10**8).ae('1.4672005404314334081e+8685')
assert hyp1f2(a1,b1,b2,10**20).ae('3.6888217332150976493e+8685889636')
assert hyp1f2(a1,b1,b2,10*j).ae(-16.163252524618572878 - 44.321567896480184312j)
assert hyp1f2(a1,b1,b2,100*j).ae(61938.155294517848171 + 637349.45215942348739j)
assert hyp1f2(a1,b1,b2,1000*j).ae(8455057657257695958.7 + 6261969266997571510.6j)
assert hyp1f2(a1,b1,b2,10000*j).ae(-8.9771211184008593089e+60 + 4.6550528111731631456e+59j)
assert hyp1f2(a1,b1,b2,100000*j).ae(2.6398091437239324225e+193 + 4.1658080666870618332e+193j)
assert hyp1f2(a1,b1,b2,1000000*j).ae('3.5999042951925965458e+613 + 1.5026014707128947992e+613j')
assert hyp1f2(a1,b1,b2,10**7*j).ae('-8.3208715051623234801e+1939 - 3.6752883490851869429e+1941j')
assert hyp1f2(a1,b1,b2,10**8*j).ae('2.0724195707891484454e+6140 - 1.3276619482724266387e+6141j')
assert hyp1f2(a1,b1,b2,10**20*j).ae('-1.1734497974795488504e+6141851462 + 1.1498106965385471542e+6141851462j')
def test_hyper_2f3():
mp.dps = 15
assert hyper([1,2],[3,4,5],6) == hyp2f3(1,2,3,4,5,6)
a1,a2,b1,b2,b3 = (1,10),(2,3),(3,10), 2, 1./16
# Check asymptotic expansion
assert hyp2f3(a1,a2,b1,b2,b3,10).ae(128.98207160698659976)
assert hyp2f3(a1,a2,b1,b2,b3,1000).ae(6.6309632883131273141e25)
assert hyp2f3(a1,a2,b1,b2,b3,10000).ae(4.6863639362713340539e84)
assert hyp2f3(a1,a2,b1,b2,b3,100000).ae(8.6632451236103084119e271)
assert hyp2f3(a1,a2,b1,b2,b3,10**6).ae('2.0291718386574980641e865')
assert hyp2f3(a1,a2,b1,b2,b3,10**7).ae('7.7639836665710030977e2742')
assert hyp2f3(a1,a2,b1,b2,b3,10**8).ae('3.2537462584071268759e8681')
assert hyp2f3(a1,a2,b1,b2,b3,10**20).ae('1.2966030542911614163e+8685889627')
assert hyp2f3(a1,a2,b1,b2,b3,10*j).ae(-18.551602185587547854 - 13.348031097874113552j)
assert hyp2f3(a1,a2,b1,b2,b3,100*j).ae(78634.359124504488695 + 74459.535945281973996j)
assert hyp2f3(a1,a2,b1,b2,b3,1000*j).ae(597682550276527901.59 - 65136194809352613.078j)
assert hyp2f3(a1,a2,b1,b2,b3,10000*j).ae(-1.1779696326238582496e+59 + 1.2297607505213133872e+59j)
assert hyp2f3(a1,a2,b1,b2,b3,100000*j).ae(2.9844228969804380301e+191 + 7.5587163231490273296e+190j)
assert hyp2f3(a1,a2,b1,b2,b3,1000000*j).ae('7.4859161049322370311e+610 - 2.8467477015940090189e+610j')
assert hyp2f3(a1,a2,b1,b2,b3,10**7*j).ae('-1.7477645579418800826e+1938 - 1.7606522995808116405e+1938j')
assert hyp2f3(a1,a2,b1,b2,b3,10**8*j).ae('-1.6932731942958401784e+6137 - 2.4521909113114629368e+6137j')
assert hyp2f3(a1,a2,b1,b2,b3,10**20*j).ae('-2.0988815677627225449e+6141851451 + 5.7708223542739208681e+6141851452j')
def test_hyper_2f2():
mp.dps = 15
assert hyper([1,2],[3,4],5) == hyp2f2(1,2,3,4,5)
a1,a2,b1,b2 = (3,10),4,(1,2),1./16
assert hyp2f2(a1,a2,b1,b2,10).ae(448225936.3377556696)
assert hyp2f2(a1,a2,b1,b2,10000).ae('1.2012553712966636711e+4358')
assert hyp2f2(a1,a2,b1,b2,-20000).ae(-0.04182343755661214626)
assert hyp2f2(a1,a2,b1,b2,10**20).ae('1.1148680024303263661e+43429448190325182840')
def test_orthpoly():
mp.dps = 15
assert jacobi(-4,2,3,0.7).ae(22800./4913)
assert jacobi(3,2,4,5.5) == 4133.125
assert jacobi(1.5,5/6.,4,0).ae(-1.0851951434075508417)
assert jacobi(-2, 1, 2, 4).ae(-0.16)
assert jacobi(2, -1, 2.5, 4).ae(34.59375)
#assert jacobi(2, -1, 2, 4) == 28.5
assert legendre(5, 7) == 129367
assert legendre(0.5,0).ae(0.53935260118837935667)
assert legendre(-1,-1) == 1
assert legendre(0,-1) == 1
assert legendre(0, 1) == 1
assert legendre(1, -1) == -1
assert legendre(7, 1) == 1
assert legendre(7, -1) == -1
assert legendre(8,1.5).ae(15457523./32768)
assert legendre(j,-j).ae(2.4448182735671431011 + 0.6928881737669934843j)
assert chebyu(5,1) == 6
assert chebyt(3,2) == 26
assert legendre(3.5,-1) == inf
assert legendre(4.5,-1) == -inf
assert legendre(3.5+1j,-1) == mpc(inf,inf)
assert legendre(4.5+1j,-1) == mpc(-inf,-inf)
assert laguerre(4, -2, 3).ae(-1.125)
assert laguerre(3, 1+j, 0.5).ae(0.2291666666666666667 + 2.5416666666666666667j)
def test_hermite():
mp.dps = 15
assert hermite(-2, 0).ae(0.5)
assert hermite(-1, 0).ae(0.88622692545275801365)
assert hermite(0, 0).ae(1)
assert hermite(1, 0) == 0
assert hermite(2, 0).ae(-2)
assert hermite(0, 2).ae(1)
assert hermite(1, 2).ae(4)
assert hermite(1, -2).ae(-4)
assert hermite(2, -2).ae(14)
assert hermite(0.5, 0).ae(0.69136733903629335053)
assert hermite(9, 0) == 0
assert hermite(4,4).ae(3340)
assert hermite(3,4).ae(464)
assert hermite(-4,4).ae(0.00018623860287512396181)
assert hermite(-3,4).ae(0.0016540169879668766270)
assert hermite(9, 2.5j).ae(13638725j)
assert hermite(9, -2.5j).ae(-13638725j)
assert hermite(9, 100).ae(511078883759363024000)
assert hermite(9, -100).ae(-511078883759363024000)
assert hermite(9, 100j).ae(512922083920643024000j)
assert hermite(9, -100j).ae(-512922083920643024000j)
assert hermite(-9.5, 2.5j).ae(-2.9004951258126778174e-6 + 1.7601372934039951100e-6j)
assert hermite(-9.5, -2.5j).ae(-2.9004951258126778174e-6 - 1.7601372934039951100e-6j)
assert hermite(-9.5, 100).ae(1.3776300722767084162e-22, abs_eps=0, rel_eps=eps)
assert hermite(-9.5, -100).ae('1.3106082028470671626e4355')
assert hermite(-9.5, 100j).ae(-9.7900218581864768430e-23 - 9.7900218581864768430e-23j, abs_eps=0, rel_eps=eps)
assert hermite(-9.5, -100j).ae(-9.7900218581864768430e-23 + 9.7900218581864768430e-23j, abs_eps=0, rel_eps=eps)
assert hermite(2+3j, -1-j).ae(851.3677063883687676 - 1496.4373467871007997j)
def test_gegenbauer():
mp.dps = 15
assert gegenbauer(1,2,3).ae(12)
assert gegenbauer(2,3,4).ae(381)
assert gegenbauer(0,0,0) == 0
assert gegenbauer(2,-1,3) == 0
assert gegenbauer(-7, 0.5, 3).ae(8989)
assert gegenbauer(1, -0.5, 3).ae(-3)
assert gegenbauer(1, -1.5, 3).ae(-9)
assert gegenbauer(1, -0.5, 3).ae(-3)
assert gegenbauer(-0.5, -0.5, 3).ae(-2.6383553159023906245)
assert gegenbauer(2+3j, 1-j, 3+4j).ae(14.880536623203696780 + 20.022029711598032898j)
#assert gegenbauer(-2, -0.5, 3).ae(-12)
def test_legenp():
mp.dps = 15
assert legenp(2,0,4) == legendre(2,4)
assert legenp(-2, -1, 0.5).ae(0.43301270189221932338)
assert legenp(-2, -1, 0.5, type=3).ae(0.43301270189221932338j)
assert legenp(-2, 1, 0.5).ae(-0.86602540378443864676)
assert legenp(2+j, 3+4j, -j).ae(134742.98773236786148 + 429782.72924463851745j)
assert legenp(2+j, 3+4j, -j, type=3).ae(802.59463394152268507 - 251.62481308942906447j)
assert legenp(2,4,3).ae(0)
assert legenp(2,4,3,type=3).ae(0)
assert legenp(2,1,0.5).ae(-1.2990381056766579701)
assert legenp(2,1,0.5,type=3).ae(1.2990381056766579701j)
assert legenp(3,2,3).ae(-360)
assert legenp(3,3,3).ae(240j*2**0.5)
assert legenp(3,4,3).ae(0)
assert legenp(0,0.5,2).ae(0.52503756790433198939 - 0.52503756790433198939j)
assert legenp(-1,-0.5,2).ae(0.60626116232846498110 + 0.60626116232846498110j)
assert legenp(-2,0.5,2).ae(1.5751127037129959682 - 1.5751127037129959682j)
assert legenp(-2,0.5,-0.5).ae(-0.85738275810499171286)
def test_legenq():
mp.dps = 15
f = legenq
# Evaluation at poles
assert isnan(f(3,2,1))
assert isnan(f(3,2,-1))
assert isnan(f(3,2,1,type=3))
assert isnan(f(3,2,-1,type=3))
# Evaluation at 0
assert f(0,1,0,type=2).ae(-1)
assert f(-2,2,0,type=2,zeroprec=200).ae(0)
assert f(1.5,3,0,type=2).ae(-2.2239343475841951023)
assert f(0,1,0,type=3).ae(j)
assert f(-2,2,0,type=3,zeroprec=200).ae(0)
assert f(1.5,3,0,type=3).ae(2.2239343475841951022*(1-1j))
# Standard case, degree 0
assert f(0,0,-1.5).ae(-0.8047189562170501873 + 1.5707963267948966192j)
assert f(0,0,-0.5).ae(-0.54930614433405484570)
assert f(0,0,0,zeroprec=200).ae(0)
assert f(0,0,0.5).ae(0.54930614433405484570)
assert f(0,0,1.5).ae(0.8047189562170501873 - 1.5707963267948966192j)
assert f(0,0,-1.5,type=3).ae(-0.80471895621705018730)
assert f(0,0,-0.5,type=3).ae(-0.5493061443340548457 - 1.5707963267948966192j)
assert f(0,0,0,type=3).ae(-1.5707963267948966192j)
assert f(0,0,0.5,type=3).ae(0.5493061443340548457 - 1.5707963267948966192j)
assert f(0,0,1.5,type=3).ae(0.80471895621705018730)
# Standard case, degree 1
assert f(1,0,-1.5).ae(0.2070784343255752810 - 2.3561944901923449288j)
assert f(1,0,-0.5).ae(-0.72534692783297257715)
assert f(1,0,0).ae(-1)
assert f(1,0,0.5).ae(-0.72534692783297257715)
assert f(1,0,1.5).ae(0.2070784343255752810 - 2.3561944901923449288j)
# Standard case, degree 2
assert f(2,0,-1.5).ae(-0.0635669991240192885 + 4.5160394395353277803j)
assert f(2,0,-0.5).ae(0.81866326804175685571)
assert f(2,0,0,zeroprec=200).ae(0)
assert f(2,0,0.5).ae(-0.81866326804175685571)
assert f(2,0,1.5).ae(0.0635669991240192885 - 4.5160394395353277803j)
# Misc orders and degrees
assert f(2,3,1.5,type=2).ae(-5.7243340223994616228j)
assert f(2,3,1.5,type=3).ae(-5.7243340223994616228)
assert f(2,3,0.5,type=2).ae(-12.316805742712016310)
assert f(2,3,0.5,type=3).ae(-12.316805742712016310j)
assert f(2,3,-1.5,type=2).ae(-5.7243340223994616228j)
assert f(2,3,-1.5,type=3).ae(5.7243340223994616228)
assert f(2,3,-0.5,type=2).ae(-12.316805742712016310)
assert f(2,3,-0.5,type=3).ae(-12.316805742712016310j)
assert f(2+3j, 3+4j, 0.5, type=3).ae(0.0016119404873235186807 - 0.0005885900510718119836j)
assert f(2+3j, 3+4j, -1.5, type=3).ae(0.008451400254138808670 + 0.020645193304593235298j)
assert f(-2.5,1,-1.5).ae(3.9553395527435335749j)
assert f(-2.5,1,-0.5).ae(1.9290561746445456908)
assert f(-2.5,1,0).ae(1.2708196271909686299)
assert f(-2.5,1,0.5).ae(-0.31584812990742202869)
assert f(-2.5,1,1.5).ae(-3.9553395527435335742 + 0.2993235655044701706j)
assert f(-2.5,1,-1.5,type=3).ae(0.29932356550447017254j)
assert f(-2.5,1,-0.5,type=3).ae(-0.3158481299074220287 - 1.9290561746445456908j)
assert f(-2.5,1,0,type=3).ae(1.2708196271909686292 - 1.2708196271909686299j)
assert f(-2.5,1,0.5,type=3).ae(1.9290561746445456907 + 0.3158481299074220287j)
assert f(-2.5,1,1.5,type=3).ae(-0.29932356550447017254)
def test_agm():
mp.dps = 15
assert agm(0,0) == 0
assert agm(0,1) == 0
assert agm(1,1) == 1
assert agm(7,7) == 7
assert agm(j,j) == j
assert (1/agm(1,sqrt(2))).ae(0.834626841674073186)
assert agm(1,2).ae(1.4567910310469068692)
assert agm(1,3).ae(1.8636167832448965424)
assert agm(1,j).ae(0.599070117367796104+0.599070117367796104j)
assert agm(2) == agm(1,2)
assert agm(-3,4).ae(0.63468509766550907+1.3443087080896272j)
def test_gammainc():
mp.dps = 15
assert gammainc(2,5).ae(6*exp(-5))
assert gammainc(2,0,5).ae(1-6*exp(-5))
assert gammainc(2,3,5).ae(-6*exp(-5)+4*exp(-3))
assert gammainc(-2.5,-0.5).ae(-0.9453087204829418812-5.3164237738936178621j)
assert gammainc(0,2,4).ae(0.045121158298212213088)
assert gammainc(0,3).ae(0.013048381094197037413)
assert gammainc(0,2+j,1-j).ae(0.00910653685850304839-0.22378752918074432574j)
assert gammainc(0,1-j).ae(0.00028162445198141833+0.17932453503935894015j)
assert gammainc(3,4,5,True).ae(0.11345128607046320253)
assert gammainc(3.5,0,inf).ae(gamma(3.5))
assert gammainc(-150.5,500).ae('6.9825435345798951153e-627')
assert gammainc(-150.5,800).ae('4.6885137549474089431e-788')
assert gammainc(-3.5, -20.5).ae(0.27008820585226911 - 1310.31447140574997636j)
assert gammainc(-3.5, -200.5).ae(0.27008820585226911 - 5.3264597096208368435e76j) # XXX real part
assert gammainc(0,0,2) == inf
assert gammainc(1,b=1).ae(0.6321205588285576784)
assert gammainc(3,2,2) == 0
assert gammainc(2,3+j,3-j).ae(-0.28135485191849314194j)
assert gammainc(4+0j,1).ae(5.8860710587430771455)
# Regularized upper gamma
assert isnan(gammainc(0, 0, regularized=True))
assert gammainc(-1, 0, regularized=True) == inf
assert gammainc(1, 0, regularized=True) == 1
assert gammainc(0, 5, regularized=True) == 0
assert gammainc(0, 2+3j, regularized=True) == 0
assert gammainc(0, 5000, regularized=True) == 0
assert gammainc(0, 10**30, regularized=True) == 0
assert gammainc(-1, 5, regularized=True) == 0
assert gammainc(-1, 5000, regularized=True) == 0
assert gammainc(-1, 10**30, regularized=True) == 0
assert gammainc(-1, -5, regularized=True) == 0
assert gammainc(-1, -5000, regularized=True) == 0
assert gammainc(-1, -10**30, regularized=True) == 0
assert gammainc(-1, 3+4j, regularized=True) == 0
assert gammainc(1, 5, regularized=True).ae(exp(-5))
assert gammainc(1, 5000, regularized=True).ae(exp(-5000))
assert gammainc(1, 10**30, regularized=True).ae(exp(-10**30))
assert gammainc(1, 3+4j, regularized=True).ae(exp(-3-4j))
assert gammainc(-1000000,2).ae('1.3669297209397347754e-301037', abs_eps=0, rel_eps=8*eps)
assert gammainc(-1000000,2,regularized=True) == 0
assert gammainc(-1000000,3+4j).ae('-1.322575609404222361e-698979 - 4.9274570591854533273e-698978j', abs_eps=0, rel_eps=8*eps)
assert gammainc(-1000000,3+4j,regularized=True) == 0
assert gammainc(2+3j, 4+5j, regularized=True).ae(0.085422013530993285774-0.052595379150390078503j)
assert gammainc(1000j, 1000j, regularized=True).ae(0.49702647628921131761 + 0.00297355675013575341j)
# Generalized
assert gammainc(3,4,2) == -gammainc(3,2,4)
assert gammainc(4, 2, 3).ae(1.2593494302978947396)
assert gammainc(4, 2, 3, regularized=True).ae(0.20989157171631578993)
assert gammainc(0, 2, 3).ae(0.035852129613864082155)
assert gammainc(0, 2, 3, regularized=True) == 0
assert gammainc(-1, 2, 3).ae(0.015219822548487616132)
assert gammainc(-1, 2, 3, regularized=True) == 0
assert gammainc(0, 2, 3).ae(0.035852129613864082155)
assert gammainc(0, 2, 3, regularized=True) == 0
# Should use upper gammas
assert gammainc(5, 10000, 12000).ae('1.1359381951461801687e-4327', abs_eps=0, rel_eps=8*eps)
# Should use lower gammas
assert gammainc(10000, 2, 3).ae('8.1244514125995785934e4765')
def test_gammainc_expint_n():
# These tests are intended to check all cases of the low-level code
# for upper gamma and expint with small integer index.
# Need to cover positive/negative arguments; small/large/huge arguments
# for both positive and negative indices, as well as indices 0 and 1
# which may be special-cased
mp.dps = 15
assert expint(-3,3.5).ae(0.021456366563296693987)
assert expint(-2,3.5).ae(0.014966633183073309405)
assert expint(-1,3.5).ae(0.011092916359219041088)
assert expint(0,3.5).ae(0.0086278238349481430685)
assert expint(1,3.5).ae(0.0069701398575483929193)
assert expint(2,3.5).ae(0.0058018939208991255223)
assert expint(3,3.5).ae(0.0049453773495857807058)
assert expint(-3,-3.5).ae(-4.6618170604073311319)
assert expint(-2,-3.5).ae(-5.5996974157555515963)
assert expint(-1,-3.5).ae(-6.7582555017739415818)
assert expint(0,-3.5).ae(-9.4615577024835182145)
assert expint(1,-3.5).ae(-13.925353995152335292 - 3.1415926535897932385j)
assert expint(2,-3.5).ae(-15.62328702434085977 - 10.995574287564276335j)
assert expint(3,-3.5).ae(-10.783026313250347722 - 19.242255003237483586j)
assert expint(-3,350).ae(2.8614825451252838069e-155, abs_eps=0, rel_eps=8*eps)
assert expint(-2,350).ae(2.8532837224504675901e-155, abs_eps=0, rel_eps=8*eps)
assert expint(-1,350).ae(2.8451316155828634555e-155, abs_eps=0, rel_eps=8*eps)
assert expint(0,350).ae(2.8370258275042797989e-155, abs_eps=0, rel_eps=8*eps)
assert expint(1,350).ae(2.8289659656701459404e-155, abs_eps=0, rel_eps=8*eps)
assert expint(2,350).ae(2.8209516419468505006e-155, abs_eps=0, rel_eps=8*eps)
assert expint(3,350).ae(2.8129824725501272171e-155, abs_eps=0, rel_eps=8*eps)
assert expint(-3,-350).ae(-2.8528796154044839443e+149)
assert expint(-2,-350).ae(-2.8610072121701264351e+149)
assert expint(-1,-350).ae(-2.8691813842677537647e+149)
assert expint(0,-350).ae(-2.8774025343659421709e+149)
u = expint(1,-350)
assert u.ae(-2.8856710698020863568e+149)
assert u.imag.ae(-3.1415926535897932385)
u = expint(2,-350)
assert u.ae(-2.8939874026504650534e+149)
assert u.imag.ae(-1099.5574287564276335)
u = expint(3,-350)
assert u.ae(-2.9023519497915044349e+149)
assert u.imag.ae(-192422.55003237483586)
assert expint(-3,350000000000000000000000).ae('2.1592908471792544286e-152003068666138139677919', abs_eps=0, rel_eps=8*eps)
assert expint(-2,350000000000000000000000).ae('2.1592908471792544286e-152003068666138139677919', abs_eps=0, rel_eps=8*eps)
assert expint(-1,350000000000000000000000).ae('2.1592908471792544286e-152003068666138139677919', abs_eps=0, rel_eps=8*eps)
assert expint(0,350000000000000000000000).ae('2.1592908471792544286e-152003068666138139677919', abs_eps=0, rel_eps=8*eps)
assert expint(1,350000000000000000000000).ae('2.1592908471792544286e-152003068666138139677919', abs_eps=0, rel_eps=8*eps)
assert expint(2,350000000000000000000000).ae('2.1592908471792544286e-152003068666138139677919', abs_eps=0, rel_eps=8*eps)
assert expint(3,350000000000000000000000).ae('2.1592908471792544286e-152003068666138139677919', abs_eps=0, rel_eps=8*eps)
assert expint(-3,-350000000000000000000000).ae('-3.7805306852415755699e+152003068666138139677871')
assert expint(-2,-350000000000000000000000).ae('-3.7805306852415755699e+152003068666138139677871')
assert expint(-1,-350000000000000000000000).ae('-3.7805306852415755699e+152003068666138139677871')
assert expint(0,-350000000000000000000000).ae('-3.7805306852415755699e+152003068666138139677871')
u = expint(1,-350000000000000000000000)
assert u.ae('-3.7805306852415755699e+152003068666138139677871')
assert u.imag.ae(-3.1415926535897932385)
u = expint(2,-350000000000000000000000)
assert u.imag.ae(-1.0995574287564276335e+24)
assert u.ae('-3.7805306852415755699e+152003068666138139677871')
u = expint(3,-350000000000000000000000)
assert u.imag.ae(-1.9242255003237483586e+47)
assert u.ae('-3.7805306852415755699e+152003068666138139677871')
# Small case; no branch cut
assert gammainc(-3,3.5).ae(0.00010020262545203707109)
assert gammainc(-2,3.5).ae(0.00040370427343557393517)
assert gammainc(-1,3.5).ae(0.0016576839773997501492)
assert gammainc(0,3.5).ae(0.0069701398575483929193)
assert gammainc(1,3.5).ae(0.03019738342231850074)
assert gammainc(2,3.5).ae(0.13588822540043325333)
assert gammainc(3,3.5).ae(0.64169439772426814072)
# Small case; with branch cut
assert gammainc(-3,-3.5).ae(0.03595832954467563286 - 0.52359877559829887308j)
assert gammainc(-2,-3.5).ae(-0.88024704597962022221 - 1.5707963267948966192j)
assert gammainc(-1,-3.5).ae(4.4637962926688170771 - 3.1415926535897932385j)
assert gammainc(0,-3.5).ae(-13.925353995152335292 - 3.1415926535897932385j)
assert gammainc(1,-3.5).ae(33.115451958692313751)
assert gammainc(2,-3.5).ae(-82.788629896730784377)
assert gammainc(3,-3.5).ae(240.08702670051927469)
# Asymptotic case; no branch cut
assert gammainc(-3,350).ae(6.5424095113340358813e-163, abs_eps=0, rel_eps=8*eps)
assert gammainc(-2,350).ae(2.296312222489899769e-160, abs_eps=0, rel_eps=8*eps)
assert gammainc(-1,350).ae(8.059861834133858573e-158, abs_eps=0, rel_eps=8*eps)
assert gammainc(0,350).ae(2.8289659656701459404e-155, abs_eps=0, rel_eps=8*eps)
assert gammainc(1,350).ae(9.9295903962649792963e-153, abs_eps=0, rel_eps=8*eps)
assert gammainc(2,350).ae(3.485286229089007733e-150, abs_eps=0, rel_eps=8*eps)
assert gammainc(3,350).ae(1.2233453960006379793e-147, abs_eps=0, rel_eps=8*eps)
# Asymptotic case; branch cut
u = gammainc(-3,-350)
assert u.ae(6.7889565783842895085e+141)
assert u.imag.ae(-0.52359877559829887308)
u = gammainc(-2,-350)
assert u.ae(-2.3692668977889832121e+144)
assert u.imag.ae(-1.5707963267948966192)
u = gammainc(-1,-350)
assert u.ae(8.2685354361441858669e+146)
assert u.imag.ae(-3.1415926535897932385)
u = gammainc(0,-350)
assert u.ae(-2.8856710698020863568e+149)
assert u.imag.ae(-3.1415926535897932385)
u = gammainc(1,-350)
assert u.ae(1.0070908870280797598e+152)
assert u.imag == 0
u = gammainc(2,-350)
assert u.ae(-3.5147471957279983618e+154)
assert u.imag == 0
u = gammainc(3,-350)
assert u.ae(1.2266568422179417091e+157)
assert u.imag == 0
# Extreme asymptotic case
assert gammainc(-3,350000000000000000000000).ae('5.0362468738874738859e-152003068666138139677990', abs_eps=0, rel_eps=8*eps)
assert gammainc(-2,350000000000000000000000).ae('1.7626864058606158601e-152003068666138139677966', abs_eps=0, rel_eps=8*eps)
assert gammainc(-1,350000000000000000000000).ae('6.1694024205121555102e-152003068666138139677943', abs_eps=0, rel_eps=8*eps)
assert gammainc(0,350000000000000000000000).ae('2.1592908471792544286e-152003068666138139677919', abs_eps=0, rel_eps=8*eps)
assert gammainc(1,350000000000000000000000).ae('7.5575179651273905e-152003068666138139677896', abs_eps=0, rel_eps=8*eps)
assert gammainc(2,350000000000000000000000).ae('2.645131287794586675e-152003068666138139677872', abs_eps=0, rel_eps=8*eps)
assert gammainc(3,350000000000000000000000).ae('9.2579595072810533625e-152003068666138139677849', abs_eps=0, rel_eps=8*eps)
u = gammainc(-3,-350000000000000000000000)
assert u.ae('8.8175642804468234866e+152003068666138139677800')
assert u.imag.ae(-0.52359877559829887308)
u = gammainc(-2,-350000000000000000000000)
assert u.ae('-3.0861474981563882203e+152003068666138139677824')
assert u.imag.ae(-1.5707963267948966192)
u = gammainc(-1,-350000000000000000000000)
assert u.ae('1.0801516243547358771e+152003068666138139677848')
assert u.imag.ae(-3.1415926535897932385)
u = gammainc(0,-350000000000000000000000)
assert u.ae('-3.7805306852415755699e+152003068666138139677871')
assert u.imag.ae(-3.1415926535897932385)
assert gammainc(1,-350000000000000000000000).ae('1.3231857398345514495e+152003068666138139677895')
assert gammainc(2,-350000000000000000000000).ae('-4.6311500894209300731e+152003068666138139677918')
assert gammainc(3,-350000000000000000000000).ae('1.6209025312973255256e+152003068666138139677942')
def test_incomplete_beta():
mp.dps = 15
assert betainc(-2,-3,0.5,0.75).ae(63.4305673311255413583969)
assert betainc(4.5,0.5+2j,2.5,6).ae(0.2628801146130621387903065 + 0.5162565234467020592855378j)
assert betainc(4,5,0,6).ae(90747.77142857142857142857)
def test_erf():
mp.dps = 15
assert erf(0) == 0
assert erf(1).ae(0.84270079294971486934)
assert erf(3+4j).ae(-120.186991395079444098 - 27.750337293623902498j)
assert erf(-4-3j).ae(-0.99991066178539168236 + 0.00004972026054496604j)
assert erf(pi).ae(0.99999112385363235839)
assert erf(1j).ae(1.6504257587975428760j)
assert erf(-1j).ae(-1.6504257587975428760j)
assert isinstance(erf(1), mpf)
assert isinstance(erf(-1), mpf)
assert isinstance(erf(0), mpf)
assert isinstance(erf(0j), mpc)
assert erf(inf) == 1
assert erf(-inf) == -1
assert erfi(0) == 0
assert erfi(1/pi).ae(0.371682698493894314)
assert erfi(inf) == inf
assert erfi(-inf) == -inf
assert erf(1+0j) == erf(1)
assert erfc(1+0j) == erfc(1)
assert erf(0.2+0.5j).ae(1 - erfc(0.2+0.5j))
assert erfc(0) == 1
assert erfc(1).ae(1-erf(1))
assert erfc(-1).ae(1-erf(-1))
assert erfc(1/pi).ae(1-erf(1/pi))
assert erfc(-10) == 2
assert erfc(-1000000) == 2
assert erfc(-inf) == 2
assert erfc(inf) == 0
assert isnan(erfc(nan))
assert (erfc(10**4)*mpf(10)**43429453).ae('3.63998738656420')
assert erf(8+9j).ae(-1072004.2525062051158 + 364149.91954310255423j)
assert erfc(8+9j).ae(1072005.2525062051158 - 364149.91954310255423j)
assert erfc(-8-9j).ae(-1072003.2525062051158 + 364149.91954310255423j)
mp.dps = 50
# This one does not use the asymptotic series
assert (erfc(10)*10**45).ae('2.0884875837625447570007862949577886115608181193212')
# This one does
assert (erfc(50)*10**1088).ae('2.0709207788416560484484478751657887929322509209954')
mp.dps = 15
assert str(erfc(10**50)) == '3.66744826532555e-4342944819032518276511289189166050822943970058036665661144537831658646492088707747292249493384317534'
assert erfinv(0) == 0
assert erfinv(0.5).ae(0.47693627620446987338)
assert erfinv(-0.5).ae(-0.47693627620446987338)
assert erfinv(1) == inf
assert erfinv(-1) == -inf
assert erf(erfinv(0.95)).ae(0.95)
assert erf(erfinv(0.999999999995)).ae(0.999999999995)
assert erf(erfinv(-0.999999999995)).ae(-0.999999999995)
mp.dps = 50
assert erf(erfinv('0.99999999999999999999999999999995')).ae('0.99999999999999999999999999999995')
assert erf(erfinv('0.999999999999999999999999999999995')).ae('0.999999999999999999999999999999995')
assert erf(erfinv('-0.999999999999999999999999999999995')).ae('-0.999999999999999999999999999999995')
mp.dps = 15
# Complex asymptotic expansions
v = erfc(50j)
assert v.real == 1
assert v.imag.ae('-6.1481820666053078736e+1083')
assert erfc(-100+5j).ae(2)
assert (erfc(100+5j)*10**4335).ae(2.3973567853824133572 - 3.9339259530609420597j)
assert erfc(100+100j).ae(0.00065234366376857698698 - 0.0039357263629214118437j)
def test_pdf():
mp.dps = 15
assert npdf(-inf) == 0
assert npdf(inf) == 0
assert npdf(5,0,2).ae(npdf(5+4,4,2))
assert quadts(lambda x: npdf(x,-0.5,0.8), [-inf, inf]) == 1
assert ncdf(0) == 0.5
assert ncdf(3,3) == 0.5
assert ncdf(-inf) == 0
assert ncdf(inf) == 1
assert ncdf(10) == 1
# Verify that this is computed accurately
assert (ncdf(-10)*10**24).ae(7.619853024160526)
def test_lambertw():
mp.dps = 15
assert lambertw(0) == 0
assert lambertw(0+0j) == 0
assert lambertw(inf) == inf
assert isnan(lambertw(nan))
assert lambertw(inf,1).real == inf
assert lambertw(inf,1).imag.ae(2*pi)
assert lambertw(-inf,1).real == inf
assert lambertw(-inf,1).imag.ae(3*pi)
assert lambertw(0,-1) == -inf
assert lambertw(0,1) == -inf
assert lambertw(0,3) == -inf
assert lambertw(e).ae(1)
assert lambertw(1).ae(0.567143290409783873)
assert lambertw(-pi/2).ae(j*pi/2)
assert lambertw(-log(2)/2).ae(-log(2))
assert lambertw(0.25).ae(0.203888354702240164)
assert lambertw(-0.25).ae(-0.357402956181388903)
assert lambertw(-1./10000,0).ae(-0.000100010001500266719)
assert lambertw(-0.25,-1).ae(-2.15329236411034965)
assert lambertw(0.25,-1).ae(-3.00899800997004620-4.07652978899159763j)
assert lambertw(-0.25,-1).ae(-2.15329236411034965)
assert lambertw(0.25,1).ae(-3.00899800997004620+4.07652978899159763j)
assert lambertw(-0.25,1).ae(-3.48973228422959210+7.41405453009603664j)
assert lambertw(-4).ae(0.67881197132094523+1.91195078174339937j)
assert lambertw(-4,1).ae(-0.66743107129800988+7.76827456802783084j)
assert lambertw(-4,-1).ae(0.67881197132094523-1.91195078174339937j)
assert lambertw(1000).ae(5.24960285240159623)
assert lambertw(1000,1).ae(4.91492239981054535+5.44652615979447070j)
assert lambertw(1000,-1).ae(4.91492239981054535-5.44652615979447070j)
assert lambertw(1000,5).ae(3.5010625305312892+29.9614548941181328j)
assert lambertw(3+4j).ae(1.281561806123775878+0.533095222020971071j)
assert lambertw(-0.4+0.4j).ae(-0.10396515323290657+0.61899273315171632j)
assert lambertw(3+4j,1).ae(-0.11691092896595324+5.61888039871282334j)
assert lambertw(3+4j,-1).ae(0.25856740686699742-3.85211668616143559j)
assert lambertw(-0.5,-1).ae(-0.794023632344689368-0.770111750510379110j)
assert lambertw(-1./10000,1).ae(-11.82350837248724344+6.80546081842002101j)
assert lambertw(-1./10000,-1).ae(-11.6671145325663544)
assert lambertw(-1./10000,-2).ae(-11.82350837248724344-6.80546081842002101j)
assert lambertw(-1./100000,4).ae(-14.9186890769540539+26.1856750178782046j)
assert lambertw(-1./100000,5).ae(-15.0931437726379218666+32.5525721210262290086j)
assert lambertw((2+j)/10).ae(0.173704503762911669+0.071781336752835511j)
assert lambertw((2+j)/10,1).ae(-3.21746028349820063+4.56175438896292539j)
assert lambertw((2+j)/10,-1).ae(-3.03781405002993088-3.53946629633505737j)
assert lambertw((2+j)/10,4).ae(-4.6878509692773249+23.8313630697683291j)
assert lambertw(-(2+j)/10).ae(-0.226933772515757933-0.164986470020154580j)
assert lambertw(-(2+j)/10,1).ae(-2.43569517046110001+0.76974067544756289j)
assert lambertw(-(2+j)/10,-1).ae(-3.54858738151989450-6.91627921869943589j)
assert lambertw(-(2+j)/10,4).ae(-4.5500846928118151+20.6672982215434637j)
mp.dps = 50
assert lambertw(pi).ae('1.073658194796149172092178407024821347547745350410314531')
mp.dps = 15
# Former bug in generated branch
assert lambertw(-0.5+0.002j).ae(-0.78917138132659918344 + 0.76743539379990327749j)
assert lambertw(-0.5-0.002j).ae(-0.78917138132659918344 - 0.76743539379990327749j)
assert lambertw(-0.448+0.4j).ae(-0.11855133765652382241 + 0.66570534313583423116j)
assert lambertw(-0.448-0.4j).ae(-0.11855133765652382241 - 0.66570534313583423116j)
def test_meijerg():
mp.dps = 15
assert meijerg([[2,3],[1]],[[0.5,2],[3,4]], 2.5).ae(4.2181028074787439386)
assert meijerg([[],[1+j]],[[1],[1]], 3+4j).ae(271.46290321152464592 - 703.03330399954820169j)
assert meijerg([[0.25],[1]],[[0.5],[2]],0) == 0
assert meijerg([[0],[]],[[0,0,'1/3','2/3'], []], '2/27').ae(2.2019391389653314120)
# Verify 1/z series being used
assert meijerg([[-3],[-0.5]], [[-1],[-2.5]], -0.5).ae(-1.338096165935754898687431)
assert meijerg([[1-(-1)],[1-(-2.5)]], [[1-(-3)],[1-(-0.5)]], -2.0).ae(-1.338096165935754898687431)
assert meijerg([[-3],[-0.5]], [[-1],[-2.5]], -1).ae(-(pi+4)/(4*pi))
a = 2.5
b = 1.25
for z in [mpf(0.25), mpf(2)]:
x1 = hyp1f1(a,b,z)
x2 = gamma(b)/gamma(a)*meijerg([[1-a],[]],[[0],[1-b]],-z)
x3 = gamma(b)/gamma(a)*meijerg([[1-0],[1-(1-b)]],[[1-(1-a)],[]],-1/z)
assert x1.ae(x2)
assert x1.ae(x3)
def test_appellf1():
mp.dps = 15
assert appellf1(2,-2,1,1,2,3).ae(-1.75)
assert appellf1(2,1,-2,1,2,3).ae(-8)
assert appellf1(2,1,-2,1,0.5,0.25).ae(1.5)
assert appellf1(-2,1,3,2,3,3).ae(19)
assert appellf1(1,2,3,4,0.5,0.125).ae( 1.53843285792549786518)
def test_coulomb():
# Note: most tests are doctests
# Test for a bug:
mp.dps = 15
assert coulombg(mpc(-5,0),2,3).ae(20.087729487721430394)
def test_hyper_param_accuracy():
mp.dps = 15
As = [n+1e-10 for n in range(-5,-1)]
Bs = [n+1e-10 for n in range(-12,-5)]
assert hyper(As,Bs,10).ae(-381757055858.652671927)
assert legenp(0.5, 100, 0.25).ae(-2.4124576567211311755e+144)
assert (hyp1f1(1000,1,-100)*10**24).ae(5.2589445437370169113)
assert (hyp2f1(10, -900, 10.5, 0.99)*10**24).ae(1.9185370579660768203)
assert (hyp2f1(1000,1.5,-3.5,-1.5)*10**385).ae(-2.7367529051334000764)
assert hyp2f1(-5, 10, 3, 0.5, zeroprec=500) == 0
assert (hyp1f1(-10000, 1000, 100)*10**424).ae(-3.1046080515824859974)
assert (hyp2f1(1000,1.5,-3.5,-0.75,maxterms=100000)*10**231).ae(-4.0534790813913998643)
assert legenp(2, 3, 0.25) == 0
try:
hypercomb(lambda a: [([],[],[],[],[a],[-a],0.5)], [3])
assert 0
except ValueError:
pass
assert hypercomb(lambda a: [([],[],[],[],[a],[-a],0.5)], [3], infprec=200) == inf
assert meijerg([[],[]],[[0,0,0,0],[]],0.1).ae(1.5680822343832351418)
assert (besselk(400,400)*10**94).ae(1.4387057277018550583)
mp.dps = 5
(hyp1f1(-5000.5, 1500, 100)*10**185).ae(8.5185229673381935522)
(hyp1f1(-5000, 1500, 100)*10**185).ae(9.1501213424563944311)
mp.dps = 15
(hyp1f1(-5000.5, 1500, 100)*10**185).ae(8.5185229673381935522)
(hyp1f1(-5000, 1500, 100)*10**185).ae(9.1501213424563944311)
assert hyp0f1(fadd(-20,'1e-100',exact=True), 0.25).ae(1.85014429040102783e+49)
assert hyp0f1((-20*10**100+1, 10**100), 0.25).ae(1.85014429040102783e+49)
def test_hypercomb_zero_pow():
# check that 0^0 = 1
assert hypercomb(lambda a: (([0],[a],[],[],[],[],0),), [0]) == 1
assert meijerg([[-1.5],[]],[[0],[-0.75]],0).ae(1.4464090846320771425)
def test_spherharm():
mp.dps = 15
t = 0.5; r = 0.25
assert spherharm(0,0,t,r).ae(0.28209479177387814347)
assert spherharm(1,-1,t,r).ae(0.16048941205971996369 - 0.04097967481096344271j)
assert spherharm(1,0,t,r).ae(0.42878904414183579379)
assert spherharm(1,1,t,r).ae(-0.16048941205971996369 - 0.04097967481096344271j)
assert spherharm(2,-2,t,r).ae(0.077915886919031181734 - 0.042565643022253962264j)
assert spherharm(2,-1,t,r).ae(0.31493387233497459884 - 0.08041582001959297689j)
assert spherharm(2,0,t,r).ae(0.41330596756220761898)
assert spherharm(2,1,t,r).ae(-0.31493387233497459884 - 0.08041582001959297689j)
assert spherharm(2,2,t,r).ae(0.077915886919031181734 + 0.042565643022253962264j)
assert spherharm(3,-3,t,r).ae(0.033640236589690881646 - 0.031339125318637082197j)
assert spherharm(3,-2,t,r).ae(0.18091018743101461963 - 0.09883168583167010241j)
assert spherharm(3,-1,t,r).ae(0.42796713930907320351 - 0.10927795157064962317j)
assert spherharm(3,0,t,r).ae(0.27861659336351639787)
assert spherharm(3,1,t,r).ae(-0.42796713930907320351 - 0.10927795157064962317j)
assert spherharm(3,2,t,r).ae(0.18091018743101461963 + 0.09883168583167010241j)
assert spherharm(3,3,t,r).ae(-0.033640236589690881646 - 0.031339125318637082197j)
assert spherharm(0,-1,t,r) == 0
assert spherharm(0,-2,t,r) == 0
assert spherharm(0,1,t,r) == 0
assert spherharm(0,2,t,r) == 0
assert spherharm(1,2,t,r) == 0
assert spherharm(1,3,t,r) == 0
assert spherharm(1,-2,t,r) == 0
assert spherharm(1,-3,t,r) == 0
assert spherharm(2,3,t,r) == 0
assert spherharm(2,4,t,r) == 0
assert spherharm(2,-3,t,r) == 0
assert spherharm(2,-4,t,r) == 0
assert spherharm(3,4.5,0.5,0.25).ae(-22.831053442240790148 + 10.910526059510013757j)
assert spherharm(2+3j, 1-j, 1+j, 3+4j).ae(-2.6582752037810116935 - 1.0909214905642160211j)
assert spherharm(-6,2.5,t,r).ae(0.39383644983851448178 + 0.28414687085358299021j)
assert spherharm(-3.5, 3, 0.5, 0.25).ae(0.014516852987544698924 - 0.015582769591477628495j)
assert spherharm(-3, 3, 0.5, 0.25) == 0
assert spherharm(-6, 3, 0.5, 0.25).ae(-0.16544349818782275459 - 0.15412657723253924562j)
assert spherharm(-6, 1.5, 0.5, 0.25).ae(0.032208193499767402477 + 0.012678000924063664921j)
assert spherharm(3,0,0,1).ae(0.74635266518023078283)
assert spherharm(3,-2,0,1) == 0
assert spherharm(3,-2,1,1).ae(-0.16270707338254028971 - 0.35552144137546777097j)
def test_qfunctions():
mp.dps = 15
assert qp(2,3,100).ae('2.7291482267247332183e2391')
| mattpap/sympy-polys | sympy/mpmath/tests/test_functions2.py | Python | bsd-3-clause | 66,026 |
Subsets and Splits