repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
devoid/nova | nova/tests/db/test_migration_utils.py | 3 | 28910 | # Copyright (c) 2013 Boris Pavlovic ([email protected]).
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import warnings
from migrate.changeset import UniqueConstraint
from sqlalchemy.dialects import mysql
from sqlalchemy import Boolean, Index, Integer, DateTime, String
from sqlalchemy import MetaData, Table, Column, ForeignKey
from sqlalchemy.engine import reflection
from sqlalchemy.exc import NoSuchTableError
from sqlalchemy.exc import SAWarning
from sqlalchemy.sql import select
from sqlalchemy.types import UserDefinedType, NullType
from nova.db.sqlalchemy import api as db
from nova.db.sqlalchemy import utils
from nova import exception
from nova.tests.db import test_migrations
class CustomType(UserDefinedType):
"""Dummy column type for testing unsupported types."""
def get_col_spec(self):
return "CustomType"
class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
"""Class for testing utils that are used in db migrations."""
def test_delete_from_select(self):
table_name = "__test_deletefromselect_table__"
uuidstrs = []
for unused in range(10):
uuidstrs.append(uuid.uuid4().hex)
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
conn = engine.connect()
test_table = Table(table_name, meta,
Column('id', Integer, primary_key=True,
nullable=False, autoincrement=True),
Column('uuid', String(36), nullable=False))
test_table.create()
# Add 10 rows to table
for uuidstr in uuidstrs:
ins_stmt = test_table.insert().values(uuid=uuidstr)
conn.execute(ins_stmt)
# Delete 4 rows in one chunk
column = test_table.c.id
query_delete = select([column],
test_table.c.id < 5).order_by(column)
delete_statement = utils.DeleteFromSelect(test_table,
query_delete, column)
result_delete = conn.execute(delete_statement)
# Verify we delete 4 rows
self.assertEqual(result_delete.rowcount, 4)
query_all = select([test_table]).\
where(test_table.c.uuid.in_(uuidstrs))
rows = conn.execute(query_all).fetchall()
# Verify we still have 6 rows in table
self.assertEqual(len(rows), 6)
test_table.drop()
def test_insert_from_select(self):
insert_table_name = "__test_insert_to_table__"
select_table_name = "__test_select_from_table__"
uuidstrs = []
for unused in range(10):
uuidstrs.append(uuid.uuid4().hex)
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
conn = engine.connect()
insert_table = Table(insert_table_name, meta,
Column('id', Integer, primary_key=True,
nullable=False, autoincrement=True),
Column('uuid', String(36), nullable=False))
select_table = Table(select_table_name, meta,
Column('id', Integer, primary_key=True,
nullable=False, autoincrement=True),
Column('uuid', String(36), nullable=False))
insert_table.create()
select_table.create()
# Add 10 rows to select_table
for uuidstr in uuidstrs:
ins_stmt = select_table.insert().values(uuid=uuidstr)
conn.execute(ins_stmt)
# Select 4 rows in one chunk from select_table
column = select_table.c.id
query_insert = select([select_table],
select_table.c.id < 5).order_by(column)
insert_statement = utils.InsertFromSelect(insert_table,
query_insert)
result_insert = conn.execute(insert_statement)
# Verify we insert 4 rows
self.assertEqual(result_insert.rowcount, 4)
query_all = select([insert_table]).\
where(insert_table.c.uuid.in_(uuidstrs))
rows = conn.execute(query_all).fetchall()
# Verify we really have 4 rows in insert_table
self.assertEqual(len(rows), 4)
insert_table.drop()
select_table.drop()
def test_utils_drop_unique_constraint(self):
table_name = "test_utils_drop_unique_constraint"
uc_name = 'uniq_foo'
values = [
{'id': 1, 'a': 3, 'foo': 10},
{'id': 2, 'a': 2, 'foo': 20},
{'id': 3, 'a': 1, 'foo': 30}
]
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
test_table = Table(table_name, meta,
Column('id', Integer, primary_key=True,
nullable=False),
Column('a', Integer),
Column('foo', Integer),
UniqueConstraint('a', name='uniq_a'),
UniqueConstraint('foo', name=uc_name))
test_table.create()
engine.execute(test_table.insert(), values)
# NOTE(boris-42): This method is generic UC dropper.
utils.drop_unique_constraint(engine, table_name, uc_name, 'foo')
s = test_table.select().order_by(test_table.c.id)
rows = engine.execute(s).fetchall()
for i in xrange(0, len(values)):
v = values[i]
self.assertEqual((v['id'], v['a'], v['foo']), rows[i])
# NOTE(boris-42): Update data about Table from DB.
meta = MetaData()
meta.bind = engine
test_table = Table(table_name, meta, autoload=True)
constraints = filter(lambda c: c.name == uc_name,
test_table.constraints)
self.assertEqual(len(constraints), 0)
self.assertEqual(len(test_table.constraints), 1)
test_table.drop()
def test_util_drop_unique_constraint_with_not_supported_sqlite_type(self):
if 'sqlite' in self.engines:
engine = self.engines['sqlite']
meta = MetaData(bind=engine)
table_name = ("test_util_drop_unique_constraint_with_not_supported"
"_sqlite_type")
uc_name = 'uniq_foo'
values = [
{'id': 1, 'a': 3, 'foo': 10},
{'id': 2, 'a': 2, 'foo': 20},
{'id': 3, 'a': 1, 'foo': 30}
]
test_table = Table(table_name, meta,
Column('id', Integer, primary_key=True,
nullable=False),
Column('a', Integer),
Column('foo', CustomType, default=0),
UniqueConstraint('a', name='uniq_a'),
UniqueConstraint('foo', name=uc_name))
test_table.create()
engine.execute(test_table.insert(), values)
warnings.simplefilter("ignore", SAWarning)
# NOTE(boris-42): Missing info about column `foo` that has
# unsupported type CustomType.
self.assertRaises(exception.NovaException,
utils.drop_unique_constraint,
engine, table_name, uc_name, 'foo')
# NOTE(boris-42): Wrong type of foo instance. it should be
# instance of sqlalchemy.Column.
self.assertRaises(exception.NovaException,
utils.drop_unique_constraint,
engine, table_name, uc_name, 'foo',
foo=Integer())
foo = Column('foo', CustomType, default=0)
utils.drop_unique_constraint(engine, table_name, uc_name, 'foo',
foo=foo)
s = test_table.select().order_by(test_table.c.id)
rows = engine.execute(s).fetchall()
for i in xrange(0, len(values)):
v = values[i]
self.assertEqual((v['id'], v['a'], v['foo']), rows[i])
# NOTE(boris-42): Update data about Table from DB.
meta = MetaData(bind=engine)
test_table = Table(table_name, meta, autoload=True)
constraints = filter(lambda c: c.name == uc_name,
test_table.constraints)
self.assertEqual(len(constraints), 0)
self.assertEqual(len(test_table.constraints), 1)
test_table.drop()
def _populate_db_for_drop_duplicate_entries(self, engine, meta,
table_name):
values = [
{'id': 11, 'a': 3, 'b': 10, 'c': 'abcdef'},
{'id': 12, 'a': 5, 'b': 10, 'c': 'abcdef'},
{'id': 13, 'a': 6, 'b': 10, 'c': 'abcdef'},
{'id': 14, 'a': 7, 'b': 10, 'c': 'abcdef'},
{'id': 21, 'a': 1, 'b': 20, 'c': 'aa'},
{'id': 31, 'a': 1, 'b': 20, 'c': 'bb'},
{'id': 41, 'a': 1, 'b': 30, 'c': 'aef'},
{'id': 42, 'a': 2, 'b': 30, 'c': 'aef'},
{'id': 43, 'a': 3, 'b': 30, 'c': 'aef'}
]
test_table = Table(table_name, meta,
Column('id', Integer, primary_key=True,
nullable=False),
Column('a', Integer),
Column('b', Integer),
Column('c', String(255)),
Column('deleted', Integer, default=0),
Column('deleted_at', DateTime),
Column('updated_at', DateTime))
test_table.create()
engine.execute(test_table.insert(), values)
return test_table, values
def test_drop_old_duplicate_entries_from_table(self):
table_name = "test_drop_old_duplicate_entries_from_table"
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
test_table, values = self.\
_populate_db_for_drop_duplicate_entries(engine, meta,
table_name)
utils.drop_old_duplicate_entries_from_table(engine, table_name,
False, 'b', 'c')
uniq_values = set()
expected_ids = []
for value in sorted(values, key=lambda x: x['id'], reverse=True):
uniq_value = (('b', value['b']), ('c', value['c']))
if uniq_value in uniq_values:
continue
uniq_values.add(uniq_value)
expected_ids.append(value['id'])
real_ids = [row[0] for row in
engine.execute(select([test_table.c.id])).fetchall()]
self.assertEqual(len(real_ids), len(expected_ids))
for id_ in expected_ids:
self.assertIn(id_, real_ids)
test_table.drop()
def test_drop_old_duplicate_entries_from_table_soft_delete(self):
table_name = "test_drop_old_duplicate_entries_from_table_soft_delete"
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table, values = self.\
_populate_db_for_drop_duplicate_entries(engine, meta,
table_name)
utils.drop_old_duplicate_entries_from_table(engine, table_name,
True, 'b', 'c')
uniq_values = set()
expected_values = []
soft_deleted_values = []
for value in sorted(values, key=lambda x: x['id'], reverse=True):
uniq_value = (('b', value['b']), ('c', value['c']))
if uniq_value in uniq_values:
soft_deleted_values.append(value)
continue
uniq_values.add(uniq_value)
expected_values.append(value)
base_select = table.select()
rows_select = base_select.\
where(table.c.deleted != table.c.id)
row_ids = [row['id'] for row in
engine.execute(rows_select).fetchall()]
self.assertEqual(len(row_ids), len(expected_values))
for value in expected_values:
self.assertIn(value['id'], row_ids)
deleted_rows_select = base_select.\
where(table.c.deleted == table.c.id)
deleted_rows_ids = [row['id'] for row in
engine.execute(deleted_rows_select).fetchall()]
self.assertEqual(len(deleted_rows_ids),
len(values) - len(row_ids))
for value in soft_deleted_values:
self.assertIn(value['id'], deleted_rows_ids)
table.drop()
def test_check_shadow_table(self):
table_name = 'test_check_shadow_table'
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', Integer),
Column('c', String(256)))
table.create()
#check missing shadow table
self.assertRaises(NoSuchTableError,
utils.check_shadow_table, engine, table_name)
shadow_table = Table(db._SHADOW_TABLE_PREFIX + table_name, meta,
Column('id', Integer),
Column('a', Integer))
shadow_table.create()
# check missing column
self.assertRaises(exception.NovaException,
utils.check_shadow_table, engine, table_name)
# check when all is ok
c = Column('c', String(256))
shadow_table.create_column(c)
self.assertTrue(utils.check_shadow_table(engine, table_name))
# check extra column
d = Column('d', Integer)
shadow_table.create_column(d)
self.assertRaises(exception.NovaException,
utils.check_shadow_table, engine, table_name)
table.drop()
shadow_table.drop()
def test_check_shadow_table_different_types(self):
table_name = 'test_check_shadow_table_different_types'
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', Integer))
table.create()
shadow_table = Table(db._SHADOW_TABLE_PREFIX + table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', String(256)))
shadow_table.create()
self.assertRaises(exception.NovaException,
utils.check_shadow_table, engine, table_name)
table.drop()
shadow_table.drop()
def test_check_shadow_table_with_unsupported_type(self):
table_name = 'test_check_shadow_table_with_unsupported_type'
engine = self.engines['sqlite']
meta = MetaData(bind=engine)
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', Integer),
Column('c', CustomType))
table.create()
shadow_table = Table(db._SHADOW_TABLE_PREFIX + table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', Integer),
Column('c', CustomType))
shadow_table.create()
self.assertTrue(utils.check_shadow_table(engine, table_name))
shadow_table.drop()
def test_create_shadow_table_by_table_instance(self):
table_name = 'test_create_shadow_table_by_table_instance'
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', Integer),
Column('b', String(256)))
table.create()
shadow_table = utils.create_shadow_table(engine, table=table)
self.assertTrue(utils.check_shadow_table(engine, table_name))
table.drop()
shadow_table.drop()
def test_create_shadow_table_by_name(self):
table_name = 'test_create_shadow_table_by_name'
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', Integer),
Column('b', String(256)))
table.create()
shadow_table = utils.create_shadow_table(engine,
table_name=table_name)
self.assertTrue(utils.check_shadow_table(engine, table_name))
table.drop()
shadow_table.drop()
def test_create_shadow_table_not_supported_type(self):
if 'sqlite' in self.engines:
table_name = 'test_create_shadow_table_not_supported_type'
engine = self.engines['sqlite']
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', CustomType))
table.create()
self.assertRaises(exception.NovaException,
utils.create_shadow_table,
engine, table_name=table_name)
shadow_table = utils.create_shadow_table(engine,
table_name=table_name,
a=Column('a', CustomType())
)
self.assertTrue(utils.check_shadow_table(engine, table_name))
table.drop()
shadow_table.drop()
def test_create_shadow_both_table_and_table_name_are_none(self):
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
self.assertRaises(exception.NovaException,
utils.create_shadow_table, engine)
def test_create_shadow_both_table_and_table_name_are_specified(self):
table_name = ('test_create_shadow_both_table_and_table_name_are_'
'specified')
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', Integer))
table.create()
self.assertRaises(exception.NovaException,
utils.create_shadow_table,
engine, table=table, table_name=table_name)
table.drop()
def test_create_duplicate_shadow_table(self):
table_name = 'test_create_duplicate_shadow_table'
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', Integer))
table.create()
shadow_table = utils.create_shadow_table(engine,
table_name=table_name)
self.assertRaises(exception.ShadowTableExists,
utils.create_shadow_table,
engine, table_name=table_name)
table.drop()
shadow_table.drop()
def test_change_deleted_column_type_doesnt_drop_index(self):
table_name = 'test_change_deleted_column_type_doesnt_drop_index'
for key, engine in self.engines.items():
meta = MetaData(bind=engine)
indexes = {
'idx_a_deleted': ['a', 'deleted'],
'idx_b_deleted': ['b', 'deleted'],
'idx_a': ['a']
}
index_instances = [Index(name, *columns)
for name, columns in indexes.iteritems()]
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', String(255)),
Column('b', String(255)),
Column('deleted', Boolean),
*index_instances)
table.create()
utils.change_deleted_column_type_to_id_type(engine, table_name)
utils.change_deleted_column_type_to_boolean(engine, table_name)
insp = reflection.Inspector.from_engine(engine)
real_indexes = insp.get_indexes(table_name)
self.assertEqual(len(real_indexes), 3)
for index in real_indexes:
name = index['name']
self.assertIn(name, indexes)
self.assertEqual(set(index['column_names']),
set(indexes[name]))
table.drop()
def test_change_deleted_column_type_to_id_type_integer(self):
table_name = 'test_change_deleted_column_type_to_id_type_integer'
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('deleted', Boolean))
table.create()
utils.change_deleted_column_type_to_id_type(engine, table_name)
table = utils.get_table(engine, table_name)
self.assertIsInstance(table.c.deleted.type, Integer)
table.drop()
def test_change_deleted_column_type_to_id_type_string(self):
table_name = 'test_change_deleted_column_type_to_id_type_string'
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', String(255), primary_key=True),
Column('deleted', Boolean))
table.create()
utils.change_deleted_column_type_to_id_type(engine, table_name)
table = utils.get_table(engine, table_name)
self.assertIsInstance(table.c.deleted.type, String)
table.drop()
def test_change_deleted_column_type_to_id_type_custom(self):
if 'sqlite' in self.engines:
table_name = 'test_change_deleted_column_type_to_id_type_custom'
engine = self.engines['sqlite']
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('foo', CustomType),
Column('deleted', Boolean))
table.create()
self.assertRaises(exception.NovaException,
utils.change_deleted_column_type_to_id_type,
engine, table_name)
fooColumn = Column('foo', CustomType())
utils.change_deleted_column_type_to_id_type(engine, table_name,
foo=fooColumn)
table = utils.get_table(engine, table_name)
# NOTE(boris-42): There is no way to check has foo type CustomType.
# but sqlalchemy will set it to NullType.
self.assertIsInstance(table.c.foo.type, NullType)
self.assertIsInstance(table.c.deleted.type, Integer)
table.drop()
def test_change_deleted_column_type_to_boolean(self):
table_name = 'test_change_deleted_column_type_to_boolean'
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('deleted', Integer))
table.create()
utils.change_deleted_column_type_to_boolean(engine, table_name)
table = utils.get_table(engine, table_name)
expected_type = Boolean if key != "mysql" else mysql.TINYINT
self.assertIsInstance(table.c.deleted.type, expected_type)
table.drop()
def test_change_deleted_column_type_to_boolean_type_custom(self):
if 'sqlite' in self.engines:
table_name = \
'test_change_deleted_column_type_to_boolean_type_custom'
engine = self.engines['sqlite']
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('foo', CustomType),
Column('deleted', Integer))
table.create()
self.assertRaises(exception.NovaException,
utils.change_deleted_column_type_to_boolean,
engine, table_name)
fooColumn = Column('foo', CustomType())
utils.change_deleted_column_type_to_boolean(engine, table_name,
foo=fooColumn)
table = utils.get_table(engine, table_name)
# NOTE(boris-42): There is no way to check has foo type CustomType.
# but sqlalchemy will set it to NullType.
self.assertIsInstance(table.c.foo.type, NullType)
self.assertIsInstance(table.c.deleted.type, Boolean)
table.drop()
def test_drop_unique_constraint_in_sqlite_fk_recreate(self):
if 'sqlite' in self.engines:
engine = self.engines['sqlite']
meta = MetaData()
meta.bind = engine
parent_table_name = ('test_drop_unique_constraint_in_sqlite_fk_'
'recreate_parent_table')
parent_table = Table(parent_table_name, meta,
Column('id', Integer, primary_key=True),
Column('foo', Integer))
parent_table.create()
table_name = 'test_drop_unique_constraint_in_sqlite_fk_recreate'
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('baz', Integer),
Column('bar', Integer,
ForeignKey(parent_table_name + ".id")),
UniqueConstraint('baz', name='constr1'))
table.create()
utils.drop_unique_constraint(engine, table_name, 'constr1', 'baz')
insp = reflection.Inspector.from_engine(engine)
f_keys = insp.get_foreign_keys(table_name)
self.assertEqual(len(f_keys), 1)
f_key = f_keys[0]
self.assertEqual(f_key['referred_table'], parent_table_name)
self.assertEqual(f_key['referred_columns'], ['id'])
self.assertEqual(f_key['constrained_columns'], ['bar'])
table.drop()
parent_table.drop()
| apache-2.0 | 7,695,907,303,963,113,000 | 42.539157 | 79 | 0.511138 | false |
daevaorn/sentry | tests/sentry/api/endpoints/test_user_details.py | 2 | 2045 | from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.models import User
from sentry.testutils import APITestCase
class UserDetailsTest(APITestCase):
def test_simple(self):
user = self.create_user(email='[email protected]')
self.login_as(user=user)
url = reverse('sentry-api-0-user-details', kwargs={
'user_id': 'me',
})
response = self.client.get(url, format='json')
assert response.status_code == 200, response.content
assert response.data['id'] == str(user.id)
class UserUpdateTest(APITestCase):
def test_simple(self):
user = self.create_user(email='[email protected]')
self.login_as(user=user)
url = reverse('sentry-api-0-user-details', kwargs={
'user_id': 'me',
})
resp = self.client.put(url, data={
'name': 'hello world',
'username': '[email protected]',
})
assert resp.status_code == 200, resp.content
assert resp.data['id'] == str(user.id)
user = User.objects.get(id=user.id)
assert user.name == 'hello world'
assert user.email == '[email protected]'
assert user.username == user.email
def test_superuser(self):
user = self.create_user(email='[email protected]')
superuser = self.create_user(email='[email protected]', is_superuser=True)
self.login_as(user=superuser)
url = reverse('sentry-api-0-user-details', kwargs={
'user_id': user.id,
})
resp = self.client.put(url, data={
'name': 'hello world',
'email': '[email protected]',
'username': 'foo',
'isActive': 'false',
})
assert resp.status_code == 200, resp.content
assert resp.data['id'] == str(user.id)
user = User.objects.get(id=user.id)
assert user.name == 'hello world'
assert user.email == '[email protected]'
assert user.username == 'foo'
assert not user.is_active
| bsd-3-clause | 8,106,798,348,610,766,000 | 28.637681 | 78 | 0.577995 | false |
airbnb/airflow | airflow/contrib/hooks/spark_jdbc_hook.py | 7 | 1170 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.providers.apache.spark.hooks.spark_jdbc`."""
import warnings
# pylint: disable=unused-import
from airflow.providers.apache.spark.hooks.spark_jdbc import SparkJDBCHook # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.apache.spark.hooks.spark_jdbc`.",
DeprecationWarning,
stacklevel=2,
)
| apache-2.0 | 5,826,819,363,365,302,000 | 39.344828 | 95 | 0.768376 | false |
mediatum/mediatum | core/database/postgres/connector.py | 1 | 20887 | # -*- coding: utf-8 -*-
"""
:copyright: (c) 2014 by the mediaTUM authors
:license: GPL3, see COPYING for details
"""
import logging
import atexit
import pwd
import os.path
import time
from sqlalchemy import create_engine, event
from sqlalchemy.engine import Engine
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy_continuum.utils import version_class
from core import config
from . import db_metadata, DeclarativeBase
from core.database.postgres import MtQuery
from core.database.postgres.psycopg2_debug import make_debug_connection_factory
from core.database.init import init_database_values
from utils.utils import find_free_port
from utils.postgres import schema_exists, table_exists
import utils.process
import sys
from core.search.config import get_fulltext_autoindex_languages, get_attribute_autoindex_languages
from sqlalchemy_continuum import make_versioned
from sqlalchemy_continuum.plugins.transaction_meta import TransactionMetaPlugin
from core.transition.athana_continuum_plugin import AthanaContinuumPlugin
from core.database.postgres.continuumext import MtVersionBase
# set this to True or False to override debug config settings
DEBUG = None
DEBUG_SHOW_TRACE = None
CONNECTSTR_TEMPLATE = "postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{database}?application_name={application_name}"
CONNECTSTR_TEMPLATE_TEST_DB = "postgresql+psycopg2://{user}@:{port}/{database}?host={socketdir}"
CONNECTSTR_TEMPLATE_WITHOUT_PW = "postgresql+psycopg2://{user}:<passwd>@{host}:{port}/{database}"
logg = logging.getLogger(__name__)
def read_and_prepare_sql(sql_filepath, sql_dir=None, filter_notices=True, filter_comments=True):
"""Reads SQL code from a file, sets search path and strips comment + logging lines"""
if sql_dir is None:
sql_dir = os.path.join(os.path.dirname(__file__), "sql")
with open(os.path.join(sql_dir, sql_filepath)) as f:
sql = f.read().replace(":search_path", "mediatum").replace("%", "%%")
if filter_notices or filter_comments:
sql_lines = sql.split("\n")
return "\n".join(l for l in sql_lines
if (not filter_comments or not l.startswith("--"))
and (not filter_notices or "RAISE NOTICE" not in l))
else:
return sql
def disabled_scoped_session(*args, **kwargs):
raise Exception("Test mode, database session disabled. Use the core.test.fixtures.session fixture!")
class ConnectionException(Exception):
pass
class PostgresSQLAConnector(object):
"""Basic db object used by the application
"""
def __init__(self):
session_factory = sessionmaker(query_cls=MtQuery)
self.Session = scoped_session(session_factory)
self.metadata = db_metadata
self.meta_plugin = TransactionMetaPlugin()
self.athana_continuum_plugin = AthanaContinuumPlugin()
# XXX: maybe there is a better place for this, but we need it before some methods in this class are called.
# XXX: maybe we could make it optionsl
self.setup_versioning()
def setup_versioning(self):
make_versioned(
plugins=[self.meta_plugin, self.athana_continuum_plugin],
options={
'native_versioning': True,
'base_classes': (MtVersionBase, DeclarativeBase),
'extension_schema': config.get("database.extension_schema", "public")
}
)
def configure(self, force_test_db=False):
if DEBUG is None:
self.debug = config.get("database.debug", "").lower() == "true"
else:
self.debug = DEBUG
if force_test_db:
logg.warn("WARNING: force_test_db requested, creating / using test database server", trace=False)
test_db = True
else:
test_db = config.get("database.test_db", "false").lower() == "true"
if test_db:
logg.warn("WARNING: database.test_db enabled in config, creating / using test database server", trace=False)
self.test_db = test_db
if not test_db:
self.host = config.get("database.host", "localhost")
self.port = config.getint("database.port", "5432")
self.database = config.get("database.db", "mediatum")
self.user = config.get("database.user", "mediatum")
self.passwd = config.get("database.passwd", "mediatum")
self.pool_size = config.getint("database.pool_size", 20)
self.slow_query_seconds = config.getfloat("database.slow_query_seconds", 0.2)
self.application_name = "{}({})".format(os.path.basename(sys.argv[0]), os.getpid())
self.connectstr = CONNECTSTR_TEMPLATE.format(**self.__dict__)
logg.info("using database connection string: %s", CONNECTSTR_TEMPLATE_WITHOUT_PW.format(**self.__dict__))
# test_db is handled in create_engine / check_run_test_db_server
def check_create_test_db(self):
# check database existence
out = self.run_psql_command("SELECT 1 FROM pg_database WHERE datname='mediatum'", output=True, database="postgres")
if out.strip() != "1":
# no mediaTUM database present, use postgres as starting point to create one
self.run_psql_command("CREATE DATABASE mediatum OWNER=" + self.user, database="postgres")
self.run_psql_command("CREATE EXTENSION hstore SCHEMA public")
self.run_psql_command("ALTER ROLE {} SET search_path TO mediatum,public".format(self.user))
def check_run_test_db_server(self):
dirpath = config.check_create_test_db_dir()
# database role name must be the same as the process user
user = pwd.getpwuid(os.getuid())[0]
code = utils.process.call(["pg_ctl", "status", "-D", dirpath])
if code:
# error code > 0? database dir is not present or server not running
if code == 4:
# dirpath is not a proper database directory, try to init it
utils.process.check_call(["pg_ctl", "init", "-D", dirpath])
elif code == 3:
# database directory is ok, but no server running
logg.info("using existing database directory %s", dirpath)
else:
# should not happen with the tested postgres version...
raise ConnectionException("unexpected exit code from pg_ctl: %s. This looks like a bug.".format(code))
port = find_free_port()
socketdir = "/tmp"
logg.info("starting temporary postgresql server on port %s as user %s", port, user)
utils.process.check_call(["pg_ctl", "start", "-w", "-D", dirpath, "-o", "'-p {}'".format(port)])
# we have started the database server, it should be stopped automatically if mediaTUM exits
def stop_db():
self.Session.close_all()
self.engine.dispose()
utils.process.check_call(["pg_ctl", "stop", "-D", dirpath])
atexit.register(stop_db)
else:
# server is already running, get information from database dir
with open(os.path.join(dirpath, "postmaster.pid")) as f:
lines = f.readlines()
port = int(lines[3])
socketdir = lines[4]
# finally set the database config params for engine creation
self.port = port
self.host = "localhost"
self.passwd = ""
self.user = user
self.database = "mediatum"
self.socketdir = socketdir
self.connectstr = CONNECTSTR_TEMPLATE_TEST_DB.format(**self.__dict__)
self.pool_size = 5
self.slow_query_seconds = 0.2
logg.info("using test database connection string: %s", self.connectstr)
def _setup_slow_query_logging(self):
"""Registers cursor execute event handlers that measure query time and
log warnings when `self.slow_query_seconds` is exceeded.
"""
@event.listens_for(Engine, "before_cursor_execute")
def before_cursor_execute(conn, cursor, statement,
parameters, context, executemany):
conn.info.setdefault('query_start_time', []).append(time.time())
conn.info.setdefault('current_query', []).append(statement)
@event.listens_for(Engine, "after_cursor_execute")
def after_cursor_execute(conn, cursor, statement,
parameters, context, executemany):
total = time.time() - conn.info['query_start_time'].pop(-1)
statement = conn.info['current_query'].pop(-1)
# total in seconds
if total > self.slow_query_seconds:
logg.warn("slow query %.1fms:\n%s", total * 1000, statement)
def create_engine(self):
if self.debug:
if DEBUG_SHOW_TRACE is None:
show_trace = config.get("database.debug_show_trace", "").lower() == "true"
else:
show_trace = DEBUG_SHOW_TRACE
connect_args = {"connection_factory": make_debug_connection_factory(show_trace)}
else:
connect_args = {}
if self.test_db:
self.check_run_test_db_server()
self.check_create_test_db()
engine = create_engine(self.connectstr, connect_args=connect_args, pool_size=self.pool_size)
db_connection_exception = self.check_db_connection(engine)
if db_connection_exception:
if self.test_db:
msg = "Could not connect to temporary test database, error was: " + db_connection_exception.args[0]
msg += "This looks like a bug in mediaTUM or a strange problem with your system."
else:
msg = "Could not connect to database, error was: " + db_connection_exception.args[0]
if config.is_default_config:
msg += "HINT: You are running mediaTUM without a config file. Did you forget to create one?" \
"\nTo start mediaTUM without a config file using a temporary test database server, use" \
" the --force-test-db option on the command line." \
"\nSee --help for more info."
else:
msg += "check the settings in the [database] section in your config file."
raise ConnectionException(msg)
DeclarativeBase.metadata.bind = engine
self.engine = engine
self.Session.configure(bind=engine)
self._setup_slow_query_logging()
if self.test_db:
# create schema with default data in test_db mode if not present
self.check_create_schema(set_alembic_version=False)
self.check_load_initial_database_values(default_admin_password=u"insecure")
def check_db_connection(self, engine):
try:
conn = engine.connect()
except Exception as e:
return e
res = conn.execute("SELECT version()")
version = res.fetchone()
res = conn.execute("SHOW search_path")
search_path = res.fetchone()
logg.info("db connection test succeeded, search_path is '%s', version is: '%s'", search_path[0], version[0])
conn.close()
def check_db_structure_validity(self):
"""Just a simple check if the schema and the node table exist, should be extended"""
if not schema_exists(self.session, "mediatum"):
# missing schema, user should run schema creation or import a dump with structure
raise Exception("'mediatum' database schema does not exist."
"HINT: Did you forget to run 'bin/manage.py schema create'?")
if not table_exists(self.session, "mediatum", "node"):
# missing node table, there's something really wrong here...
raise Exception("'node' table does not exist."
"HINT: You can delete and recreate the database schema with all tables with 'bin/manage.py schema recreate'")
def get_model_classes(self):
from core.database.postgres.file import File, NodeToFile
from core.database.postgres.node import NodeType, Node, NodeAlias
from core.database.postgres.user import User, UserGroup, UserToUserGroup, AuthenticatorInfo, OAuthUserCredentials
from core.database.postgres.permission import AccessRule, AccessRuleset, NodeToAccessRule, NodeToAccessRuleset, AccessRulesetToRule
from core.database.postgres.setting import Setting
return (
File,
NodeToFile,
Node,
User,
UserGroup,
UserToUserGroup,
AuthenticatorInfo,
OAuthUserCredentials,
AccessRule,
AccessRuleset,
NodeToAccessRule,
NodeToAccessRuleset,
AccessRulesetToRule,
Setting,
NodeType,
NodeAlias)
def make_session(self):
"""Create a session.
For testing purposes (used in core.test.factories, for example).
"""
return self.Session()
@property
def session(self):
return self.Session()
@property
def statement_history(self):
if not self.debug:
raise Exception("connector debugging disabled (cfg: database.debug), statement history not available")
return self.Session().connection().connection.connection.history
def query(self, *entities, **kwargs):
"""Query proxy.
:see: sqlalchemy.orm.session.Session.query
Example:
from core import db
q = db.query
q(Node).get(42)
"""
return self.Session().query(*entities, **kwargs)
def refresh(self, node):
"""Return a refreshed copy of `node`.
Workaround for Node objects which are kept between requests.
XXX: must be removed later
"""
from .node import Node
NodeVersion = version_class(Node)
if isinstance(node, NodeVersion):
return self.session.query(NodeVersion).get((node.id, node.transaction.id))
else:
return self.session.query(Node).get(node.id)
# database manipulation helpers
def drop_schema(self):
s = self.session
if schema_exists(s, "mediatum"):
s.execute("DROP SCHEMA mediatum CASCADE")
s.commit()
logg.info("dropped database structure")
else:
logg.info("schema mediatum does not exist, cannot drop it")
def create_schema(self, set_alembic_version=True):
"""Creates the 'mediatum' schema.
:param set_alembic_version: Stamp database with current alembic revision information. Defaults to True.
Can be disabled if a schema for testing is going to be created.
"""
s = self.session
logg.info("creating DB schema...")
s.execute("CREATE SCHEMA mediatum")
s.commit()
try:
self.create_all()
if set_alembic_version:
# create alembic version table and set current alembic version to head
from alembic.config import Config
from alembic import command
alembic_cfg = Config(os.path.join(config.basedir, "alembic.ini"))
alembic_cfg.attributes["running_in_mediatum"] = True
command.stamp(alembic_cfg, "head")
s.commit()
logg.info("commited database structure")
except:
# I tried to use a transaction to enclose everything, but sqlalchemy (?) fails when the schema is created within the transaction
# solution: just drop the schema it if something fails after schema creation
s.execute("DROP SCHEMA mediatum CASCADE")
raise
def check_create_schema(self, set_alembic_version=True):
if not schema_exists(self.session, "mediatum"):
self.create_schema(set_alembic_version)
def upgrade_schema(self):
from alembic.config import Config
from alembic import command
alembic_cfg = Config(os.path.join(config.basedir, "alembic.ini"))
alembic_cfg.attributes["running_in_mediatum"] = True
command.upgrade(alembic_cfg, "head")
def check_load_initial_database_values(self, default_admin_password=None):
s = self.session
stmt = "SELECT EXISTS (SELECT FROM node)"
nodes_exist = s.execute(stmt).fetchone()[0]
if not nodes_exist:
init_database_values(s, default_admin_password=default_admin_password)
s.commit()
return True
return False
def create_tables(self, conn):
self.metadata.create_all(conn)
def drop_tables(self, conn):
self.metadata.drop_all(conn)
def create_extra_indexes(self, conn):
pass
def drop_extra_indexes(self, conn):
pass
def create_functions(self, conn):
conn.execute(read_and_prepare_sql("mediatum_utils.sql"))
conn.execute(read_and_prepare_sql("node_funcs.sql"))
conn.execute(read_and_prepare_sql("noderelation_funcs.sql"))
conn.execute(read_and_prepare_sql("json.sql"))
conn.execute(read_and_prepare_sql("nodesearch.sql"))
conn.execute(read_and_prepare_sql("node_access_funcs.sql"))
conn.execute(read_and_prepare_sql("node_access_rules_and_triggers.sql"))
conn.execute(read_and_prepare_sql("noderelation_rules_and_triggers.sql"))
conn.execute(read_and_prepare_sql("speedups.sql"))
def drop_functions(self, conn):
pass
def create_all(self):
from sqlalchemy import orm
orm.configure_mappers()
with self.engine.begin() as conn:
conn.execute("SET search_path TO " + self.metadata.schema)
self.create_tables(conn)
self.create_functions(conn)
def drop_all(self):
with self.engine.begin() as conn:
conn.execute("SET search_path TO " + self.metadata.schema)
self.drop_functions(conn)
self.drop_tables(conn)
def init_fulltext_search(self):
from core.database.postgres.setting import Setting
s = self.session
fulltext_autoindex_languages = get_fulltext_autoindex_languages()
if fulltext_autoindex_languages:
fulltext_autoindex_languages_setting = Setting(key=u"search.fulltext_autoindex_languages", value=list(fulltext_autoindex_languages))
s.merge(fulltext_autoindex_languages_setting)
attribute_autoindex_languages = get_attribute_autoindex_languages()
if attribute_autoindex_languages:
attribute_autoindex_languages_setting = Setting(key=u"search.attribute_autoindex_languages", value=list(attribute_autoindex_languages))
s.merge(attribute_autoindex_languages_setting)
s.commit()
def run_psql_command(self, command, output=False, database=None):
"""Executes a single SQL command via an external psql call.
Uses the connections options that are specified for the connector.
:param output: Return output from psql invocation?
:param database: override database name specified by connector configuration
"""
return self._run_psql(database, output, "-c", command)
def run_psql_file(self, filepath, output=False, database=None):
"""Executes a list of SQL statements from a file via an external psql call.
Uses the connections options that are specified for the connector.
:param output: Return output from psql invocation?
:param database: override database name specified by connector configuration
"""
return self._run_psql(database, output, "-f", filepath)
def _run_psql(self, database, output, *additional_args):
if database is None:
database = self.database
args = ["psql", "-tA", "-h", self.host, "-p", str(self.port), "-U", self.user, database]
args.extend(additional_args)
env = dict(os.environ, PGPASSWORD=self.passwd)
if output:
return utils.process.check_output(args, env=env)
else:
utils.process.check_call(args, env=env)
# test helpers
def disable_session_for_test(self):
"""Disables db.Session and closes the current session, preventing all session operations using db.session. Used for unit tests."""
self.Session.remove()
self._Session = self.Session
self.Session = disabled_scoped_session
def enable_session_for_test(self):
"""Reenables db.Session after disabling it with disable_session_for_test(),
allowing session operations using db.session. Used for unit tests."""
self.Session = self._Session
| gpl-3.0 | 7,418,163,225,976,108,000 | 41.713701 | 147 | 0.627759 | false |
wbinventor/openmc | examples/python/pincell_multigroup/build-xml.py | 1 | 7284 | import numpy as np
import openmc
import openmc.mgxs
###############################################################################
# Simulation Input File Parameters
###############################################################################
# OpenMC simulation parameters
batches = 100
inactive = 10
particles = 1000
###############################################################################
# Exporting to OpenMC mgxs.h5 file
###############################################################################
# Instantiate the energy group data
groups = openmc.mgxs.EnergyGroups(group_edges=[
1e-5, 0.0635, 10.0, 1.0e2, 1.0e3, 0.5e6, 1.0e6, 20.0e6])
# Instantiate the 7-group (C5G7) cross section data
uo2_xsdata = openmc.XSdata('UO2', groups)
uo2_xsdata.order = 0
uo2_xsdata.set_total(
[0.1779492, 0.3298048, 0.4803882, 0.5543674, 0.3118013, 0.3951678,
0.5644058])
uo2_xsdata.set_absorption([8.0248E-03, 3.7174E-03, 2.6769E-02, 9.6236E-02,
3.0020E-02, 1.1126E-01, 2.8278E-01])
scatter_matrix = np.array(
[[[0.1275370, 0.0423780, 0.0000094, 0.0000000, 0.0000000, 0.0000000, 0.0000000],
[0.0000000, 0.3244560, 0.0016314, 0.0000000, 0.0000000, 0.0000000, 0.0000000],
[0.0000000, 0.0000000, 0.4509400, 0.0026792, 0.0000000, 0.0000000, 0.0000000],
[0.0000000, 0.0000000, 0.0000000, 0.4525650, 0.0055664, 0.0000000, 0.0000000],
[0.0000000, 0.0000000, 0.0000000, 0.0001253, 0.2714010, 0.0102550, 0.0000000],
[0.0000000, 0.0000000, 0.0000000, 0.0000000, 0.0012968, 0.2658020, 0.0168090],
[0.0000000, 0.0000000, 0.0000000, 0.0000000, 0.0000000, 0.0085458, 0.2730800]]])
scatter_matrix = np.rollaxis(scatter_matrix, 0, 3)
uo2_xsdata.set_scatter_matrix(scatter_matrix)
uo2_xsdata.set_fission([7.21206E-03, 8.19301E-04, 6.45320E-03,
1.85648E-02, 1.78084E-02, 8.30348E-02,
2.16004E-01])
uo2_xsdata.set_nu_fission([2.005998E-02, 2.027303E-03, 1.570599E-02,
4.518301E-02, 4.334208E-02, 2.020901E-01,
5.257105E-01])
uo2_xsdata.set_chi([5.8791E-01, 4.1176E-01, 3.3906E-04, 1.1761E-07, 0.0000E+00,
0.0000E+00, 0.0000E+00])
h2o_xsdata = openmc.XSdata('LWTR', groups)
h2o_xsdata.order = 0
h2o_xsdata.set_total([0.15920605, 0.412969593, 0.59030986, 0.58435,
0.718, 1.2544497, 2.650379])
h2o_xsdata.set_absorption([6.0105E-04, 1.5793E-05, 3.3716E-04,
1.9406E-03, 5.7416E-03, 1.5001E-02,
3.7239E-02])
scatter_matrix = np.array(
[[[0.0444777, 0.1134000, 0.0007235, 0.0000037, 0.0000001, 0.0000000, 0.0000000],
[0.0000000, 0.2823340, 0.1299400, 0.0006234, 0.0000480, 0.0000074, 0.0000010],
[0.0000000, 0.0000000, 0.3452560, 0.2245700, 0.0169990, 0.0026443, 0.0005034],
[0.0000000, 0.0000000, 0.0000000, 0.0910284, 0.4155100, 0.0637320, 0.0121390],
[0.0000000, 0.0000000, 0.0000000, 0.0000714, 0.1391380, 0.5118200, 0.0612290],
[0.0000000, 0.0000000, 0.0000000, 0.0000000, 0.0022157, 0.6999130, 0.5373200],
[0.0000000, 0.0000000, 0.0000000, 0.0000000, 0.0000000, 0.1324400, 2.4807000]]])
scatter_matrix = np.rollaxis(scatter_matrix, 0, 3)
h2o_xsdata.set_scatter_matrix(scatter_matrix)
mg_cross_sections_file = openmc.MGXSLibrary(groups)
mg_cross_sections_file.add_xsdatas([uo2_xsdata, h2o_xsdata])
mg_cross_sections_file.export_to_hdf5()
###############################################################################
# Exporting to OpenMC materials.xml file
###############################################################################
# Instantiate some Macroscopic Data
uo2_data = openmc.Macroscopic('UO2')
h2o_data = openmc.Macroscopic('LWTR')
# Instantiate some Materials and register the appropriate Macroscopic objects
uo2 = openmc.Material(material_id=1, name='UO2 fuel')
uo2.set_density('macro', 1.0)
uo2.add_macroscopic(uo2_data)
water = openmc.Material(material_id=2, name='Water')
water.set_density('macro', 1.0)
water.add_macroscopic(h2o_data)
# Instantiate a Materials collection and export to XML
materials_file = openmc.Materials([uo2, water])
materials_file.cross_sections = "./mgxs.h5"
materials_file.export_to_xml()
###############################################################################
# Exporting to OpenMC geometry.xml file
###############################################################################
# Instantiate ZCylinder surfaces
fuel_or = openmc.ZCylinder(surface_id=1, x0=0, y0=0, r=0.54, name='Fuel OR')
left = openmc.XPlane(surface_id=4, x0=-0.63, name='left')
right = openmc.XPlane(surface_id=5, x0=0.63, name='right')
bottom = openmc.YPlane(surface_id=6, y0=-0.63, name='bottom')
top = openmc.YPlane(surface_id=7, y0=0.63, name='top')
left.boundary_type = 'reflective'
right.boundary_type = 'reflective'
top.boundary_type = 'reflective'
bottom.boundary_type = 'reflective'
# Instantiate Cells
fuel = openmc.Cell(cell_id=1, name='cell 1')
moderator = openmc.Cell(cell_id=2, name='cell 2')
# Use surface half-spaces to define regions
fuel.region = -fuel_or
moderator.region = +fuel_or & +left & -right & +bottom & -top
# Register Materials with Cells
fuel.fill = uo2
moderator.fill = water
# Instantiate Universe
root = openmc.Universe(universe_id=0, name='root universe')
# Register Cells with Universe
root.add_cells([fuel, moderator])
# Instantiate a Geometry, register the root Universe, and export to XML
geometry = openmc.Geometry(root)
geometry.export_to_xml()
###############################################################################
# Exporting to OpenMC settings.xml file
###############################################################################
# Instantiate a Settings object, set all runtime parameters, and export to XML
settings_file = openmc.Settings()
settings_file.energy_mode = "multi-group"
settings_file.batches = batches
settings_file.inactive = inactive
settings_file.particles = particles
# Create an initial uniform spatial source distribution over fissionable zones
bounds = [-0.63, -0.63, -1, 0.63, 0.63, 1]
uniform_dist = openmc.stats.Box(bounds[:3], bounds[3:])
settings_file.source = openmc.source.Source(space=uniform_dist)
settings_file.export_to_xml()
###############################################################################
# Exporting to OpenMC tallies.xml file
###############################################################################
# Instantiate a tally mesh
mesh = openmc.Mesh(mesh_id=1)
mesh.type = 'regular'
mesh.dimension = [100, 100, 1]
mesh.lower_left = [-0.63, -0.63, -1.e50]
mesh.upper_right = [0.63, 0.63, 1.e50]
# Instantiate some tally Filters
energy_filter = openmc.EnergyFilter([1e-5, 0.0635, 10.0, 1.0e2, 1.0e3, 0.5e6,
1.0e6, 20.0e6])
mesh_filter = openmc.MeshFilter(mesh)
# Instantiate the Tally
tally = openmc.Tally(tally_id=1, name='tally 1')
tally.filters = [energy_filter, mesh_filter]
tally.scores = ['flux', 'fission', 'nu-fission']
# Instantiate a Tallies collection, register all Tallies, and export to XML
tallies_file = openmc.Tallies([tally])
tallies_file.export_to_xml()
| mit | 5,276,218,147,830,047,000 | 40.386364 | 86 | 0.587589 | false |
orchidinfosys/odoo | addons/sale/report/sale_report.py | 2 | 6005 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp import tools
from openerp.osv import fields, osv
class sale_report(osv.osv):
_name = "sale.report"
_description = "Sales Orders Statistics"
_auto = False
_rec_name = 'date'
_columns = {
'date': fields.datetime('Date Order', readonly=True),
'product_id': fields.many2one('product.product', 'Product', readonly=True),
'product_uom': fields.many2one('product.uom', 'Unit of Measure', readonly=True),
'product_uom_qty': fields.float('# of Qty', readonly=True),
'qty_delivered': fields.float('Qty Delivered', readonly=True),
'qty_to_invoice': fields.float('Qty To Invoice', readonly=True),
'qty_invoiced': fields.float('Qty Invoiced', readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner', readonly=True),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'user_id': fields.many2one('res.users', 'Salesperson', readonly=True),
'price_total': fields.float('Total Price', readonly=True),
'price_subtotal': fields.float('Untaxed Total Price', readonly=True),
'product_tmpl_id': fields.many2one('product.template', 'Product Template', readonly=True),
'categ_id': fields.many2one('product.category','Product Category', readonly=True),
'nbr': fields.integer('# of Lines', readonly=True),
'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', readonly=True),
'analytic_account_id': fields.many2one('account.analytic.account', 'Analytic Account', readonly=True),
'team_id': fields.many2one('crm.team', 'Sales Team', readonly=True, oldname='section_id'),
'country_id': fields.many2one('res.country', 'Partner Country', readonly=True),
'commercial_partner_id': fields.many2one('res.partner', 'Commercial Entity', readonly=True),
'state': fields.selection([
('draft', 'Draft Quotation'),
('sent', 'Quotation Sent'),
('sale', 'Sales Order'),
('done', 'Sales Done'),
('cancel', 'Cancelled'),
], string='Status', readonly=True),
'weight': fields.float('Gross Weight', readonly=True),
'volume': fields.float('Volume', readonly=True),
}
_order = 'date desc'
def _select(self):
select_str = """
WITH currency_rate as (%s)
SELECT min(l.id) as id,
l.product_id as product_id,
t.uom_id as product_uom,
sum(l.product_uom_qty / u.factor * u2.factor) as product_uom_qty,
sum(l.qty_delivered / u.factor * u2.factor) as qty_delivered,
sum(l.qty_invoiced / u.factor * u2.factor) as qty_invoiced,
sum(l.qty_to_invoice / u.factor * u2.factor) as qty_to_invoice,
sum(l.price_total * COALESCE(cr.rate, 1.0)) as price_total,
sum(l.price_subtotal * COALESCE(cr.rate, 1.0)) as price_subtotal,
count(*) as nbr,
s.date_order as date,
s.state as state,
s.partner_id as partner_id,
s.user_id as user_id,
s.company_id as company_id,
extract(epoch from avg(date_trunc('day',s.date_order)-date_trunc('day',s.create_date)))/(24*60*60)::decimal(16,2) as delay,
t.categ_id as categ_id,
s.pricelist_id as pricelist_id,
s.project_id as analytic_account_id,
s.team_id as team_id,
p.product_tmpl_id,
partner.country_id as country_id,
partner.commercial_partner_id as commercial_partner_id,
sum(p.weight * l.product_uom_qty / u.factor * u2.factor) as weight,
sum(p.volume * l.product_uom_qty / u.factor * u2.factor) as volume
""" % self.pool['res.currency']._select_companies_rates()
return select_str
def _from(self):
from_str = """
sale_order_line l
join sale_order s on (l.order_id=s.id)
join res_partner partner on s.partner_id = partner.id
left join product_product p on (l.product_id=p.id)
left join product_template t on (p.product_tmpl_id=t.id)
left join product_uom u on (u.id=l.product_uom)
left join product_uom u2 on (u2.id=t.uom_id)
left join product_pricelist pp on (s.pricelist_id = pp.id)
left join currency_rate cr on (cr.currency_id = pp.currency_id and
cr.company_id = s.company_id and
cr.date_start <= coalesce(s.date_order, now()) and
(cr.date_end is null or cr.date_end > coalesce(s.date_order, now())))
"""
return from_str
def _group_by(self):
group_by_str = """
GROUP BY l.product_id,
l.order_id,
t.uom_id,
t.categ_id,
s.date_order,
s.partner_id,
s.user_id,
s.state,
s.company_id,
s.pricelist_id,
s.project_id,
s.team_id,
p.product_tmpl_id,
partner.country_id,
partner.commercial_partner_id
"""
return group_by_str
def init(self, cr):
# self._table = sale_report
tools.drop_view_if_exists(cr, self._table)
cr.execute("""CREATE or REPLACE VIEW %s as (
%s
FROM ( %s )
%s
)""" % (self._table, self._select(), self._from(), self._group_by()))
| gpl-3.0 | -4,253,150,912,885,934,000 | 48.628099 | 143 | 0.531724 | false |
magicrub/MissionPlanner | Lib/site-packages/scipy/lib/blas/__init__.py | 57 | 2028 | #
# BLAS wrappers
#
from info import __doc__
__all__ = ['fblas','cblas','get_blas_funcs']
import fblas
import cblas
_use_force_cblas = 1
if hasattr(cblas,'empty_module'):
cblas = fblas
_use_force_cblas = 0
elif hasattr(fblas,'empty_module'):
fblas = cblas
_type_conv = {'f':'s', 'd':'d', 'F':'c', 'D':'z'} # 'd' will be default for 'i',..
_inv_type_conv = {'s':'f','d':'d','c':'F','z':'D'}
def get_blas_funcs(names,arrays=(),debug=0):
"""Return available BLAS function objects with names.
arrays are used to determine the optimal prefix of
BLAS routines."""
ordering = []
for i in range(len(arrays)):
t = arrays[i].dtype.char
if t not in _type_conv:
t = 'd'
ordering.append((t,i))
if ordering:
ordering.sort()
required_prefix = _type_conv[ordering[0][0]]
else:
required_prefix = 'd'
dtypechar = _inv_type_conv[required_prefix]
# Default lookup:
if ordering and arrays[ordering[0][1]].flags['FORTRAN']:
# prefer Fortran code for leading array with column major order
m1,m2 = fblas,cblas
else:
# in all other cases, C code is preferred
m1,m2 = cblas,fblas
funcs = []
for name in names:
if name=='ger' and dtypechar in 'FD':
name = 'gerc'
elif name in ('dotc', 'dotu') and dtypechar in 'fd':
name = 'dot'
func_name = required_prefix + name
if name == 'nrm2' and dtypechar == 'D':
func_name = 'dznrm2'
elif name == 'nrm2' and dtypechar == 'F':
func_name = 'scnrm2'
func = getattr(m1,func_name,None)
if func is None:
func = getattr(m2,func_name)
func.module_name = m2.__name__.split('.')[-1]
else:
func.module_name = m1.__name__.split('.')[-1]
func.prefix = required_prefix
func.dtypechar = dtypechar
funcs.append(func)
return tuple(funcs)
from numpy.testing import Tester
test = Tester().test
| gpl-3.0 | -199,373,108,604,541,630 | 28.823529 | 82 | 0.562623 | false |
papapep/python | PFE_UMichigan/4_UsingDatabases/Week5/Geocoding/geoload.py | 1 | 1806 | import urllib
import sqlite3
import json
import time
import ssl
# If you are in China use this URL:
# serviceurl = "http://maps.google.cn/maps/api/geocode/json?"
#serviceurl = "http://maps.googleapis.com/maps/api/geocode/json?"
serviceurl = 'https://maps.googleapis.com/maps/api/place/textsearch/json?'
apikey ='YOUR API KEY'
# Deal with SSL certificate anomalies Python > 2.7
# scontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
scontext = None
conn = sqlite3.connect('geodata.sqlite')
cur = conn.cursor()
cur.execute('''
CREATE TABLE IF NOT EXISTS Locations (address TEXT, geodata TEXT)''')
fh = open("where.data")
count = 0
for line in fh:
if count > 200: break
address = line.strip()
print ''
cur.execute("SELECT geodata FROM Locations WHERE address= ?", (buffer(address), ))
try:
data = cur.fetchone()[0]
print "Found in database ",address
continue
except:
pass
print 'Resolving', address
url = serviceurl + urllib.urlencode({"sensor":"false", "query": address,"key": apikey})
print 'Retrieving', url
uh = urllib.urlopen(url, context=scontext)
data = uh.read()
print 'Retrieved',len(data),'characters',data[:20].replace('\n',' ')
count = count + 1
try:
js = json.loads(str(data))
# print js # We print in case unicode causes an error
except:
continue
if 'status' not in js or (js['status'] != 'OK' and js['status'] != 'ZERO_RESULTS') :
print '==== Failure To Retrieve ===='
print data
break
cur.execute('''INSERT INTO Locations (address, geodata)
VALUES ( ?, ? )''', ( buffer(address),buffer(data) ) )
conn.commit()
time.sleep(1)
print "Run geodump.py to read the data from the database so you can visualize it on a map."
| gpl-3.0 | -1,836,214,071,308,913,400 | 28.606557 | 91 | 0.638981 | false |
plowman/python-mcparseface | models/syntaxnet/tensorflow/tensorflow/contrib/learn/python/learn/preprocessing/categorical.py | 7 | 4053 | """Implements preprocesing transformers for categorical variables."""
# Copyright 2015-present The Scikit Flow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
from . import categorical_vocabulary
from ..io.data_feeder import setup_processor_data_feeder
class CategoricalProcessor(object):
"""Maps documents to sequences of word ids.
As a common convention, Nan values are handled as unknown tokens.
Both float('nan') and np.nan are accepted.
Parameters:
min_frequency: Minimum frequency of categories in the vocabulary.
share: Share vocabulary between variables.
vocabularies: list of CategoricalVocabulary objects for each variable in
the input dataset.
Attributes:
vocabularies_: list of CategoricalVocabulary objects.
"""
def __init__(self, min_frequency=0, share=False, vocabularies=None):
self.min_frequency = min_frequency
self.share = share
self.vocabularies_ = vocabularies
def freeze(self, freeze=True):
"""Freeze or unfreeze all vocabularies.
Args:
freeze: Boolean, indicate if vocabularies should be frozen.
"""
for vocab in self.vocabularies_:
vocab.freeze(freeze)
def fit(self, X, unused_y=None):
"""Learn a vocabulary dictionary of all categories in X.
Args:
X: numpy matrix or iterable of lists/numpy arrays.
unused_y: to match fit format signature of estimators.
Returns:
self
"""
X = setup_processor_data_feeder(X)
for row in X:
# Create vocabularies if not given.
if self.vocabularies_ is None:
# If not share, one per column, else one shared across.
if not self.share:
self.vocabularies_ = [
categorical_vocabulary.CategoricalVocabulary() for _ in row
]
else:
vocab = categorical_vocabulary.CategoricalVocabulary()
self.vocabularies_ = [vocab for _ in row]
for idx, value in enumerate(row):
# Nans are handled as unknowns.
if (isinstance(value, float) and math.isnan(value)) or value == np.nan:
continue
self.vocabularies_[idx].add(value)
if self.min_frequency > 0:
for vocab in self.vocabularies_:
vocab.trim(self.min_frequency)
self.freeze()
return self
def fit_transform(self, X, unused_y=None):
"""Learn the vocabulary dictionary and return indexies of categories.
Args:
X: numpy matrix or iterable of lists/numpy arrays.
unused_y: to match fit_transform signature of estimators.
Returns:
X: iterable, [n_samples]. Category-id matrix.
"""
self.fit(X)
return self.transform(X)
def transform(self, X):
"""Transform documents to category-id matrix.
Converts categories to ids give fitted vocabulary from `fit` or
one provided in the constructor.
Args:
X: numpy matrix or iterable of lists/numpy arrays.
Returns:
X: iterable, [n_samples]. Category-id matrix.
"""
self.freeze()
X = setup_processor_data_feeder(X)
for row in X:
output_row = []
for idx, value in enumerate(row):
# Return <UNK> when it's Nan.
if (isinstance(value, float) and math.isnan(value)) or value == np.nan:
output_row.append(0)
continue
output_row.append(self.vocabularies_[idx].get(value))
yield np.array(output_row, dtype=np.int64)
| apache-2.0 | -2,279,777,209,195,272,400 | 31.685484 | 79 | 0.680237 | false |
alexston/calibre-webserver | src/calibre/ebooks/metadata/book/__init__.py | 9 | 5366 | #!/usr/bin/env python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
__license__ = 'GPL v3'
__copyright__ = '2010, Kovid Goyal <[email protected]>'
__docformat__ = 'restructuredtext en'
'''
All fields must have a NULL value represented as None for simple types,
an empty list/dictionary for complex types and (None, None) for cover_data
'''
SOCIAL_METADATA_FIELDS = frozenset([
'tags', # Ordered list
'rating', # A floating point number between 0 and 10
'comments', # A simple HTML enabled string
'series', # A simple string
'series_index', # A floating point number
# Of the form { scheme1:value1, scheme2:value2}
# For example: {'isbn':'123456789', 'doi':'xxxx', ... }
'identifiers',
])
'''
The list of names that convert to identifiers when in get and set.
'''
TOP_LEVEL_IDENTIFIERS = frozenset([
'isbn',
])
PUBLICATION_METADATA_FIELDS = frozenset([
'title', # title must never be None. Should be _('Unknown')
# Pseudo field that can be set, but if not set is auto generated
# from title and languages
'title_sort',
'authors', # Ordered list. Must never be None, can be [_('Unknown')]
'author_sort_map', # Map of sort strings for each author
# Pseudo field that can be set, but if not set is auto generated
# from authors and languages
'author_sort',
'book_producer',
'timestamp', # Dates and times must be timezone aware
'pubdate',
'last_modified',
'rights',
# So far only known publication type is periodical:calibre
# If None, means book
'publication_type',
'uuid', # A UUID usually of type 4
'languages', # ordered list of languages in this publication
'publisher', # Simple string, no special semantics
# Absolute path to image file encoded in filesystem_encoding
'cover',
# Of the form (format, data) where format is, for e.g. 'jpeg', 'png', 'gif'...
'cover_data',
# Either thumbnail data, or an object with the attribute
# image_path which is the path to an image file, encoded
# in filesystem_encoding
'thumbnail',
])
BOOK_STRUCTURE_FIELDS = frozenset([
# These are used by code, Null values are None.
'toc', 'spine', 'guide', 'manifest',
])
USER_METADATA_FIELDS = frozenset([
# A dict of dicts similar to field_metadata. Each field description dict
# also contains a value field with the key #value#.
'user_metadata',
])
DEVICE_METADATA_FIELDS = frozenset([
'device_collections', # Ordered list of strings
'lpath', # Unicode, / separated
'size', # In bytes
'mime', # Mimetype of the book file being represented
])
CALIBRE_METADATA_FIELDS = frozenset([
'application_id', # An application id, currently set to the db_id.
'db_id', # the calibre primary key of the item.
'formats', # list of formats (extensions) for this book
# a dict of user category names, where the value is a list of item names
# from the book that are in that category
'user_categories',
# a dict of author to an associated hyperlink
'author_link_map',
]
)
ALL_METADATA_FIELDS = SOCIAL_METADATA_FIELDS.union(
PUBLICATION_METADATA_FIELDS).union(
BOOK_STRUCTURE_FIELDS).union(
USER_METADATA_FIELDS).union(
DEVICE_METADATA_FIELDS).union(
CALIBRE_METADATA_FIELDS)
# All fields except custom fields
STANDARD_METADATA_FIELDS = SOCIAL_METADATA_FIELDS.union(
PUBLICATION_METADATA_FIELDS).union(
BOOK_STRUCTURE_FIELDS).union(
DEVICE_METADATA_FIELDS).union(
CALIBRE_METADATA_FIELDS)
# Metadata fields that smart update must do special processing to copy.
SC_FIELDS_NOT_COPIED = frozenset(['title', 'title_sort', 'authors',
'author_sort', 'author_sort_map',
'cover_data', 'tags', 'languages',
'identifiers'])
# Metadata fields that smart update should copy only if the source is not None
SC_FIELDS_COPY_NOT_NULL = frozenset(['lpath', 'size', 'comments', 'thumbnail'])
# Metadata fields that smart update should copy without special handling
SC_COPYABLE_FIELDS = SOCIAL_METADATA_FIELDS.union(
PUBLICATION_METADATA_FIELDS).union(
BOOK_STRUCTURE_FIELDS).union(
DEVICE_METADATA_FIELDS).union(
CALIBRE_METADATA_FIELDS) - \
SC_FIELDS_NOT_COPIED.union(
SC_FIELDS_COPY_NOT_NULL)
SERIALIZABLE_FIELDS = SOCIAL_METADATA_FIELDS.union(
USER_METADATA_FIELDS).union(
PUBLICATION_METADATA_FIELDS).union(
CALIBRE_METADATA_FIELDS).union(
DEVICE_METADATA_FIELDS) - \
frozenset(['device_collections', 'formats',
'cover_data'])
# these are rebuilt when needed
| gpl-3.0 | -954,920,378,011,277,400 | 39.044776 | 82 | 0.581439 | false |
HybridF5/nova | nova/tests/functional/api_sample_tests/test_cloudpipe.py | 8 | 3439 | # Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid as uuid_lib
from oslo_config import cfg
from nova.tests.functional.api_sample_tests import api_sample_base
from nova.tests.unit.image import fake
CONF = cfg.CONF
CONF.import_opt('vpn_image_id', 'nova.cloudpipe.pipelib')
CONF.import_opt('osapi_compute_extension',
'nova.api.openstack.compute.legacy_v2.extensions')
class CloudPipeSampleTest(api_sample_base.ApiSampleTestBaseV21):
ADMIN_API = True
extension_name = "os-cloudpipe"
def _get_flags(self):
f = super(CloudPipeSampleTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.cloudpipe.Cloudpipe')
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.cloudpipe_update.Cloudpipe_update')
return f
def setUp(self):
super(CloudPipeSampleTest, self).setUp()
def get_user_data(self, project_id):
"""Stub method to generate user data for cloudpipe tests."""
return "VVNFUiBEQVRB\n"
def network_api_get(self, context, network_uuid):
"""Stub to get a valid network and its information."""
return {'vpn_public_address': '127.0.0.1',
'vpn_public_port': 22}
self.stub_out('nova.cloudpipe.pipelib.CloudPipe.get_encoded_zip',
get_user_data)
self.stub_out('nova.network.api.API.get',
network_api_get)
def generalize_subs(self, subs, vanilla_regexes):
subs['project_id'] = '[0-9a-f-]+'
return subs
def test_cloud_pipe_create(self):
# Get api samples of cloud pipe extension creation.
self.flags(vpn_image_id=fake.get_valid_image_id())
subs = {'project_id': str(uuid_lib.uuid4().hex)}
response = self._do_post('os-cloudpipe', 'cloud-pipe-create-req',
subs)
subs['image_id'] = CONF.vpn_image_id
self._verify_response('cloud-pipe-create-resp', subs, response, 200)
return subs
def test_cloud_pipe_list(self):
# Get api samples of cloud pipe extension get request.
subs = self.test_cloud_pipe_create()
response = self._do_get('os-cloudpipe')
subs['image_id'] = CONF.vpn_image_id
self._verify_response('cloud-pipe-get-resp', subs, response, 200)
def test_cloud_pipe_update(self):
subs = {'vpn_ip': '192.168.1.1',
'vpn_port': '2000'}
response = self._do_put('os-cloudpipe/configure-project',
'cloud-pipe-update-req',
subs)
self.assertEqual(202, response.status_code)
self.assertEqual("", response.content)
| apache-2.0 | 5,554,966,729,186,697,000 | 38.528736 | 78 | 0.623437 | false |
fubecka/f5-dashboard | flask/lib/python2.6/site-packages/pbr/tests/test_setup.py | 20 | 15025 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 OpenStack Foundation
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import os
import sys
import tempfile
try:
import cStringIO as io
BytesIO = io.StringIO
except ImportError:
import io
BytesIO = io.BytesIO
import fixtures
import testscenarios
from pbr import git
from pbr import packaging
from pbr.tests import base
class SkipFileWrites(base.BaseTestCase):
scenarios = [
('changelog_option_true',
dict(option_key='skip_changelog', option_value='True',
env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None,
pkg_func=git.write_git_changelog, filename='ChangeLog')),
('changelog_option_false',
dict(option_key='skip_changelog', option_value='False',
env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None,
pkg_func=git.write_git_changelog, filename='ChangeLog')),
('changelog_env_true',
dict(option_key='skip_changelog', option_value='False',
env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True',
pkg_func=git.write_git_changelog, filename='ChangeLog')),
('changelog_both_true',
dict(option_key='skip_changelog', option_value='True',
env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True',
pkg_func=git.write_git_changelog, filename='ChangeLog')),
('authors_option_true',
dict(option_key='skip_authors', option_value='True',
env_key='SKIP_GENERATE_AUTHORS', env_value=None,
pkg_func=git.generate_authors, filename='AUTHORS')),
('authors_option_false',
dict(option_key='skip_authors', option_value='False',
env_key='SKIP_GENERATE_AUTHORS', env_value=None,
pkg_func=git.generate_authors, filename='AUTHORS')),
('authors_env_true',
dict(option_key='skip_authors', option_value='False',
env_key='SKIP_GENERATE_AUTHORS', env_value='True',
pkg_func=git.generate_authors, filename='AUTHORS')),
('authors_both_true',
dict(option_key='skip_authors', option_value='True',
env_key='SKIP_GENERATE_AUTHORS', env_value='True',
pkg_func=git.generate_authors, filename='AUTHORS')),
]
def setUp(self):
super(SkipFileWrites, self).setUp()
self.temp_path = self.useFixture(fixtures.TempDir()).path
self.root_dir = os.path.abspath(os.path.curdir)
self.git_dir = os.path.join(self.root_dir, ".git")
if not os.path.exists(self.git_dir):
self.skipTest("%s is missing; skipping git-related checks"
% self.git_dir)
return
self.filename = os.path.join(self.temp_path, self.filename)
self.option_dict = dict()
if self.option_key is not None:
self.option_dict[self.option_key] = ('setup.cfg',
self.option_value)
self.useFixture(
fixtures.EnvironmentVariable(self.env_key, self.env_value))
def test_skip(self):
self.pkg_func(git_dir=self.git_dir,
dest_dir=self.temp_path,
option_dict=self.option_dict)
self.assertEqual(
not os.path.exists(self.filename),
(self.option_value.lower() in packaging.TRUE_VALUES
or self.env_value is not None))
_changelog_content = """04316fe (review/monty_taylor/27519) Make python
378261a Add an integration test script.
3c373ac (HEAD, tag: 2013.2.rc2, tag: 2013.2, milestone-proposed) Merge "Lib
182feb3 (tag: 0.5.17) Fix pip invocation for old versions of pip.
fa4f46e (tag: 0.5.16) Remove explicit depend on distribute.
d1c53dd Use pip instead of easy_install for installation.
a793ea1 Merge "Skip git-checkout related tests when .git is missing"
6c27ce7 Skip git-checkout related tests when .git is missing
04984a5 Refactor hooks file.
a65e8ee (tag: 0.5.14, tag: 0.5.13) Remove jinja pin.
"""
class GitLogsTest(base.BaseTestCase):
def setUp(self):
super(GitLogsTest, self).setUp()
self.temp_path = self.useFixture(fixtures.TempDir()).path
self.root_dir = os.path.abspath(os.path.curdir)
self.git_dir = os.path.join(self.root_dir, ".git")
self.useFixture(
fixtures.EnvironmentVariable('SKIP_GENERATE_AUTHORS'))
self.useFixture(
fixtures.EnvironmentVariable('SKIP_WRITE_GIT_CHANGELOG'))
def test_write_git_changelog(self):
self.useFixture(fixtures.FakePopen(lambda _: {
"stdout": BytesIO(_changelog_content.encode('utf-8'))
}))
git.write_git_changelog(git_dir=self.git_dir, dest_dir=self.temp_path)
with open(os.path.join(self.temp_path, "ChangeLog"), "r") as ch_fh:
changelog_contents = ch_fh.read()
self.assertIn("2013.2", changelog_contents)
self.assertIn("0.5.17", changelog_contents)
self.assertIn("------", changelog_contents)
self.assertIn("Refactor hooks file", changelog_contents)
self.assertNotIn("Refactor hooks file.", changelog_contents)
self.assertNotIn("182feb3", changelog_contents)
self.assertNotIn("review/monty_taylor/27519", changelog_contents)
self.assertNotIn("0.5.13", changelog_contents)
self.assertNotIn('Merge "', changelog_contents)
def test_generate_authors(self):
author_old = u"Foo Foo <[email protected]>"
author_new = u"Bar Bar <[email protected]>"
co_author = u"Foo Bar <[email protected]>"
co_author_by = u"Co-authored-by: " + co_author
git_log_cmd = (
"git --git-dir=%s log --format=%%aN <%%aE>"
% self.git_dir)
git_co_log_cmd = ("git --git-dir=%s log" % self.git_dir)
git_top_level = "git rev-parse --show-toplevel"
cmd_map = {
git_log_cmd: author_new,
git_co_log_cmd: co_author_by,
git_top_level: self.root_dir,
}
exist_files = [self.git_dir,
os.path.join(self.temp_path, "AUTHORS.in")]
self.useFixture(fixtures.MonkeyPatch(
"os.path.exists",
lambda path: os.path.abspath(path) in exist_files))
def _fake_run_shell_command(cmd, **kwargs):
return cmd_map[" ".join(cmd)]
self.useFixture(fixtures.MonkeyPatch(
"pbr.git._run_shell_command",
_fake_run_shell_command))
with open(os.path.join(self.temp_path, "AUTHORS.in"), "w") as auth_fh:
auth_fh.write("%s\n" % author_old)
git.generate_authors(git_dir=self.git_dir, dest_dir=self.temp_path)
with open(os.path.join(self.temp_path, "AUTHORS"), "r") as auth_fh:
authors = auth_fh.read()
self.assertTrue(author_old in authors)
self.assertTrue(author_new in authors)
self.assertTrue(co_author in authors)
class BuildSphinxTest(base.BaseTestCase):
scenarios = [
('true_autodoc_caps',
dict(has_opt=True, autodoc='True', has_autodoc=True)),
('true_autodoc_lower',
dict(has_opt=True, autodoc='true', has_autodoc=True)),
('false_autodoc',
dict(has_opt=True, autodoc='False', has_autodoc=False)),
('no_autodoc',
dict(has_opt=False, autodoc='False', has_autodoc=False)),
]
def setUp(self):
super(BuildSphinxTest, self).setUp()
self.useFixture(fixtures.MonkeyPatch(
"sphinx.setup_command.BuildDoc.run", lambda self: None))
from distutils import dist
self.distr = dist.Distribution()
self.distr.packages = ("fake_package",)
self.distr.command_options["build_sphinx"] = {
"source_dir": ["a", "."]}
pkg_fixture = fixtures.PythonPackage(
"fake_package", [("fake_module.py", b"")])
self.useFixture(pkg_fixture)
self.useFixture(base.DiveDir(pkg_fixture.base))
def test_build_doc(self):
if self.has_opt:
self.distr.command_options["pbr"] = {
"autodoc_index_modules": ('setup.cfg', self.autodoc)}
build_doc = packaging.LocalBuildDoc(self.distr)
build_doc.run()
self.assertTrue(
os.path.exists("api/autoindex.rst") == self.has_autodoc)
self.assertTrue(
os.path.exists(
"api/fake_package.fake_module.rst") == self.has_autodoc)
def test_builders_config(self):
if self.has_opt:
self.distr.command_options["pbr"] = {
"autodoc_index_modules": ('setup.cfg', self.autodoc)}
build_doc = packaging.LocalBuildDoc(self.distr)
build_doc.finalize_options()
self.assertEqual(2, len(build_doc.builders))
self.assertIn('html', build_doc.builders)
self.assertIn('man', build_doc.builders)
build_doc = packaging.LocalBuildDoc(self.distr)
build_doc.builders = ''
build_doc.finalize_options()
self.assertEqual('', build_doc.builders)
build_doc = packaging.LocalBuildDoc(self.distr)
build_doc.builders = 'man'
build_doc.finalize_options()
self.assertEqual(1, len(build_doc.builders))
self.assertIn('man', build_doc.builders)
build_doc = packaging.LocalBuildDoc(self.distr)
build_doc.builders = 'html,man,doctest'
build_doc.finalize_options()
self.assertIn('html', build_doc.builders)
self.assertIn('man', build_doc.builders)
self.assertIn('doctest', build_doc.builders)
class ParseRequirementsTest(base.BaseTestCase):
def setUp(self):
super(ParseRequirementsTest, self).setUp()
(fd, self.tmp_file) = tempfile.mkstemp(prefix='openstack',
suffix='.setup')
def test_parse_requirements_normal(self):
with open(self.tmp_file, 'w') as fh:
fh.write("foo\nbar")
self.assertEqual(['foo', 'bar'],
packaging.parse_requirements([self.tmp_file]))
def test_parse_requirements_with_git_egg_url(self):
with open(self.tmp_file, 'w') as fh:
fh.write("-e git://foo.com/zipball#egg=bar")
self.assertEqual(['bar'],
packaging.parse_requirements([self.tmp_file]))
def test_parse_requirements_with_versioned_git_egg_url(self):
with open(self.tmp_file, 'w') as fh:
fh.write("-e git://foo.com/zipball#egg=bar-1.2.4")
self.assertEqual(['bar>=1.2.4'],
packaging.parse_requirements([self.tmp_file]))
def test_parse_requirements_with_http_egg_url(self):
with open(self.tmp_file, 'w') as fh:
fh.write("https://foo.com/zipball#egg=bar")
self.assertEqual(['bar'],
packaging.parse_requirements([self.tmp_file]))
def test_parse_requirements_with_versioned_http_egg_url(self):
with open(self.tmp_file, 'w') as fh:
fh.write("https://foo.com/zipball#egg=bar-4.2.1")
self.assertEqual(['bar>=4.2.1'],
packaging.parse_requirements([self.tmp_file]))
def test_parse_requirements_removes_index_lines(self):
with open(self.tmp_file, 'w') as fh:
fh.write("-f foobar")
self.assertEqual([], packaging.parse_requirements([self.tmp_file]))
def test_parse_requirements_override_with_env(self):
with open(self.tmp_file, 'w') as fh:
fh.write("foo\nbar")
self.useFixture(
fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES',
self.tmp_file))
self.assertEqual(['foo', 'bar'],
packaging.parse_requirements())
def test_parse_requirements_override_with_env_multiple_files(self):
with open(self.tmp_file, 'w') as fh:
fh.write("foo\nbar")
self.useFixture(
fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES',
"no-such-file," + self.tmp_file))
self.assertEqual(['foo', 'bar'],
packaging.parse_requirements())
def test_get_requirement_from_file_empty(self):
actual = packaging.get_reqs_from_files([])
self.assertEqual([], actual)
def test_parse_requirements_with_comments(self):
with open(self.tmp_file, 'w') as fh:
fh.write("# this is a comment\nfoobar\n# and another one\nfoobaz")
self.assertEqual(['foobar', 'foobaz'],
packaging.parse_requirements([self.tmp_file]))
def test_parse_requirements_python_version(self):
with open("requirements-py%d.txt" % sys.version_info[0],
"w") as fh:
fh.write("# this is a comment\nfoobar\n# and another one\nfoobaz")
self.assertEqual(['foobar', 'foobaz'],
packaging.parse_requirements())
def test_parse_requirements_right_python_version(self):
with open("requirements-py1.txt", "w") as fh:
fh.write("thisisatrap")
with open("requirements-py%d.txt" % sys.version_info[0],
"w") as fh:
fh.write("# this is a comment\nfoobar\n# and another one\nfoobaz")
self.assertEqual(['foobar', 'foobaz'],
packaging.parse_requirements())
class ParseDependencyLinksTest(base.BaseTestCase):
def setUp(self):
super(ParseDependencyLinksTest, self).setUp()
(fd, self.tmp_file) = tempfile.mkstemp(prefix="openstack",
suffix=".setup")
def test_parse_dependency_normal(self):
with open(self.tmp_file, "w") as fh:
fh.write("http://test.com\n")
self.assertEqual(
["http://test.com"],
packaging.parse_dependency_links([self.tmp_file]))
def test_parse_dependency_with_git_egg_url(self):
with open(self.tmp_file, "w") as fh:
fh.write("-e git://foo.com/zipball#egg=bar")
self.assertEqual(
["git://foo.com/zipball#egg=bar"],
packaging.parse_dependency_links([self.tmp_file]))
def load_tests(loader, in_tests, pattern):
return testscenarios.load_tests_apply_scenarios(loader, in_tests, pattern)
| apache-2.0 | 6,605,694,283,085,214,000 | 39.173797 | 78 | 0.602995 | false |
xijunlee/leetcode | 39.py | 1 | 1578 | #!/usr/bin/env python
# coding=utf-8
import copy
class Solution(object):
def combinationSum(self, candidates, target):
"""
:type candidates: List[int]
:type target: int
:rtype: List[List[int]]
"""
ret = []
def dfs(sum, step, stack):
if step>=len(candidates) or sum>target:
return
if sum == target:
flag = True
for s in ret:
if s == stack:
flag = False
break
if flag == True: ret.append(copy.deepcopy(stack))
return
if sum + candidates[step] < target:
stack.append(candidates[step])
dfs(sum+candidates[step],step,stack)
dfs(sum+candidates[step],step+1,stack)
stack.pop()
dfs(sum,step+1,stack)
return
candidates.sort()
dfs(0,0,[])
return ret
class Solution(object):
def combinationSum(self, candidates, target):
res = []
candidates.sort()
self.dfs(candidates, target, 0, [], res)
return res
def dfs(self, nums, target, index, path, res):
if target < 0:
return # backtracking
if target == 0:
res.append(path)
return
for i in xrange(index, len(nums)):
self.dfs(nums, target-nums[i], i, path+[nums[i]], res)
if __name__ == '__main__':
s = Solution()
print s.combinationSum([2, 3, 6, 7], 7) | mit | -3,387,380,965,697,453,000 | 26.701754 | 66 | 0.479087 | false |
log2timeline/dfvfs | tests/resolver_helpers/qcow_resolver_helper.py | 2 | 1443 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for the QCOW image resolver helper implementation."""
import unittest
from dfvfs.lib import definitions
from dfvfs.path import factory as path_spec_factory
from dfvfs.resolver_helpers import qcow_resolver_helper
from tests.resolver_helpers import test_lib
class QCOWResolverHelperTest(test_lib.ResolverHelperTestCase):
"""Tests for the QCOW image resolver helper implementation."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
super(QCOWResolverHelperTest, self).setUp()
test_path = self._GetTestFilePath(['ext2.qcow2'])
self._SkipIfPathNotExists(test_path)
test_os_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_OS, location=test_path)
self._qcow_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_QCOW, parent=test_os_path_spec)
def testNewFileObject(self):
"""Tests the NewFileObject function."""
resolver_helper_object = qcow_resolver_helper.QCOWResolverHelper()
self._TestNewFileObject(resolver_helper_object, self._qcow_path_spec)
def testNewFileSystem(self):
"""Tests the NewFileSystem function."""
resolver_helper_object = qcow_resolver_helper.QCOWResolverHelper()
self._TestNewFileSystemRaisesNotSupported(
resolver_helper_object, self._qcow_path_spec)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -6,156,833,488,490,423,000 | 33.357143 | 73 | 0.737353 | false |
mapeveri/django-endless-pagination-angular | endless_pagination/tests/integration/__init__.py | 9 | 5479 | """Integration tests base objects definitions."""
from __future__ import unicode_literals
from contextlib import contextmanager
import os
from django.core.urlresolvers import reverse
from django.http import QueryDict
from django.test import LiveServerTestCase
from django.utils import unittest
from selenium.common import exceptions
from selenium.webdriver import Firefox
from selenium.webdriver.support import ui
from xvfbwrapper.xvfbwrapper import Xvfb
from endless_pagination.utils import PYTHON3
SHOW_BROWSER = os.getenv('SHOW_BROWSER', False)
SKIP_SELENIUM = os.getenv('SKIP_SELENIUM', False)
# FIXME: do not exclude integration tests on Python3 once Selenium is updated
# (bug #17).
tests_are_run = not (PYTHON3 or SKIP_SELENIUM)
def setup_package():
"""Set up the Selenium driver once for all tests."""
# Just skipping *setup_package* and *teardown_package* generates an
# uncaught exception under Python 2.6.
if tests_are_run:
if not SHOW_BROWSER:
# Perform all graphical operations in memory.
vdisplay = SeleniumTestCase.vdisplay = Xvfb(width=1280, height=720)
vdisplay.start()
# Create a Selenium browser instance.
selenium = SeleniumTestCase.selenium = Firefox()
SeleniumTestCase.wait = ui.WebDriverWait(selenium, 10)
def teardown_package():
"""Quit the Selenium driver."""
if tests_are_run:
SeleniumTestCase.selenium.quit()
if not SHOW_BROWSER:
SeleniumTestCase.vdisplay.stop()
# FIXME: do not exclude integration tests on Python3 once Selenium is updated
# (bug #17).
@unittest.skipIf(
PYTHON3,
'excluding integration tests: Python 3 tests are still not supported.')
@unittest.skipIf(
SKIP_SELENIUM,
'excluding integration tests: environment variable SKIP_SELENIUM is set.')
class SeleniumTestCase(LiveServerTestCase):
"""Base test class for integration tests."""
PREVIOUS = '<'
NEXT = '>'
MORE = 'More results'
def setUp(self):
self.url = self.live_server_url + reverse(self.view_name)
def get(self, url=None, data=None, **kwargs):
"""Load a web page in the current browser session.
If *url* is None, *self.url* is used.
The querydict can be expressed providing *data* or *kwargs*.
"""
if url is None:
url = self.url
querydict = QueryDict('', mutable=True)
if data is not None:
querydict.update(data)
querydict.update(kwargs)
path = '{0}?{1}'.format(url, querydict.urlencode())
return self.selenium.get(path)
def wait_ajax(self):
"""Wait for the document to be ready."""
def document_ready(driver):
script = """
return (
document.readyState === 'complete' &&
jQuery.active === 0
);
"""
return driver.execute_script(script)
self.wait.until(document_ready)
return self.wait
def click_link(self, text, index=0):
"""Click the link with the given *text* and *index*."""
link = self.selenium.find_elements_by_link_text(str(text))[index]
link.click()
return link
def scroll_down(self):
"""Scroll down to the bottom of the page."""
script = 'window.scrollTo(0, document.body.scrollHeight);'
self.selenium.execute_script(script)
def get_current_elements(self, class_name, driver=None):
"""Return the range of current elements as a list of numbers."""
elements = []
selector = 'div.{0} > h4'.format(class_name)
if driver is None:
driver = self.selenium
for element in driver.find_elements_by_css_selector(selector):
elements.append(int(element.text.split()[1]))
return elements
def asserLinksEqual(self, count, text):
"""Assert the page contains *count* links with given *text*."""
links = self.selenium.find_elements_by_link_text(str(text))
self.assertEqual(count, len(links))
def assertElements(self, class_name, elements):
"""Assert the current page contains the given *elements*."""
current_elements = self.get_current_elements(class_name)
self.assertSequenceEqual(
elements, current_elements, (
'Elements differ: {expected} != {actual}\n'
'Class name: {class_name}\n'
'Expected elements: {expected}\n'
'Actual elements: {actual}'
).format(
actual=current_elements,
expected=elements,
class_name=class_name,
)
)
@contextmanager
def assertNewElements(self, class_name, new_elements):
"""Fail when new elements are not found in the page."""
def new_elements_loaded(driver):
elements = self.get_current_elements(class_name, driver=driver)
return elements == new_elements
yield
try:
self.wait_ajax().until(new_elements_loaded)
except exceptions.TimeoutException:
self.assertElements(class_name, new_elements)
@contextmanager
def assertSameURL(self):
"""Assert the URL does not change after executing the yield block."""
current_url = self.selenium.current_url
yield
self.wait_ajax()
self.assertEqual(current_url, self.selenium.current_url)
| mit | -6,592,669,038,139,208,000 | 34.577922 | 79 | 0.632232 | false |
rob-smallshire/asq | docs/source/conf.py | 1 | 7442 | # -*- coding: utf-8 -*-
#
# asq documentation build configuration file, created by
# sphinx-quickstart on Thu Feb 17 19:12:20 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(os.path.join('..', '..')))
import asq
# on_rtd is whether we are on readthedocs.org
import os
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'cartouche']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'asq'
copyright = '2015, Sixty North'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = asq.__version__
# The full version, including alpha/beta/rc tags.
release = asq.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = 'images/asq_logo_150.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'asqdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'asq.tex', 'asq Documentation',
'Sixty North', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'asq', 'asq Documentation',
['Sixty North'], 1)
]
| mit | 5,043,088,478,161,745,000 | 31.356522 | 82 | 0.707068 | false |
doug-wade/AlgorithmsGreatestHits | graph_algorithms/topological_sort.py | 2 | 1397 | from collections import deque
def depth_first_search(graph, start, path, explored):
"""
Performs depth-first search to find a sink node. At the sink node, it
appends the sink node to a queue, which tracks the topological sorting
of the graph.
"""
explored |= {start}
if start in graph.keys():
for x in graph[start]:
if x not in explored:
depth_first_search(graph, x, path, explored)
path.appendleft(start)
def topological_sort(graph):
"""
Performs a topological sort on a directed graph. Returns an appropriately
sorted queue.
"""
explored = set()
path = deque()
for key in graph.keys():
if key not in explored:
depth_first_search(graph, key, path, explored)
return path
def get_graph_from_file(file_path, isReversed=False):
"""
Returns an adjacency list for a graph represented in text file, where
each line of the text file defines an arc, pointing from the first
element to the second element (delimited by a space).
"""
f = open(file_path)
if isReversed == False:
i,j = 0,1
else:
i,j = 1,0
graph = {}
for l in f:
tempArr = l.rstrip().split(' ')
k = tempArr[i]
v = tempArr[j]
if k in graph:
graph[k].append(v)
else:
graph[k] = [v]
return graph
| mit | 2,767,040,236,139,109,000 | 28.104167 | 78 | 0.595562 | false |
bartoldeman/easybuild-easyconfigs | easybuild/easyconfigs/l/LAMMPS/lammps_vs_yaff_test_single_point_energy.py | 6 | 109971 | #!/usr/bin/env python
#
# Correctness test for LAMMPS, comparison with result produced by yaff;
# calculates single-point energy with both LAMMPS and yaff, and compares the results.
# The energy difference should not exceed 1 kJ/mol, otherwise an error is raised.
# It should run to completion in a matter of seconds.
#
# If this test passes, there are no guarantees that the installation works fully correctly.
# But if it fails, it's a strong signal that there is something wrong with the (LAMMPS) installation.
#
# author: Veronique Van Speybroeck (Center for Molecular Modeling, Ghent University), April 2020
import numpy as np
import os
from mpi4py import MPI
from yaff import log, System, angstrom, ForceField, swap_noncovalent_lammps
from molmod import kjmol
import tempfile
def main():
# create input files in temporary directory
tmpdir = tempfile.mkdtemp()
init_chk = os.path.join(tmpdir, 'init.chk')
with open(init_chk, 'w') as fp:
fp.write(INIT_CHK)
pars_txt = os.path.join(tmpdir, 'pars.txt')
with open(pars_txt, 'w') as fp:
fp.write(PARS_TXT)
# Setup MPI
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
# Set random seed, important to get the same velocities for all processes
np.random.seed(5)
# Turn off logging for all processes, it can be turned on for one selected process later on
log.set_level(log.silent)
if rank == 0:
log.set_level(log.medium)
# Load in the structure and the force field
system = System.from_file(init_chk)
# Initialize the Yaff and LAMMPS force fields
rcut = 12 * angstrom
ff_yaff = ForceField.generate(system, pars_txt, rcut=rcut, smooth_ei=False,
gcut_scale=1.5, alpha_scale=3.2) # , tailcorrections=True)
ff = swap_noncovalent_lammps(ff_yaff, fn_system='lammps.dat', fn_log="log.lammps",
suffix='', fn_table='lammps_smoothei2.table', comm=comm)
# Print out the Yaff single-point energy
print('Yaff energy')
energy_yaff = ff_yaff.compute()
print(energy_yaff / kjmol)
for part in ff_yaff.parts:
print('%s: %.3f kJ/mol' % (part.name, part.energy / kjmol))
# Print out the LAMMPS single-point energy
print('LAMMPS energy')
energy_lammps = ff.compute()
print(energy_lammps / kjmol)
for part in ff.parts:
print('%s: %.3f kJ/mol' % (part.name, part.energy / kjmol))
assert np.abs(energy_yaff - energy_lammps) < 1 * kjmol, "The two energies are not the same"
INIT_CHK = """bonds kind=intar 592,2
52 14 74 14
130 14 4 52
4 92 112 4
152 71 152 112
176 74 176 127
92 180 130 180
200 176 224 180
152 228 52 348
11 52 11 71
11 127 53 15
75 15 131 15
53 5 93 5
113 5 153 70
153 113 177 75
177 126 181 93
131 181 201 177
225 181 153 229
349 53 10 53
10 70 10 126
12 54 72 12
128 12 6 54
6 94 114 6
154 69 154 114
72 178 178 125
94 182 128 182
202 178 226 182
154 230 54 350
9 54 9 69
9 125 8 41
56 8 8 148
56 165 107 165
173 103 145 173
185 97 185 148
185 213 221 165
233 173 353 41
41 3 41 21
97 21 3 103
3 107 145 21
0 36 19 36
36 22 48 3
48 16 64 16
3 79 3 119
16 140 48 13
81 13 121 13
64 156 156 119
168 81 168 140
196 79 121 196
168 204 216 196
156 244 344 48
240 277 236 301
273 204 216 313
249 301 313 277
273 341 361 249
385 273 389 277
413 301 425 313
453 341 249 220
212 341 3 39
16 39 21 39
42 11 11 59
11 151 59 166
104 166 100 174
146 174 186 98
186 151 186 214
166 222 234 174
354 42 0 42
42 22 98 22
0 100 104 0
146 22 241 276
300 237 272 205
312 217 248 300
312 276 272 340
360 248 384 272
276 388 300 412
424 312 340 452
248 221 340 213
17 47 20 47
17 61 88 20
17 133 136 20
6 47 6 86
6 110 163 61
163 110 86 191
136 191 88 195
195 133 211 195
163 239 243 191
47 359 209 282
241 330 262 310
282 318 330 270
262 374 270 382
394 282 310 422
318 430 442 330
229 262 201 318
217 270 245 310
2 31 9 31
12 31 225 285
305 229 257 245
333 205 289 213
337 233 305 257
289 333 337 285
369 257 397 285
401 289 417 305
445 333 449 337
252 238 210 324
264 202 320 226
296 252 320 292
264 324 252 364
376 264 292 404
408 296 432 320
324 436 234 292
296 222 19 45
45 22 19 63
90 22 19 135
138 22 4 45
4 84 4 108
161 63 161 108
84 189 138 189
193 90 193 135
209 193 161 237
241 189 357 45
27 15 27 5
18 27 25 13
25 7 16 25
1 37 18 37
37 23 242 279
238 303 275 206
218 315 251 303
315 279 275 343
363 251 275 387
279 391 303 415
315 427 343 455
251 222 214 343
227 287 307 231
259 247 207 335
291 215 339 235
259 307 291 335
339 287 259 371
287 399 291 403
307 419 335 447
339 451 280 211
328 243 260 308
280 316 328 268
260 372 268 380
392 280 308 420
316 428 440 328
260 231 203 316
219 268 308 247
226 286 306 230
258 246 206 334
290 214 338 234
306 258 290 334
338 286 370 258
286 398 402 290
418 306 334 446
450 338 49 2
49 17 65 17
2 78 2 118
17 141 49 12
80 12 120 12
65 157 157 118
80 169 169 141
197 78 120 197
169 205 217 197
245 157 345 49
16 46 21 46
16 60 89 21
16 132 137 21
46 7 7 87
7 111 162 60
162 111 190 87
137 190 89 194
194 132 210 194
162 238 242 190
46 358 253 239
211 325 265 203
321 227 297 253
321 293 265 325
365 253 377 265
405 293 409 297
433 321 437 325
235 293 297 223
1 50 50 18
66 18 1 77
1 117 18 142
50 15 83 15
123 15 66 158
117 158 170 83
170 142 77 198
123 198 170 206
218 198 158 246
346 50 208 283
240 331 263 311
283 319 331 271
263 375 271 383
283 395 311 423
319 431 331 443
228 263 200 319
216 271 244 311
26 14 26 4
26 19 18 44
44 23 18 62
91 23 18 134
139 23 44 5
85 5 109 5
160 62 160 109
188 85 139 188
192 91 192 134
208 192 160 236
240 188 44 356
34 10 34 23
34 5 281 210
329 242 309 261
281 317 329 269
373 261 381 269
393 281 421 309
429 317 441 329
261 230 202 317
218 269 309 246
243 278 302 239
274 207 314 219
250 302 314 278
274 342 362 250
386 274 278 390
302 414 426 314
342 454 250 223
342 215 224 284
304 228 256 244
204 332 288 212
336 232 304 256
288 332 336 284
368 256 284 396
400 288 416 304
332 444 448 336
237 255 209 327
201 267 225 323
299 255 323 295
267 327 255 367
267 379 295 407
299 411 323 435
327 439 233 295
299 221 33 9
33 20 33 6
40 9 57 9
9 149 57 164
106 164 172 102
144 172 184 96
184 149 184 212
164 220 232 172
352 40 40 2
40 20 96 20
2 102 106 2
144 20 0 29
11 29 29 14
0 51 19 51
19 67 0 76
0 116 19 143
51 14 82 14
122 14 67 159
116 159 82 171
171 143 76 199
122 199 171 207
219 199 159 247
51 347 2 38
17 38 20 38
10 43 58 10
10 150 58 167
105 167 101 175
147 175 99 187
187 150 187 215
167 223 235 175
43 355 1 43
43 23 99 23
1 101 105 1
147 23 32 8
32 21 32 7
13 55 73 13
129 13 7 55
7 95 115 7
155 68 115 155
73 179 179 124
95 183 129 183
203 179 227 183
155 231 55 351
8 55 8 68
8 124 3 30
8 30 13 30
236 254 208 326
200 266 224 322
298 254 322 294
266 326 254 366
378 266 294 406
410 298 434 322
326 438 232 294
298 220 11 35
35 22 35 4
24 12 24 6
24 17 1 28
10 28 28 15
charges kind=fltar 456
2.361566205200000e+00 2.361566205200000e+00 2.361566205200000e+00 2.361566205200000e+00
2.361566205200000e+00 2.361566205200000e+00 2.361566205200000e+00 2.361566205200000e+00
2.361566205200000e+00 2.361566205200000e+00 2.361566205200000e+00 2.361566205200000e+00
2.361566205200000e+00 2.361566205200000e+00 2.361566205200000e+00 2.361566205200000e+00
2.361566205200000e+00 2.361566205200000e+00 2.361566205200000e+00 2.361566205200000e+00
2.361566205200000e+00 2.361566205200000e+00 2.361566205200000e+00 2.361566205200000e+00
-1.093870316700000e+00 -1.093870316700000e+00 -1.093870316700000e+00 -1.093870316700000e+00
-1.093870316700000e+00 -1.093870316700000e+00 -1.093870316700000e+00 -1.093870316700000e+00
-1.093870316700000e+00 -1.093870316700000e+00 -1.093870316700000e+00 -1.093870316700000e+00
-1.093870316700000e+00 -1.093870316700000e+00 -1.093870316700000e+00 -1.093870316700000e+00
-1.055042603900000e+00 -1.055042603900000e+00 -1.055042603900000e+00 -1.055042603900000e+00
-1.055042603900000e+00 -1.055042603900000e+00 -1.055042603900000e+00 -1.055042603900000e+00
-1.055042603900000e+00 -1.055042603900000e+00 -1.055042603900000e+00 -1.055042603900000e+00
-1.055042603900000e+00 -1.055042603900000e+00 -1.055042603900000e+00 -1.055042603900000e+00
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
-6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01 -6.831393458000000e-01
7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01
7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01
7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01
7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01
7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01
7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01
7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01
7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01
7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01
7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01
7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01
7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01 7.815150188000001e-01
-1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01
-1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01
-1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01
-1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01
-1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01
-1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01
-1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01
-1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01
-1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01
-1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01
-1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01
-1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01 -1.544640324000000e-01
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
-8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02 -8.298140200000001e-02
4.906092530000000e-01 4.906092530000000e-01 4.906092530000000e-01 4.906092530000000e-01
4.906092530000000e-01 4.906092530000000e-01 4.906092530000000e-01 4.906092530000000e-01
4.906092530000000e-01 4.906092530000000e-01 4.906092530000000e-01 4.906092530000000e-01
4.906092530000000e-01 4.906092530000000e-01 4.906092530000000e-01 4.906092530000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01 1.385876479000000e-01
dipoles kind=none None
ffatype_ids kind=intar 456
0 0 0 0
0 0 0 0
0 0 0 0
0 0 0 0
0 0 0 0
0 0 0 0
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
2 2 2 2
2 2 2 2
2 2 2 2
2 2 2 2
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
3 3 3 3
4 4 4 4
4 4 4 4
4 4 4 4
4 4 4 4
4 4 4 4
4 4 4 4
4 4 4 4
4 4 4 4
4 4 4 4
4 4 4 4
4 4 4 4
4 4 4 4
5 5 5 5
5 5 5 5
5 5 5 5
5 5 5 5
5 5 5 5
5 5 5 5
5 5 5 5
5 5 5 5
5 5 5 5
5 5 5 5
5 5 5 5
5 5 5 5
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
7 7 7 7
7 7 7 7
7 7 7 7
7 7 7 7
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
ffatypes kind=strar 9
ZR O_OX O_OH O_CA
C_CA C_PC C_PH H_OH
H_PH
masses kind=none None
numbers kind=intar 456
40 40 40 40
40 40 40 40
40 40 40 40
40 40 40 40
40 40 40 40
40 40 40 40
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
8 8 8 8
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
6 6 6 6
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
1 1 1 1
pos kind=fltar 456,3
3.490725830842442e+01 1.989736115878592e+01 1.989640487320012e+01 3.490808649393055e+01
3.979513994951059e+01 -1.300625055309490e-03 1.501055756938832e+01 1.989535638139978e+01
1.246179555604761e-03 1.501137588505900e+01 3.979320094458823e+01 1.989720602732817e+01
4.874308149304886e+00 1.989583090118305e+01 1.989787247092695e+01 4.874894653776321e+00
3.979257955564209e+01 5.968951244624455e-04 2.477156000121577e+01 1.989794132834076e+01
-6.649172855657153e-04 2.477213204653341e+01 3.979471652734251e+01 1.989575923404384e+01
1.989201946003435e+01 3.491390226935287e+01 1.989723959501853e+01 1.989176072681321e+01
1.501673299775960e+01 -1.129256244541729e-03 3.978764801128609e+01 3.491304735065996e+01
1.057641614954819e-03 3.978733906375515e+01 1.501590858399707e+01 1.989643443478721e+01
1.989021170080069e+01 1.989797529016069e+01 3.491309701053401e+01 1.989131281056547e+01
3.979348778678292e+01 1.501588819750306e+01 3.978803993526803e+01 1.989696868977292e+01
1.501699610198728e+01 3.978913215770860e+01 3.979262010168185e+01 3.491343905174566e+01
1.989046538478880e+01 4.880841585670589e+00 1.989494772283481e+01 1.989057374713003e+01
2.477729070125715e+01 4.719741553780736e-04 3.978878868209046e+01 4.880309341815733e+00
-4.301581744627571e-04 3.978892228577907e+01 2.477678835910098e+01 1.989859264118343e+01
1.989148884038907e+01 1.989658031878211e+01 4.880302130079328e+00 1.989302042109499e+01
3.979559229700724e+01 2.477684310638939e+01 3.978633008797122e+01 1.989499819501557e+01
2.477750655215571e+01 3.978786594705887e+01 3.979386468015720e+01 4.880389304875714e+00
2.202461643802651e+01 2.203189112311701e+01 3.765944899136999e+01 2.202531036798885e+01
2.134740604106842e+00 1.776133298387446e+01 2.129765527659656e+00 2.203148419271726e+01
1.776286517993534e+01 2.130436098341955e+00 2.134407460468690e+00 3.765846622785212e+01
3.765364101627898e+01 3.765888318746777e+01 3.765879402579836e+01 3.765331908779535e+01
1.776255570111377e+01 1.776226963765306e+01 1.775666476589178e+01 3.765859689708251e+01
1.776200250148918e+01 1.775636570738458e+01 1.776230133401189e+01 3.765909028320628e+01
2.202672939433552e+01 3.766037150215158e+01 2.203097986183339e+01 2.202631608352776e+01
1.776234599247950e+01 2.134213937499734e+00 2.128786365030469e+00 3.765879446242553e+01
2.135086538364289e+00 2.128334805551631e+00 1.776082738113674e+01 2.203203177401806e+01
3.765329158533921e+01 2.203073913533792e+01 2.203181153117562e+01 3.765410410304322e+01
2.134932100735816e+00 2.133688919571228e+00 1.775586776786592e+01 2.203127523481187e+01
2.135670833274500e+00 1.775671455087363e+01 2.135559199454761e+00 2.203111303100527e+01
1.762577734966588e+01 1.763000570487011e+01 2.266330822679377e+00 1.762650624498306e+01
3.752875276030682e+01 2.216275038100298e+01 3.752050766793352e+01 1.762948080272583e+01
2.216322617063571e+01 3.752126841974270e+01 3.752811262536633e+01 2.265957839948938e+00
2.261537912485077e+00 2.265729289858605e+00 2.265998740387887e+00 2.260329120181068e+00
2.216117704414238e+01 2.216406386780314e+01 2.215772730325949e+01 2.268087772864094e+00
2.216185216065963e+01 2.215650720606797e+01 2.216346869842136e+01 2.266326751780221e+00
1.762407035216446e+01 2.265842029938293e+00 1.763000274242215e+01 1.762402526074742e+01
2.216292137234051e+01 3.752887572170109e+01 3.752297244443796e+01 2.266345156290114e+00
3.752607068387407e+01 3.752292973172899e+01 2.216340736083256e+01 1.763120267852185e+01
2.260410474901676e+00 1.763088489134654e+01 1.763105821224282e+01 2.261022444448104e+00
3.752716039127341e+01 3.752806178485923e+01 2.215703687890538e+01 1.763106712433468e+01
3.752683455326825e+01 2.215763882359795e+01 3.752726086333593e+01 1.763026158731759e+01
1.612738088836549e+01 3.302533778908620e+01 2.003984794891178e+01 1.612731613417120e+01
1.312766242903430e+01 1.393784446245362e-01 3.602237942245713e+01 3.302688557668290e+01
1.336174039388602e-01 3.602227098524808e+01 1.312927418068616e+01 2.002680785221010e+01
2.365498542535781e+01 6.769074757939987e+00 2.002635382191530e+01 2.365500685717470e+01
2.666617394718741e+01 1.412707297290573e-01 3.760418659459016e+00 6.766650584719018e+00
1.318037843702247e-01 3.760457177349364e+00 2.666375182178951e+01 2.004022690334620e+01
1.612528143643354e+01 6.767097824203487e+00 1.976273718790822e+01 1.612523916539509e+01
2.666352562681059e+01 3.966262386478711e+01 3.602441305318901e+01 6.769318338518314e+00
3.965201139308716e+01 3.602440379697210e+01 2.666578382452883e+01 1.975787140941610e+01
2.365671567575621e+01 3.302657742274238e+01 1.976444893991181e+01 2.365670486219636e+01
1.312989855683247e+01 3.966000342369477e+01 3.758748749953809e+00 3.302470531337752e+01
3.965437653497323e+01 3.758723999249336e+00 1.312801586164986e+01 1.975637385138161e+01
2.002184863407880e+01 1.613346505188802e+01 3.302473883007246e+01 2.002197289124604e+01
3.602810468126447e+01 1.312926947232245e+01 1.367082338189690e-01 1.613261305267060e+01
1.312846150972057e+01 1.368852196166233e-01 3.602742198898699e+01 3.302655767808238e+01
3.302040837247949e+01 2.004004219509145e+01 1.613126321395754e+01 3.302171468849843e+01
1.339550457157747e-01 3.602721125685146e+01 1.312168471554626e+01 2.003596460621149e+01
3.603032797522945e+01 1.312300101336320e+01 1.299388364031086e-01 1.613262606149094e+01
1.974570901927019e+01 2.366325071637388e+01 3.302633042641153e+01 1.975025798565546e+01
3.764227587050050e+00 1.312719456401017e+01 3.965627467935057e+01 2.366228684834167e+01
1.313040914493622e+01 3.966058902913779e+01 3.763443724451319e+00 3.302506543319744e+01
6.761061605966100e+00 2.002909549751064e+01 2.366299714003316e+01 6.762419925076353e+00
1.306631168260614e-01 3.765354530141899e+00 2.665923351994980e+01 2.003919387995704e+01
3.764283077950346e+00 2.666057056535371e+01 1.409459540052401e-01 2.366032672794897e+01
2.002276797307826e+01 2.366128056142727e+01 6.768489650517730e+00 2.002335749192145e+01
3.767118915544936e+00 2.666318227829811e+01 1.353561089701583e-01 2.365954347220269e+01
2.666633018845635e+01 1.359144051724433e-01 3.765186960268605e+00 6.767508539746105e+00
6.762372935307679e+00 1.976218024753378e+01 1.613337749116571e+01 6.763322579988857e+00
3.965965385602520e+01 3.602973569058194e+01 2.665831602525251e+01 1.975794207783412e+01
3.602778715627186e+01 2.665929230927920e+01 3.965505961599392e+01 1.613053989336842e+01
1.975816760699984e+01 1.613154259519901e+01 6.766902427345431e+00 1.974859444413245e+01
3.603079508399954e+01 2.666528581190256e+01 3.965774147924260e+01 1.613010897504268e+01
2.666432519015519e+01 3.964836314388970e+01 3.602918489878213e+01 6.768965429210017e+00
3.301905972503401e+01 1.976430008312319e+01 2.366060689044069e+01 3.302006072637284e+01
3.966224338434107e+01 3.763234929262723e+00 1.312334670151796e+01 1.975504589289441e+01
3.766416647857637e+00 1.312434492210792e+01 3.965324384079992e+01 2.366270507752447e+01
3.301950714223298e+01 1.613276804758171e+01 2.002645820182657e+01 3.301986897404555e+01
3.603051998232966e+01 1.314988135041166e-01 1.312358457612722e+01 1.613025759663716e+01
1.415055264284224e-01 1.312390868917699e+01 3.602808549953281e+01 2.004013952859266e+01
6.763290683819170e+00 2.366025326910892e+01 2.003956523794497e+01 6.763158961257040e+00
3.763205542795933e+00 1.318388397269749e-01 2.665850003634340e+01 2.366337918885094e+01
1.411759155409970e-01 2.665839516949183e+01 3.766344007689240e+00 2.002699494901033e+01
6.762101013310686e+00 1.613097528116441e+01 1.975831801028227e+01 6.762315863480681e+00
3.602748860718660e+01 3.965405243376151e+01 2.665936672231643e+01 1.613336088963971e+01
3.965979670007713e+01 2.665956327231921e+01 3.602986646059846e+01 1.976269607944013e+01
3.302082841722822e+01 2.366306534385745e+01 1.975646236830511e+01 3.302090302941561e+01
3.766520306973559e+00 3.965207390224676e+01 1.312251280487792e+01 2.366008615429296e+01
3.966192258716966e+01 1.312260994416990e+01 3.763585958017277e+00 1.976457660862210e+01
1.612441575700581e+01 2.003737628478283e+01 3.302667362056058e+01 1.612579002578034e+01
1.300943615919166e-01 1.312929183554348e+01 3.602385461209880e+01 2.003980285143647e+01
1.312835912727251e+01 3.602521570582152e+01 1.326126129270848e-01 3.302462604224628e+01
2.002237388759288e+01 3.302542651164779e+01 1.613272634740917e+01 2.002283097144105e+01
1.312989650757725e+01 3.602729144147526e+01 1.359486117124403e-01 3.302472864920428e+01
3.603021827855345e+01 1.363774176440271e-01 1.312908901372393e+01 1.613127922476698e+01
2.365474508978707e+01 1.975805959178482e+01 3.302489375434092e+01 2.365582793051143e+01
3.965333108119757e+01 1.312759813226865e+01 3.759593281615865e+00 1.976353965391668e+01
1.313009378981553e+01 3.760648541044594e+00 3.965951782488528e+01 3.302640175483977e+01
2.002108500568356e+01 6.769639743094411e+00 2.365960668883464e+01 2.002163482344500e+01
2.666450545568627e+01 3.765702798044551e+00 1.371212288279759e-01 6.768226947003763e+00
3.764085113435486e+00 1.376581682393422e-01 2.666323425010762e+01 2.366364349018840e+01
2.365627916904770e+01 2.003828360473675e+01 6.767686206404353e+00 2.365782489480705e+01
1.419852084602276e-01 2.666398547283920e+01 3.757574288722359e+00 2.002804095734190e+01
2.666557499549343e+01 3.759137131165368e+00 1.315951885129683e-01 6.768189733765365e+00
1.975012398629565e+01 6.767812764910553e+00 1.613003602182881e+01 1.974648029687565e+01
2.666604139189277e+01 3.602990672505781e+01 3.965980196493943e+01 6.766864834421964e+00
3.602775511780144e+01 3.965656552988282e+01 2.666497095133764e+01 1.613377602848543e+01
1.612688033848156e+01 1.975583736606142e+01 6.769177718413183e+00 1.612788536211239e+01
3.965478529834676e+01 2.666538980316798e+01 3.602174832606473e+01 1.976282697384082e+01
2.666416652842498e+01 3.602275648993071e+01 3.966118561074479e+01 6.766715134787656e+00
1.974852660756867e+01 3.302723074932664e+01 2.366260586375733e+01 1.975887704013076e+01
1.312889263768425e+01 3.763466875326005e+00 3.964795683860110e+01 3.302561618320703e+01
3.766146355335531e+00 3.965779704774696e+01 1.312740553030542e+01 2.366079595410458e+01
6.070393232326075e+00 1.381933706730796e+01 1.979313958501530e+01 6.070471408146224e+00
3.371591174479433e+01 3.969036761895347e+01 2.596783518999552e+01 1.382208783733355e+01
3.971929472589517e+01 2.596788992018665e+01 3.371866820841480e+01 1.982353971784280e+01
1.381424310577510e+01 6.074865117949615e+00 1.982335856793499e+01 1.381416819778865e+01
2.597129560299399e+01 3.972213953575969e+01 3.371263185842697e+01 6.078150646602359e+00
3.968771830132448e+01 3.371260764874877e+01 2.597463522592599e+01 1.979325482015008e+01
6.071483900165442e+00 6.074449799909492e+00 7.236782374018715e-02 6.072049100844775e+00
2.597200804076883e+01 2.000532892129358e+01 2.596622477296280e+01 6.077413377537295e+00
1.996568623469416e+01 2.596679116572648e+01 2.597498695728957e+01 1.051286467109458e-01
1.381552098889720e+01 1.381872516087529e+01 1.043858578372248e-01 1.381562643317187e+01
3.371647841617332e+01 2.000523014592194e+01 3.371119906842123e+01 1.382154433182282e+01
1.996582988029359e+01 3.371133037630351e+01 3.371919486592358e+01 7.307237473549275e-02
1.978604205769553e+01 6.075965011519122e+00 1.381845043505683e+01 1.978062943042749e+01
2.597484531270559e+01 3.371811247719431e+01 3.972121224358062e+01 6.074671765569149e+00
3.371672196307043e+01 3.971632947131886e+01 2.597339444562062e+01 1.382252554427384e+01
1.381511813506187e+01 1.979081527893156e+01 6.078005149428230e+00 1.381614027082065e+01
3.968998167641033e+01 2.597427225406033e+01 3.371065250110522e+01 1.982316375095708e+01
2.597189458322347e+01 3.371167642967917e+01 3.972166730162139e+01 6.074502304910115e+00
1.017823148820772e-01 1.382083681627463e+01 1.381966838343492e+01 1.016723266520908e-01
3.371584994645583e+01 3.371844212121823e+01 1.996152380957845e+01 1.382219812315616e+01
3.371624729825850e+01 1.996138578455740e+01 3.371705844856085e+01 1.382150204851188e+01
6.070733681004529e+00 1.982221614461082e+01 1.382214115058187e+01 6.071733248281731e+00
3.971946359517061e+01 3.371850189123408e+01 2.596650715229699e+01 1.979367757564411e+01
3.371617677123812e+01 2.596755002315471e+01 3.969016960899948e+01 1.381892306913047e+01
1.981833251995918e+01 1.382045279181060e+01 6.074732363763417e+00 1.978290291603092e+01
3.371899708180666e+01 2.597422355619830e+01 3.971898447817124e+01 1.381907949100311e+01
2.597199366490836e+01 3.968411603194392e+01 3.371744029580114e+01 6.077732087982101e+00
6.070304077918620e+00 7.013824118642557e-02 6.076477281148843e+00 6.068828833688231e+00
1.996866676987030e+01 2.597406230101006e+01 2.596942141712212e+01 1.058151271052866e-01
2.597210393971820e+01 2.596793498137526e+01 2.000417416696340e+01 6.075992250926570e+00
1.019418554937276e-01 6.076832173324530e+00 6.075799373709719e+00 1.016526624744318e-01
2.597132779031803e+01 2.597520360598918e+01 1.996152308122143e+01 6.078150149712796e+00
2.597089180181899e+01 1.996123581569601e+01 2.597246444240781e+01 6.076813840776871e+00
1.381471798005188e+01 6.925573882686667e-02 1.382140747150762e+01 1.381289132850737e+01
2.000057639307594e+01 3.371856733658931e+01 3.371388404817216e+01 7.384027953358849e-02
3.371612406308586e+01 3.371209663413830e+01 2.000512599147392e+01 1.381965040346558e+01
8.672218495339466e-02 1.182113723457815e+01 1.181976662645888e+01 8.647357086259645e-02
3.171574447001204e+01 3.171896211118905e+01 1.995581763268942e+01 1.182249444690000e+01
3.171589190994230e+01 1.995555548181683e+01 3.171691935369907e+01 1.182159907104908e+01
1.980518242918080e+01 8.075723288088817e+00 1.181855888400319e+01 1.979485639753668e+01
2.797486502931759e+01 3.171845572585710e+01 3.972788797135372e+01 8.074480802549155e+00
3.171655894678791e+01 3.971863022968562e+01 2.797343752985148e+01 1.182260057382002e+01
8.681805591030312e-02 8.076748381181810e+00 8.075517086361035e+00 8.649642558696696e-02
2.797105971007662e+01 2.797506979613716e+01 1.995578366624222e+01 8.078250622361793e+00
2.797083786278593e+01 1.995546899629196e+01 2.797236273169486e+01 8.076925168362079e+00
1.982004072849675e+01 1.182032349308245e+01 8.074416886154317e+00 1.979617177880267e+01
3.171930899339469e+01 2.797417314519791e+01 3.972660524613276e+01 1.181895861617269e+01
2.797188572127837e+01 3.970390833615899e+01 3.171775381447850e+01 8.077844263412164e+00
1.181464594528382e+01 6.364404154195957e-02 1.182144072667786e+01 1.181289182593741e+01
1.998131907233046e+01 3.171883045703952e+01 3.171407599961245e+01 7.222992768481579e-02
3.171605315609878e+01 3.171234753579476e+01 1.998982744517894e+01 1.181979456383545e+01
1.181590487102266e+01 1.181868769157720e+01 8.513476984165391e-02 1.181602475200967e+01
3.171643406794604e+01 1.999018376435125e+01 3.171098277688720e+01 1.182176428651959e+01
1.995995491826899e+01 3.171112767781727e+01 3.171943303282861e+01 7.145057903744415e-02
8.070820696763668e+00 1.982298145635792e+01 1.182232316837899e+01 8.071862883416708e+00
3.972183845723185e+01 3.171869179758548e+01 2.796618792385944e+01 1.981221058707145e+01
3.171617637041888e+01 2.796725926203570e+01 3.970967635530060e+01 1.181892138516928e+01
8.070058893997910e+00 1.181934841040094e+01 1.981198824433493e+01 8.070079138860185e+00
3.171585776933837e+01 3.970963272277147e+01 2.796803236167278e+01 1.182233286373358e+01
3.972086493743659e+01 2.796804321000422e+01 3.171885314215038e+01 1.982611246226420e+01
1.181536689045192e+01 1.980868572401337e+01 8.077966288373313e+00 1.181640674181759e+01
3.970865358128022e+01 2.797427141448886e+01 3.171056688977725e+01 1.982540184660307e+01
2.797169546342603e+01 3.171161096692349e+01 3.972412161428723e+01 8.074354740892542e+00
8.071557650782674e+00 8.074247555177745e+00 7.050062415355079e-02 8.071937158277732e+00
2.797179511031450e+01 1.999022016786892e+01 2.796615149304444e+01 8.077453993368954e+00
1.995989620307934e+01 2.796653645085193e+01 2.797498989324110e+01 8.610397791641479e-02
8.070192812496019e+00 6.419249672714625e-02 8.076634488707834e+00 8.068576960257134e+00
1.996676883827650e+01 2.797423034732491e+01 2.796948134292814e+01 8.681848309330077e-02
2.797174613304083e+01 2.796786900495130e+01 1.998923487678351e+01 8.075657974312703e+00
1.181421957081106e+01 8.074719134443027e+00 1.982560272853746e+01 1.181418439204753e+01
2.797118276917622e+01 3.972415117171580e+01 3.171281335341664e+01 8.078079782490921e+00
3.970664144464227e+01 3.171281828807571e+01 2.797458778876385e+01 1.981255358789447e+01
1.250582840368720e+01 2.914666489505505e+01 1.998581534089329e+01 1.250555392452712e+01
9.248897282293649e+00 7.835262295367966e-02 3.240056679838487e+01 2.914956750159762e+01
7.216198845370839e-02 3.240029035817515e+01 9.251877777028364e+00 1.995832468698563e+01
2.727682685020807e+01 1.064733679832643e+01 1.995805679017008e+01 2.727706445068976e+01
3.054483721808866e+01 7.934678771389009e-02 7.381941531267555e+00 1.064405630013955e+01
7.118433105206592e-02 7.382150352822855e+00 3.054156769814290e+01 1.998590571660755e+01
1.250394035343708e+01 1.064451216149219e+01 1.982367589490861e+01 1.250394150469548e+01
3.054096543236675e+01 3.972523888817460e+01 3.240217001267826e+01 1.064795009276426e+01
3.971225738628854e+01 3.240219080493355e+01 3.054444885832707e+01 1.981950221232768e+01
2.727870998158595e+01 2.914897470662509e+01 1.982820340944857e+01 2.727870467008315e+01
9.252447607585127e+00 3.972124509535219e+01 7.380286044239628e+00 2.914610026781122e+01
3.971829193564196e+01 7.380313442766089e+00 9.249563638323313e+00 1.981633403544856e+01
1.995358184209804e+01 1.251219148170519e+01 2.914608716823374e+01 1.995370776589150e+01
3.240640230252035e+01 9.251744125897076e+00 8.244779772052184e-02 1.251074774892251e+01
9.249961791504605e+00 8.262260277302413e-02 3.240515295940634e+01 2.914910926924576e+01
2.914257287367412e+01 1.998549079972283e+01 1.250955281191710e+01 2.914426401275458e+01
7.350297028980025e-02 3.240568646705869e+01 9.243054471566870e+00 1.997402188651311e+01
3.240830698776938e+01 9.244761484098495e+00 6.189148939201462e-02 1.251077883534666e+01
1.979825586453108e+01 2.728530134522020e+01 2.914863567309117e+01 1.981218172973174e+01
7.386247757270069e+00 9.248727792595064e+00 3.971809964485507e+01 2.728381138884808e+01
9.252779292377982e+00 3.973057336130524e+01 7.384935655012308e+00 2.914672192417138e+01
1.063843546879799e+01 1.996787866602766e+01 2.728478595396909e+01 1.064007827185785e+01
6.248503884654118e-02 7.387304932994692e+00 3.053764264773101e+01 1.998488054863796e+01
7.385891618028505e+00 3.053927242401543e+01 7.958304302831659e-02 2.728208107966552e+01
1.995389339736400e+01 2.728286897274880e+01 1.064677747535507e+01 1.995465237533794e+01
7.388576086402149e+00 3.054065125537959e+01 8.156017581817152e-02 2.728163032850796e+01
3.054491321143102e+01 8.237271191933480e-02 7.387147515844949e+00 1.064532433867220e+01
1.064064144911362e+01 1.982117919575010e+01 1.251193295058655e+01 1.064169543905434e+01
3.972171227461700e+01 3.240826441885544e+01 3.053600713271579e+01 1.981846056839645e+01
3.240571637965510e+01 3.053708318279578e+01 3.971870503406586e+01 1.250841471409844e+01
1.981917140243515e+01 1.250976775384085e+01 1.064427029807278e+01 1.979937368957093e+01
3.240888823917486e+01 3.054399175844921e+01 3.972946762609432e+01 1.250848906475585e+01
3.054172390240114e+01 3.970975878197266e+01 3.240740815167378e+01 1.064764652507601e+01
2.914074075046528e+01 1.982519073371558e+01 2.728215597735856e+01 2.914177144439387e+01
3.972410873815303e+01 7.384869490924587e+00 9.245549147409207e+00 1.981431908474011e+01
7.388356027877606e+00 9.246581232590794e+00 3.971488949564753e+01 2.728469747473889e+01
2.914117494752227e+01 1.251141673142221e+01 1.995809668274219e+01 2.914130571311315e+01
3.240900361129270e+01 7.183463838087101e-02 9.246077592444484e+00 1.250823884053288e+01
7.868624859847995e-02 9.246172523504816e+00 3.240587342867793e+01 1.998587772349170e+01
1.064178459359191e+01 2.728234418637113e+01 1.998584040987460e+01 1.064138989865301e+01
7.384695265489253e+00 7.093542359955422e-02 3.053635310448776e+01 2.728542024374890e+01
7.958732361744429e-02 3.053596410725073e+01 7.387813094763260e+00 1.995828969742503e+01
1.063989242559662e+01 1.250880220686596e+01 1.982033324739158e+01 1.063993698584806e+01
3.240526724604702e+01 3.971423354355418e+01 3.053783607612447e+01 1.251200900396604e+01
3.972176911631418e+01 3.053785409584417e+01 3.240851832576362e+01 1.982573833443115e+01
2.914301260858894e+01 2.728492033812162e+01 1.981829124652232e+01 2.914301309702387e+01
7.388401395221556e+00 3.971173552946184e+01 9.244326650155632e+00 2.728176840956382e+01
3.972209528333754e+01 9.244371446135403e+00 7.385277513573646e+00 1.982694757587705e+01
1.250253644651036e+01 1.997532718677503e+01 2.914902816664720e+01 1.250420067224088e+01
6.225125755118907e-02 9.251603872392659e+00 3.240189573532817e+01 1.998513356865633e+01
9.249973964243997e+00 3.240353477187163e+01 7.211970693129093e-02 2.914620118676577e+01
1.995393787550432e+01 2.914706199942743e+01 1.251102892710483e+01 1.995465351697644e+01
9.252576110802481e+00 3.240506344656923e+01 8.156487948945704e-02 2.914600353430419e+01
3.240888336278172e+01 8.232440982266527e-02 9.251338817625795e+00 1.250944608926462e+01
2.727657950440097e+01 1.981839115286423e+01 2.914636593689280e+01 2.727763665353707e+01
3.971602095410041e+01 9.249111685914333e+00 7.381327631726466e+00 1.982165962042658e+01
9.252482255200558e+00 7.382371631377547e+00 3.972168759197840e+01 2.914884752711017e+01
1.995356941647684e+01 1.064816574879485e+01 2.728164498581899e+01 1.995365955642835e+01
3.054222718582404e+01 7.387503871390357e+00 8.250265477260228e-02 1.064654019438886e+01
7.385830461303763e+00 8.263054034687108e-02 3.054080418095189e+01 2.728516779888928e+01
2.727833052046908e+01 1.998519974506595e+01 1.064548531502190e+01 2.728002383833265e+01
8.094385793664415e-02 3.054159192237094e+01 7.378931128107247e+00 1.996660099670826e+01
3.054403909115141e+01 7.380628325579808e+00 6.215919869969651e-02 1.064646509416267e+01
1.981159637597344e+01 1.064549576767454e+01 1.250830977419344e+01 1.979873853028092e+01
3.054466173689181e+01 3.240812916897033e+01 3.973008654417936e+01 1.064434535235626e+01
3.240597389691740e+01 3.972019207480745e+01 3.054331817249836e+01 1.251195501748814e+01
1.250488334235093e+01 1.981389887899715e+01 1.064779539996564e+01 1.250593009569637e+01
3.971496496937775e+01 3.054410045150013e+01 3.240014293036852e+01 1.982521256729928e+01
3.054152070995434e+01 3.240120559029220e+01 3.972313179566277e+01 1.064417987929424e+01
1.979887820564971e+01 2.914950054984696e+01 2.728451372266434e+01 1.981975232771712e+01
9.250539573816612e+00 7.384667021286449e+00 3.971126979682839e+01 2.914788966709210e+01
7.388480626223573e+00 3.972996575126965e+01 9.249105286255935e+00 2.728245773618650e+01
1.656140352983275e+01 3.323475259180652e+00 1.657133431343251e+01 1.656228486168100e+01
2.322349657026098e+01 3.647247979787997e+01 3.646546169865879e+01 3.325015483781570e+00
3.646325433420166e+01 3.646639285916446e+01 2.322505375654974e+01 1.657064509644183e+01
3.321383769169834e+00 1.657394415365840e+01 1.657024029142375e+01 3.322075242770350e+00
3.646992948505324e+01 3.646762546863646e+01 2.321523144537045e+01 1.656909322726821e+01
3.646800862694339e+01 2.321590504485378e+01 3.646489593582192e+01 1.657191337305349e+01
1.656956411173918e+01 1.656842604137276e+01 3.327274985978196e+00 1.656729775746637e+01
3.646973268922548e+01 2.322301421175649e+01 3.646045320739267e+01 1.656928168460218e+01
2.322219907283236e+01 3.645832042111462e+01 3.647038251516410e+01 3.324250667262286e+00
3.324106003631590e+00 3.323659830263010e+00 3.324461885732755e+00 3.321163002993453e+00
2.321719133232705e+01 2.322591938551336e+01 2.321612567307243e+01 3.331304292322689e+00
2.321923913577702e+01 2.321318355210028e+01 2.322475152249319e+01 3.327115786599175e+00
1.448944722131224e+01 2.861313146337814e+01 1.998477602906750e+01 1.448914595640550e+01
8.715294775229181e+00 7.347930002723484e-02 3.438410506576655e+01 2.861576865638980e+01
6.835783933128936e-02 3.438380019536345e+01 8.718022713931383e+00 1.995076562189102e+01
2.529331214772399e+01 1.118116209974402e+01 1.995028455263344e+01 2.529350580934633e+01
3.107854588758818e+01 7.431730396495140e-02 5.398369723788479e+00 1.117774779924335e+01
6.754427777628247e-02 5.398536456241224e+00 3.107513017847854e+01 1.998492936122963e+01
1.448752526350660e+01 1.117810789195383e+01 1.983223375976018e+01 1.448754052791589e+01
3.107454260602082e+01 3.973070753958378e+01 3.438570500463899e+01 1.118175374742984e+01
3.971629871794456e+01 3.438572084205047e+01 3.107827581579216e+01 1.981814105818846e+01
2.529519522967835e+01 2.861511043460138e+01 1.983375422213745e+01 2.529519432716770e+01
8.718561551334329e+00 3.972578965580740e+01 5.396661366581465e+00 2.861258996078253e+01
3.971860137850852e+01 5.396699257838535e+00 8.716022068188586e+00 1.981927884398634e+01
1.994557227591399e+01 1.449580384176039e+01 2.861260018181787e+01 1.994603400832985e+01
3.438996210394035e+01 8.718040855201783e+00 8.140600259372471e-02 1.449431250987591e+01
8.716262550446167e+00 8.195334433150248e-02 3.438866709098891e+01 2.861520523568056e+01
2.860897736649211e+01 1.998473304627838e+01 1.449315335058396e+01 2.861058284317422e+01
7.030640141129860e-02 3.438925690276886e+01 8.709353203848824e+00 1.996857236937689e+01
3.439187149971026e+01 8.710955403386045e+00 5.397955675313217e-02 1.449430252135673e+01
1.979889453654178e+01 2.530173130694460e+01 2.861492197952455e+01 1.981730763602466e+01
5.402690622395350e+00 8.715004614110915e+00 3.972146634985789e+01 2.530023522028510e+01
8.719112665180688e+00 3.973860242769609e+01 5.401378818002596e+00 2.861304640443996e+01
1.117224999094351e+01 1.996445895034417e+01 2.530124906059843e+01 1.117389193699622e+01
5.506063887368379e-02 5.403779633604668e+00 3.107122842464413e+01 1.998362619065168e+01
5.402290683161406e+00 3.107284097543882e+01 7.429214717454556e-02 2.529848300738154e+01
1.994626036922956e+01 2.529931081918375e+01 1.118047396761855e+01 1.994744260682303e+01
5.404985504065141e+00 3.107422728283204e+01 8.009945833070133e-02 2.529809253594331e+01
3.107873149054796e+01 8.138996301420534e-02 5.403580781310575e+00 1.117902882087334e+01
1.117430183085210e+01 1.983127498759274e+01 1.449549065276872e+01 1.117537287197525e+01
3.972529112469727e+01 3.439184251765676e+01 3.106971199112278e+01 1.982358958190448e+01
3.438927339756147e+01 3.107083668155762e+01 3.971882266901002e+01 1.449196789548167e+01
1.982289427475211e+01 1.449328911631026e+01 1.117812731481820e+01 1.980757029975721e+01
3.439244183073311e+01 3.107768990432324e+01 3.972994805186935e+01 1.449206274981191e+01
3.107541068049627e+01 3.971322023016205e+01 3.439102177946479e+01 1.118118583417689e+01
2.860704662505399e+01 1.982863249291954e+01 2.529859432522522e+01 2.860804032355126e+01
3.972805580124251e+01 5.401315354072053e+00 8.711884134688830e+00 1.981918386718058e+01
5.404778464414528e+00 8.712894517999343e+00 3.972007939532279e+01 2.530112347079487e+01
2.860762657046938e+01 1.449500932353360e+01 1.995037807371602e+01 2.860765200227069e+01
3.439255741510998e+01 6.117377466542123e-02 8.712331150749863e+00 1.449179550555223e+01
8.066921826109379e-02 8.712316304769622e+00 3.438940463304745e+01 1.998486149867214e+01
1.117557555703258e+01 2.529879722374251e+01 1.998487359367005e+01 1.117510202059823e+01
5.401156931422038e+00 6.046255371340203e-02 3.107004125837593e+01 2.530184687063480e+01
8.138735662856671e-02 3.106957659572577e+01 5.404235763710312e+00 1.995065584102628e+01
1.117371768190502e+01 1.449233662233708e+01 1.982058890873824e+01 1.117376249240272e+01
3.438880267116292e+01 3.971707009601584e+01 3.107141545264543e+01 1.449560439057040e+01
3.972681490687355e+01 3.107142551856906e+01 3.439212175814145e+01 1.982890862870563e+01
2.860941476294594e+01 2.530132334939649e+01 1.982231444211803e+01 2.860943175299674e+01
5.404802634144197e+00 3.971530375689093e+01 8.710506146768992e+00 2.529824904529780e+01
3.973096383937591e+01 8.710588049014270e+00 5.401739802105893e+00 1.983220109754306e+01
1.448612795718051e+01 1.997760194380987e+01 2.861539319297428e+01 1.448777140403815e+01
5.468794276042826e-02 8.717947822965622e+00 3.438545169889156e+01 1.998403416131029e+01
8.716233837084291e+00 3.438704759885889e+01 6.112822590070287e-02 2.861242175624176e+01
1.994639747038860e+01 2.861334390527322e+01 1.449458222796956e+01 1.994745440452426e+01
8.718638184610272e+00 3.438855636067932e+01 8.009428117476187e-02 2.861253683590428e+01
3.439251573952119e+01 8.123890205283871e-02 8.717668900520691e+00 1.449302189603736e+01
2.529300666519455e+01 1.981668167179750e+01 2.861271002750497e+01 2.529405600684820e+01
3.971358111022431e+01 8.715474507432207e+00 5.397799631182405e+00 1.983209962291707e+01
8.718708903917502e+00 5.398836011260602e+00 3.973202722728961e+01 2.861508879625010e+01
1.994564297311250e+01 1.118206545198795e+01 2.529814577709530e+01 1.994594638320838e+01
3.107600590862849e+01 5.403966678364231e+00 8.150620387871944e-02 1.118015379270882e+01
5.402240269580677e+00 8.187321997526248e-02 3.107430332794916e+01 2.530154235992309e+01
2.529478888771636e+01 1.998429020532284e+01 1.117928023340339e+01 2.529648723728984e+01
8.340923829511278e-02 3.107539346562041e+01 5.395363897422516e+00 1.995550159858810e+01
3.107765062999068e+01 5.397043036597902e+00 5.437882981761424e-02 1.118006350374238e+01
1.980951094406628e+01 1.117907606281405e+01 1.449191065228317e+01 1.980642453543212e+01
3.107824402343124e+01 3.439171603939867e+01 3.973106480495004e+01 1.117816253786496e+01
3.438951215306260e+01 3.972568211781174e+01 3.107715274144377e+01 1.449548053904397e+01
1.448844913065645e+01 1.981837869800219e+01 1.118150206992924e+01 1.448950294886975e+01
3.971340045275402e+01 3.107781423344316e+01 3.438367885356273e+01 1.983548859363656e+01
3.107522232019939e+01 3.438475872468907e+01 3.973265346741821e+01 1.117785437229317e+01
1.979993063384021e+01 2.861579935031427e+01 2.530094509441142e+01 1.983061637004195e+01
8.716948815700588e+00 5.401098791563760e+00 3.970971124743446e+01 2.861407731715894e+01
5.404939826039998e+00 3.973759256575831e+01 8.715403587862335e+00 2.529890195423824e+01
radii kind=fltar 456
4.472981758991604e+00 4.472981758991604e+00 4.472981758991604e+00 4.472981758991604e+00
4.472981758991604e+00 4.472981758991604e+00 4.472981758991604e+00 4.472981758991604e+00
4.472981758991604e+00 4.472981758991604e+00 4.472981758991604e+00 4.472981758991604e+00
4.472981758991604e+00 4.472981758991604e+00 4.472981758991604e+00 4.472981758991604e+00
4.472981758991604e+00 4.472981758991604e+00 4.472981758991604e+00 4.472981758991604e+00
4.472981758991604e+00 4.472981758991604e+00 4.472981758991604e+00 4.472981758991604e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00 2.112713817723960e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00 2.197751493750416e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00 1.368161720958986e+00
radii2 kind=none None
rvecs kind=fltar 3,3
3.979364985026868e+01 7.614451550399652e-06 -6.349718219381480e-06 7.617082337342254e-06
3.979363720174106e+01 1.466093923475890e-05 -6.348208809181665e-06 1.466769197263813e-05
3.979365172754711e+01
scope_ids kind=none None
scopes kind=none None
valence_charges kind=none None
"""
PARS_TXT = """# BONDHARM
#---------
BONDHARM:UNIT K kjmol/A**2
BONDHARM:UNIT R0 A
BONDHARM:PARS O_OH ZR 3.7856874859e+02 2.2345796544e+00
BONDHARM:PARS O_CA ZR 8.4085734618e+02 2.1879092596e+00
BONDHARM:PARS O_OX ZR 7.5654881429e+02 2.1161540051e+00
BONDHARM:PARS C_CA O_CA 4.4101463252e+03 1.2792436607e+00
BONDHARM:PARS C_CA C_PC 2.1038566297e+03 1.4832348712e+00
BONDHARM:PARS C_PC C_PH 3.2247609625e+03 1.4015678878e+00
BONDHARM:PARS C_PH C_PH 3.3880065753e+03 1.3929483296e+00
BONDHARM:PARS C_PH H_PH 3.3762726364e+03 1.0876572905e+00
BONDHARM:PARS H_OH O_OH 5.0216717525e+03 9.9986798534e-01
# BENDAHARM
#----------
BENDAHARM:UNIT K kjmol/rad**2
BENDAHARM:UNIT THETA0 deg
BENDAHARM:PARS ZR O_OH ZR 4.1362556428e+02 1.0775418131e+02
BENDAHARM:PARS C_CA O_CA ZR 2.6430192082e+02 1.3689725294e+02
BENDAHARM:PARS C_PH C_PH H_PH 3.0835003755e+02 1.2017556632e+02
BENDAHARM:PARS C_PC C_CA O_CA 1.6684448679e+02 1.1975013868e+02
BENDAHARM:PARS C_CA C_PC C_PH 6.0256074066e+02 1.1990380480e+02
BENDAHARM:PARS C_PC C_PH C_PH 4.7103023496e+02 1.2003765295e+02
BENDAHARM:PARS O_CA C_CA O_CA 6.8388715389e+02 1.2311437675e+02
BENDAHARM:PARS O_OH ZR O_OX 5.3052365716e+02 6.4534430940e+01
BENDAHARM:PARS C_PH C_PC C_PH 4.2496973028e+02 1.1972403242e+02
BENDAHARM:PARS C_PC C_PH H_PH 2.9895866247e+02 1.1956474865e+02
BENDAHARM:PARS ZR O_OX ZR 5.0533812788e+02 1.2046678892e+02
BENDAHARM:PARS H_OH O_OH ZR 1.7902855792e+02 1.1039262816e+02
BENDAHARM:PARS O_OX ZR O_OX 1.0039193248e+02 6.3800382263e+01
# TORSION
#--------
TORSION:UNIT A kjmol
TORSION:UNIT PHI0 deg
TORSION:PARS C_PH C_PC C_PH C_PH 2 3.4910522540e+01 0.0000000000e+00
TORSION:PARS O_CA C_CA O_CA ZR 2 7.0218267368e+00 0.0000000000e+00
TORSION:PARS C_CA C_PC C_PH C_PH 2 4.6013908971e+01 0.0000000000e+00
TORSION:PARS C_PH C_PC C_PH H_PH 2 2.8550678889e+01 0.0000000000e+00
TORSION:PARS C_CA C_PC C_PH H_PH 2 1.8865768343e+01 0.0000000000e+00
TORSION:PARS C_PC C_CA O_CA ZR 2 3.2834550916e+01 0.0000000000e+00
TORSION:PARS C_PC C_PH C_PH H_PH 2 3.3870440544e+01 0.0000000000e+00
TORSION:PARS H_PH C_PH C_PH H_PH 2 1.7461761843e+01 0.0000000000e+00
TORSION:PARS C_PH C_PC C_CA O_CA 2 1.3072558996e+01 0.0000000000e+00
# OOPDIST
#--------
OOPDIST:UNIT K kjmol/A**2
OOPDIST:UNIT D0 A
OOPDIST:PARS C_PC O_CA O_CA C_CA 1.3685283732e+03 0.0000000000e+00
OOPDIST:PARS C_PC C_PH H_PH C_PH 2.0695840516e+02 0.0000000000e+00
#Fixed charges
#---------------
FIXQ:UNIT Q0 e
FIXQ:UNIT P e
FIXQ:UNIT R angstrom
FIXQ:SCALE 1 1.0
FIXQ:SCALE 2 1.0
FIXQ:SCALE 3 1.0
FIXQ:DIELECTRIC 1.0
# Atomic parameters
# ----------------------------------------------------
# KEY label Q_0A R_A
# ----------------------------------------------------
FIXQ:ATOM ZR 0.0000000000000 2.3670000000
FIXQ:ATOM CE 0.0000000000000 2.3670000000
FIXQ:ATOM O_OH 0.0000000000000 1.1180000000
FIXQ:ATOM O_OX 0.0000000000000 1.1180000000
FIXQ:ATOM O_CA 0.0000000000000 1.1180000000
FIXQ:ATOM C_PH 0.0000000000000 1.1630000000
FIXQ:ATOM C_PC 0.0000000000000 1.1630000000
FIXQ:ATOM C_CA 0.0000000000000 1.1630000000
FIXQ:ATOM H_PH 0.0000000000000 0.7240000000
FIXQ:ATOM H_OH 0.0000000000000 0.7240000000
# Bond parameters
# ----------------------------------------------------
# KEY label0 label1 P_AB
# ----------------------------------------------------
FIXQ:BOND C_PH C_PH 0.0000928607
FIXQ:BOND C_CA C_PC 0.0432515406
FIXQ:BOND O_CA ZR -0.3140076067
FIXQ:BOND O_OX ZR -0.3646234389
FIXQ:BOND C_PH H_PH -0.1385876479
FIXQ:BOND H_OH O_OH 0.4906092530
FIXQ:BOND C_CA O_CA 0.3691317391
FIXQ:BOND C_PC C_PH -0.0556062459
FIXQ:BOND O_OH ZR -0.1881444503
# van der Waals
#==============
# The following mathemetical form is supported:
# - MM3: EPSILON*(1.84e5*exp(-12*r/SIGMA)-2.25*(SIGMA/r)^6)
# - LJ: 4.0*EPSILON*((SIGMA/r)^12 - (SIGMA/r)^6)
#
# Remark:
# In MM3, if ONLYPAULI=1 then only the pauli term will be used.
# If ONLYPAULI=0, the full MM3 expression is used with 12.
MM3:UNIT SIGMA angstrom
MM3:UNIT EPSILON kcalmol
MM3:SCALE 1 0.0
MM3:SCALE 2 0.0
MM3:SCALE 3 1.0
# ---------------------------------------------
# KEY ffatype SIGMA EPSILON ONLYPAULI
# ---------------------------------------------
MM3:PARS ZR 2.540 0.300 0
MM3:PARS CE 2.740 0.340 0
MM3:PARS HF 2.530 0.516 0
MM3:PARS O_CA 1.820 0.059 0
MM3:PARS O_OX 1.820 0.059 0
MM3:PARS O_OH 1.820 0.059 0
MM3:PARS C_CA 1.940 0.056 0
MM3:PARS C_PC 1.960 0.056 0
MM3:PARS C_PH 1.960 0.056 0
MM3:PARS H_OH 1.600 0.016 0
MM3:PARS H_PH 1.620 0.020 0
MM3:PARS C_FO 1.940 0.056 0
MM3:PARS H_FO 1.620 0.020 0
"""
main()
| gpl-2.0 | 4,154,871,986,466,351,600 | 81.996981 | 101 | 0.478535 | false |
keishi/chromium | third_party/mesa/MesaLib/src/gallium/tests/graw/fragment-shader/fragment-shader.py | 32 | 7321 | #!/usr/bin/env python
##########################################################################
#
# Copyright 2009 VMware, Inc.
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sub license, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice (including the
# next paragraph) shall be included in all copies or substantial portions
# of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
# IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
# ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
##########################################################################
import struct
from gallium import *
def make_image(surface):
data = surface.get_tile_rgba8(0, 0, surface.width, surface.height)
import Image
outimage = Image.fromstring('RGBA', (surface.width, surface.height), data, "raw", 'RGBA', 0, 1)
return outimage
def save_image(filename, surface):
outimage = make_image(surface)
outimage.save(filename, "PNG")
def test(dev, name):
ctx = dev.context_create()
width = 320
height = 320
minz = 0.0
maxz = 1.0
# disabled blending/masking
blend = Blend()
blend.rt[0].rgb_src_factor = PIPE_BLENDFACTOR_ONE
blend.rt[0].alpha_src_factor = PIPE_BLENDFACTOR_ONE
blend.rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_ZERO
blend.rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ZERO
blend.rt[0].colormask = PIPE_MASK_RGBA
ctx.set_blend(blend)
# depth/stencil/alpha
depth_stencil_alpha = DepthStencilAlpha()
depth_stencil_alpha.depth.enabled = 0
depth_stencil_alpha.depth.writemask = 1
depth_stencil_alpha.depth.func = PIPE_FUNC_LESS
ctx.set_depth_stencil_alpha(depth_stencil_alpha)
# rasterizer
rasterizer = Rasterizer()
rasterizer.front_winding = PIPE_WINDING_CW
rasterizer.cull_mode = PIPE_WINDING_NONE
rasterizer.scissor = 1
ctx.set_rasterizer(rasterizer)
# viewport
viewport = Viewport()
scale = FloatArray(4)
scale[0] = width / 2.0
scale[1] = -height / 2.0
scale[2] = (maxz - minz) / 2.0
scale[3] = 1.0
viewport.scale = scale
translate = FloatArray(4)
translate[0] = width / 2.0
translate[1] = height / 2.0
translate[2] = (maxz - minz) / 2.0
translate[3] = 0.0
viewport.translate = translate
ctx.set_viewport(viewport)
# samplers
sampler = Sampler()
sampler.wrap_s = PIPE_TEX_WRAP_CLAMP_TO_EDGE
sampler.wrap_t = PIPE_TEX_WRAP_CLAMP_TO_EDGE
sampler.wrap_r = PIPE_TEX_WRAP_CLAMP_TO_EDGE
sampler.min_mip_filter = PIPE_TEX_MIPFILTER_NONE
sampler.min_img_filter = PIPE_TEX_MIPFILTER_NEAREST
sampler.mag_img_filter = PIPE_TEX_MIPFILTER_NEAREST
sampler.normalized_coords = 1
ctx.set_fragment_sampler(0, sampler)
# scissor
scissor = Scissor()
scissor.minx = 0
scissor.miny = 0
scissor.maxx = width
scissor.maxy = height
ctx.set_scissor(scissor)
clip = Clip()
clip.nr = 0
ctx.set_clip(clip)
# framebuffer
cbuf = dev.resource_create(
PIPE_FORMAT_B8G8R8X8_UNORM,
width, height,
bind=PIPE_BIND_RENDER_TARGET,
).get_surface()
fb = Framebuffer()
fb.width = width
fb.height = height
fb.nr_cbufs = 1
fb.set_cbuf(0, cbuf)
ctx.set_framebuffer(fb)
rgba = FloatArray(4);
rgba[0] = 0.5
rgba[1] = 0.5
rgba[2] = 0.5
rgba[3] = 0.5
ctx.clear(PIPE_CLEAR_COLOR, rgba, 0.0, 0)
# vertex shader
vs = Shader('''
VERT
DCL IN[0], POSITION
DCL IN[1], COLOR
DCL OUT[0], POSITION
DCL OUT[1], COLOR
MOV OUT[0], IN[0]
MOV OUT[1], IN[1]
END
''')
ctx.set_vertex_shader(vs)
# fragment shader
fs = Shader(file('frag-' + name + '.sh', 'rt').read())
ctx.set_fragment_shader(fs)
constbuf0 = dev.buffer_create(64,
(PIPE_BUFFER_USAGE_CONSTANT |
PIPE_BUFFER_USAGE_GPU_READ |
PIPE_BUFFER_USAGE_CPU_WRITE),
4 * 4 * 4)
cbdata = ''
cbdata += struct.pack('4f', 0.4, 0.0, 0.0, 1.0)
cbdata += struct.pack('4f', 1.0, 1.0, 1.0, 1.0)
cbdata += struct.pack('4f', 2.0, 2.0, 2.0, 2.0)
cbdata += struct.pack('4f', 4.0, 8.0, 16.0, 32.0)
constbuf0.write(cbdata, 0)
ctx.set_constant_buffer(PIPE_SHADER_FRAGMENT,
0,
constbuf0)
constbuf1 = dev.buffer_create(64,
(PIPE_BUFFER_USAGE_CONSTANT |
PIPE_BUFFER_USAGE_GPU_READ |
PIPE_BUFFER_USAGE_CPU_WRITE),
4 * 4 * 4)
cbdata = ''
cbdata += struct.pack('4f', 0.1, 0.1, 0.1, 0.1)
cbdata += struct.pack('4f', 0.25, 0.25, 0.25, 0.25)
cbdata += struct.pack('4f', 0.5, 0.5, 0.5, 0.5)
cbdata += struct.pack('4f', 0.75, 0.75, 0.75, 0.75)
constbuf1.write(cbdata, 0)
ctx.set_constant_buffer(PIPE_SHADER_FRAGMENT,
1,
constbuf1)
xy = [
-0.8, -0.8,
0.8, -0.8,
0.0, 0.8,
]
color = [
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0,
]
nverts = 3
nattrs = 2
verts = FloatArray(nverts * nattrs * 4)
for i in range(0, nverts):
verts[i * nattrs * 4 + 0] = xy[i * 2 + 0] # x
verts[i * nattrs * 4 + 1] = xy[i * 2 + 1] # y
verts[i * nattrs * 4 + 2] = 0.5 # z
verts[i * nattrs * 4 + 3] = 1.0 # w
verts[i * nattrs * 4 + 4] = color[i * 3 + 0] # r
verts[i * nattrs * 4 + 5] = color[i * 3 + 1] # g
verts[i * nattrs * 4 + 6] = color[i * 3 + 2] # b
verts[i * nattrs * 4 + 7] = 1.0 # a
ctx.draw_vertices(PIPE_PRIM_TRIANGLES,
nverts,
nattrs,
verts)
ctx.flush()
save_image('frag-' + name + '.png', cbuf)
def main():
tests = [
'abs',
'add',
'cb-1d',
'cb-2d',
'dp3',
'dp4',
'dst',
'ex2',
'flr',
'frc',
'lg2',
'lit',
'lrp',
'mad',
'max',
'min',
'mov',
'mul',
'rcp',
'rsq',
'sge',
'slt',
'srcmod-abs',
'srcmod-absneg',
'srcmod-neg',
'srcmod-swz',
'sub',
'xpd',
]
dev = Device()
for t in tests:
test(dev, t)
if __name__ == '__main__':
main()
| bsd-3-clause | 7,435,287,178,502,534,000 | 27.486381 | 99 | 0.542002 | false |
grundic/yagocd | tests/test_package_repository.py | 1 | 7501 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 Grigory Chernyshev
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import pytest
from mock import mock
from six import string_types
from tests import AbstractTestManager, ReturnValueMixin
from yagocd.resources import package_repository
@pytest.fixture()
def manager(session_fixture):
return package_repository.PackageRepositoryManager(session=session_fixture)
class BaseManager(AbstractTestManager):
@pytest.fixture()
def prepare_package_repositories(self, manager, my_vcr):
with my_vcr.use_cassette("package_repositories/prepare"):
manager.create(dict(
repo_id='repository-foo',
name='repo.foo',
plugin_metadata=dict(id="yum", version="1"),
configuration=[
dict(key="REPO_URL", value="http://foo.example.com")
]
))
manager.create(dict(
repo_id='repository-bar',
name='repo.bar',
plugin_metadata=dict(id="yum", version="1"),
configuration=[
dict(key="REPO_URL", value="http://bar.example.com")
]
))
class TestList(BaseManager, ReturnValueMixin):
@pytest.fixture()
def _execute_test_action(self, manager, my_vcr, prepare_package_repositories):
with my_vcr.use_cassette("package_repositories/list") as cass:
return cass, manager.list()
@pytest.fixture()
def expected_request_url(self):
return '/go/api/admin/repositories'
@pytest.fixture()
def expected_request_method(self):
return 'GET'
@pytest.fixture()
def expected_return_type(self):
return list
@pytest.fixture()
def expected_return_value(self):
def check_value(result):
assert all(isinstance(i, package_repository.PackageRepository) for i in result)
return check_value
class TestGet(BaseManager, ReturnValueMixin):
ID = 'repository-foo'
@pytest.fixture()
def _execute_test_action(self, manager, my_vcr, prepare_package_repositories):
with my_vcr.use_cassette("package_repositories/get_{}".format(self.ID)) as cass:
return cass, manager.get(self.ID)
@pytest.fixture()
def expected_request_url(self):
return '/go/api/admin/repositories/{}'.format(self.ID)
@pytest.fixture()
def expected_request_method(self):
return 'GET'
@pytest.fixture()
def expected_return_type(self):
return package_repository.PackageRepository
@pytest.fixture()
def expected_return_value(self):
def check_value(result):
assert result.data.repo_id == self.ID
return check_value
class TestCreate(BaseManager, ReturnValueMixin):
ID = 'repository-baz'
@pytest.fixture()
def _execute_test_action(self, manager, my_vcr):
with my_vcr.use_cassette("package_repositories/create_{}".format(self.ID)) as cass:
return cass, manager.create(dict(
repo_id=self.ID,
name='repo.baz',
plugin_metadata=dict(id="yum", version="1"),
configuration=[
dict(key="REPO_URL", value="http://baz.example.com")
]
))
@pytest.fixture()
def expected_request_url(self):
return '/go/api/admin/repositories'
@pytest.fixture()
def expected_request_method(self):
return 'POST'
@pytest.fixture()
def expected_return_type(self):
return package_repository.PackageRepository
@pytest.fixture()
def expected_return_value(self):
def check_value(result):
assert result.data.repo_id == self.ID
assert result.data.name == 'repo.baz'
return check_value
class TestUpdate(BaseManager, ReturnValueMixin):
ID = 'repository-bar'
@pytest.fixture()
def _execute_test_action(self, manager, my_vcr, prepare_package_repositories):
with my_vcr.use_cassette("package_repositories/prepare_update_{}".format(self.ID)):
repository = manager.get(self.ID)
with my_vcr.use_cassette("package_repositories/update_{}".format(self.ID)) as cass:
repository.data.name = 'updated-name'
return cass, manager.update(repo_id=self.ID, repository=repository.data, etag=repository.etag)
@pytest.fixture()
def expected_request_url(self):
return '/go/api/admin/repositories/{}'.format(self.ID)
@pytest.fixture()
def expected_request_method(self, manager):
return 'PUT'
@pytest.fixture()
def expected_return_type(self):
return package_repository.PackageRepository
@pytest.fixture()
def expected_return_value(self):
def check_value(result):
assert result.data.name == 'updated-name'
return check_value
class TestDelete(BaseManager, ReturnValueMixin):
ID = 'repository-foo'
@pytest.fixture()
def _execute_test_action(self, manager, my_vcr, prepare_package_repositories):
with my_vcr.use_cassette("package_repositories/delete_{}".format(self.ID)) as cass:
return cass, manager.delete(self.ID)
@pytest.fixture()
def expected_request_url(self):
return '/go/api/admin/repositories/{}'.format(self.ID)
@pytest.fixture()
def expected_request_method(self):
return 'DELETE'
@pytest.fixture()
def expected_return_type(self):
return string_types
@pytest.fixture()
def expected_return_value(self):
def check_value(result):
assert result == "The package repository '{}' was deleted successfully.".format(self.ID)
return check_value
class TestMagicMethods(object):
@mock.patch('yagocd.resources.package_repository.PackageRepositoryManager.get')
def test_indexed_based_access(self, get_mock, manager):
repo_id = mock.MagicMock()
_ = manager[repo_id] # noqa
get_mock.assert_called_once_with(repo_id=repo_id)
@mock.patch('yagocd.resources.package_repository.PackageRepositoryManager.list')
def test_iterator_access(self, list_mock, manager):
for _ in manager:
pass
list_mock.assert_called_once_with()
| isc | -2,367,280,102,324,830,700 | 32.337778 | 106 | 0.639781 | false |
cyberplant/scrapy | tests/test_spiderstate.py | 17 | 1292 | import os
from datetime import datetime
from twisted.trial import unittest
from scrapy.extensions.spiderstate import SpiderState
from scrapy.spiders import Spider
from scrapy.exceptions import NotConfigured
from scrapy.utils.test import get_crawler
class SpiderStateTest(unittest.TestCase):
def test_store_load(self):
jobdir = self.mktemp()
os.mkdir(jobdir)
spider = Spider(name='default')
dt = datetime.now()
ss = SpiderState(jobdir)
ss.spider_opened(spider)
spider.state['one'] = 1
spider.state['dt'] = dt
ss.spider_closed(spider)
spider2 = Spider(name='default')
ss2 = SpiderState(jobdir)
ss2.spider_opened(spider2)
self.assertEqual(spider.state, {'one': 1, 'dt': dt})
ss2.spider_closed(spider2)
def test_state_attribute(self):
# state attribute must be present if jobdir is not set, to provide a
# consistent interface
spider = Spider(name='default')
ss = SpiderState()
ss.spider_opened(spider)
self.assertEqual(spider.state, {})
ss.spider_closed(spider)
def test_not_configured(self):
crawler = get_crawler(Spider)
self.assertRaises(NotConfigured, SpiderState.from_crawler, crawler)
| bsd-3-clause | 5,931,210,219,580,721,000 | 29.761905 | 76 | 0.658669 | false |
olakiril/pipeline | python/pipeline/settings.py | 6 | 3063 | """
Settings for DataJoint.
"""
import json
from collections import OrderedDict
from .exceptions import PipelineException
import collections
from pprint import pformat
LOCALCONFIG = 'pipeline_config.json'
GLOBALCONFIG = '.pipeline_config.json'
validators = collections.defaultdict(lambda: lambda value: True)
default = OrderedDict({
'path.mounts': '/mnt/',
'display.tracking': False
})
class Config(collections.MutableMapping):
instance = None
def __init__(self, *args, **kwargs):
if not Config.instance:
Config.instance = Config.__Config(*args, **kwargs)
else:
Config.instance._conf.update(dict(*args, **kwargs))
def __getattr__(self, name):
return getattr(self.instance, name)
def __getitem__(self, item):
return self.instance.__getitem__(item)
def __setitem__(self, item, value):
self.instance.__setitem__(item, value)
def __str__(self):
return pformat(self.instance._conf, indent=4)
def __repr__(self):
return self.__str__()
def __delitem__(self, key):
del self.instance._conf[key]
def __iter__(self):
return iter(self.instance._conf)
def __len__(self):
return len(self.instance._conf)
class __Config:
"""
Stores datajoint settings. Behaves like a dictionary, but applies validator functions
when certain keys are set.
The default parameters are stored in pipeline.settings.default . If a local config file
exists, the settings specified in this file override the default settings.
"""
def __init__(self, *args, **kwargs):
self._conf = dict(default)
self._conf.update(dict(*args, **kwargs)) # use the free update to set keys
def __getitem__(self, key):
return self._conf[key]
def __setitem__(self, key, value):
if isinstance(value, collections.Mapping):
raise ValueError("Nested settings are not supported!")
if validators[key](value):
self._conf[key] = value
else:
raise PipelineException(u'Validator for {0:s} did not pass'.format(key, ))
def save(self, filename=None):
"""
Saves the settings in JSON format to the given file path.
:param filename: filename of the local JSON settings file. If None, the local config file is used.
"""
if filename is None:
filename = LOCALCONFIG
with open(filename, 'w') as fid:
json.dump(self._conf, fid, indent=4)
def load(self, filename):
"""
Updates the setting from config file in JSON format.
:param filename: filename of the local JSON settings file. If None, the local config file is used.
"""
if filename is None:
filename = LOCALCONFIG
with open(filename, 'r') as fid:
self._conf.update(json.load(fid)) | lgpl-3.0 | -8,438,406,022,688,926,000 | 30.265306 | 110 | 0.589944 | false |
trolldbois/python-haystack-reverse | test/haystack/reverse/test_structure.py | 1 | 6644 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for haystack.reverse.structure."""
import logging
import os
import unittest
from haystack.mappings import folder
from haystack.reverse import config
from haystack.reverse import context
from haystack.reverse import fieldtypes
from haystack.reverse import structure
from haystack.reverse.heuristics import dsa
from haystack.reverse.heuristics import pointertypes
__author__ = "Loic Jaquemet"
__copyright__ = "Copyright (C) 2012 Loic Jaquemet"
__license__ = "GPL"
__maintainer__ = "Loic Jaquemet"
__email__ = "[email protected]"
__status__ = "Production"
log = logging.getLogger("test_structure")
class TestStructure(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.dumpname = 'test/src/test-ctypes3.32.dump'
config.remove_cache_folder(cls.dumpname)
cls.memory_handler = folder.load(cls.dumpname)
finder = cls.memory_handler.get_heap_finder()
heap_walker = finder.list_heap_walkers()[0]
heap_addr = heap_walker.get_heap_address()
cls.context = context.get_context_for_address(cls.memory_handler, heap_addr)
cls.target = cls.context.memory_handler.get_target_platform()
cls.dsa = dsa.FieldReverser(cls.context.memory_handler)
cls.pta = pointertypes.PointerFieldReverser(cls.context.memory_handler)
return
@classmethod
def tearDownClass(cls):
config.remove_cache_folder(cls.dumpname)
cls.context = None
cls.target = None
cls.dsa = None
cls.pta = None
return
def setUp(self):
return
def tearDown(self):
return
def test_decodeFields(self):
for s in self.context.listStructures():
self.dsa.reverse_record(self.context, s)
pointer_fields = [f for f in s.get_fields() if f.type.is_pointer()]
if len(s) == 12: # Node + padding, 1 pointer on create
self.assertEqual(len(s.get_fields()), 3) # 1, 2 and padding
self.assertEqual(len(pointer_fields), 2)
elif len(s) == 20: # test3, 1 pointer on create
# fields, no heuristic to detect medium sized int
# TODO untyped of size < 8 == int * x
# print s.toString()
self.assertEqual(len(s.get_fields()), 3) # discutable
self.assertEqual(len(pointer_fields), 1)
return
def test_resolvePointers(self):
for s in self.context.listStructures():
self.dsa.reverse_record(self.context, s)
for s in self.context.listStructures():
self.pta.reverse_record(self.context, s)
self.assertTrue(True) # test no error
def test_resolvePointers2(self):
for s in self.context.listStructures():
self.dsa.reverse_record(self.context, s)
self.assertEqual(s.get_reverse_level(), 10)
for s in self.context.listStructures():
log.debug('RLEVEL: %d' % s.get_reverse_level())
self.pta.reverse_record(self.context, s)
pointer_fields = [f for f in s.get_fields() if f.type.is_pointer()]
if len(s) == 12: # Node + padding, 1 pointer on create
self.assertEqual(len(s.get_fields()), 3) # 1, 2 and padding
self.assertEqual(len(pointer_fields), 2)
def test_reset(self):
for s in self.context.listStructures():
s.reset()
if isinstance(s, structure.CacheWrapper):
members = s.obj().__dict__
else:
members = s.__dict__
for name, value in members.items():
if name in ['_size', '_memory_handler', '_name', '_vaddr', '_target']:
self.assertNotIn(value, [None, False])
elif name in ['_dirty', '_AnonymousRecord__address', '_AnonymousRecord__record_type']:
self.assertTrue(value)
elif name in ['_fields']:
self.assertEqual(value, None)
elif name in ['dumpname']:
self.assertTrue(os.access(value, os.F_OK))
else:
self.assertIn(value, [None, False], name + ' not resetted')
class TestStructure2(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.dumpname = 'test/src/test-ctypes6.32.dump'
config.remove_cache_folder(cls.dumpname)
cls.memory_handler = folder.load(cls.dumpname)
finder = cls.memory_handler.get_heap_finder()
heap_walker = finder.list_heap_walkers()[0]
heap_addr = heap_walker.get_heap_address()
cls.context = context.get_context_for_address(cls.memory_handler, heap_addr)
cls.target = cls.context.memory_handler.get_target_platform()
cls.dsa = dsa.FieldReverser(cls.context.memory_handler)
cls.pta = pointertypes.PointerFieldReverser(cls.context.memory_handler)
return
@classmethod
def tearDownClass(cls):
config.remove_cache_folder(cls.dumpname)
cls.context = None
cls.target = None
cls.dsa = None
cls.pta = None
return
def setUp(self):
return
def tearDown(self):
return
def test_string_overlap(self):
for s in self.context.listStructures():
# s.resolvePointers()
self.dsa.reverse_record(self.context, s)
log.debug(s.to_string())
self.assertTrue(True) # test no error
def test_get_fields(self):
_record = structure.AnonymousRecord(self.memory_handler, 0xdeadbeef, 40)
word_size = self.target.get_word_size()
f1 = fieldtypes.Field('f1', 0*word_size, fieldtypes.ZEROES, word_size, False)
f2 = fieldtypes.Field('f2', 1*word_size, fieldtypes.ZEROES, word_size, False)
fields = [f1, f2]
_record_type = fieldtypes.RecordType('struct_test', 2 * word_size, fields)
_record.set_record_type(_record_type)
# same fields
self.assertEqual(f1, _record.get_fields()[0].type)
self.assertEqual(f1, _record.get_field('f1').type)
# get_fields return a new list of fields
x = _record.get_fields()
self.assertEqual(x, _record.get_fields())
x.pop(0)
self.assertNotEqual(x, _record.get_fields())
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
# logging.getLogger("test_structure").setLevel(logging.DEBUG)
#suite = unittest.TestLoader().loadTestsFromTestCase(TestFunctions)
# unittest.TextTestRunner(verbosity=2).run(suite)
unittest.main(verbosity=2)
| gpl-3.0 | -5,797,053,801,691,626,000 | 36.965714 | 102 | 0.613486 | false |
khchine5/xl | lino_xl/lib/ledger/accounts.py | 1 | 1481 | # -*- coding: UTF-8 -*-
# Copyright 2012-2017 Luc Saffre
# License: BSD (see file COPYING for details)
"""Defines referrable names for certain well-known accounts.
These names depend on :attr:`lino_xl.lib.ledger.Plugin.use_pcmn`
Currently used by the :mod:`minimal_ledger
<lino_xl.lib.ledger.fixtures.minimal_ledger>` and :mod:`euvatrates
<lino_xl.lib.vat.fixtures.euvatrates>` fixtures.
"""
raise Exception("No longer used (20171008)")
from django.conf import settings
def pcmnref(ref, pcmn):
if settings.SITE.plugins.ledger.use_pcmn:
return pcmn
return ref
# partner centralization accounts:
CUSTOMERS_ACCOUNT = pcmnref('customers', '4000')
SUPPLIERS_ACCOUNT = pcmnref('suppliers', '4400')
TAX_OFFICES_ACCOUNT = pcmnref('tax_offices', '4500')
BANK_PO_ACCOUNT = pcmnref('bank_po', '4600')
# VAT to declare:
VAT_DUE_ACCOUNT = pcmnref('vat_due', '4510')
VAT_RETURNABLE_ACCOUNT = pcmnref('vat_returnable', '4511')
VAT_DEDUCTIBLE_ACCOUT = pcmnref('vat_deductible', '4512')
# declared VAT:
VATDCL_ACCOUNT = pcmnref('vatdcl', '4513')
# financial accounts
BESTBANK_ACCOUNT = pcmnref('bestbank', '5500')
CASH_ACCOUNT = pcmnref('cash', '5700')
PURCHASE_OF_GOODS = pcmnref('goods', '6040')
PURCHASE_OF_SERVICES = pcmnref('services', '6010')
PURCHASE_OF_INVESTMENTS = pcmnref('investments', '6020')
# PO_BESTBANK_ACCOUNT = pcmnref('bestbankpo', '5810')
SALES_ACCOUNT = pcmnref('sales', '7000')
MEMBERSHIP_FEE_ACCOUNT = pcmnref('membership_fee', '7310')
| bsd-2-clause | -913,734,980,422,852,400 | 28.62 | 66 | 0.717083 | false |
nathanross/amiens | src/amiens/core/util.py | 1 | 6740 | #!/usr/bin/python3
# Copyright 2015 Nathan Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import amiens.core.asserts
from enum import Enum, IntEnum
import os
from os import path
import inspect
import sys
import json
def debug_open(goal, l, opentype):
try:
return open(l, opentype)
except IOError as e:
Log.fatal(goal)
except:
Log.fatal(goal)
def full_read(goal, l):
f=debug_open(goal, l, 'r')
d=f.read()
f.close()
return d
def full_read_utf8(goal, l):
f=debug_open(goal, l, 'rb')
d=f.read().decode('utf8')
f.close()
return d
def full_write(goal, l, d):
f=debug_open(goal, l, 'w')
f.write(d)
f.flush()
f.close()
def full_write_utf8(goal, l, d):
f=debug_open(goal, l, 'wb')
f.write(d.encode('utf8'))
f.flush()
f.close()
def json_read(goal, l):
return json.loads(full_read_utf8(goal, l))
def json_write(goal, l, d):
full_write_utf8(goal, l, json.dumps(d, ensure_ascii=False))
def indent(elem, level=0):
i = "\n" + level*" "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent(elem, level+1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
class LogSeverity(IntEnum):
DATA_LOSS=0
FATAL=1
ERROR=2
LIKELY_ERROR=3
WARNING=4
WRITES=5
OUTLINE=6
DATA=7
DEBUG=8
FORCE=9
class Bcolors(Enum):
NORMAL = '\033[0m'
BLACK = '\033[0;30m'
D_RED ='\033[0;31m'
D_GREEN = '\033[0;32m'
BROWN = '\033[0;33m'
D_BLUE = '\033[0;34m'
D_PURPLE = '\033[0;35m'
D_CYAN ='\033[0;36m'
D_GRAY ='\033[1;30m'
L_GRAY ='\033[0;37m'
L_RED ='\033[0;31m'
L_GREEN = '\033[1;32m'
YELLOW = '\033[1;33m'
L_BLUE = '\033[1;34m'
L_PURPLE = '\033[1;35m'
L_CYAN ='\033[1;36m'
WHITE = '\033[1;37m'
U_RED ='\033[4;31m'
class Log(object):
@staticmethod
def logColor(level):
if level == LogSeverity.DATA_LOSS:
return Bcolors.U_RED
elif level == LogSeverity.FATAL:
return Bcolors.L_RED
elif level <= LogSeverity.LIKELY_ERROR:
return Bcolors.D_RED
elif level == LogSeverity.WARNING:
return Bcolors.YELLOW
elif level == LogSeverity.DATA:
return Bcolors.D_BLUE
elif level == LogSeverity.DEBUG:
return Bcolors.D_GREEN
elif level == LogSeverity.FORCE:
return Bcolors.L_CYAN
return Bcolors.NORMAL
@staticmethod
def getTrace(level=1):
#by default gets the file, funcname, line, of calling
#function.
stack_full=inspect.stack(level)
tb_item= stack_full[-1]
if len(stack_full) > level:
tb_item= stack_full[level]
return {
'filename':tb_item[1],
'line_num':tb_item[2],
'func_name':tb_item[3]
}
@staticmethod
def log(level, message, trace_distance=0):
if type(message) != str:
Log.fatal('error, you must provide a string to log!')
display_up_to=LogSeverity.LIKELY_ERROR
if 'VERBOSE' in os.environ:
display_up_to=int(os.environ['VERBOSE'])
if level == LogSeverity.FORCE or level <= display_up_to:
context_segment=''
if level == LogSeverity.FORCE or \
display_up_to >= LogSeverity.DATA:
context=Log.getTrace(trace_distance+2)
#rm .py
pathstr=context['filename'][:-3]
patharr=[path.basename(pathstr)]
pathstr=path.dirname(pathstr)
try:
while not os.access(pathstr + '/__init__.py',0):
patharr.insert(0, path.basename(pathstr))
pathstr=path.dirname(pathstr)
except:
raise Exception('error! your package must have an'
'__init__.py file at its root')
pathstr='/'.join(patharr)
context_segment=''.join([
str(level.value), '``',
pathstr,
' @ ',
str(context['line_num']),
' ',
context['func_name'],
'(..) '
])
print(''.join([
(Log.logColor(level)).value,
context_segment,
message,
Bcolors.NORMAL.value
]))
if level <= LogSeverity.FATAL:
raise Exception(message)
@staticmethod
def data_loss(text, trace_distance=0):
Log.log(LogSeverity.DATA_LOSS, text, trace_distance+1)
@staticmethod
def fatal(text, trace_distance=0):
Log.log(LogSeverity.FATAL, text, trace_distance+1)
@staticmethod
def error(text, trace_distance=0):
Log.log(LogSeverity.ERROR, text, trace_distance+1)
@staticmethod
def likely_error(text, trace_distance=0):
Log.log(LogSeverity.LIKELY_ERROR, text, trace_distance+1)
@staticmethod
def warning(text, trace_distance=0):
Log.log(LogSeverity.WARNING, text, trace_distance+1)
@staticmethod
def writes(text, trace_distance=0):
Log.log(LogSeverity.WRITES, text, trace_distance+1)
@staticmethod
def outline(text, trace_distance=0):
Log.log(LogSeverity.OUTLINE, text, trace_distance+1)
@staticmethod
def data(text, trace_distance=0):
Log.log(LogSeverity.DATA, text, trace_distance+1)
@staticmethod
def debug(text, trace_distance=0):
Log.log(LogSeverity.DEBUG, text, trace_distance+1)
@staticmethod
def force(text, trace_distance=0):
Log.log(LogSeverity.FORCE, text, trace_distance+1)
| apache-2.0 | -7,644,443,310,563,183,000 | 28.432314 | 84 | 0.549407 | false |
roadmapper/ansible | lib/ansible/modules/cloud/vmware/vmware_host_vmhba_info.py | 6 | 8764 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Christian Kotte <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: vmware_host_vmhba_info
short_description: Gathers info about vmhbas available on the given ESXi host
description:
- This module can be used to gather information about vmhbas available on the given ESXi host.
- If C(cluster_name) is provided, then vmhba information about all hosts from given cluster will be returned.
- If C(esxi_hostname) is provided, then vmhba information about given host system will be returned.
version_added: '2.9'
author:
- Christian Kotte (@ckotte)
notes:
- Tested on vSphere 6.5
requirements:
- python >= 2.6
- PyVmomi
options:
esxi_hostname:
description:
- Name of the host system to work with.
- Vmhba information about this ESXi server will be returned.
- This parameter is required if C(cluster_name) is not specified.
type: str
cluster_name:
description:
- Name of the cluster from which all host systems will be used.
- Vmhba information about each ESXi server will be returned for the given cluster.
- This parameter is required if C(esxi_hostname) is not specified.
type: str
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Gather info about vmhbas of all ESXi Host in the given Cluster
vmware_host_vmhba_info:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
cluster_name: '{{ cluster_name }}'
delegate_to: localhost
register: cluster_host_vmhbas
- name: Gather info about vmhbas of an ESXi Host
vmware_host_vmhba_info:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
esxi_hostname: '{{ esxi_hostname }}'
delegate_to: localhost
register: host_vmhbas
'''
RETURN = r'''
hosts_vmhbas_info:
description:
- dict with hostname as key and dict with vmhbas information as value.
returned: hosts_vmhbas_info
type: dict
sample:
{
"10.76.33.204": {
"vmhba_details": [
{
"adapter": "HPE Smart Array P440ar",
"bus": 3,
"device": "vmhba0",
"driver": "nhpsa",
"location": "0000:03:00.0",
"model": "Smart Array P440ar",
"node_wwn": "50:01:43:80:37:18:9e:a0",
"status": "unknown",
"type": "SAS"
},
{
"adapter": "QLogic Corp ISP2532-based 8Gb Fibre Channel to PCI Express HBA",
"bus": 5,
"device": "vmhba1",
"driver": "qlnativefc",
"location": "0000:05:00.0",
"model": "ISP2532-based 8Gb Fibre Channel to PCI Express HBA",
"node_wwn": "57:64:96:32:15:90:23:95:82",
"port_type": "unknown",
"port_wwn": "57:64:96:32:15:90:23:95:82",
"speed": 8,
"status": "online",
"type": "Fibre Channel"
},
{
"adapter": "QLogic Corp ISP2532-based 8Gb Fibre Channel to PCI Express HBA",
"bus": 8,
"device": "vmhba2",
"driver": "qlnativefc",
"location": "0000:08:00.0",
"model": "ISP2532-based 8Gb Fibre Channel to PCI Express HBA",
"node_wwn": "57:64:96:32:15:90:23:95:21",
"port_type": "unknown",
"port_wwn": "57:64:96:32:15:90:23:95:21",
"speed": 8,
"status": "online",
"type": "Fibre Channel"
}
],
}
}
'''
try:
from pyVmomi import vim
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import vmware_argument_spec, PyVmomi
class HostVmhbaMgr(PyVmomi):
"""Class to manage vmhba info"""
def __init__(self, module):
super(HostVmhbaMgr, self).__init__(module)
cluster_name = self.params.get('cluster_name', None)
esxi_host_name = self.params.get('esxi_hostname', None)
self.hosts = self.get_all_host_objs(cluster_name=cluster_name, esxi_host_name=esxi_host_name)
if not self.hosts:
self.module.fail_json(msg="Failed to find host system.")
def gather_host_vmhba_info(self):
"""Gather vmhba info"""
hosts_vmhba_info = {}
for host in self.hosts:
host_vmhba_info = dict()
host_st_system = host.configManager.storageSystem
if host_st_system:
device_info = host_st_system.storageDeviceInfo
host_vmhba_info['vmhba_details'] = []
for hba in device_info.hostBusAdapter:
hba_info = dict()
if hba.pci:
hba_info['location'] = hba.pci
for pci_device in host.hardware.pciDevice:
if pci_device.id == hba.pci:
hba_info['adapter'] = pci_device.vendorName + ' ' + pci_device.deviceName
break
else:
hba_info['location'] = 'PCI'
hba_info['device'] = hba.device
# contains type as string in format of 'key-vim.host.FibreChannelHba-vmhba1'
hba_type = hba.key.split(".")[-1].split("-")[0]
if hba_type == 'SerialAttachedHba':
hba_info['type'] = 'SAS'
elif hba_type == 'FibreChannelHba':
hba_info['type'] = 'Fibre Channel'
else:
hba_info['type'] = hba_type
hba_info['bus'] = hba.bus
hba_info['status'] = hba.status
hba_info['model'] = hba.model
hba_info['driver'] = hba.driver
try:
if isinstance(hba, (vim.host.FibreChannelHba, vim.host.FibreChannelOverEthernetHba)):
hba_info['node_wwn'] = self.format_number('%X' % hba.nodeWorldWideName)
else:
hba_info['node_wwn'] = self.format_number(hba.nodeWorldWideName)
except AttributeError:
pass
try:
if isinstance(hba, (vim.host.FibreChannelHba, vim.host.FibreChannelOverEthernetHba)):
hba_info['port_wwn'] = self.format_number('%X' % hba.portWorldWideName)
else:
hba_info['port_wwn'] = self.format_number(hba.portWorldWideName)
except AttributeError:
pass
try:
hba_info['port_type'] = hba.portType
except AttributeError:
pass
try:
hba_info['speed'] = hba.speed
except AttributeError:
pass
host_vmhba_info['vmhba_details'].append(hba_info)
hosts_vmhba_info[host.name] = host_vmhba_info
return hosts_vmhba_info
@staticmethod
def format_number(number):
"""Format number"""
string = str(number)
return ':'.join(a + b for a, b in zip(string[::2], string[1::2]))
def main():
"""Main"""
argument_spec = vmware_argument_spec()
argument_spec.update(
cluster_name=dict(type='str', required=False),
esxi_hostname=dict(type='str', required=False),
)
module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=[
['cluster_name', 'esxi_hostname'],
],
supports_check_mode=True,
)
host_vmhba_mgr = HostVmhbaMgr(module)
module.exit_json(changed=False, hosts_vmhbas_info=host_vmhba_mgr.gather_host_vmhba_info())
if __name__ == "__main__":
main()
| gpl-3.0 | 2,828,567,438,932,037,600 | 37.270742 | 109 | 0.516203 | false |
sealcode/gpandoc | settings.py | 1 | 2909 | import os
import sys
import configparser
import settings_dialog
import PyQt5
from PyQt5 import QtGui
from PyQt5.QtGui import QFont
"""
Settings with global variables
"""
pathsOfDocuments = []
listPaths = []
selectedRecipe = ""
loadedRecipe = ""
defaultRecipe = ""
zipsFolder = ""
tempFolder = ""
localPath = ""
sets = ""
font = ""
def getValue(form_field):
value = form_field
print("GET defalut Value: ", value)
return value
def getDefaultRecipe(form_field): # settings_dialog.settings_ui.combo_box_1.currentText()
defaultRecipe = form_field
print(defaultRecipe)
return defaultRecipe
"""
def getDefaultFontName():
return defaultFontName
def getDefaultFontSize():
return defaultFontSize
def getDefaultOutputName():
return defaultOutputName
"""
"""
def buildConfiguration():
confWriter = configparser.ConfigParser()
recipe = str(getValue(settings_dialog.settings_ui.combo_box_1.currentText()))
size = int(getValue(settings_dialog.settings_ui.spin_box_1.currentText()))
recipe = str(getValue(settings_dialog.settings_ui.combo_box_1.currentText()))
size = int(getValue(settings_dialog.settings_ui.spin_box_1.value()))
font = size = int(getValue(settings_dialog.settings_ui.spin_box_1.currentText()))
size = getDefaultValue()
font = getDefaultFontName()
outputName = getDefaultOutputName()
confWriter['user'] = {
'default-recipe': recipe,
'font-name': font,
'font-size': size,
'default-book-name': outputName
}
return confWriter
"""
def saveConfiguration(defaultRecipe, fontName, fontSize, bookName):
config = configparser.ConfigParser()
config['user'] = {'default-recipe': str(defaultRecipe),
'font-name': str(fontName),
'font-size': int(fontSize),
'default-book-name': str(bookName)}
with open('configuration.ini', 'w') as configfile:
config.write(configfile)
def loadConfiguration(configfile='configuration.ini'):
confReader = configparser.ConfigParser()
confReader.read(configfile)
return confReader
def prepareGlobalVariables():
global localPath
localPath = os.path.dirname(__file__)
global sets
sets = loadConfiguration()
global pathsOfDocuments
pathsOfDocuments = []
global zipsFolder
zipsFolder = "/zips/"
global tempFolder
tempFolder = "/temp/"
global listPaths
listPaths = []
global selectedRecipe
selectedRecipe = str(localPath + zipsFolder
+ sets['user']['default-recipe'])
global defaultRecipe
defaultRecipe = str(sets['user']['default-recipe'])
global font
font = QFont(sets['user']['font-name'],
int(sets['user']['font-size']))
prepareGlobalVariables()
sets = configparser.ConfigParser()
sets = loadConfiguration()
| lgpl-3.0 | -4,959,815,925,449,559,000 | 24.517544 | 89 | 0.66449 | false |
platinhom/CADDHom | python/algorithm/Data2D.py | 1 | 20820 | '''Module for 2D data based on numpy.ndarray'''
__author__="Zhixiong Zhao"
__date__="2015.12.08"
import string
#import traceback
import numpy as np
### Test Data
a=[[1,2,3],[2,3,4],[3,4,5],[4,5,6]]
b=Data2D(a)
c=Data2D([1,1.2,1.4,1.6]).T
class mlarray(np.ndarray):
@property
def I(self):
''' Inverse of the array.'''
return np.linalg.inv(self.data)
class Data2D(object):
"""Class to deal with the 2D data in numpy array."""
def InitData(self, data, dtype="float64", name='Data'):
'''Initial data from list to array'''
if (isinstance(data,list) or isinstance(data,tuple)):
arr=np.array(data,dtype=dtype);
elif (isinstance(data,np.ndarray)):
arr=data
if arr.ndim >2:
raise ValueError("Data dimension > 2!");
exit();
else:
# set the object property
self.name=name;
self.data=arr;
# No data but show ndim=1
if self.data.size==0:
print "Warning: Blank data!"
self.data.shape=(0,0);
# Dimension corect, translate 1D to one row
# [1,2,3..]
if self.data.ndim==1:
self.data.shape=(1,self.data.shape[0]);
return self.data
def __init__(self,data=None,
filename=None, dtype="float64", delim=None,name=None):
if (filename):
self.readfile(filename,delim=delim,dtype=dtype);
if (isinstance(name,str)):
self.name=name;
elif (data is not None):
if (isinstance(data,list) or isinstance(data,tuple) or isinstance(data,np.ndarray)):
self.InitData(data,name=name,dtype=dtype)
else:
#Todo: optimize
raise TypeError("Error input type")
else:
self.data=None;
self.name=""
if (name):
self.name=name
self.dtype=dtype;
def __len__(self):
return self.data.__len__();
def __str__(self):
return self.data.__str__();
def __repr__(self):
return self.data.__repr__();
# important for give Data2D to numpy
def __getitem__(self,i):
return self.data.__getitem__(i);
def __delitem__(self,i):
return self.data.__delitem__(i);
def __setitem__(self,i,y):
return self.data.__setitem__(i,y);
# for slice
def __getslice__(self,i,j):
return self.data.__getslice__(i,j);
def __delslice__(self,i,j):
return self.data.__delslice__(i,j);
def __setslice__(self,i,j,y):
return self.data.__setslice__(i,j,y);
def __iter__(self):
return self.data.__iter__();
def __add__(self,y):
return self.data.__add__(y);
def __sub__(self,y):
return self.data.__sub__(y);
# ndarray-based
@property
def ndim(self):
return 2
@property
def size(self):
return self.data.size
@property
def shape(self):
return self.data.shape
@shape.setter
def shape(self,value):
self.resize(value);
@property
def row(self):
return self.data.shape[0]
@property
def col(self):
return self.data.shape[1]
@property
def T(self):
'''Transpose of the array.'''
return Data2D(data=self.data.T)
@property
def I(self):
''' Inverse of the array.'''
return Data2D(np.linalg.inv(self.data))
@property
def ADT(self):
'''array.dot(array.T)'''
return Data2D(self.data.dot(self.data.T))
@property
def TDA(self):
'''array.T.dot(array)'''
return Data2D(self.data.T.dot(self.data))
def resize(self,row=1,col=1):
if (isinstance(row,tuple) or isinstance(row,list)):
if (len(row)==2):
col=row[1];
row=row[0];
elif (len(row)==1):
row=row[0]
else:
raise ValueError("Error shape tuple dimension!")
self.data.resize((row,col));
def reshape(self,row=1,col=1):
if (isinstance(row,tuple) or isinstance(row,list)):
if (len(row)==2):
col=row[1];
row=row[0];
elif (len(row)==1):
row=row[0]
else:
raise ValueError("Error shape tuple dimension!")
return self.data.reshape((row,col));
def dot(self, data):
if (isinstance(data,np.ndarray)):
return Data2D(self.data.dot(data))
if (not isinstance(data,Data2D)):
data=self.convert(data)
return Data2D(self.data.dot(data.data))
def tolist(self):
'''Convert to a list'''
return self.data.tolist();
##### Custom Funtions #####
## array related
def append(self,data,column=False):
if (isinstance(data,list) or isinstance(data,tuple)):
data=np.array(data)
elif (isinstance(data,np.ndarray) or isinstance(data,Data2D)):
pass
else:
raise TypeError("Type error for data:"+str(type(data)));
# Perceive number of row/column of given data
nrow=1;ncol=1;
if data.ndim is 1:
if column:
nrow=len(data)
data.shape=(nrow,1)
else:
ncol=len(data)
data.shape=(1,ncol)
else:
nrow=data.shape[0]
ncol=data.shape[1]
# Check length
axis=0
if (column):
axis=1
if (self.row!=nrow):
raise ValueError("Length Data Row "+str(self.row)+"is not equal to given data lengt:"+str(nrow));
else:
if (self.col!=ncol):
raise ValueError("Length Data Column "+str(self.col)+"is not equal to given data lengt:"+str(ncol));
if data.ndim <= 2:
self.data=np.append(self.data,data,axis)
#olddata=self.data
#if (not column):
# self.data=np.zeros((self.row+nrow,self.col),dtype=self.dtype)
# self.data[:self.row,:]=olddata
# self.data[self.row:,:]=data
# self.row+=nrow;
#else:
# self.data=np.zeros((self.row,self.col+ncol),dtype=self.dtype)
# self.data[:,:self.col]=olddata
# self.data[:,self.col:]=data
# self.col+=ncol;
#del olddata
else:
raise ValueError("Error dimension! "+str(data.ndim));
def func4each(self,func,column=False, tolist=False):
'''Return Data2D/list containing result based on func for each row(default)/column'''
if (tolist):
if (not column):
return [ func(i) for i in self.data]
else:
return [ func(i) for i in self.data.T]
else:
if (not column):
return Data2D( data=[ [func(i)] for i in self.data],name=func.__name__)
else:
return Data2D(data=[[ func(i) for i in self.data.T]],name=func.__name__)
def getchild(self, childlist, column=False, name="Child", dtype="float64"):
"""Creat a child Data2D object
Given a list for index, starting from zero"""
# Todo: Optimize algorithm
if (isinstance(childlist,list) or isinstance(childlist,tuple) ):
if (not column):
childdatas=[ self.data[i].tolist() for i in childlist ];
return Data2D(data=childdatas, name=name, dtype=dtype);
else:
tdata=self.data.T
childdatas=[ tdata[i].tolist() for i in childlist ];
return Data2D(data=childdatas, name=name, dtype=dtype).T;
elif (isinstance(childlist,str)):
clist=[]
# parse the string, 0,2-4 mean [0], [2], [3],[4]
# use , as delimiter
tmp=[ string.strip(i) for i in childlist.strip().split(',') ]
for i in tmp:
if (i is ''):
continue;
if ('-' in i):
tmp2= i.split('-')
if tmp2[0]=='':tmp2[0]=0
if tmp2[1]=='':tmp2[1]=self.row-1
if int(tmp2[1])>=self.row:
tmp2[1]=self.row-1
## Don't support negative number
#if int(tmp2[0])<0:
# tmp2[0]=self.row+int(tmp2[0])
#if int(tmp2[1])<0:
# tmp2[1]=self.row+int(tmp2[1])
if (int(tmp2[1])>=int(tmp2[0]) and int(tmp2[0])>=0 and int(tmp2[1])<self.row):
for j in range(int(tmp2[0]),int(tmp2[1])+1):
clist.append(j)
else:
print tmp2
else:
clist.append(int(i))
clist.sort()
if (not column):
return Data2D(data=[ self.data[i].tolist() for i in clist], name=name, dtype=dtype);
else:
tdata=self.data.T
return Data2D(data=[ tdata[i].tolist() for i in clist], name=name, dtype=dtype).T;
else:
raise TypeError("Error given child type, should be list/tuple or string")
return None;
## User define
def convert(self,data, dtype="float64", name='Data'):
if (isinstance(data,list) or isinstance(data,tuple) or isinstance(data,np.ndarray)):
data=Data2D(data,dtype=dtype,name=name);
if (isinstance(data,Data2D)):
return data
else:
raise TypeError("Can't convert to Data2D type!");
return None
def readfile(self,filename,delim=None,dtype="float64"):
"""Read data from file.
delim is the delimiter for data.
convert is the method to convert the data."""
if (not filename):
raise IOError("No file was found!");
# read file
f=open(filename);
datas=[]
for line in f:
data=line.strip().split(delim)
datas.append(data)
f.close()
# Create New data from list
self.InitData(datas,dtype=dtype,name=filename);
del datas[:]
return self.data
def savefile(self,filename,delim=" ",outformat=""):
'''outformat is the format formula in format function'''
f=open(filename,'w');
for i in range(self.row):
f.write(delim.join( \
[ format(item, outformat) for item in self.data[i].tolist()]) \
+"\n");
f.close()
## test function for filter
#def testfunc(self,d):
# if d[0]>1:return True;
# else: return False
def filter(self,func, column=False):
'''Use a filter function to filter the data.
When func(data[i]) return True, save it.
The filter can one by one in row or in column.'''
copydata=None
if column:
copydata=self.T
else:
copydata=self
outdata=np.zeros(copydata.shape)
savenum=0;
for d in copydata:
if (func(d)):
outdata[savenum]=d
savenum+=1;
finaldata=outdata[:savenum,:]
if (column):
return Data2D(finaldata.T, dtype=self.dtype)
else:
return Data2D(finaldata, dtype=self.dtype)
def each4min(self,column=False, tolist=False):
'''Min for each row/column'''
out= np.amin(self.data,axis=0) if column else np.amin(self.data,axis=1).reshape(self.data.shape[0],1)
return out.ravel().tolist() if tolist else Data2D(out,dtype=self.dtype)
def each4max(self,column=False, tolist=False):
'''Max for each row/column'''
out= np.amax(self.data,axis=0) if column else np.amax(self.data,axis=1).reshape(self.data.shape[0],1)
return out.ravel().tolist() if tolist else Data2D(out,dtype=self.dtype)
def each4sum(self,column=False, tolist=False):
'''Sum for each row/column'''
out= np.sum(self.data,axis=0) if column else np.sum(self.data,axis=1).reshape(self.data.shape[0],1)
return out.ravel().tolist() if tolist else Data2D(out,dtype=self.dtype)
def each4median(self,column=False, tolist=False):
'''Median for each row/column.
Given a vector V of length N, the median of V is the middle value of a sorted copy of V,
V_sorted - i.e., V_sorted[(N-1)/2], when N is odd.
When N is even, it is the average of the two middle values of V_sorted.'''
out= np.median(self.data,axis=0) if column else np.median(self.data,axis=1).reshape(self.data.shape[0],1)
return out.ravel().tolist() if tolist else Data2D(out,dtype=self.dtype)
def each4average(self,weights=None, column=False, tolist=False):
'''Average for each row/column.
Different to mean, it can be given a weight for each element!'''
out= np.average(self.data,weights=weights,axis=0) if column else np.average(self.data,weights=weights,axis=1).reshape(self.data.shape[0],1)
return out.ravel().tolist() if tolist else Data2D(out,dtype=self.dtype)
def each4mean(self,column=False, tolist=False):
'''Mean for each row/column'''
out= np.mean(self.data,axis=0) if column else np.mean(self.data,axis=1).reshape(self.data.shape[0],1)
return out.ravel().tolist() if tolist else Data2D(out,dtype=self.dtype)
def each4std(self,column=False, tolist=False):
'''Standard deviation for each row/column'''
out= np.std(self.data,axis=0) if column else np.std(self.data,axis=1).reshape(self.data.shape[0],1)
return out.ravel().tolist() if tolist else Data2D(out,dtype=self.dtype)
def each4var(self,column=False, tolist=False):
'''Variance for each row/column'''
out= np.var(self.data,axis=0) if column else np.var(self.data,axis=1).reshape(self.data.shape[0],1)
return out.ravel().tolist() if tolist else Data2D(out,dtype=self.dtype)
def each4ptp(self,column=False, tolist=False):
'''Max-Min for each row/column'''
out= np.ptp(self.data,axis=0) if column else np.ptp(self.data,axis=1).reshape(self.data.shape[0],1)
return out.ravel().tolist() if tolist else Data2D(out,dtype=self.dtype)
def each4percentile(self, q, column=False, tolist=False):
'''(Max-Min)*q/100 for each row/column'''
out= np.percentile(self.data,q,axis=0) if column else np.percentile(self.data,q,axis=1).reshape(self.data.shape[0],1)
return out.ravel().tolist() if tolist else Data2D(out,dtype=self.dtype)
def LeastSquares(self,dataY,rlamda=500):
'''Least-Squares method to calculate weight W'''
mx, nx=self.data.shape;
# data: (my, 1)
if (dataY.ndim==1):
my=dataY.size;
ny=1
# data: (my, ny)
elif (dataY.ndim==2):
my, ny=dataY.shape;
else:
raise ValueError, "data for y dimension > 2!";
wts=np.zeros((nx,1))
# data: (mx, nx) * (my, 1)
if (mx==my and ny==1):
# (self.TDA+rlamda*np.eye(nx)).I.dot(self.T.dot(Y)).T
wts=(np.linalg.inv(self.data.T.dot(self.data)+rlamda*np.eye(nx)).dot(self.data.T.dot(dataY)))
# data: (mx, nx) * (1, my)
elif( mx==ny and my==1):
wts=(np.linalg.inv(self.data.T.dot(self.data)+rlamda*np.eye(nx)).dot(self.data.T.dot(dataY.T)))
else:
raise ValueError, "Not match array size for x and y!";
return Data2D(data=wts,name="weight");
#########################################
class LT2D(object):
"""Linear Transformation for ndarray/Data2D"""
def __init__(self, X, Y, rlamda=500):
X=self.normX(X);
self.X=X;
Y=self.normY(Y,checkXY=True);
if (self.checkXY(X,Y)):
self.Y=Y
self.rlamda=rlamda
# W for weights for input X/Y
self.W=self.LeastSquares(self.X, self.Y, rlamda=self.rlamda)
# train for training LT2D
self._train=None
else:
raise ValueError("Error for matching X-Y size..")
@property
def row(self):
'''row number of X'''
return self.X.shape[0]
@property
def col(self):
'''column number of X'''
return self.X.shape[1]
@property
def error(self):
'''Error array from current Y'=XW and real Y'''
return self.calcError()
@property
def rms(self):
'''Self RMS'''
return self.calcRMS()
@property
def WT(self):
'''WT for weights from training set for test set/validation'''
if (self.train): return self.train.W
@property
def train(self):
# Saving a training LT2D object
if (not self._train):
raise ValueError('Train model is not set!')
return None
return self._train
@train.setter
def train(self,value):
if (not isinstance(value,LT2D)):
raise ValueError("Error, argument should be LT2D class")
self._train=value;
def normX(self, X):
'''Normalize X as ndarray'''
if (not (isinstance(X,Data2D) or isinstance(X,np.ndarray))):
X=np.array(X)
return X
def normY(self, Y, checkXY=False):
'''Normalize Y as ndarray ~ (n,1) '''
if (not (isinstance(Y,Data2D) or isinstance(Y,np.ndarray))):
Y=np.array(Y)
if (Y.shape[1]!=1 and Y.shape[0]==1):
Y.resize(Y.shape[1],Y.shape[0]);
if (Y.shape[1]!=1):
raise ValueError("Y column is not 1!")
# When set Y, check whether consistent to X.
# Need checkXY=True and set X before.
if (checkXY and not self.checkXY(X=self.X,Y=Y)):
raise ValueError("Y row is not equal to X row!")
return Y
def checkXY(self, X, Y):
'''Check X-Y shape consistent'''
return X.row==Y.row
###### Reset some value and recalculate
def resetX(self, X, rlamda=None):
'''reset X and W'''
if (rlamda is None):
rlamda=self.rlamda;
self.X=self.normX(X);
self.W=self.LeastSquares(self.X,self.Y,rlamda=rlamda)
def resetY(self,Y, rlamda=None):
'''reset Y and W'''
if (not rlamda): rlamda=self.rlamda;
self.Y=self.normY(Y)
self.W=self.LeastSquares(self.X, self.Y, rlamda=rlamda)
def resetR(self,rlamda):
'''recalculate weight based on given rlamda'''
self.W=self.LeastSquares(self.X, self.Y, rlamda=rlamda)
##### Calculate Linear Tranformation methods
def LeastSquares(self,dataX=None,dataY=None,rlamda=500):
'''Least Sqaures for y data and x data.
- dataX/Y is ndarray/Data2D object
- Return the weight for x'''
#try:
if (dataX is None):
dataX=self.X
if (dataY is None):
dataY=self.Y
# data: (mx,1)
if (dataX.ndim==1):
mx=dataX.size;
nx=1
# data: (mx, nx)
elif (dataX.ndim==2):
mx, nx=dataX.shape;
else:
raise ValueError, "data for x dimension > 2!";
# data: (my, 1)
if (dataY.ndim==1):
my=dataY.size;
ny=1
# data: (my, ny)
elif (dataY.ndim==2):
my, ny=dataY.shape;
else:
raise ValueError, "data for y dimension > 2!";
# data: (mx, nx) * (my, 1)
wts=None
if (mx==my and ny==1):
wts=np.zeros((nx,1))
wts=(np.linalg.inv(dataX.T.dot(dataX)+rlamda*np.eye(nx)).dot(dataX.T.dot(dataY)))
#wts=np.zeros((1,nx))
#wts[:, 0]=(np.linalg.inv(dataX.T.dot(dataX)+rlamda*np.eye(nx)).dot(dataX.T.dot(dataY))).T
# data: (mx, nx) * (1, my)
elif (mx==ny and my==1):
#wts=np.zeros((1,nx))
#wts[:,0]=(np.linalg.inv(dataX.T.dot(dataX)+rlamda*np.eye(nx)).dot(dataX.T.dot(dataY.T))).T
wts=np.zeros((nx,1))
wts=(np.linalg.inv(dataX.T.dot(dataX)+rlamda*np.eye(nx)).dot(dataX.T.dot(dataY.T)))
### Should never happen if give correct X and Y
# data: (my, ny)*(mx, 1)
elif ( mx==ny and nx==1):
#wts=np.zeros((1,ny))
#wts[:,0]=(np.linalg.inv(dataY.T.dot(dataY)+rlamda*np.eye(ny)).dot(y.T.dot(dataX))).T
wts=np.zeros((ny,1))
wts=(np.linalg.inv(dataY.T.dot(dataY)+rlamda*np.eye(ny)).dot(y.T.dot(dataX)))
# data: (mx, nx) * (1, my)
elif (mx==my and mx==1):
#wts=np.zeros((1,ny))
#wts[:,0]=(np.linalg.inv(dataY.T.dot(dataY)+rlamda*np.eye(ny)).dot(y.T.dot(dataX.T))).T
wts=np.zeros((ny,1))
wts=(np.linalg.inv(dataY.T.dot(dataY)+rlamda*np.eye(ny)).dot(y.T.dot(dataX.T)))
else:
raise ValueError, "Not match array size for x and y!";
return Data2D(data=wts,name="weight");
#except:
# traceback.print_exc()
# exit(1)
def calcY(self,weight):
'''Calculate the Y' for given Weight based on current X'''
if (isinstance(weight,list) or isinstance(weight,tuple)):
weight=Data2D(weight,name="weight")
if (weight.shape[0] is 1):
weight.resize((weight.size,1));
Y=self.X.dot(weight)
return Y
def calcWeight(self,Y, rlamda=None):
'''Calculate the Weight for given Y based on current X'''
if (isinstance(Y,list) or isinstance(Y,tuple)):
Y=Data2D(Y,name="Y")
if (rlamda is None):
rlamda=self.rlamda;
if (Y.shape[0] is 1):
Y.resize((Y.size,1));
return self.LeastSquares(self.X, Y, rlamda=rlamda);
def calcError(self, Y=None, Abs=False):
'''Calculate the Error array for given Y and current Y
- Abs: Calculate Absolute Error array.
- if Y is not given, calculate current delta Y'''
if (isinstance(Y,list) or isinstance(Y,tuple)):
Y=Data2D(Y,name="Y")
if (Y is None):
Y=self.calcY(self.W);
if (Y.shape[0] is 1):
Y.resize((Y.size,1));
if (Abs):
return np.abs(Y-self.Y)
else:
return Y-self.Y
def calcRMS(self,Err=None, Y=None, weight=None):
'''Calculate the RMS based on given data
- If Err is given, calculate RMS directly.
- If no Err and Y given, calculate Error based on given Y firstly
- If no Err & Y, but weight given, calculate Y based on given weight firstly
- If no Err & Y & weight, calculate RMS based on current Y'=XW '''
if (isinstance(Err,list) or isinstance(Err,tuple)):
Err=Data2D(Err,name="Error")
if (isinstance(Y,list) or isinstance(Y,tuple)):
Y=Data2D(Y,name="Y")
if (isinstance(weight,list) or isinstance(weight,tuple)):
weight=Data2D(weight,name="weight")
if (Err is None and Y is None and weight is None):
weight=self.W
if (weight.shape[0] is 1): weight.resize((weight.size,1));
Err=self.X.dot(weight)-self.Y;
if (Err is None and Y is None and weight is not None):
if (weight.shape[0] is 1): weight.resize((weight.size,1));
Y=self.calcY(weight)
if (Err is None and Y is not None):
if (Y.shape[0] is 1): Y.resize((Y.size,1));
Err=self.calcError(Y)
# Calculate the RMS for deltaY Error Array
return np.sqrt((Err.T.dot(Err)).ravel()[0]/Err.shape[0])
def getErrorRMS(self,weight=None,rlamda=None):
'''Calculate Error and RMS based on current X and Y.
- When weight is given, using the given weight;
- When weight is not given and rlamda is given, recalculate the weight
- When weight and rlamda is not given, using the current W'''
if ( weight is None and rlamda is None):
weight=self.W;
elif ( weight is None and rlamda is not None):
weight=self.calcWeight(self.Y,rlamda);
return self.calcError(self.calcY(weight)),self.calcRMS(weight=weight)
##### Methods for train-test set
def testY(self, train=None):
'''Calculate the Y' based on training set W'''
if (train is None):
WT=self.WT
else:
WT=train.W
return self.calcY(WT)
def testError(self, train=None):
'''Calculate the Error array based on training set W'''
return self.calcError(self.testY(train));
def testRMS(self, train=None):
'''Calculate the RMS based on training set W'''
Err=self.testError(train);
return np.sqrt((Err.T.dot(Err)).ravel()[0]/Err.shape[0])
| gpl-2.0 | 3,786,618,927,551,604,700 | 29.261628 | 141 | 0.651633 | false |
nrz/ylikuutio | external/bullet3/examples/pybullet/gym/pybullet_envs/minitaur/envs/minitaur_randomize_terrain_gym_env_example.py | 2 | 2208 | """An example to run minitaur gym environment with randomized terrain.
"""
import math
import numpy as np
import tf.compat.v1 as tf
#from google3.pyglib import app
#from google3.pyglib import flags
from pybullet_envs.minitaur.envs import minitaur_randomize_terrain_gym_env
FLAGS = flags.FLAGS
flags.DEFINE_enum("example_name", "reset", ["sine", "reset"],
"The name of the example: sine or reset.")
def ResetTerrainExample():
"""An example showing resetting random terrain env."""
num_reset = 10
steps = 100
env = minitaur_randomize_terrain_gym_env.MinitaurRandomizeTerrainGymEnv(
render=True, leg_model_enabled=False, motor_velocity_limit=np.inf, pd_control_enabled=True)
action = [math.pi / 2] * 8
for _ in xrange(num_reset):
env.reset()
for _ in xrange(steps):
_, _, done, _ = env.step(action)
if done:
break
def SinePolicyExample():
"""An example of minitaur walking with a sine gait."""
env = minitaur_randomize_terrain_gym_env.MinitaurRandomizeTerrainGymEnv(
render=True, motor_velocity_limit=np.inf, pd_control_enabled=True, on_rack=False)
sum_reward = 0
steps = 200
amplitude_1_bound = 0.5
amplitude_2_bound = 0.5
speed = 40
for step_counter in xrange(steps):
time_step = 0.01
t = step_counter * time_step
amplitude1 = amplitude_1_bound
amplitude2 = amplitude_2_bound
steering_amplitude = 0
if t < 10:
steering_amplitude = 0.5
elif t < 20:
steering_amplitude = -0.5
else:
steering_amplitude = 0
# Applying asymmetrical sine gaits to different legs can steer the minitaur.
a1 = math.sin(t * speed) * (amplitude1 + steering_amplitude)
a2 = math.sin(t * speed + math.pi) * (amplitude1 - steering_amplitude)
a3 = math.sin(t * speed) * amplitude2
a4 = math.sin(t * speed + math.pi) * amplitude2
action = [a1, a2, a2, a1, a3, a4, a4, a3]
_, reward, _, _ = env.step(action)
sum_reward += reward
def main(unused_argv):
if FLAGS.example_name == "sine":
SinePolicyExample()
elif FLAGS.example_name == "reset":
ResetTerrainExample()
if __name__ == "__main__":
tf.logging.set_verbosity(tf.logging.INFO)
app.run()
| agpl-3.0 | 8,645,204,355,993,900,000 | 27.675325 | 97 | 0.663496 | false |
drlagos/unify-playpen | playpen.py | 1 | 1433 | import subprocess
def playpen(command, arguments):
return subprocess.Popen(("playpen",
"root",
"--mount-proc",
"--user=rust",
"--timeout=5",
"--syscalls-file=whitelist",
"--devices=/dev/urandom:r",
"--memory-limit=128",
"--",
command) + arguments,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
def echo(command, arguments):
return subprocess.Popen(("echo", command) + arguments,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
def raw_exec(command, arguments):
return subprocess.Popen((command,) + arguments,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
def execute(command, arguments, data):
with raw_exec(command, arguments) as p:
if data is None:
out = p.communicate()[0]
else:
out = p.communicate(data.encode('utf-8'))[0]
return (out, p.returncode)
| mit | 6,710,819,243,707,378,000 | 37.72973 | 58 | 0.432659 | false |
tedder/ansible | lib/ansible/modules/network/checkpoint/checkpoint_host_facts.py | 30 | 2740 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: checkpoint_host_facts
short_description: Get host objects facts on Checkpoint over Web Services API
description:
- Get host objects facts on Checkpoint devices.
All operations are performed over Web Services API.
version_added: "2.8"
author: "Ansible by Red Hat (@rcarrillocruz)"
options:
name:
description:
- Name of the host object. If name is not provided, UID is required.
type: str
uid:
description:
- UID of the host object. If UID is not provided, name is required.
type: str
"""
EXAMPLES = """
- name: Get host object facts
checkpoint_host_facts:
name: attacker
"""
RETURN = """
ansible_hosts:
description: The checkpoint host object facts.
returned: always.
type: list
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.six.moves.urllib.error import HTTPError
import json
def get_host(module, connection):
name = module.params['name']
uid = module.params['uid']
if uid:
payload = {'uid': uid}
elif name:
payload = {'name': name}
code, result = connection.send_request('/web_api/show-host', payload)
return code, result
def main():
argument_spec = dict(
name=dict(type='str'),
uid=dict(type='str'),
)
required_one_of = [('name', 'uid')]
module = AnsibleModule(argument_spec=argument_spec, required_one_of=required_one_of)
connection = Connection(module._socket_path)
code, response = get_host(module, connection)
if code == 200:
module.exit_json(ansible_facts=dict(checkpoint_hosts=response))
else:
module.fail_json(msg='Checkpoint device returned error {0} with message {1}'.format(code, response))
if __name__ == '__main__':
main()
| gpl-3.0 | -1,846,599,469,748,569,900 | 25.862745 | 108 | 0.683212 | false |
Bismarrck/tensorflow | tensorflow/python/data/experimental/kernel_tests/optimization/map_vectorization_test.py | 3 | 16516 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the `MapVectorization` optimization."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
from tensorflow.core.example import example_pb2
from tensorflow.core.example import feature_pb2
from tensorflow.python.data.experimental.ops import optimization
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import bitwise_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import parsing_ops
from tensorflow.python.platform import test
def _generate_unary_cwise_math_cases():
# TODO(rachelim): Consolidate tests with pfor when APIs are somewhat shared.
bitwise_cases = [("Invert", bitwise_ops.invert)]
logical_cases = [("LogicalNot", math_ops.logical_not)]
complex_cases = [
("Angle", math_ops.angle),
("ComplexAbs", math_ops.abs),
("Conj", math_ops.conj),
("Imag", math_ops.imag),
("Real", math_ops.real),
]
real_cases = [
("Abs", math_ops.abs),
("Acos", math_ops.acos),
("Acosh", lambda x: math_ops.acosh(1 + math_ops.square(x))),
("Asin", math_ops.asin),
("Asinh", math_ops.asinh),
("Atan", math_ops.atan),
("Atanh", math_ops.atanh),
("BesselI0e", math_ops.bessel_i0e),
("BesselI1e", math_ops.bessel_i1e),
("Ceil", math_ops.ceil),
("Cos", math_ops.cos),
("Cosh", math_ops.cosh),
("Digamma", math_ops.digamma),
("Elu", nn.elu),
("Erf", math_ops.erf),
("Erfc", math_ops.erfc),
("Exp", math_ops.exp),
("Expm1", math_ops.expm1),
("Floor", math_ops.floor),
("Inv", math_ops.inv),
("IsFinite", math_ops.is_finite),
("IsInf", math_ops.is_inf),
("Lgamma", math_ops.lgamma),
("Log", math_ops.log),
("Log1p", math_ops.log1p),
("Neg", math_ops.negative),
("Reciprocal", math_ops.reciprocal),
("Relu", nn.relu),
("Relu6", nn.relu6),
("Rint", math_ops.rint),
("Round", math_ops.round),
("Rsqrt", math_ops.rsqrt),
("Selu", nn.selu),
("Sigmoid", math_ops.sigmoid),
("Sign", math_ops.sign),
("Sin", math_ops.sin),
("Sinh", math_ops.sinh),
("Softplus", nn.softplus),
("Softsign", nn.softsign),
("Sqrt", math_ops.sqrt),
("Square", math_ops.square),
("Tan", math_ops.tan),
("Tanh", math_ops.tanh),
]
random_input = np.random.rand(3, 5)
complex_component = np.random.rand(3, 5)
random_int = np.random.randint(0, 10, (7, 3, 5))
def bitwise_dataset_factory():
return dataset_ops.Dataset.from_tensor_slices(random_int)
def logical_dataset_factory():
return dataset_ops.Dataset.from_tensor_slices(random_input > 0)
def random_dataset_factory():
return dataset_ops.Dataset.from_tensor_slices(random_input)
def complex_dataset_factory():
return dataset_ops.Dataset.from_tensor_slices(
math_ops.complex(random_input, complex_component))
case_factory_pairs = [
(bitwise_cases, bitwise_dataset_factory),
(logical_cases, logical_dataset_factory),
(complex_cases, complex_dataset_factory),
(real_cases, random_dataset_factory),
]
return [(case[0], case[1], factory)
for cases, factory in case_factory_pairs
for case in cases]
def _generate_binary_cwise_math_cases():
bitwise_cases = [("BitwiseAnd", bitwise_ops.bitwise_and),
("BitwiseOr", bitwise_ops.bitwise_or),
("BitwiseXor", bitwise_ops.bitwise_xor),
("LeftShift", bitwise_ops.left_shift),
("RightShift", bitwise_ops.right_shift)]
logical_cases = [("LogicalAnd", math_ops.logical_and),
("LogicalOr", math_ops.logical_or)]
# Wrapper functions restricting the range of inputs of zeta and polygamma.
def safe_polygamma(x, y):
return math_ops.polygamma(
math_ops.round(clip_ops.clip_by_value(y, 1, 10)), x * x + 1)
def safe_zeta(x, y):
return math_ops.zeta(x * x + 1, y * y)
real_cases = [
("Add", math_ops.add),
("AddV2", math_ops.add_v2),
("Atan2", math_ops.atan2),
("Complex", math_ops.complex),
("DivNoNan", math_ops.div_no_nan),
("Equal", math_ops.equal),
("FloorDiv", math_ops.floor_div),
("FloorMod", math_ops.floor_mod),
("Greater", math_ops.greater),
("GreaterEqual", math_ops.greater_equal),
("Igamma", math_ops.igamma),
("Igammac", math_ops.igammac),
("IgammaGradA", math_ops.igamma_grad_a),
("Less", math_ops.less),
("LessEqual", math_ops.less_equal),
("Maximum", math_ops.maximum),
("Minimum", math_ops.minimum),
("Mod", math_ops.mod),
("Mul", math_ops.multiply),
("NotEqual", math_ops.not_equal),
("Polygamma", safe_polygamma),
("Pow", math_ops.pow),
("RealDiv", math_ops.divide),
("SquareDifference", math_ops.squared_difference),
("Sub", math_ops.subtract),
("TruncateMod", math_ops.truncate_mod),
("Zeta", safe_zeta),
]
# Exercises broadcasting capabilities
x = np.random.rand(7, 3, 5)
y = np.random.rand(3, 5)
x_int = np.random.randint(0, 10, (7, 3, 5))
y_int = np.random.randint(0, 10, (3, 5))
def bitwise_dataset_factory():
return dataset_ops.Dataset.from_tensors((x_int, y_int))
def logical_dataset_factory():
return dataset_ops.Dataset.from_tensors((x > 0, y > 0))
def random_dataset_factory():
return dataset_ops.Dataset.from_tensors((x, y))
case_factory_pairs = [
(bitwise_cases, bitwise_dataset_factory),
(logical_cases, logical_dataset_factory),
(real_cases, random_dataset_factory),
]
return [(case[0], case[1], factory)
for cases, factory in case_factory_pairs
for case in cases]
def _generate_cwise_test_cases():
return _generate_unary_cwise_math_cases() + _generate_binary_cwise_math_cases(
)
def _generate_csv_test_case():
def csv_factory():
return dataset_ops.Dataset.from_tensor_slices(["1.0:2:a",
"2.4:5:c"]).repeat(5)
def decode_csv_fn(x):
return parsing_ops.decode_csv(
x,
record_defaults=[
constant_op.constant([], dtypes.float32),
constant_op.constant([], dtypes.int32),
constant_op.constant([], dtypes.string)
],
field_delim=":")
return decode_csv_fn, csv_factory
def _generate_parse_single_example_test_case():
def parse_example_factory():
def _int64_feature(*values):
return feature_pb2.Feature(int64_list=feature_pb2.Int64List(value=values))
def _bytes_feature(*values):
return feature_pb2.Feature(
bytes_list=feature_pb2.BytesList(
value=[v.encode("utf-8") for v in values]))
return dataset_ops.Dataset.from_tensor_slices(
constant_op.constant([
example_pb2.Example(
features=feature_pb2.Features(
feature={
"dense_int": _int64_feature(i),
"dense_str": _bytes_feature(str(i)),
"sparse_int": _int64_feature(i, i * 2, i * 4, i * 8),
"sparse_str": _bytes_feature(*["abc"] * i)
})).SerializeToString() for i in range(10)
]))
def parse_single_example_fn(x):
features = {
"dense_int": parsing_ops.FixedLenFeature((), dtypes.int64, 0),
"dense_str": parsing_ops.FixedLenFeature((), dtypes.string, ""),
"sparse_int": parsing_ops.VarLenFeature(dtypes.int64),
"sparse_str": parsing_ops.VarLenFeature(dtypes.string),
}
return parsing_ops.parse_single_example(x, features)
return parse_single_example_fn, parse_example_factory
def _generate_optimization_test_cases():
def base_dataset_factory():
return dataset_ops.Dataset.from_tensors(np.random.rand(10, 3)).repeat(5)
rand_val = np.random.rand(1, 1, 1, 1, 1, 1)
csv_test_case = _generate_csv_test_case()
parse_fn, parse_base = _generate_parse_single_example_test_case()
def dense_output_only_parse_fn(x):
# Since we haven't implemented a vectorizer for SerializeSparse, any
# function with sparse outputs will only be naively vectorized.
parse_result = parse_fn(x)
return [
y for y in parse_result if not isinstance(y, sparse_tensor.SparseTensor)
]
def map_fn_with_cycle(x):
c = lambda i: math_ops.less(i, 10)
b = lambda i: math_ops.add(i, 1)
return control_flow_ops.while_loop(c, b, [x])
# Misc test cases
test_cases = [
("Basic", lambda x: (x, x + 1), base_dataset_factory),
("Broadcast", lambda x: x + rand_val, base_dataset_factory),
("Cycle", map_fn_with_cycle, lambda: dataset_ops.Dataset.from_tensors(1)),
("Const", lambda x: 2, base_dataset_factory),
("Cast", lambda x: math_ops.cast(x, dtypes.float64),
base_dataset_factory),
("Reshape", lambda x: array_ops.reshape(x, (-1, 30)),
base_dataset_factory),
("Transpose", array_ops.transpose, base_dataset_factory),
("Unpack", array_ops.unstack, base_dataset_factory),
("UnpackNegativeAxis", lambda x: array_ops.unstack(x, axis=-1),
base_dataset_factory),
# Parsing ops
("DecodeCSV", csv_test_case[0], csv_test_case[1]),
("ParseSingleExample", parse_fn, parse_base),
("ParseSingleExampleDenseOutputOnly", dense_output_only_parse_fn,
parse_base),
] + _generate_cwise_test_cases()
return [{
"testcase_name":
x[0] + "Parallel" if num_parallel_calls is not None else x[0],
"map_fn":
x[1],
"base_dataset_factory":
x[2],
"num_parallel_calls":
num_parallel_calls
} for x in test_cases for num_parallel_calls in (None, 12)]
@test_util.run_all_in_graph_and_eager_modes
class MapVectorizationTest(test_base.DatasetTestBase, parameterized.TestCase):
def _get_test_datasets(self,
base_dataset,
map_fn,
num_parallel_calls=None,
expect_optimized=True):
"""Given base dataset and map fn, creates test datasets.
Returns a tuple of (unoptimized dataset, optimized dataset). The
unoptimized dataset has the assertion that Batch follows Map. The optimized
dataset has the assertion that Map follows Batch, and has the
"map_vectorization" optimization applied.
Args:
base_dataset: Input dataset to map->batch
map_fn: Map function to use
num_parallel_calls: (Optional.) num_parallel_calls argument for map
expect_optimized: (Optional.) Whether we expect the optimization to take
place, in which case we will assert that Batch is followed by Map,
otherwise Map followed by Batch. Defaults to True.
Returns:
Tuple of (unoptimized dataset, optimized dataset).
"""
map_node_name = "Map" if num_parallel_calls is None else "ParallelMap"
def _make_dataset(node_names):
dataset = base_dataset.apply(optimization.assert_next(node_names))
dataset = dataset.map(map_fn, num_parallel_calls)
dataset = dataset.batch(100)
options = dataset_ops.Options()
options.experimental_optimization.map_and_batch_fusion = False
dataset = dataset.with_options(options)
return dataset
unoptimized = _make_dataset([map_node_name, "Batch"])
optimized = _make_dataset(["Batch", map_node_name]
if expect_optimized else [map_node_name, "Batch"])
options = dataset_ops.Options()
options.experimental_optimization.map_vectorization = True
optimized = optimized.with_options(options)
return unoptimized, optimized
@parameterized.named_parameters(_generate_optimization_test_cases())
def testOptimization(self, map_fn, base_dataset_factory, num_parallel_calls):
base_dataset = base_dataset_factory()
unoptimized, optimized = self._get_test_datasets(base_dataset, map_fn,
num_parallel_calls)
self.assertDatasetsEqual(unoptimized, optimized)
# TODO(b/117581999): Add eager coverage for the following tests.
def testSkipEagerOptimizationBadMapFn(self):
# Test map functions that give an error
def map_fn(x):
# x has leading dimension 5, this will raise an error
return array_ops.gather(x, 10)
base_dataset = dataset_ops.Dataset.range(5).repeat(5).batch(
5, drop_remainder=True)
_, optimized = self._get_test_datasets(base_dataset, map_fn)
nxt = dataset_ops.make_one_shot_iterator(optimized).get_next()
with self.assertRaisesRegexp(errors.InvalidArgumentError,
r"indices = 10 is not in \[0, 5\)"):
self.evaluate(nxt)
def testOptimizationWithCapturedInputs(self):
# Tests that vectorization works with captured inputs
y = constant_op.constant(1, shape=(2,))
z = constant_op.constant(2, shape=(2,))
def map_fn(x):
return x, y, z
base_dataset = dataset_ops.Dataset.from_tensor_slices([[1, 2],
[3, 4]]).repeat(5)
unoptimized, optimized = self._get_test_datasets(
base_dataset, map_fn, expect_optimized=True)
self.assertDatasetsEqual(optimized, unoptimized)
# TODO(b/117581999): Add eager coverage for the following tests.
def testSkipEagerOptimizationIgnoreStateful(self):
def map_fn(x):
with ops.control_dependencies([check_ops.assert_equal(x, 0)]):
return array_ops.identity(x)
base_dataset = dataset_ops.Dataset.from_tensor_slices([[1, 2],
[3, 4]]).repeat(5)
unoptimized, optimized = self._get_test_datasets(
base_dataset, map_fn, expect_optimized=False)
self.assertDatasetsRaiseSameError(
unoptimized, optimized, errors.InvalidArgumentError,
[("OneShotIterator", "OneShotIterator_1", 1),
("IteratorGetNext", "IteratorGetNext_1", 1)])
def testOptimizationIgnoreRagged(self):
# Make sure we ignore inputs that might not be uniformly sized
def map_fn(x):
return array_ops.gather(x, 0)
# output_shape = (?,)
base_dataset = dataset_ops.Dataset.range(20).batch(3, drop_remainder=False)
unoptimized, optimized = self._get_test_datasets(
base_dataset, map_fn, expect_optimized=False)
self.assertDatasetsEqual(unoptimized, optimized)
# TODO(b/117581999): Add eager coverage for the following tests.
def testSkipEagerOptimizationIgnoreRaggedMap(self):
# Don't optimize when the output of the map fn shapes are unknown.
def map_fn(x):
return array_ops.tile(x, x)
base_dataset = dataset_ops.Dataset.range(20).batch(1, drop_remainder=True)
unoptimized, optimized = self._get_test_datasets(
base_dataset, map_fn, expect_optimized=False)
self.assertDatasetsRaiseSameError(
unoptimized, optimized, errors.InvalidArgumentError,
[("OneShotIterator", "OneShotIterator_1", 1),
("IteratorGetNext", "IteratorGetNext_1", 1)])
if __name__ == "__main__":
test.main()
| apache-2.0 | -4,651,972,429,710,658,000 | 36.282167 | 80 | 0.635989 | false |
virtualopensystems/nova | nova/tests/virt/vmwareapi/test_driver_api.py | 1 | 125894 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2012 VMware, Inc.
# Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test suite for VMwareAPI.
"""
import collections
import contextlib
import copy
import datetime
import time
import mock
import mox
from oslo.config import cfg
import suds
from nova import block_device
from nova.compute import api as compute_api
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import vm_states
from nova import context
from nova import exception
from nova.image import glance
from nova.network import model as network_model
from nova.openstack.common import jsonutils
from nova.openstack.common import timeutils
from nova.openstack.common import units
from nova.openstack.common import uuidutils
from nova import test
from nova.tests import fake_instance
import nova.tests.image.fake
from nova.tests import matchers
from nova.tests import test_flavors
from nova.tests import utils
from nova.tests.virt import test_driver
from nova.tests.virt.vmwareapi import fake as vmwareapi_fake
from nova.tests.virt.vmwareapi import stubs
from nova import utils as nova_utils
from nova.virt import driver as v_driver
from nova.virt.vmwareapi import constants
from nova.virt.vmwareapi import driver
from nova.virt.vmwareapi import ds_util
from nova.virt.vmwareapi import error_util
from nova.virt.vmwareapi import imagecache
from nova.virt.vmwareapi import read_write_util
from nova.virt.vmwareapi import vif
from nova.virt.vmwareapi import vim
from nova.virt.vmwareapi import vim_util
from nova.virt.vmwareapi import vm_util
from nova.virt.vmwareapi import vmops
from nova.virt.vmwareapi import vmware_images
from nova.virt.vmwareapi import volumeops
CONF = cfg.CONF
CONF.import_opt('host', 'nova.netconf')
CONF.import_opt('remove_unused_original_minimum_age_seconds',
'nova.virt.imagecache')
class fake_vm_ref(object):
def __init__(self):
self.value = 4
self._type = 'VirtualMachine'
class fake_service_content(object):
def __init__(self):
self.ServiceContent = vmwareapi_fake.DataObject()
self.ServiceContent.fake = 'fake'
class VMwareSudsTest(test.NoDBTestCase):
def setUp(self):
super(VMwareSudsTest, self).setUp()
def new_client_init(self, url, **kwargs):
return
mock.patch.object(suds.client.Client,
'__init__', new=new_client_init).start()
self.vim = self._vim_create()
self.addCleanup(mock.patch.stopall)
def _vim_create(self):
def fake_retrieve_service_content(fake):
return fake_service_content()
self.stubs.Set(vim.Vim, 'retrieve_service_content',
fake_retrieve_service_content)
return vim.Vim()
def test_exception_with_deepcopy(self):
self.assertIsNotNone(self.vim)
self.assertRaises(error_util.VimException,
copy.deepcopy, self.vim)
def _fake_create_session(inst):
session = vmwareapi_fake.DataObject()
session.key = 'fake_key'
session.userName = 'fake_username'
inst._session = session
class VMwareDriverStartupTestCase(test.NoDBTestCase):
def _start_driver_with_flags(self, expected_exception_type, startup_flags):
self.flags(**startup_flags)
with mock.patch(
'nova.virt.vmwareapi.driver.VMwareAPISession.__init__'):
e = self.assertRaises(
Exception, driver.VMwareVCDriver, None) # noqa
self.assertIs(type(e), expected_exception_type)
def test_start_driver_no_user(self):
self._start_driver_with_flags(
Exception,
dict(host_ip='ip', host_password='password',
group='vmware'))
def test_start_driver_no_host(self):
self._start_driver_with_flags(
Exception,
dict(host_username='username', host_password='password',
group='vmware'))
def test_start_driver_no_password(self):
self._start_driver_with_flags(
Exception,
dict(host_ip='ip', host_username='username',
group='vmware'))
def test_start_driver_with_user_host_password(self):
# Getting the InvalidInput exception signifies that no exception
# is raised regarding missing user/password/host
self._start_driver_with_flags(
nova.exception.InvalidInput,
dict(host_ip='ip', host_password='password',
host_username="user", datastore_regex="bad(regex",
group='vmware'))
class VMwareSessionTestCase(test.NoDBTestCase):
def _fake_is_vim_object(self, module):
return True
@mock.patch('time.sleep')
def test_call_method_vim_fault(self, mock_sleep):
def _fake_session_is_active(self):
return False
with contextlib.nested(
mock.patch.object(driver.VMwareAPISession, '_is_vim_object',
self._fake_is_vim_object),
mock.patch.object(driver.VMwareAPISession, '_create_session',
_fake_create_session),
mock.patch.object(driver.VMwareAPISession, '_session_is_active',
_fake_session_is_active)
) as (_fake_vim, _fake_create, _fake_is_active):
api_session = driver.VMwareAPISession()
args = ()
kwargs = {}
self.assertRaises(error_util.VimFaultException,
api_session._call_method,
stubs, 'fake_temp_method_exception',
*args, **kwargs)
def test_call_method_vim_empty(self):
def _fake_session_is_active(self):
return True
with contextlib.nested(
mock.patch.object(driver.VMwareAPISession, '_is_vim_object',
self._fake_is_vim_object),
mock.patch.object(driver.VMwareAPISession, '_create_session',
_fake_create_session),
mock.patch.object(driver.VMwareAPISession, '_session_is_active',
_fake_session_is_active)
) as (_fake_vim, _fake_create, _fake_is_active):
api_session = driver.VMwareAPISession()
args = ()
kwargs = {}
res = api_session._call_method(stubs, 'fake_temp_method_exception',
*args, **kwargs)
self.assertEqual([], res)
@mock.patch('time.sleep')
def test_call_method_session_exception(self, mock_sleep):
with contextlib.nested(
mock.patch.object(driver.VMwareAPISession, '_is_vim_object',
self._fake_is_vim_object),
mock.patch.object(driver.VMwareAPISession, '_create_session',
_fake_create_session),
) as (_fake_vim, _fake_create):
api_session = driver.VMwareAPISession()
args = ()
kwargs = {}
self.assertRaises(error_util.SessionConnectionException,
api_session._call_method,
stubs, 'fake_temp_session_exception',
*args, **kwargs)
def test_call_method_session_file_exists_exception(self):
with contextlib.nested(
mock.patch.object(driver.VMwareAPISession, '_is_vim_object',
self._fake_is_vim_object),
mock.patch.object(driver.VMwareAPISession, '_create_session',
_fake_create_session),
) as (_fake_vim, _fake_create):
api_session = driver.VMwareAPISession()
args = ()
kwargs = {}
self.assertRaises(error_util.FileAlreadyExistsException,
api_session._call_method,
stubs, 'fake_session_file_exception',
*args, **kwargs)
def test_call_method_session_no_permission_exception(self):
with contextlib.nested(
mock.patch.object(driver.VMwareAPISession, '_is_vim_object',
self._fake_is_vim_object),
mock.patch.object(driver.VMwareAPISession, '_create_session',
_fake_create_session),
) as (_fake_vim, _fake_create):
api_session = driver.VMwareAPISession()
args = ()
kwargs = {}
e = self.assertRaises(error_util.NoPermissionException,
api_session._call_method,
stubs, 'fake_session_permission_exception',
*args, **kwargs)
fault_string = 'Permission to perform this operation was denied.'
details = {'privilegeId': 'Resource.AssignVMToPool',
'object': 'domain-c7'}
exception_string = '%s %s' % (fault_string, details)
self.assertEqual(exception_string, str(e))
class VMwareAPIConfTestCase(test.NoDBTestCase):
"""Unit tests for VMWare API configurations."""
def setUp(self):
super(VMwareAPIConfTestCase, self).setUp()
vm_util.vm_refs_cache_reset()
def tearDown(self):
super(VMwareAPIConfTestCase, self).tearDown()
def test_configure_without_wsdl_loc_override(self):
# Test the default configuration behavior. By default,
# use the WSDL sitting on the host we are talking to in
# order to bind the SOAP client.
wsdl_loc = cfg.CONF.vmware.wsdl_location
self.assertIsNone(wsdl_loc)
wsdl_url = vim.Vim.get_wsdl_url("https", "www.example.com", 443)
url = vim.Vim.get_soap_url("https", "www.example.com", 443)
self.assertEqual("https://www.example.com:443/sdk/vimService.wsdl",
wsdl_url)
self.assertEqual("https://www.example.com:443/sdk", url)
def test_configure_without_wsdl_loc_override_using_ipv6(self):
# Same as above but with ipv6 based host ip
wsdl_loc = cfg.CONF.vmware.wsdl_location
self.assertIsNone(wsdl_loc)
wsdl_url = vim.Vim.get_wsdl_url("https", "::1", 443)
url = vim.Vim.get_soap_url("https", "::1", 443)
self.assertEqual("https://[::1]:443/sdk/vimService.wsdl",
wsdl_url)
self.assertEqual("https://[::1]:443/sdk", url)
def test_configure_with_wsdl_loc_override(self):
# Use the setting vmwareapi_wsdl_loc to override the
# default path to the WSDL.
#
# This is useful as a work-around for XML parsing issues
# found when using some WSDL in combination with some XML
# parsers.
#
# The wsdl_url should point to a different host than the one we
# are actually going to send commands to.
fake_wsdl = "https://www.test.com:443/sdk/foo.wsdl"
self.flags(wsdl_location=fake_wsdl, group='vmware')
wsdl_loc = cfg.CONF.vmware.wsdl_location
self.assertIsNotNone(wsdl_loc)
self.assertEqual(fake_wsdl, wsdl_loc)
wsdl_url = vim.Vim.get_wsdl_url("https", "www.example.com", 443)
url = vim.Vim.get_soap_url("https", "www.example.com", 443)
self.assertEqual(fake_wsdl, wsdl_url)
self.assertEqual("https://www.example.com:443/sdk", url)
def test_configure_non_default_host_port(self):
def _fake_create_session(self):
pass
def _fake_retrieve_service_content(self):
return None
with contextlib.nested(
mock.patch.object(driver.VMwareAPISession, '_create_session',
_fake_create_session),
mock.patch.object(vim.Vim, 'retrieve_service_content',
_fake_retrieve_service_content),
mock.patch('suds.client.Client')
):
self.flags(host_ip='www.test.com',
host_port=12345, group='vmware')
host_ip = cfg.CONF.vmware.host_ip
host_port = cfg.CONF.vmware.host_port
self.assertEqual('www.test.com', host_ip)
self.assertEqual(12345, host_port)
api_session = driver.VMwareAPISession(host_ip=host_ip,
host_port=host_port)
vim_obj = api_session._get_vim_object()
self.assertEqual("https://www.test.com:12345/sdk/vimService.wsdl",
vim_obj.wsdl_url)
self.assertEqual("https://www.test.com:12345/sdk", vim_obj.url)
@mock.patch.object(driver, 'TIME_BETWEEN_API_CALL_RETRIES', 0)
class VMwareAPIVMTestCase(test.NoDBTestCase):
"""Unit tests for Vmware API connection calls."""
def setUp(self):
super(VMwareAPIVMTestCase, self).setUp()
vm_util.vm_refs_cache_reset()
self.context = context.RequestContext('fake', 'fake', is_admin=False)
cluster_name = 'test_cluster'
cluster_name2 = 'test_cluster2'
self.flags(cluster_name=[cluster_name, cluster_name2],
host_ip='test_url',
host_username='test_username',
host_password='test_pass',
api_retry_count=1,
use_linked_clone=False, group='vmware')
self.flags(vnc_enabled=False,
image_cache_subdirectory_name='vmware_base',
my_ip='')
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
stubs.set_stubs(self.stubs)
vmwareapi_fake.reset()
nova.tests.image.fake.stub_out_image_service(self.stubs)
self.conn = driver.VMwareVCDriver(None, False)
self.node_name = self.conn._resources.keys()[0]
self.node_name2 = self.conn._resources.keys()[1]
if cluster_name2 in self.node_name2:
self.ds = 'ds1'
else:
self.ds = 'ds2'
self.vim = vmwareapi_fake.FakeVim()
# NOTE(vish): none of the network plugging code is actually
# being tested
self.network_info = utils.get_test_network_info()
image_ref = nova.tests.image.fake.get_valid_image_id()
(image_service, image_id) = glance.get_remote_image_service(
self.context, image_ref)
metadata = image_service.show(self.context, image_id)
self.image = {
'id': image_ref,
'disk_format': 'vmdk',
'size': int(metadata['size']),
}
self.fake_image_uuid = self.image['id']
nova.tests.image.fake.stub_out_image_service(self.stubs)
self.vnc_host = 'ha-host'
self._set_exception_vars()
self.instance_without_compute = {'node': None,
'vm_state': 'building',
'project_id': 'fake',
'user_id': 'fake',
'name': '1',
'display_description': '1',
'kernel_id': '1',
'ramdisk_id': '1',
'mac_addresses': [
{'address': 'de:ad:be:ef:be:ef'}
],
'memory_mb': 8192,
'instance_type': 'm1.large',
'vcpus': 4,
'root_gb': 80,
'image_ref': self.image['id'],
'host': 'fake_host',
'task_state':
'scheduling',
'reservation_id': 'r-3t8muvr0',
'id': 1,
'uuid': 'fake-uuid',
'metadata': []}
def tearDown(self):
super(VMwareAPIVMTestCase, self).tearDown()
vmwareapi_fake.cleanup()
nova.tests.image.fake.FakeImageService_reset()
def test_get_host_ip_addr(self):
self.assertEqual('test_url', self.conn.get_host_ip_addr())
def test_init_host_with_no_session(self):
self.conn._session = mock.Mock()
self.conn._session.vim = None
self.conn.init_host('fake_host')
self.conn._session._create_session.assert_called_once_with()
def test_init_host(self):
try:
self.conn.init_host("fake_host")
except Exception as ex:
self.fail("init_host raised: %s" % ex)
def _set_exception_vars(self):
self.wait_task = self.conn._session._wait_for_task
self.call_method = self.conn._session._call_method
self.task_ref = None
self.exception = False
def test_cleanup_host(self):
self.conn.init_host("fake_host")
try:
self.conn.cleanup_host("fake_host")
except Exception as ex:
self.fail("cleanup_host raised: %s" % ex)
@mock.patch('nova.virt.vmwareapi.driver.VMwareVCDriver.__init__')
def test_cleanup_host_direct(self, mock_init):
mock_init.return_value = None
vcdriver = driver.VMwareVCDriver(None, False)
vcdriver._session = mock.Mock()
vcdriver.cleanup_host("foo")
vcdriver._session.vim.get_service_content.assert_called_once_with()
vcdriver._session.vim.client.service.Logout.assert_called_once_with(
vcdriver._session.vim.get_service_content().sessionManager
)
@mock.patch('nova.virt.vmwareapi.driver.VMwareVCDriver.__init__')
def test_cleanup_host_direct_with_bad_logout(self, mock_init):
mock_init.return_value = None
vcdriver = driver.VMwareVCDriver(None, False)
vcdriver._session = mock.Mock()
fault = suds.WebFault(mock.Mock(), mock.Mock())
vcdriver._session.vim.client.service.Logout.side_effect = fault
vcdriver.cleanup_host("foo")
def test_driver_capabilities(self):
self.assertTrue(self.conn.capabilities['has_imagecache'])
self.assertFalse(self.conn.capabilities['supports_recreate'])
def test_login_retries(self):
self.attempts = 0
self.login_session = vmwareapi_fake.FakeVim()._login()
def _fake_login(_self):
self.attempts += 1
if self.attempts == 1:
raise exception.NovaException('Here is my fake exception')
return self.login_session
def _fake_check_session(_self):
return True
self.stubs.Set(vmwareapi_fake.FakeVim, '_login', _fake_login)
self.stubs.Set(time, 'sleep', lambda x: None)
self.stubs.Set(vmwareapi_fake.FakeVim, '_check_session',
_fake_check_session)
self.conn = driver.VMwareAPISession()
self.assertEqual(self.attempts, 2)
def test_wait_for_task_exception(self):
self.flags(task_poll_interval=1, group='vmware')
self.login_session = vmwareapi_fake.FakeVim()._login()
self.stop_called = 0
def _fake_login(_self):
return self.login_session
self.stubs.Set(vmwareapi_fake.FakeVim, '_login', _fake_login)
def fake_poll_task(task_ref, done):
done.send_exception(exception.NovaException('fake exception'))
def fake_stop_loop(loop):
self.stop_called += 1
return loop.stop()
self.conn = driver.VMwareAPISession()
self.stubs.Set(self.conn, "_poll_task",
fake_poll_task)
self.stubs.Set(self.conn, "_stop_loop",
fake_stop_loop)
self.assertRaises(exception.NovaException,
self.conn._wait_for_task, 'fake-ref')
self.assertEqual(self.stop_called, 1)
def _get_instance_type_by_name(self, type):
for instance_type in test_flavors.DEFAULT_FLAVORS:
if instance_type['name'] == type:
return instance_type
if type == 'm1.micro':
return {'memory_mb': 128, 'root_gb': 0, 'deleted_at': None,
'name': 'm1.micro', 'deleted': 0, 'created_at': None,
'ephemeral_gb': 0, 'updated_at': None,
'disabled': False, 'vcpus': 1, 'extra_specs': {},
'swap': 0, 'rxtx_factor': 1.0, 'is_public': True,
'flavorid': '1', 'vcpu_weight': None, 'id': 2}
def _create_instance(self, node=None, set_image_ref=True,
uuid=None, instance_type='m1.large'):
if not node:
node = self.node_name
if not uuid:
uuid = uuidutils.generate_uuid()
self.type_data = self._get_instance_type_by_name(instance_type)
values = {'name': 'fake_name',
'id': 1,
'uuid': uuid,
'project_id': self.project_id,
'user_id': self.user_id,
'kernel_id': "fake_kernel_uuid",
'ramdisk_id': "fake_ramdisk_uuid",
'mac_address': "de:ad:be:ef:be:ef",
'flavor': instance_type,
'node': node,
'memory_mb': self.type_data['memory_mb'],
'root_gb': self.type_data['root_gb'],
'ephemeral_gb': self.type_data['ephemeral_gb'],
'vcpus': self.type_data['vcpus'],
'swap': self.type_data['swap'],
}
if set_image_ref:
values['image_ref'] = self.fake_image_uuid
self.instance_node = node
self.uuid = uuid
self.instance = fake_instance.fake_instance_obj(
self.context, **values)
def _create_vm(self, node=None, num_instances=1, uuid=None,
instance_type='m1.large', powered_on=True):
"""Create and spawn the VM."""
read_file_handle = mock.MagicMock()
write_file_handle = mock.MagicMock()
def fake_read_handle(read_iter):
return read_file_handle
def _fake_write_mock(host, dc_name, ds_name, cookies,
file_path, file_size, scheme="https"):
self.vim.fake_transfer_file(ds_name=ds_name,
file_path=file_path)
image_ref = self.image['id']
self.assertIn(dc_name, ['dc1', 'dc2'])
self.assertIn(ds_name, ['ds1', 'ds2'])
self.assertEqual('Fake-CookieJar', cookies)
split_file_path = file_path.split('/')
self.assertEqual('vmware_temp', split_file_path[0])
self.assertEqual(image_ref, split_file_path[2])
self.assertEqual(int(self.image['size']), file_size)
return write_file_handle
if not node:
node = self.node_name
self._create_instance(node=node, uuid=uuid,
instance_type=instance_type)
self.assertIsNone(vm_util.vm_ref_cache_get(self.uuid))
with contextlib.nested(
mock.patch.object(read_write_util, 'VMwareHTTPWriteFile',
_fake_write_mock),
mock.patch.object(read_write_util, 'GlanceFileRead',
fake_read_handle),
mock.patch.object(vmware_images, 'start_transfer')
) as (fake_http_write, fake_glance_read, fake_start_transfer):
self.conn.spawn(self.context, self.instance, self.image,
injected_files=[], admin_password=None,
network_info=self.network_info,
block_device_info=None)
fake_start_transfer.assert_called_once_with(self.context,
read_file_handle, self.image['size'],
write_file_handle=write_file_handle)
self._check_vm_record(num_instances=num_instances,
powered_on=powered_on)
self.assertIsNotNone(vm_util.vm_ref_cache_get(self.uuid))
def _get_vm_record(self):
# Get record for VM
vms = vmwareapi_fake._get_objects("VirtualMachine")
for vm in vms.objects:
if vm.get('name') == self.uuid:
return vm
self.fail('Unable to find VM backing!')
def _check_vm_record(self, num_instances=1, powered_on=True):
"""Check if the spawned VM's properties correspond to the instance in
the db.
"""
instances = self.conn.list_instances()
self.assertEqual(len(instances), num_instances)
# Get Nova record for VM
vm_info = self.conn.get_info({'uuid': self.uuid,
'name': 1,
'node': self.instance_node})
vm = self._get_vm_record()
# Check that m1.large above turned into the right thing.
mem_kib = long(self.type_data['memory_mb']) << 10
vcpus = self.type_data['vcpus']
self.assertEqual(vm_info['max_mem'], mem_kib)
self.assertEqual(vm_info['mem'], mem_kib)
self.assertEqual(vm.get("summary.config.instanceUuid"), self.uuid)
self.assertEqual(vm.get("summary.config.numCpu"), vcpus)
self.assertEqual(vm.get("summary.config.memorySizeMB"),
self.type_data['memory_mb'])
self.assertEqual(
vm.get("config.hardware.device").VirtualDevice[2].obj_name,
"ns0:VirtualE1000")
if powered_on:
# Check that the VM is running according to Nova
self.assertEqual(power_state.RUNNING, vm_info['state'])
# Check that the VM is running according to vSphere API.
self.assertEqual('poweredOn', vm.get("runtime.powerState"))
else:
# Check that the VM is not running according to Nova
self.assertEqual(power_state.SHUTDOWN, vm_info['state'])
# Check that the VM is not running according to vSphere API.
self.assertEqual('poweredOff', vm.get("runtime.powerState"))
found_vm_uuid = False
found_iface_id = False
extras = vm.get("config.extraConfig")
for c in extras.OptionValue:
if (c.key == "nvp.vm-uuid" and c.value == self.instance['uuid']):
found_vm_uuid = True
if (c.key == "nvp.iface-id.0" and c.value == "vif-xxx-yyy-zzz"):
found_iface_id = True
self.assertTrue(found_vm_uuid)
self.assertTrue(found_iface_id)
def _check_vm_info(self, info, pwr_state=power_state.RUNNING):
"""Check if the get_info returned values correspond to the instance
object in the db.
"""
mem_kib = long(self.type_data['memory_mb']) << 10
self.assertEqual(info["state"], pwr_state)
self.assertEqual(info["max_mem"], mem_kib)
self.assertEqual(info["mem"], mem_kib)
self.assertEqual(info["num_cpu"], self.type_data['vcpus'])
def test_instance_exists(self):
self._create_vm()
self.assertTrue(self.conn.instance_exists(self.instance))
invalid_instance = dict(uuid='foo', name='bar', node=self.node_name)
self.assertFalse(self.conn.instance_exists(invalid_instance))
def test_list_instances(self):
instances = self.conn.list_instances()
self.assertEqual(len(instances), 0)
def test_list_instances_1(self):
self._create_vm()
instances = self.conn.list_instances()
self.assertEqual(len(instances), 1)
def test_list_instance_uuids(self):
self._create_vm()
uuids = self.conn.list_instance_uuids()
self.assertEqual(len(uuids), 1)
def test_list_instance_uuids_invalid_uuid(self):
self._create_vm(uuid='fake_id')
uuids = self.conn.list_instance_uuids()
self.assertEqual(len(uuids), 0)
def _cached_files_exist(self, exists=True):
cache = ds_util.DatastorePath(self.ds, 'vmware_base',
self.fake_image_uuid,
'%s.vmdk' % self.fake_image_uuid)
if exists:
self.assertTrue(vmwareapi_fake.get_file(str(cache)))
else:
self.assertFalse(vmwareapi_fake.get_file(str(cache)))
@mock.patch.object(nova.virt.vmwareapi.vmware_images.VMwareImage,
'from_image')
def test_instance_dir_disk_created(self, mock_from_image):
"""Test image file is cached when even when use_linked_clone
is False
"""
img_props = vmware_images.VMwareImage(
image_id=self.fake_image_uuid,
linked_clone=False)
mock_from_image.return_value = img_props
self._create_vm()
path = ds_util.DatastorePath(self.ds, self.uuid, '%s.vmdk' % self.uuid)
self.assertTrue(vmwareapi_fake.get_file(str(path)))
self._cached_files_exist()
@mock.patch.object(nova.virt.vmwareapi.vmware_images.VMwareImage,
'from_image')
def test_cache_dir_disk_created(self, mock_from_image):
"""Test image disk is cached when use_linked_clone is True."""
self.flags(use_linked_clone=True, group='vmware')
img_props = vmware_images.VMwareImage(
image_id=self.fake_image_uuid,
file_size=1 * units.Ki,
disk_type=constants.DISK_TYPE_SPARSE)
mock_from_image.return_value = img_props
self._create_vm()
path = ds_util.DatastorePath(self.ds, 'vmware_base',
self.fake_image_uuid,
'%s.vmdk' % self.fake_image_uuid)
root = ds_util.DatastorePath(self.ds, 'vmware_base',
self.fake_image_uuid,
'%s.80.vmdk' % self.fake_image_uuid)
self.assertTrue(vmwareapi_fake.get_file(str(path)))
self.assertTrue(vmwareapi_fake.get_file(str(root)))
def _iso_disk_type_created(self, instance_type='m1.large'):
self.image['disk_format'] = 'iso'
self._create_vm(instance_type=instance_type)
path = ds_util.DatastorePath(self.ds, 'vmware_base',
self.fake_image_uuid,
'%s.iso' % self.fake_image_uuid)
self.assertTrue(vmwareapi_fake.get_file(str(path)))
def test_iso_disk_type_created(self):
self._iso_disk_type_created()
path = ds_util.DatastorePath(self.ds, self.uuid, '%s.vmdk' % self.uuid)
self.assertTrue(vmwareapi_fake.get_file(str(path)))
def test_iso_disk_type_created_with_root_gb_0(self):
self._iso_disk_type_created(instance_type='m1.micro')
path = ds_util.DatastorePath(self.ds, self.uuid, '%s.vmdk' % self.uuid)
self.assertFalse(vmwareapi_fake.get_file(str(path)))
def test_iso_disk_cdrom_attach(self):
iso_path = ds_util.DatastorePath(self.ds, 'vmware_base',
self.fake_image_uuid,
'%s.iso' % self.fake_image_uuid)
def fake_attach_cdrom(vm_ref, instance, data_store_ref,
iso_uploaded_path):
self.assertEqual(iso_uploaded_path, str(iso_path))
self.stubs.Set(self.conn._vmops, "_attach_cdrom_to_vm",
fake_attach_cdrom)
self.image['disk_format'] = 'iso'
self._create_vm()
@mock.patch.object(nova.virt.vmwareapi.vmware_images.VMwareImage,
'from_image')
def test_iso_disk_cdrom_attach_with_config_drive(self,
mock_from_image):
img_props = vmware_images.VMwareImage(
image_id=self.fake_image_uuid,
file_size=80 * units.Gi,
file_type='iso',
linked_clone=False)
mock_from_image.return_value = img_props
self.flags(force_config_drive=True)
iso_path = [
ds_util.DatastorePath(self.ds, 'vmware_base',
self.fake_image_uuid,
'%s.iso' % self.fake_image_uuid),
ds_util.DatastorePath(self.ds, 'fake-config-drive')]
self.iso_index = 0
def fake_create_config_drive(instance, injected_files, password,
data_store_name, folder, uuid, cookies):
return 'fake-config-drive'
def fake_attach_cdrom(vm_ref, instance, data_store_ref,
iso_uploaded_path):
self.assertEqual(iso_uploaded_path, str(iso_path[self.iso_index]))
self.iso_index += 1
self.stubs.Set(self.conn._vmops, "_attach_cdrom_to_vm",
fake_attach_cdrom)
self.stubs.Set(self.conn._vmops, '_create_config_drive',
fake_create_config_drive)
self.image['disk_format'] = 'iso'
self._create_vm()
self.assertEqual(self.iso_index, 2)
def test_cdrom_attach_with_config_drive(self):
self.flags(force_config_drive=True)
iso_path = ds_util.DatastorePath(self.ds, 'fake-config-drive')
self.cd_attach_called = False
def fake_create_config_drive(instance, injected_files, password,
data_store_name, folder, uuid, cookies):
return 'fake-config-drive'
def fake_attach_cdrom(vm_ref, instance, data_store_ref,
iso_uploaded_path):
self.assertEqual(iso_uploaded_path, str(iso_path))
self.cd_attach_called = True
self.stubs.Set(self.conn._vmops, "_attach_cdrom_to_vm",
fake_attach_cdrom)
self.stubs.Set(self.conn._vmops, '_create_config_drive',
fake_create_config_drive)
self._create_vm()
self.assertTrue(self.cd_attach_called)
def test_spawn(self):
self._create_vm()
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
def test_spawn_vm_ref_cached(self):
uuid = uuidutils.generate_uuid()
self.assertIsNone(vm_util.vm_ref_cache_get(uuid))
self._create_vm(uuid=uuid)
self.assertIsNotNone(vm_util.vm_ref_cache_get(uuid))
def _spawn_power_state(self, power_on):
self._spawn = self.conn._vmops.spawn
self._power_on = power_on
def _fake_spawn(context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info=None,
instance_name=None, power_on=True):
return self._spawn(context, instance, image_meta,
injected_files, admin_password, network_info,
block_device_info=block_device_info,
instance_name=instance_name,
power_on=self._power_on)
with (
mock.patch.object(self.conn._vmops, 'spawn', _fake_spawn)
):
self._create_vm(powered_on=power_on)
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
if power_on:
self._check_vm_info(info, power_state.RUNNING)
else:
self._check_vm_info(info, power_state.SHUTDOWN)
def test_spawn_no_power_on(self):
self._spawn_power_state(False)
def test_spawn_power_on(self):
self._spawn_power_state(True)
def test_spawn_root_size_0(self):
self._create_vm(instance_type='m1.micro')
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
cache = ('[%s] vmware_base/%s/%s.vmdk' %
(self.ds, self.fake_image_uuid, self.fake_image_uuid))
gb_cache = ('[%s] vmware_base/%s/%s.0.vmdk' %
(self.ds, self.fake_image_uuid, self.fake_image_uuid))
self.assertTrue(vmwareapi_fake.get_file(cache))
self.assertFalse(vmwareapi_fake.get_file(gb_cache))
def _spawn_with_delete_exception(self, fault=None):
def fake_call_method(module, method, *args, **kwargs):
task_ref = self.call_method(module, method, *args, **kwargs)
if method == "DeleteDatastoreFile_Task":
self.exception = True
task_mdo = vmwareapi_fake.create_task(method, "error",
error_fault=fault)
return task_mdo.obj
return task_ref
with (
mock.patch.object(self.conn._session, '_call_method',
fake_call_method)
):
if fault:
self._create_vm()
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
else:
self.assertRaises(error_util.VMwareDriverException,
self._create_vm)
self.assertTrue(self.exception)
def test_spawn_with_delete_exception_not_found(self):
self._spawn_with_delete_exception(vmwareapi_fake.FileNotFound())
def test_spawn_with_delete_exception_file_fault(self):
self._spawn_with_delete_exception(vmwareapi_fake.FileFault())
def test_spawn_with_delete_exception_cannot_delete_file(self):
self._spawn_with_delete_exception(vmwareapi_fake.CannotDeleteFile())
def test_spawn_with_delete_exception_file_locked(self):
self._spawn_with_delete_exception(vmwareapi_fake.FileLocked())
def test_spawn_with_delete_exception_general(self):
self._spawn_with_delete_exception()
def test_spawn_disk_extend(self):
self.mox.StubOutWithMock(self.conn._vmops, '_extend_virtual_disk')
requested_size = 80 * units.Mi
self.conn._vmops._extend_virtual_disk(mox.IgnoreArg(),
requested_size, mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
self._create_vm()
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
def test_spawn_disk_extend_exists(self):
root = ds_util.DatastorePath(self.ds, 'vmware_base',
self.fake_image_uuid,
'%s.80.vmdk' % self.fake_image_uuid)
def _fake_extend(instance, requested_size, name, dc_ref):
vmwareapi_fake._add_file(str(root))
self.stubs.Set(self.conn._vmops, '_extend_virtual_disk',
_fake_extend)
self._create_vm()
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
self.assertTrue(vmwareapi_fake.get_file(str(root)))
@mock.patch.object(nova.virt.vmwareapi.vmware_images.VMwareImage,
'from_image')
def test_spawn_disk_extend_sparse(self, mock_from_image):
img_props = vmware_images.VMwareImage(
image_id=self.fake_image_uuid,
file_size=units.Ki,
disk_type=constants.DISK_TYPE_SPARSE,
linked_clone=True)
mock_from_image.return_value = img_props
with contextlib.nested(
mock.patch.object(self.conn._vmops, '_extend_virtual_disk'),
mock.patch.object(self.conn._vmops, 'get_datacenter_ref_and_name'),
) as (mock_extend, mock_get_dc):
dc_val = mock.Mock()
dc_val.ref = "fake_dc_ref"
dc_val.name = "dc1"
mock_get_dc.return_value = dc_val
self._create_vm()
iid = img_props.image_id
cached_image = ds_util.DatastorePath(self.ds, 'vmware_base',
iid, '%s.80.vmdk' % iid)
mock_extend.assert_called_once_with(
self.instance, self.instance.root_gb * units.Mi,
str(cached_image), "fake_dc_ref")
def test_spawn_disk_extend_failed_copy(self):
# Spawn instance
# copy for extend fails without creating a file
#
# Expect the copy error to be raised
self.flags(use_linked_clone=True, group='vmware')
self.wait_task = self.conn._session._wait_for_task
self.call_method = self.conn._session._call_method
CopyError = error_util.FileFaultException
def fake_wait_for_task(task_ref):
if task_ref == 'fake-copy-task':
raise CopyError('Copy failed!')
return self.wait_task(task_ref)
def fake_call_method(module, method, *args, **kwargs):
if method == "CopyVirtualDisk_Task":
return 'fake-copy-task'
return self.call_method(module, method, *args, **kwargs)
with contextlib.nested(
mock.patch.object(self.conn._session, '_call_method',
new=fake_call_method),
mock.patch.object(self.conn._session, '_wait_for_task',
new=fake_wait_for_task)):
self.assertRaises(CopyError, self._create_vm)
def test_spawn_disk_extend_failed_partial_copy(self):
# Spawn instance
# Copy for extend fails, leaving a file behind
#
# Expect the file to be cleaned up
# Expect the copy error to be raised
self.flags(use_linked_clone=True, group='vmware')
self.wait_task = self.conn._session._wait_for_task
self.call_method = self.conn._session._call_method
self.task_ref = None
uuid = self.fake_image_uuid
cached_image = '[%s] vmware_base/%s/%s.80.vmdk' % (self.ds,
uuid, uuid)
CopyError = error_util.FileFaultException
def fake_wait_for_task(task_ref):
if task_ref == self.task_ref:
self.task_ref = None
self.assertTrue(vmwareapi_fake.get_file(cached_image))
# N.B. We don't test for -flat here because real
# CopyVirtualDisk_Task doesn't actually create it
raise CopyError('Copy failed!')
return self.wait_task(task_ref)
def fake_call_method(module, method, *args, **kwargs):
task_ref = self.call_method(module, method, *args, **kwargs)
if method == "CopyVirtualDisk_Task":
self.task_ref = task_ref
return task_ref
with contextlib.nested(
mock.patch.object(self.conn._session, '_call_method',
new=fake_call_method),
mock.patch.object(self.conn._session, '_wait_for_task',
new=fake_wait_for_task)):
self.assertRaises(CopyError, self._create_vm)
self.assertFalse(vmwareapi_fake.get_file(cached_image))
def test_spawn_disk_extend_failed_partial_copy_failed_cleanup(self):
# Spawn instance
# Copy for extend fails, leaves file behind
# File cleanup fails
#
# Expect file to be left behind
# Expect file cleanup error to be raised
self.flags(use_linked_clone=True, group='vmware')
self.wait_task = self.conn._session._wait_for_task
self.call_method = self.conn._session._call_method
self.task_ref = None
uuid = self.fake_image_uuid
cached_image = '[%s] vmware_base/%s/%s.80.vmdk' % (self.ds,
uuid, uuid)
CopyError = error_util.FileFaultException
DeleteError = error_util.CannotDeleteFileException
def fake_wait_for_task(task_ref):
if task_ref == self.task_ref:
self.task_ref = None
self.assertTrue(vmwareapi_fake.get_file(cached_image))
# N.B. We don't test for -flat here because real
# CopyVirtualDisk_Task doesn't actually create it
raise CopyError('Copy failed!')
elif task_ref == 'fake-delete-task':
raise DeleteError('Delete failed!')
return self.wait_task(task_ref)
def fake_call_method(module, method, *args, **kwargs):
if method == "DeleteDatastoreFile_Task":
return 'fake-delete-task'
task_ref = self.call_method(module, method, *args, **kwargs)
if method == "CopyVirtualDisk_Task":
self.task_ref = task_ref
return task_ref
with contextlib.nested(
mock.patch.object(self.conn._session, '_wait_for_task',
new=fake_wait_for_task),
mock.patch.object(self.conn._session, '_call_method',
new=fake_call_method)):
self.assertRaises(DeleteError, self._create_vm)
self.assertTrue(vmwareapi_fake.get_file(cached_image))
@mock.patch.object(nova.virt.vmwareapi.vmware_images.VMwareImage,
'from_image')
def test_spawn_disk_invalid_disk_size(self, mock_from_image):
img_props = vmware_images.VMwareImage(
image_id=self.fake_image_uuid,
file_size=82 * units.Gi,
disk_type=constants.DISK_TYPE_SPARSE,
linked_clone=True)
mock_from_image.return_value = img_props
self.assertRaises(exception.InstanceUnacceptable,
self._create_vm)
@mock.patch.object(nova.virt.vmwareapi.vmware_images.VMwareImage,
'from_image')
def test_spawn_disk_extend_insufficient_disk_space(self, mock_from_image):
img_props = vmware_images.VMwareImage(
image_id=self.fake_image_uuid,
file_size=1024,
disk_type=constants.DISK_TYPE_SPARSE,
linked_clone=True)
mock_from_image.return_value = img_props
cached_image = ds_util.DatastorePath(self.ds, 'vmware_base',
self.fake_image_uuid,
'%s.80.vmdk' %
self.fake_image_uuid)
tmp_file = ds_util.DatastorePath(self.ds, 'vmware_base',
self.fake_image_uuid,
'%s.80-flat.vmdk' %
self.fake_image_uuid)
NoDiskSpace = error_util.get_fault_class('NoDiskSpace')
def fake_wait_for_task(task_ref):
if task_ref == self.task_ref:
self.task_ref = None
raise NoDiskSpace()
return self.wait_task(task_ref)
def fake_call_method(module, method, *args, **kwargs):
task_ref = self.call_method(module, method, *args, **kwargs)
if method == 'ExtendVirtualDisk_Task':
self.task_ref = task_ref
return task_ref
with contextlib.nested(
mock.patch.object(self.conn._session, '_wait_for_task',
fake_wait_for_task),
mock.patch.object(self.conn._session, '_call_method',
fake_call_method)
) as (mock_wait_for_task, mock_call_method):
self.assertRaises(NoDiskSpace, self._create_vm)
self.assertFalse(vmwareapi_fake.get_file(str(cached_image)))
self.assertFalse(vmwareapi_fake.get_file(str(tmp_file)))
def test_spawn_with_move_file_exists_exception(self):
# The test will validate that the spawn completes
# successfully. The "MoveDatastoreFile_Task" will
# raise an file exists exception. The flag
# self.exception will be checked to see that
# the exception has indeed been raised.
def fake_wait_for_task(task_ref):
if task_ref == self.task_ref:
self.task_ref = None
self.exception = True
raise error_util.FileAlreadyExistsException()
return self.wait_task(task_ref)
def fake_call_method(module, method, *args, **kwargs):
task_ref = self.call_method(module, method, *args, **kwargs)
if method == "MoveDatastoreFile_Task":
self.task_ref = task_ref
return task_ref
with contextlib.nested(
mock.patch.object(self.conn._session, '_wait_for_task',
fake_wait_for_task),
mock.patch.object(self.conn._session, '_call_method',
fake_call_method)
) as (_wait_for_task, _call_method):
self._create_vm()
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
self.assertTrue(self.exception)
def test_spawn_with_move_general_exception(self):
# The test will validate that the spawn completes
# successfully. The "MoveDatastoreFile_Task" will
# raise a general exception. The flag self.exception
# will be checked to see that the exception has
# indeed been raised.
def fake_wait_for_task(task_ref):
if task_ref == self.task_ref:
self.task_ref = None
self.exception = True
raise error_util.VMwareDriverException('Exception!')
return self.wait_task(task_ref)
def fake_call_method(module, method, *args, **kwargs):
task_ref = self.call_method(module, method, *args, **kwargs)
if method == "MoveDatastoreFile_Task":
self.task_ref = task_ref
return task_ref
with contextlib.nested(
mock.patch.object(self.conn._session, '_wait_for_task',
fake_wait_for_task),
mock.patch.object(self.conn._session, '_call_method',
fake_call_method)
) as (_wait_for_task, _call_method):
self.assertRaises(error_util.VMwareDriverException,
self._create_vm)
self.assertTrue(self.exception)
def test_spawn_with_move_poll_exception(self):
self.call_method = self.conn._session._call_method
def fake_call_method(module, method, *args, **kwargs):
task_ref = self.call_method(module, method, *args, **kwargs)
if method == "MoveDatastoreFile_Task":
task_mdo = vmwareapi_fake.create_task(method, "error")
return task_mdo.obj
return task_ref
with (
mock.patch.object(self.conn._session, '_call_method',
fake_call_method)
):
self.assertRaises(error_util.VMwareDriverException,
self._create_vm)
def test_spawn_with_move_file_exists_poll_exception(self):
# The test will validate that the spawn completes
# successfully. The "MoveDatastoreFile_Task" will
# raise a file exists exception. The flag self.exception
# will be checked to see that the exception has
# indeed been raised.
def fake_call_method(module, method, *args, **kwargs):
task_ref = self.call_method(module, method, *args, **kwargs)
if method == "MoveDatastoreFile_Task":
self.exception = True
task_mdo = vmwareapi_fake.create_task(method, "error",
error_fault=vmwareapi_fake.FileAlreadyExists())
return task_mdo.obj
return task_ref
with (
mock.patch.object(self.conn._session, '_call_method',
fake_call_method)
):
self._create_vm()
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
self.assertTrue(self.exception)
def _spawn_attach_volume_vmdk(self, set_image_ref=True, vc_support=False):
self._create_instance(set_image_ref=set_image_ref)
self.mox.StubOutWithMock(block_device, 'volume_in_mapping')
self.mox.StubOutWithMock(v_driver, 'block_device_info_get_mapping')
connection_info = self._test_vmdk_connection_info('vmdk')
root_disk = [{'connection_info': connection_info}]
v_driver.block_device_info_get_mapping(
mox.IgnoreArg()).AndReturn(root_disk)
if vc_support:
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'_get_res_pool_of_vm')
volumeops.VMwareVolumeOps._get_res_pool_of_vm(
mox.IgnoreArg()).AndReturn('fake_res_pool')
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'_relocate_vmdk_volume')
volumeops.VMwareVolumeOps._relocate_vmdk_volume(mox.IgnoreArg(),
'fake_res_pool', mox.IgnoreArg())
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'attach_volume')
volumeops.VMwareVolumeOps.attach_volume(connection_info,
self.instance, mox.IgnoreArg())
self.mox.ReplayAll()
block_device_info = {'mount_device': 'vda'}
self.conn.spawn(self.context, self.instance, self.image,
injected_files=[], admin_password=None,
network_info=self.network_info,
block_device_info=block_device_info)
def test_spawn_attach_volume_iscsi(self):
self._create_instance()
self.mox.StubOutWithMock(block_device, 'volume_in_mapping')
self.mox.StubOutWithMock(v_driver, 'block_device_info_get_mapping')
connection_info = self._test_vmdk_connection_info('iscsi')
root_disk = [{'connection_info': connection_info}]
v_driver.block_device_info_get_mapping(
mox.IgnoreArg()).AndReturn(root_disk)
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'attach_volume')
volumeops.VMwareVolumeOps.attach_volume(connection_info,
self.instance, mox.IgnoreArg())
self.mox.ReplayAll()
block_device_info = {'mount_device': 'vda'}
self.conn.spawn(self.context, self.instance, self.image,
injected_files=[], admin_password=None,
network_info=self.network_info,
block_device_info=block_device_info)
def mock_upload_image(self, context, image, instance, **kwargs):
self.assertEqual(image, 'Test-Snapshot')
self.assertEqual(instance, self.instance)
self.assertEqual(kwargs['disk_type'], 'preallocated')
def test_get_vm_ref_using_extra_config(self):
self._create_vm()
vm_ref = vm_util._get_vm_ref_from_extraconfig(self.conn._session,
self.instance['uuid'])
self.assertIsNotNone(vm_ref, 'VM Reference cannot be none')
# Disrupt the fake Virtual Machine object so that extraConfig
# cannot be matched.
fake_vm = self._get_vm_record()
fake_vm.get('config.extraConfig["nvp.vm-uuid"]').value = ""
# We should not get a Virtual Machine through extraConfig.
vm_ref = vm_util._get_vm_ref_from_extraconfig(self.conn._session,
self.instance['uuid'])
self.assertIsNone(vm_ref, 'VM Reference should be none')
# Check if we can find the Virtual Machine using the name.
vm_ref = vm_util.get_vm_ref(self.conn._session, self.instance)
self.assertIsNotNone(vm_ref, 'VM Reference cannot be none')
def test_search_vm_ref_by_identifier(self):
self._create_vm()
vm_ref = vm_util.search_vm_ref_by_identifier(self.conn._session,
self.instance['uuid'])
self.assertIsNotNone(vm_ref, 'VM Reference cannot be none')
fake_vm = self._get_vm_record()
fake_vm.set("summary.config.instanceUuid", "foo")
fake_vm.set("name", "foo")
fake_vm.get('config.extraConfig["nvp.vm-uuid"]').value = "foo"
self.assertIsNone(vm_util.search_vm_ref_by_identifier(
self.conn._session, self.instance['uuid']),
"VM Reference should be none")
self.assertIsNotNone(
vm_util.search_vm_ref_by_identifier(self.conn._session, "foo"),
"VM Reference should not be none")
def test_get_object_for_optionvalue(self):
self._create_vm()
vms = self.conn._session._call_method(vim_util, "get_objects",
"VirtualMachine", ['config.extraConfig["nvp.vm-uuid"]'])
vm_ref = vm_util._get_object_for_optionvalue(vms,
self.instance["uuid"])
self.assertIsNotNone(vm_ref, 'VM Reference cannot be none')
def _test_snapshot(self):
expected_calls = [
{'args': (),
'kwargs':
{'task_state': task_states.IMAGE_PENDING_UPLOAD}},
{'args': (),
'kwargs':
{'task_state': task_states.IMAGE_UPLOADING,
'expected_state': task_states.IMAGE_PENDING_UPLOAD}}]
func_call_matcher = matchers.FunctionCallMatcher(expected_calls)
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
with mock.patch.object(vmware_images, 'upload_image',
self.mock_upload_image):
self.conn.snapshot(self.context, self.instance, "Test-Snapshot",
func_call_matcher.call)
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
self.assertIsNone(func_call_matcher.match())
def test_snapshot(self):
self._create_vm()
self._test_snapshot()
def test_snapshot_no_root_disk(self):
self._iso_disk_type_created(instance_type='m1.micro')
self.assertRaises(error_util.NoRootDiskDefined, self.conn.snapshot,
self.context, self.instance, "Test-Snapshot",
lambda *args, **kwargs: None)
def test_snapshot_non_existent(self):
self._create_instance()
self.assertRaises(exception.InstanceNotFound, self.conn.snapshot,
self.context, self.instance, "Test-Snapshot",
lambda *args, **kwargs: None)
def test_snapshot_delete_vm_snapshot(self):
self._create_vm()
fake_vm = self._get_vm_record()
snapshot_ref = vmwareapi_fake.ManagedObjectReference(
value="Snapshot-123",
name="VirtualMachineSnapshot")
self.mox.StubOutWithMock(vmops.VMwareVMOps,
'_create_vm_snapshot')
self.conn._vmops._create_vm_snapshot(
self.instance, fake_vm.obj).AndReturn(snapshot_ref)
self.mox.StubOutWithMock(vmops.VMwareVMOps,
'_delete_vm_snapshot')
self.conn._vmops._delete_vm_snapshot(
self.instance, fake_vm.obj, snapshot_ref).AndReturn(None)
self.mox.ReplayAll()
self._test_snapshot()
def _snapshot_delete_vm_snapshot_exception(self, exception, call_count=1):
self._create_vm()
fake_vm = vmwareapi_fake._get_objects("VirtualMachine").objects[0].obj
snapshot_ref = vmwareapi_fake.ManagedObjectReference(
value="Snapshot-123",
name="VirtualMachineSnapshot")
with contextlib.nested(
mock.patch.object(self.conn._session, '_wait_for_task',
side_effect=exception),
mock.patch.object(time, 'sleep')
) as (_fake_wait, _fake_sleep):
if exception != error_util.TaskInProgress:
self.assertRaises(exception,
self.conn._vmops._delete_vm_snapshot,
self.instance, fake_vm, snapshot_ref)
self.assertEqual(0, _fake_sleep.call_count)
else:
self.conn._vmops._delete_vm_snapshot(self.instance, fake_vm,
snapshot_ref)
self.assertEqual(call_count - 1, _fake_sleep.call_count)
self.assertEqual(call_count, _fake_wait.call_count)
def test_snapshot_delete_vm_snapshot_exception(self):
self._snapshot_delete_vm_snapshot_exception(exception.NovaException)
def test_snapshot_delete_vm_snapshot_exception_retry(self):
self.flags(api_retry_count=5, group='vmware')
self._snapshot_delete_vm_snapshot_exception(error_util.TaskInProgress,
5)
def test_reboot(self):
self._create_vm()
info = self.conn.get_info({'name': 1, 'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
reboot_type = "SOFT"
self.conn.reboot(self.context, self.instance, self.network_info,
reboot_type)
info = self.conn.get_info({'name': 1, 'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
def test_reboot_with_uuid(self):
"""Test fall back to use name when can't find by uuid."""
self._create_vm()
info = self.conn.get_info({'name': 'fake-name', 'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
reboot_type = "SOFT"
self.conn.reboot(self.context, self.instance, self.network_info,
reboot_type)
info = self.conn.get_info({'name': 'fake-name', 'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
def test_reboot_non_existent(self):
self._create_instance()
self.assertRaises(exception.InstanceNotFound, self.conn.reboot,
self.context, self.instance, self.network_info,
'SOFT')
def test_poll_rebooting_instances(self):
self.mox.StubOutWithMock(compute_api.API, 'reboot')
compute_api.API.reboot(mox.IgnoreArg(), mox.IgnoreArg(),
mox.IgnoreArg())
self.mox.ReplayAll()
self._create_vm()
instances = [self.instance]
self.conn.poll_rebooting_instances(60, instances)
def test_reboot_not_poweredon(self):
self._create_vm()
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
self.conn.suspend(self.instance)
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.SUSPENDED)
self.assertRaises(exception.InstanceRebootFailure, self.conn.reboot,
self.context, self.instance, self.network_info,
'SOFT')
def test_suspend(self):
self._create_vm()
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
self.conn.suspend(self.instance)
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.SUSPENDED)
def test_suspend_non_existent(self):
self._create_instance()
self.assertRaises(exception.InstanceNotFound, self.conn.suspend,
self.instance)
def test_resume(self):
self._create_vm()
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
self.conn.suspend(self.instance)
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.SUSPENDED)
self.conn.resume(self.context, self.instance, self.network_info)
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
def test_resume_non_existent(self):
self._create_instance()
self.assertRaises(exception.InstanceNotFound, self.conn.resume,
self.context, self.instance, self.network_info)
def test_resume_not_suspended(self):
self._create_vm()
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
self.assertRaises(exception.InstanceResumeFailure, self.conn.resume,
self.context, self.instance, self.network_info)
def test_power_on(self):
self._create_vm()
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
self.conn.power_off(self.instance)
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.SHUTDOWN)
self.conn.power_on(self.context, self.instance, self.network_info)
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
def test_power_on_non_existent(self):
self._create_instance()
self.assertRaises(exception.InstanceNotFound, self.conn.power_on,
self.context, self.instance, self.network_info)
def test_power_off(self):
self._create_vm()
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
self.conn.power_off(self.instance)
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.SHUTDOWN)
def test_power_off_non_existent(self):
self._create_instance()
self.assertRaises(exception.InstanceNotFound, self.conn.power_off,
self.instance)
def test_resume_state_on_host_boot(self):
self._create_vm()
self.mox.StubOutWithMock(vm_util, 'get_vm_state_from_name')
self.mox.StubOutWithMock(self.conn, "reboot")
vm_util.get_vm_state_from_name(mox.IgnoreArg(),
self.instance['uuid']).AndReturn("poweredOff")
self.conn.reboot(self.context, self.instance, 'network_info',
'hard', None)
self.mox.ReplayAll()
self.conn.resume_state_on_host_boot(self.context, self.instance,
'network_info')
def test_resume_state_on_host_boot_no_reboot_1(self):
"""Don't call reboot on instance which is poweredon."""
self._create_vm()
self.mox.StubOutWithMock(vm_util, 'get_vm_state_from_name')
self.mox.StubOutWithMock(self.conn, 'reboot')
vm_util.get_vm_state_from_name(mox.IgnoreArg(),
self.instance['uuid']).AndReturn("poweredOn")
self.mox.ReplayAll()
self.conn.resume_state_on_host_boot(self.context, self.instance,
'network_info')
def test_resume_state_on_host_boot_no_reboot_2(self):
"""Don't call reboot on instance which is suspended."""
self._create_vm()
self.mox.StubOutWithMock(vm_util, 'get_vm_state_from_name')
self.mox.StubOutWithMock(self.conn, 'reboot')
vm_util.get_vm_state_from_name(mox.IgnoreArg(),
self.instance['uuid']).AndReturn("suspended")
self.mox.ReplayAll()
self.conn.resume_state_on_host_boot(self.context, self.instance,
'network_info')
def destroy_rescued(self, fake_method):
self._rescue()
with contextlib.nested(
mock.patch.object(self.conn._volumeops, "detach_disk_from_vm",
fake_method),
mock.patch.object(vm_util, "power_on_instance"),
) as (fake_detach, fake_power_on):
self.instance['vm_state'] = vm_states.RESCUED
self.conn.destroy(self.context, self.instance, self.network_info)
inst_path = ds_util.DatastorePath(self.ds, self.uuid,
'%s.vmdk' % self.uuid)
self.assertFalse(vmwareapi_fake.get_file(str(inst_path)))
rescue_file_path = ds_util.DatastorePath(
self.ds, '%s-rescue' % self.uuid, '%s-rescue.vmdk' % self.uuid)
self.assertFalse(vmwareapi_fake.get_file(str(rescue_file_path)))
# Unrescue does not power on with destroy
self.assertFalse(fake_power_on.called)
def test_destroy_rescued(self):
def fake_detach_disk_from_vm(*args, **kwargs):
pass
self.destroy_rescued(fake_detach_disk_from_vm)
def test_destroy_rescued_with_exception(self):
def fake_detach_disk_from_vm(*args, **kwargs):
raise exception.NovaException('Here is my fake exception')
self.destroy_rescued(fake_detach_disk_from_vm)
def test_destroy(self):
self._create_vm()
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
instances = self.conn.list_instances()
self.assertEqual(len(instances), 1)
self.conn.destroy(self.context, self.instance, self.network_info)
instances = self.conn.list_instances()
self.assertEqual(len(instances), 0)
self.assertIsNone(vm_util.vm_ref_cache_get(self.uuid))
def test_destroy_no_datastore(self):
self._create_vm()
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
instances = self.conn.list_instances()
self.assertEqual(len(instances), 1)
# Overwrite the vmPathName
vm = self._get_vm_record()
vm.set("config.files.vmPathName", None)
self.conn.destroy(self.context, self.instance, self.network_info)
instances = self.conn.list_instances()
self.assertEqual(len(instances), 0)
def test_destroy_non_existent(self):
self.destroy_disks = True
with mock.patch.object(self.conn._vmops,
"destroy") as mock_destroy:
self._create_instance()
self.conn.destroy(self.context, self.instance,
self.network_info,
None, self.destroy_disks)
mock_destroy.assert_called_once_with(self.instance,
self.destroy_disks)
def test_destroy_instance_without_compute(self):
self.destroy_disks = True
with mock.patch.object(self.conn._vmops,
"destroy") as mock_destroy:
self.conn.destroy(self.context, self.instance_without_compute,
self.network_info,
None, self.destroy_disks)
self.assertFalse(mock_destroy.called)
def test_destroy_instance_without_vm_ref(self):
self._create_instance()
with contextlib.nested(
mock.patch.object(vm_util, 'get_vm_ref_from_name',
return_value=None),
mock.patch.object(self.conn._session,
'_call_method')
) as (mock_get, mock_call):
self.conn.destroy(self.context, self.instance,
self.network_info,
None, True)
mock_get.assert_called_once_with(self.conn._vmops._session,
self.instance['uuid'])
self.assertFalse(mock_call.called)
def _rescue(self, config_drive=False):
# validate that the power on is only called once
self._power_on = vm_util.power_on_instance
self._power_on_called = 0
def fake_attach_disk_to_vm(vm_ref, instance,
adapter_type, disk_type, vmdk_path=None,
disk_size=None, linked_clone=False,
controller_key=None, unit_number=None,
device_name=None):
info = self.conn.get_info(instance)
self._check_vm_info(info, power_state.SHUTDOWN)
if config_drive:
def fake_create_config_drive(instance, injected_files, password,
data_store_name, folder,
instance_uuid, cookies):
self.assertTrue(uuidutils.is_uuid_like(instance['uuid']))
return str(ds_util.DatastorePath(data_store_name,
instance_uuid, 'fake.iso'))
self.stubs.Set(self.conn._vmops, '_create_config_drive',
fake_create_config_drive)
self._create_vm()
def fake_power_on_instance(session, instance, vm_ref=None):
self._power_on_called += 1
return self._power_on(session, instance, vm_ref=vm_ref)
info = self.conn.get_info({'name': 1, 'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
self.stubs.Set(vm_util, "power_on_instance",
fake_power_on_instance)
self.stubs.Set(self.conn._volumeops, "attach_disk_to_vm",
fake_attach_disk_to_vm)
def _fake_http_write(host, data_center_name, datastore_name,
cookies, file_path, file_size, scheme="https"):
self.vim.fake_transfer_file(ds_name=datastore_name,
file_path=file_path)
with contextlib.nested(
mock.patch.object(read_write_util, 'VMwareHTTPWriteFile',
_fake_http_write),
mock.patch.object(read_write_util, 'GlanceFileRead'),
mock.patch.object(vmware_images, 'start_transfer')
) as (http_write, glance_read, fake_start_transfer):
self.conn.rescue(self.context, self.instance, self.network_info,
self.image, 'fake-password')
info = self.conn.get_info({'name': '1-rescue',
'uuid': '%s-rescue' % self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
info = self.conn.get_info({'name': 1, 'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.SHUTDOWN)
self.assertIsNotNone(vm_util.vm_ref_cache_get('%s-rescue' % self.uuid))
self.assertEqual(1, self._power_on_called)
def test_rescue(self):
self._rescue()
inst_file_path = ds_util.DatastorePath(self.ds, self.uuid,
'%s.vmdk' % self.uuid)
self.assertTrue(vmwareapi_fake.get_file(str(inst_file_path)))
rescue_file_path = ds_util.DatastorePath(self.ds,
'%s-rescue' % self.uuid,
'%s-rescue.vmdk' % self.uuid)
self.assertTrue(vmwareapi_fake.get_file(str(rescue_file_path)))
def test_rescue_with_config_drive(self):
self.flags(force_config_drive=True)
self._rescue(config_drive=True)
def test_unrescue(self):
# NOTE(dims): driver unrescue ends up eventually in vmops.unrescue
# with power_on=True, the test_destroy_rescued tests the
# vmops.unrescue with power_on=False
self._rescue()
vm_ref = vm_util.get_vm_ref(self.conn._session,
self.instance)
vm_rescue_ref = vm_util.get_vm_ref_from_name(self.conn._session,
'%s-rescue' % self.uuid)
self.poweroff_instance = vm_util.power_off_instance
def fake_power_off_instance(session, instance, vm_ref):
# This is called so that we actually poweroff the simulated vm.
# The reason for this is that there is a validation in destroy
# that the instance is not powered on.
self.poweroff_instance(session, instance, vm_ref)
def fake_detach_disk_from_vm(vm_ref, instance,
device_name, destroy_disk=False):
self.test_device_name = device_name
info = self.conn.get_info(instance)
self._check_vm_info(info, power_state.SHUTDOWN)
with contextlib.nested(
mock.patch.object(vm_util, "power_off_instance",
side_effect=fake_power_off_instance),
mock.patch.object(self.conn._volumeops, "detach_disk_from_vm",
side_effect=fake_detach_disk_from_vm),
mock.patch.object(vm_util, "power_on_instance"),
) as (poweroff, detach, fake_power_on):
self.conn.unrescue(self.instance, None)
poweroff.assert_called_once_with(self.conn._session, mock.ANY,
vm_rescue_ref)
detach.assert_called_once_with(vm_rescue_ref, mock.ANY,
self.test_device_name)
fake_power_on.assert_called_once_with(self.conn._session,
self.instance,
vm_ref=vm_ref)
self.test_vm_ref = None
self.test_device_name = None
def test_get_diagnostics(self):
self._create_vm()
expected = {'memoryReservation': 0, 'suspendInterval': 0,
'maxCpuUsage': 2000, 'toolsInstallerMounted': False,
'consumedOverheadMemory': 20, 'numEthernetCards': 1,
'numCpu': 1, 'featureRequirement': [{'key': 'cpuid.AES'}],
'memoryOverhead': 21417984,
'guestMemoryUsage': 0, 'connectionState': 'connected',
'memorySizeMB': 512, 'balloonedMemory': 0,
'vmPathName': 'fake_path', 'template': False,
'overallCpuUsage': 0, 'powerState': 'poweredOn',
'cpuReservation': 0, 'overallCpuDemand': 0,
'numVirtualDisks': 1, 'hostMemoryUsage': 141}
expected = dict([('vmware:' + k, v) for k, v in expected.items()])
self.assertThat(
self.conn.get_diagnostics({'name': 1, 'uuid': self.uuid,
'node': self.instance_node}),
matchers.DictMatches(expected))
def test_get_instance_diagnostics(self):
self._create_vm()
expected = {'uptime': 0,
'memory_details': {'used': 0, 'maximum': 512},
'nic_details': [],
'driver': 'vmwareapi',
'state': 'running',
'version': '1.0',
'cpu_details': [],
'disk_details': [],
'hypervisor_os': 'esxi',
'config_drive': False}
actual = self.conn.get_instance_diagnostics(
{'name': 1, 'uuid': self.uuid, 'node': self.instance_node})
self.assertThat(actual.serialize(), matchers.DictMatches(expected))
def test_get_console_output(self):
self.assertRaises(NotImplementedError, self.conn.get_console_output,
None, None)
def _test_finish_migration(self, power_on, resize_instance=False):
self._create_vm()
self.conn.finish_migration(context=self.context,
migration=None,
instance=self.instance,
disk_info=None,
network_info=None,
block_device_info=None,
resize_instance=resize_instance,
image_meta=None,
power_on=power_on)
def _test_finish_revert_migration(self, power_on):
self._create_vm()
# Ensure ESX driver throws an error
self.assertRaises(NotImplementedError,
self.conn.finish_revert_migration,
self.context,
instance=self.instance,
network_info=None)
def test_get_vnc_console_non_existent(self):
self._create_instance()
self.assertRaises(exception.InstanceNotFound,
self.conn.get_vnc_console,
self.context,
self.instance)
def _test_get_vnc_console(self):
self._create_vm()
fake_vm = self._get_vm_record()
OptionValue = collections.namedtuple('OptionValue', ['key', 'value'])
opt_val = OptionValue(key='', value=5906)
fake_vm.set(vm_util.VNC_CONFIG_KEY, opt_val)
vnc_dict = self.conn.get_vnc_console(self.context, self.instance)
self.assertEqual(vnc_dict['host'], self.vnc_host)
self.assertEqual(vnc_dict['port'], 5906)
def test_get_vnc_console(self):
self._test_get_vnc_console()
def test_get_vnc_console_noport(self):
self._create_vm()
self.assertRaises(exception.ConsoleTypeUnavailable,
self.conn.get_vnc_console,
self.context,
self.instance)
def test_get_volume_connector(self):
self._create_vm()
connector_dict = self.conn.get_volume_connector(self.instance)
fake_vm = self._get_vm_record()
fake_vm_id = fake_vm.obj.value
self.assertEqual(connector_dict['ip'], 'test_url')
self.assertEqual(connector_dict['initiator'], 'iscsi-name')
self.assertEqual(connector_dict['host'], 'test_url')
self.assertEqual(connector_dict['instance'], fake_vm_id)
def _test_vmdk_connection_info(self, type):
return {'driver_volume_type': type,
'serial': 'volume-fake-id',
'data': {'volume': 'vm-10',
'volume_id': 'volume-fake-id'}}
def test_volume_attach_vmdk(self):
self._create_vm()
connection_info = self._test_vmdk_connection_info('vmdk')
mount_point = '/dev/vdc'
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'_attach_volume_vmdk')
volumeops.VMwareVolumeOps._attach_volume_vmdk(connection_info,
self.instance, mount_point)
self.mox.ReplayAll()
self.conn.attach_volume(None, connection_info, self.instance,
mount_point)
def test_volume_detach_vmdk(self):
self._create_vm()
connection_info = self._test_vmdk_connection_info('vmdk')
mount_point = '/dev/vdc'
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'_detach_volume_vmdk')
volumeops.VMwareVolumeOps._detach_volume_vmdk(connection_info,
self.instance, mount_point)
self.mox.ReplayAll()
self.conn.detach_volume(connection_info, self.instance, mount_point,
encryption=None)
def test_attach_vmdk_disk_to_vm(self):
self._create_vm()
connection_info = self._test_vmdk_connection_info('vmdk')
mount_point = '/dev/vdc'
# create fake backing info
volume_device = vmwareapi_fake.DataObject()
volume_device.backing = vmwareapi_fake.DataObject()
volume_device.backing.fileName = 'fake_path'
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'_get_vmdk_base_volume_device')
volumeops.VMwareVolumeOps._get_vmdk_base_volume_device(
mox.IgnoreArg()).AndReturn(volume_device)
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'attach_disk_to_vm')
volumeops.VMwareVolumeOps.attach_disk_to_vm(mox.IgnoreArg(),
self.instance, mox.IgnoreArg(), mox.IgnoreArg(),
vmdk_path='fake_path')
self.mox.ReplayAll()
self.conn.attach_volume(None, connection_info, self.instance,
mount_point)
def test_detach_vmdk_disk_from_vm(self):
self._create_vm()
connection_info = self._test_vmdk_connection_info('vmdk')
mount_point = '/dev/vdc'
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'_get_volume_uuid')
volumeops.VMwareVolumeOps._get_volume_uuid(mox.IgnoreArg(),
'volume-fake-id').AndReturn('fake_disk_uuid')
self.mox.StubOutWithMock(vm_util, 'get_vmdk_backed_disk_device')
vm_util.get_vmdk_backed_disk_device(mox.IgnoreArg(),
'fake_disk_uuid').AndReturn('fake_device')
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'_consolidate_vmdk_volume')
volumeops.VMwareVolumeOps._consolidate_vmdk_volume(self.instance,
mox.IgnoreArg(), 'fake_device', mox.IgnoreArg())
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'detach_disk_from_vm')
volumeops.VMwareVolumeOps.detach_disk_from_vm(mox.IgnoreArg(),
self.instance, mox.IgnoreArg())
self.mox.ReplayAll()
self.conn.detach_volume(connection_info, self.instance, mount_point,
encryption=None)
def test_volume_attach_iscsi(self):
self._create_vm()
connection_info = self._test_vmdk_connection_info('iscsi')
mount_point = '/dev/vdc'
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'_attach_volume_iscsi')
volumeops.VMwareVolumeOps._attach_volume_iscsi(connection_info,
self.instance, mount_point)
self.mox.ReplayAll()
self.conn.attach_volume(None, connection_info, self.instance,
mount_point)
def test_volume_detach_iscsi(self):
self._create_vm()
connection_info = self._test_vmdk_connection_info('iscsi')
mount_point = '/dev/vdc'
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'_detach_volume_iscsi')
volumeops.VMwareVolumeOps._detach_volume_iscsi(connection_info,
self.instance, mount_point)
self.mox.ReplayAll()
self.conn.detach_volume(connection_info, self.instance, mount_point,
encryption=None)
def test_attach_iscsi_disk_to_vm(self):
self._create_vm()
connection_info = self._test_vmdk_connection_info('iscsi')
connection_info['data']['target_portal'] = 'fake_target_host:port'
connection_info['data']['target_iqn'] = 'fake_target_iqn'
mount_point = '/dev/vdc'
discover = ('fake_name', 'fake_uuid')
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'_iscsi_get_target')
# simulate target not found
volumeops.VMwareVolumeOps._iscsi_get_target(
connection_info['data']).AndReturn((None, None))
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'_iscsi_add_send_target_host')
# rescan gets called with target portal
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'_iscsi_rescan_hba')
volumeops.VMwareVolumeOps._iscsi_rescan_hba(
connection_info['data']['target_portal'])
# simulate target found
volumeops.VMwareVolumeOps._iscsi_get_target(
connection_info['data']).AndReturn(discover)
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'attach_disk_to_vm')
volumeops.VMwareVolumeOps.attach_disk_to_vm(mox.IgnoreArg(),
self.instance, mox.IgnoreArg(), 'rdmp',
device_name=mox.IgnoreArg())
self.mox.ReplayAll()
self.conn.attach_volume(None, connection_info, self.instance,
mount_point)
def test_iscsi_rescan_hba(self):
fake_target_portal = 'fake_target_host:port'
host_storage_sys = vmwareapi_fake._get_objects(
"HostStorageSystem").objects[0]
iscsi_hba_array = host_storage_sys.get('storageDeviceInfo'
'.hostBusAdapter')
iscsi_hba = iscsi_hba_array.HostHostBusAdapter[0]
# Check the host system does not have the send target
self.assertRaises(AttributeError, getattr, iscsi_hba,
'configuredSendTarget')
# Rescan HBA with the target portal
vops = volumeops.VMwareVolumeOps(self.conn._session)
vops._iscsi_rescan_hba(fake_target_portal)
# Check if HBA has the target portal configured
self.assertEqual('fake_target_host',
iscsi_hba.configuredSendTarget[0].address)
# Rescan HBA with same portal
vops._iscsi_rescan_hba(fake_target_portal)
self.assertEqual(1, len(iscsi_hba.configuredSendTarget))
def test_iscsi_get_target(self):
data = {'target_portal': 'fake_target_host:port',
'target_iqn': 'fake_target_iqn'}
host = vmwareapi_fake._get_objects('HostSystem').objects[0]
host._add_iscsi_target(data)
vops = volumeops.VMwareVolumeOps(self.conn._session)
result = vops._iscsi_get_target(data)
self.assertEqual(('fake-device', 'fake-uuid'), result)
def test_detach_iscsi_disk_from_vm(self):
self._create_vm()
connection_info = self._test_vmdk_connection_info('iscsi')
connection_info['data']['target_portal'] = 'fake_target_portal'
connection_info['data']['target_iqn'] = 'fake_target_iqn'
mount_point = '/dev/vdc'
find = ('fake_name', 'fake_uuid')
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'_iscsi_get_target')
volumeops.VMwareVolumeOps._iscsi_get_target(
connection_info['data']).AndReturn(find)
self.mox.StubOutWithMock(vm_util, 'get_rdm_disk')
device = 'fake_device'
vm_util.get_rdm_disk(mox.IgnoreArg(), 'fake_uuid').AndReturn(device)
self.mox.StubOutWithMock(volumeops.VMwareVolumeOps,
'detach_disk_from_vm')
volumeops.VMwareVolumeOps.detach_disk_from_vm(mox.IgnoreArg(),
self.instance, device, destroy_disk=True)
self.mox.ReplayAll()
self.conn.detach_volume(connection_info, self.instance, mount_point,
encryption=None)
def test_connection_info_get(self):
self._create_vm()
connector = self.conn.get_volume_connector(self.instance)
self.assertEqual(connector['ip'], 'test_url')
self.assertEqual(connector['host'], 'test_url')
self.assertEqual(connector['initiator'], 'iscsi-name')
self.assertIn('instance', connector)
def test_connection_info_get_after_destroy(self):
self._create_vm()
self.conn.destroy(self.context, self.instance, self.network_info)
connector = self.conn.get_volume_connector(self.instance)
self.assertEqual(connector['ip'], 'test_url')
self.assertEqual(connector['host'], 'test_url')
self.assertEqual(connector['initiator'], 'iscsi-name')
self.assertNotIn('instance', connector)
def test_refresh_instance_security_rules(self):
self.assertRaises(NotImplementedError,
self.conn.refresh_instance_security_rules,
instance=None)
def test_image_aging_image_used(self):
self._create_vm()
all_instances = [self.instance]
self.conn.manage_image_cache(self.context, all_instances)
self._cached_files_exist()
def _get_timestamp_filename(self):
return '%s%s' % (imagecache.TIMESTAMP_PREFIX,
timeutils.strtime(at=self.old_time,
fmt=imagecache.TIMESTAMP_FORMAT))
def _override_time(self):
self.old_time = datetime.datetime(2012, 11, 22, 12, 00, 00)
def _fake_get_timestamp_filename(fake):
return self._get_timestamp_filename()
self.stubs.Set(imagecache.ImageCacheManager, '_get_timestamp_filename',
_fake_get_timestamp_filename)
def _timestamp_file_exists(self, exists=True):
timestamp = ds_util.DatastorePath(self.ds, 'vmware_base',
self.fake_image_uuid,
self._get_timestamp_filename() + '/')
if exists:
self.assertTrue(vmwareapi_fake.get_file(str(timestamp)))
else:
self.assertFalse(vmwareapi_fake.get_file(str(timestamp)))
def _image_aging_image_marked_for_deletion(self):
self._create_vm(uuid=uuidutils.generate_uuid())
self._cached_files_exist()
all_instances = []
self.conn.manage_image_cache(self.context, all_instances)
self._cached_files_exist()
self._timestamp_file_exists()
def test_image_aging_image_marked_for_deletion(self):
self._override_time()
self._image_aging_image_marked_for_deletion()
def _timestamp_file_removed(self):
self._override_time()
self._image_aging_image_marked_for_deletion()
self._create_vm(num_instances=2,
uuid=uuidutils.generate_uuid())
self._timestamp_file_exists(exists=False)
def test_timestamp_file_removed_spawn(self):
self._timestamp_file_removed()
def test_timestamp_file_removed_aging(self):
self._timestamp_file_removed()
ts = self._get_timestamp_filename()
ts_path = ds_util.DatastorePath(self.ds, 'vmware_base',
self.fake_image_uuid, ts + '/')
vmwareapi_fake._add_file(str(ts_path))
self._timestamp_file_exists()
all_instances = [self.instance]
self.conn.manage_image_cache(self.context, all_instances)
self._timestamp_file_exists(exists=False)
def test_image_aging_disabled(self):
self._override_time()
self.flags(remove_unused_base_images=False)
self._create_vm()
self._cached_files_exist()
all_instances = []
self.conn.manage_image_cache(self.context, all_instances)
self._cached_files_exist(exists=True)
self._timestamp_file_exists(exists=False)
def _image_aging_aged(self, aging_time=100):
self._override_time()
cur_time = datetime.datetime(2012, 11, 22, 12, 00, 10)
self.flags(remove_unused_original_minimum_age_seconds=aging_time)
self._image_aging_image_marked_for_deletion()
all_instances = []
timeutils.set_time_override(cur_time)
self.conn.manage_image_cache(self.context, all_instances)
def test_image_aging_aged(self):
self._image_aging_aged(aging_time=8)
self._cached_files_exist(exists=False)
def test_image_aging_not_aged(self):
self._image_aging_aged()
self._cached_files_exist()
class VMwareAPIVCDriverTestCase(VMwareAPIVMTestCase,
test_driver.DriverAPITestHelper):
def setUp(self):
super(VMwareAPIVCDriverTestCase, self).setUp()
cluster_name = 'test_cluster'
cluster_name2 = 'test_cluster2'
self.flags(cluster_name=[cluster_name, cluster_name2],
api_retry_count=1,
task_poll_interval=10, datastore_regex='.*', group='vmware')
self.flags(vnc_enabled=False,
image_cache_subdirectory_name='vmware_base')
vmwareapi_fake.reset()
self.conn = driver.VMwareVCDriver(None, False)
self.node_name = self.conn._resources.keys()[0]
self.node_name2 = self.conn._resources.keys()[1]
if cluster_name2 in self.node_name2:
self.ds = 'ds1'
else:
self.ds = 'ds2'
self.vnc_host = 'ha-host'
def tearDown(self):
super(VMwareAPIVCDriverTestCase, self).tearDown()
vmwareapi_fake.cleanup()
def test_public_api_signatures(self):
self.assertPublicAPISignatures(self.conn)
def test_list_instances(self):
instances = self.conn.list_instances()
self.assertEqual(0, len(instances))
def test_list_instances_from_nodes(self):
# Create instance on node1
self._create_vm(self.node_name)
# Create instances on the other node
self._create_vm(self.node_name2, num_instances=2)
self._create_vm(self.node_name2, num_instances=3)
node1_vmops = self.conn._get_vmops_for_compute_node(self.node_name)
node2_vmops = self.conn._get_vmops_for_compute_node(self.node_name2)
self.assertEqual(1, len(node1_vmops.list_instances()))
self.assertEqual(2, len(node2_vmops.list_instances()))
self.assertEqual(3, len(self.conn.list_instances()))
def _setup_mocks_for_session(self, mock_init):
mock_init.return_value = None
vcdriver = driver.VMwareVCDriver(None, False)
vcdriver._session = mock.Mock()
vcdriver._session.vim = None
def side_effect():
vcdriver._session.vim = mock.Mock()
vcdriver._session._create_session.side_effect = side_effect
return vcdriver
def test_host_power_action(self):
self.assertRaises(NotImplementedError,
self.conn.host_power_action, 'host', 'action')
def test_host_maintenance_mode(self):
self.assertRaises(NotImplementedError,
self.conn.host_maintenance_mode, 'host', 'mode')
def test_set_host_enabled(self):
self.assertRaises(NotImplementedError,
self.conn.set_host_enabled, 'host', 'state')
def test_datastore_regex_configured(self):
for node in self.conn._resources.keys():
self.assertEqual(self.conn._datastore_regex,
self.conn._resources[node]['vmops']._datastore_regex)
def test_get_available_resource(self):
stats = self.conn.get_available_resource(self.node_name)
cpu_info = {"model": ["Intel(R) Xeon(R)", "Intel(R) Xeon(R)"],
"vendor": ["Intel", "Intel"],
"topology": {"cores": 16,
"threads": 32}}
self.assertEqual(stats['vcpus'], 32)
self.assertEqual(stats['local_gb'], 1024)
self.assertEqual(stats['local_gb_used'], 1024 - 500)
self.assertEqual(stats['memory_mb'], 1000)
self.assertEqual(stats['memory_mb_used'], 500)
self.assertEqual(stats['hypervisor_type'], 'VMware vCenter Server')
self.assertEqual(stats['hypervisor_version'], 5001000)
self.assertEqual(stats['hypervisor_hostname'], self.node_name)
self.assertEqual(stats['cpu_info'], jsonutils.dumps(cpu_info))
self.assertEqual(stats['supported_instances'],
'[["i686", "vmware", "hvm"], ["x86_64", "vmware", "hvm"]]')
def test_invalid_datastore_regex(self):
# Tests if we raise an exception for Invalid Regular Expression in
# vmware_datastore_regex
self.flags(cluster_name=['test_cluster'], datastore_regex='fake-ds(01',
group='vmware')
self.assertRaises(exception.InvalidInput, driver.VMwareVCDriver, None)
def test_get_available_nodes(self):
nodelist = self.conn.get_available_nodes()
self.assertEqual(len(nodelist), 2)
self.assertIn(self.node_name, nodelist)
self.assertIn(self.node_name2, nodelist)
def test_spawn_multiple_node(self):
def fake_is_neutron():
return False
self.stubs.Set(nova_utils, 'is_neutron', fake_is_neutron)
uuid1 = uuidutils.generate_uuid()
uuid2 = uuidutils.generate_uuid()
self._create_vm(node=self.node_name, num_instances=1,
uuid=uuid1)
info = self.conn.get_info({'uuid': uuid1,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
self.conn.destroy(self.context, self.instance, self.network_info)
self._create_vm(node=self.node_name2, num_instances=1,
uuid=uuid2)
info = self.conn.get_info({'uuid': uuid2,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
def test_snapshot(self):
# Ensure VMwareVCVMOps's get_copy_virtual_disk_spec is getting called
# two times
self.mox.StubOutWithMock(vmops.VMwareVCVMOps,
'get_copy_virtual_disk_spec')
self.conn._vmops.get_copy_virtual_disk_spec(
mox.IgnoreArg(), mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn(None)
self.conn._vmops.get_copy_virtual_disk_spec(
mox.IgnoreArg(), mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn(None)
self.mox.ReplayAll()
self._create_vm()
self._test_snapshot()
def test_snapshot_using_file_manager(self):
self._create_vm()
uuid_str = uuidutils.generate_uuid()
self.mox.StubOutWithMock(uuidutils,
'generate_uuid')
uuidutils.generate_uuid().AndReturn(uuid_str)
self.mox.StubOutWithMock(ds_util, 'file_delete')
disk_ds_path = ds_util.DatastorePath(
self.ds, "vmware_temp", "%s.vmdk" % uuid_str)
disk_ds_flat_path = ds_util.DatastorePath(
self.ds, "vmware_temp", "%s-flat.vmdk" % uuid_str)
# Check calls for delete vmdk and -flat.vmdk pair
ds_util.file_delete(
mox.IgnoreArg(), disk_ds_flat_path,
mox.IgnoreArg()).AndReturn(None)
ds_util.file_delete(
mox.IgnoreArg(), disk_ds_path, mox.IgnoreArg()).AndReturn(None)
self.mox.ReplayAll()
self._test_snapshot()
def test_spawn_invalid_node(self):
self._create_instance(node='InvalidNodeName')
self.assertRaises(exception.NotFound, self.conn.spawn,
self.context, self.instance, self.image,
injected_files=[], admin_password=None,
network_info=self.network_info,
block_device_info=None)
@mock.patch.object(nova.virt.vmwareapi.vmware_images.VMwareImage,
'from_image')
@mock.patch.object(vmops.VMwareVCVMOps, 'get_copy_virtual_disk_spec')
def test_spawn_with_sparse_image(self, mock_get_copy_virtual_disk_spec,
mock_from_image):
img_info = vmware_images.VMwareImage(
image_id=self.fake_image_uuid,
file_size=1024,
disk_type=constants.DISK_TYPE_SPARSE,
linked_clone=False)
mock_from_image.return_value = img_info
mock_get_copy_virtual_disk_spec.return_value = None
self._create_vm()
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
def test_plug_vifs(self):
# Check to make sure the method raises NotImplementedError.
self._create_instance()
self.assertRaises(NotImplementedError,
self.conn.plug_vifs,
instance=self.instance, network_info=None)
def test_unplug_vifs(self):
# Check to make sure the method raises NotImplementedError.
self._create_instance()
self.assertRaises(NotImplementedError,
self.conn.unplug_vifs,
instance=self.instance, network_info=None)
def _create_vif(self):
gw_4 = network_model.IP(address='101.168.1.1', type='gateway')
dns_4 = network_model.IP(address='8.8.8.8', type=None)
subnet_4 = network_model.Subnet(cidr='101.168.1.0/24',
dns=[dns_4],
gateway=gw_4,
routes=None,
dhcp_server='191.168.1.1')
gw_6 = network_model.IP(address='101:1db9::1', type='gateway')
subnet_6 = network_model.Subnet(cidr='101:1db9::/64',
dns=None,
gateway=gw_6,
ips=None,
routes=None)
network_neutron = network_model.Network(id='network-id-xxx-yyy-zzz',
bridge=None,
label=None,
subnets=[subnet_4,
subnet_6],
bridge_interface='eth0',
vlan=99)
vif_bridge_neutron = network_model.VIF(id='new-vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_neutron,
type=None,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
return vif_bridge_neutron
def _validate_interfaces(self, id, index, num_iface_ids):
vm = self._get_vm_record()
found_iface_id = False
extras = vm.get("config.extraConfig")
key = "nvp.iface-id.%s" % index
num_found = 0
for c in extras.OptionValue:
if c.key.startswith("nvp.iface-id."):
num_found += 1
if c.key == key and c.value == id:
found_iface_id = True
self.assertTrue(found_iface_id)
self.assertEqual(num_found, num_iface_ids)
def _attach_interface(self, vif):
self.conn.attach_interface(self.instance, self.image, vif)
self._validate_interfaces(vif['id'], 1, 2)
def test_attach_interface(self):
self._create_vm()
vif = self._create_vif()
self._attach_interface(vif)
def test_attach_interface_with_exception(self):
self._create_vm()
vif = self._create_vif()
with mock.patch.object(self.conn._session, '_wait_for_task',
side_effect=Exception):
self.assertRaises(exception.InterfaceAttachFailed,
self.conn.attach_interface,
self.instance, self.image, vif)
@mock.patch.object(vif, 'get_network_device',
return_value='fake_device')
def _detach_interface(self, vif, mock_get_device):
self._create_vm()
self._attach_interface(vif)
self.conn.detach_interface(self.instance, vif)
self._validate_interfaces('free', 1, 2)
def test_detach_interface(self):
vif = self._create_vif()
self._detach_interface(vif)
def test_detach_interface_and_attach(self):
vif = self._create_vif()
self._detach_interface(vif)
self.conn.attach_interface(self.instance, self.image, vif)
self._validate_interfaces(vif['id'], 1, 2)
def test_detach_interface_no_device(self):
self._create_vm()
vif = self._create_vif()
self._attach_interface(vif)
self.assertRaises(exception.NotFound, self.conn.detach_interface,
self.instance, vif)
def test_detach_interface_no_vif_match(self):
self._create_vm()
vif = self._create_vif()
self._attach_interface(vif)
vif['id'] = 'bad-id'
self.assertRaises(exception.NotFound, self.conn.detach_interface,
self.instance, vif)
@mock.patch.object(vif, 'get_network_device',
return_value='fake_device')
def test_detach_interface_with_exception(self, mock_get_device):
self._create_vm()
vif = self._create_vif()
self._attach_interface(vif)
with mock.patch.object(self.conn._session, '_wait_for_task',
side_effect=Exception):
self.assertRaises(exception.InterfaceDetachFailed,
self.conn.detach_interface,
self.instance, vif)
def test_migrate_disk_and_power_off(self):
def fake_update_instance_progress(context, instance, step,
total_steps):
pass
def fake_get_host_ref_from_name(dest):
return None
self._create_vm(instance_type='m1.large')
vm_ref_orig = vm_util.get_vm_ref(self.conn._session, self.instance)
flavor = self._get_instance_type_by_name('m1.large')
self.stubs.Set(self.conn._vmops, "_update_instance_progress",
fake_update_instance_progress)
self.stubs.Set(self.conn._vmops, "_get_host_ref_from_name",
fake_get_host_ref_from_name)
self.conn.migrate_disk_and_power_off(self.context, self.instance,
'fake_dest', flavor,
None)
vm_ref = vm_util.get_vm_ref(self.conn._session, self.instance)
self.assertNotEqual(vm_ref_orig.value, vm_ref.value,
"These should be different")
def test_disassociate_vmref_from_instance(self):
self._create_vm()
vm_ref = vm_util.get_vm_ref(self.conn._session, self.instance)
vm_util.disassociate_vmref_from_instance(self.conn._session,
self.instance, vm_ref, "-backup")
self.assertRaises(exception.InstanceNotFound,
vm_util.get_vm_ref, self.conn._session, self.instance)
def test_clone_vmref_for_instance(self):
self._create_vm()
vm_ref = vm_util.get_vm_ref(self.conn._session, self.instance)
vm_util.disassociate_vmref_from_instance(self.conn._session,
self.instance, vm_ref, "-backup")
host_ref = vmwareapi_fake._get_object_refs("HostSystem")[0]
ds_ref = vmwareapi_fake._get_object_refs("Datastore")[0]
dc_obj = vmwareapi_fake._get_objects("Datacenter").objects[0]
vm_util.clone_vmref_for_instance(self.conn._session, self.instance,
vm_ref, host_ref, ds_ref,
dc_obj.get("vmFolder"))
self.assertIsNotNone(
vm_util.get_vm_ref(self.conn._session, self.instance),
"No VM found")
cloned_vm_ref = vm_util.get_vm_ref(self.conn._session, self.instance)
self.assertNotEqual(vm_ref.value, cloned_vm_ref.value,
"Reference for the cloned VM should be different")
vm_obj = vmwareapi_fake._get_vm_mdo(vm_ref)
cloned_vm_obj = vmwareapi_fake._get_vm_mdo(cloned_vm_ref)
self.assertEqual(vm_obj.name, self.instance['uuid'] + "-backup",
"Original VM name should be with suffix -backup")
self.assertEqual(cloned_vm_obj.name, self.instance['uuid'],
"VM name does not match instance['uuid']")
self.assertRaises(error_util.MissingParameter,
vm_util.clone_vmref_for_instance, self.conn._session,
self.instance, None, host_ref, ds_ref,
dc_obj.get("vmFolder"))
def test_associate_vmref_for_instance(self):
self._create_vm()
vm_ref = vm_util.get_vm_ref(self.conn._session, self.instance)
# First disassociate the VM from the instance so that we have a VM
# to later associate using the associate_vmref_for_instance method
vm_util.disassociate_vmref_from_instance(self.conn._session,
self.instance, vm_ref, "-backup")
# Ensure that the VM is indeed disassociated and that we cannot find
# the VM using the get_vm_ref method
self.assertRaises(exception.InstanceNotFound,
vm_util.get_vm_ref, self.conn._session, self.instance)
# Associate the VM back to the instance
vm_util.associate_vmref_for_instance(self.conn._session, self.instance,
suffix="-backup")
# Verify if we can get the VM reference
self.assertIsNotNone(
vm_util.get_vm_ref(self.conn._session, self.instance),
"No VM found")
def test_confirm_migration(self):
self._create_vm()
self.conn.confirm_migration(self.context, self.instance, None)
def test_resize_to_smaller_disk(self):
self._create_vm(instance_type='m1.large')
flavor = self._get_instance_type_by_name('m1.small')
self.assertRaises(exception.InstanceFaultRollback,
self.conn.migrate_disk_and_power_off, self.context,
self.instance, 'fake_dest', flavor, None)
def test_spawn_attach_volume_vmdk(self):
self._spawn_attach_volume_vmdk(vc_support=True)
def test_spawn_attach_volume_vmdk_no_image_ref(self):
self._spawn_attach_volume_vmdk(set_image_ref=False, vc_support=True)
def test_pause(self):
# Tests that the VMwareVCDriver does not implement the pause method.
self._create_instance()
self.assertRaises(NotImplementedError, self.conn.pause, self.instance)
def test_unpause(self):
# Tests that the VMwareVCDriver does not implement the unpause method.
self._create_instance()
self.assertRaises(NotImplementedError, self.conn.unpause,
self.instance)
def test_datastore_dc_map(self):
vmops = self.conn._resources[self.node_name]['vmops']
self.assertEqual({}, vmops._datastore_dc_mapping)
self._create_vm()
# currently there are 2 data stores
self.assertEqual(2, len(vmops._datastore_dc_mapping))
def test_rollback_live_migration_at_destination(self):
with mock.patch.object(self.conn, "destroy") as mock_destroy:
self.conn.rollback_live_migration_at_destination(self.context,
"instance", [], None)
mock_destroy.assert_called_once_with(self.context,
"instance", [], None)
def test_get_instance_disk_info_is_implemented(self):
# Ensure that the method has been implemented in the driver
try:
disk_info = self.conn.get_instance_disk_info('fake_instance_name')
self.assertIsNone(disk_info)
except NotImplementedError:
self.fail("test_get_instance_disk_info() should not raise "
"NotImplementedError")
def test_destroy(self):
self._create_vm()
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
instances = self.conn.list_instances()
self.assertEqual(1, len(instances))
self.conn.destroy(self.context, self.instance, self.network_info)
instances = self.conn.list_instances()
self.assertEqual(0, len(instances))
self.assertIsNone(vm_util.vm_ref_cache_get(self.uuid))
def test_destroy_no_datastore(self):
self._create_vm()
info = self.conn.get_info({'uuid': self.uuid,
'node': self.instance_node})
self._check_vm_info(info, power_state.RUNNING)
instances = self.conn.list_instances()
self.assertEqual(1, len(instances))
# Overwrite the vmPathName
vm = self._get_vm_record()
vm.set("config.files.vmPathName", None)
self.conn.destroy(self.context, self.instance, self.network_info)
instances = self.conn.list_instances()
self.assertEqual(0, len(instances))
def test_destroy_non_existent(self):
self.destroy_disks = True
with mock.patch.object(self.conn._vmops,
"destroy") as mock_destroy:
self._create_instance()
self.conn.destroy(self.context, self.instance,
self.network_info,
None, self.destroy_disks)
mock_destroy.assert_called_once_with(self.instance,
self.destroy_disks)
def test_destroy_instance_without_compute(self):
self.destroy_disks = True
with mock.patch.object(self.conn._vmops,
"destroy") as mock_destroy:
self.conn.destroy(self.context, self.instance_without_compute,
self.network_info,
None, self.destroy_disks)
self.assertFalse(mock_destroy.called)
def test_get_host_uptime(self):
self.assertRaises(NotImplementedError,
self.conn.get_host_uptime, 'host')
def _test_finish_migration(self, power_on, resize_instance=False):
"""Tests the finish_migration method on VC Driver."""
# setup the test instance in the database
self._create_vm()
if resize_instance:
self.instance.system_metadata = {'old_instance_type_root_gb': '0'}
vm_ref = vm_util.get_vm_ref(self.conn._session, self.instance)
datastore = ds_util.Datastore(ref='fake-ref', name='fake')
dc_info = vmops.DcInfo(ref='fake_ref', name='fake',
vmFolder='fake_folder')
with contextlib.nested(
mock.patch.object(self.conn._session, "_call_method",
return_value='fake-task'),
mock.patch.object(self.conn._vmops,
"_update_instance_progress"),
mock.patch.object(self.conn._session, "_wait_for_task"),
mock.patch.object(vm_util, "get_vm_resize_spec",
return_value='fake-spec'),
mock.patch.object(ds_util, "get_datastore",
return_value=datastore),
mock.patch.object(self.conn._vmops,
'get_datacenter_ref_and_name',
return_value=dc_info),
mock.patch.object(self.conn._vmops, '_extend_virtual_disk'),
mock.patch.object(vm_util, "power_on_instance")
) as (fake_call_method, fake_update_instance_progress,
fake_wait_for_task, fake_vm_resize_spec,
fake_get_datastore, fake_get_datacenter_ref_and_name,
fake_extend_virtual_disk, fake_power_on):
self.conn.finish_migration(context=self.context,
migration=None,
instance=self.instance,
disk_info=None,
network_info=None,
block_device_info=None,
resize_instance=resize_instance,
image_meta=None,
power_on=power_on)
if resize_instance:
fake_vm_resize_spec.assert_called_once_with(
self.conn._session._get_vim().client.factory,
self.instance)
fake_call_method.assert_any_call(
self.conn._session._get_vim(),
"ReconfigVM_Task",
vm_ref,
spec='fake-spec')
fake_wait_for_task.assert_called_once_with('fake-task')
fake_extend_virtual_disk.assert_called_once_with(
self.instance, self.instance['root_gb'] * units.Mi,
None, dc_info.ref)
else:
self.assertFalse(fake_vm_resize_spec.called)
self.assertFalse(fake_call_method.called)
self.assertFalse(fake_wait_for_task.called)
self.assertFalse(fake_extend_virtual_disk.called)
if power_on:
fake_power_on.assert_called_once_with(self.conn._session,
self.instance,
vm_ref=vm_ref)
else:
self.assertFalse(fake_power_on.called)
fake_update_instance_progress.called_once_with(
self.context, self.instance, 4, vmops.RESIZE_TOTAL_STEPS)
def test_finish_migration_power_on(self):
self._test_finish_migration(power_on=True)
def test_finish_migration_power_off(self):
self._test_finish_migration(power_on=False)
def test_finish_migration_power_on_resize(self):
self._test_finish_migration(power_on=True,
resize_instance=True)
@mock.patch.object(vm_util, 'associate_vmref_for_instance')
@mock.patch.object(vm_util, 'power_on_instance')
def _test_finish_revert_migration(self, fake_power_on,
fake_associate_vmref, power_on):
"""Tests the finish_revert_migration method on VC Driver."""
# setup the test instance in the database
self._create_instance()
self.conn.finish_revert_migration(self.context,
instance=self.instance,
network_info=None,
block_device_info=None,
power_on=power_on)
fake_associate_vmref.assert_called_once_with(self.conn._session,
self.instance,
suffix='-orig')
if power_on:
fake_power_on.assert_called_once_with(self.conn._session,
self.instance)
else:
self.assertFalse(fake_power_on.called)
def test_finish_revert_migration_power_on(self):
self._test_finish_revert_migration(power_on=True)
def test_finish_revert_migration_power_off(self):
self._test_finish_revert_migration(power_on=False)
| apache-2.0 | 8,041,043,002,991,961,000 | 43.834046 | 79 | 0.560479 | false |
apple/swift-lldb | packages/Python/lldbsuite/test/functionalities/memory/read/TestMemoryRead.py | 5 | 5028 | """
Test the 'memory read' command.
"""
from __future__ import print_function
import lldb
from lldbsuite.test.lldbtest import *
import lldbsuite.test.lldbutil as lldbutil
class MemoryReadTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break inside main().
self.line = line_number('main.cpp', '// Set break point at this line.')
def test_memory_read(self):
"""Test the 'memory read' command with plain and vector formats."""
self.build()
exe = self.getBuildArtifact("a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Break in main() after the variables are assigned values.
lldbutil.run_break_set_by_file_and_line(
self, "main.cpp", self.line, num_expected_locations=1, loc_exact=True)
self.runCmd("run", RUN_SUCCEEDED)
# The stop reason of the thread should be breakpoint.
self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT,
substrs=['stopped', 'stop reason = breakpoint'])
# The breakpoint should have a hit count of 1.
self.expect("breakpoint list -f", BREAKPOINT_HIT_ONCE,
substrs=[' resolved, hit count = 1'])
# Test the memory read commands.
# (lldb) memory read -f d -c 1 `&argc`
# 0x7fff5fbff9a0: 1
self.runCmd("memory read -f d -c 1 `&argc`")
# Find the starting address for variable 'argc' to verify later that the
# '--format uint32_t[] --size 4 --count 4' option increments the address
# correctly.
line = self.res.GetOutput().splitlines()[0]
items = line.split(':')
address = int(items[0], 0)
argc = int(items[1], 0)
self.assertTrue(address > 0 and argc == 1)
# (lldb) memory read --format uint32_t[] --size 4 --count 4 `&argc`
# 0x7fff5fbff9a0: {0x00000001}
# 0x7fff5fbff9a4: {0x00000000}
# 0x7fff5fbff9a8: {0x0ec0bf27}
# 0x7fff5fbff9ac: {0x215db505}
self.runCmd(
"memory read --format uint32_t[] --size 4 --count 4 `&argc`")
lines = self.res.GetOutput().splitlines()
for i in range(4):
if i == 0:
# Verify that the printout for argc is correct.
self.assertTrue(
argc == int(
lines[i].split(':')[1].strip(' {}'), 0))
addr = int(lines[i].split(':')[0], 0)
# Verify that the printout for addr is incremented correctly.
self.assertTrue(addr == (address + i * 4))
# (lldb) memory read --format char[] --size 7 --count 1 `&my_string`
# 0x7fff5fbff990: {abcdefg}
self.expect(
"memory read --format char[] --size 7 --count 1 `&my_string`",
substrs=['abcdefg'])
# (lldb) memory read --format 'hex float' --size 16 `&argc`
# 0x7fff5fbff5b0: error: unsupported byte size (16) for hex float
# format
self.expect(
"memory read --format 'hex float' --size 16 `&argc`",
substrs=['unsupported byte size (16) for hex float format'])
self.expect(
"memory read --format 'float' --count 1 --size 8 `&my_double`",
substrs=['1234.'])
# (lldb) memory read --format 'float' --count 1 --size 20 `&my_double`
# 0x7fff5fbff598: error: unsupported byte size (20) for float format
self.expect(
"memory read --format 'float' --count 1 --size 20 `&my_double`",
substrs=['unsupported byte size (20) for float format'])
self.expect('memory read --type int --count 5 `&my_ints[0]`',
substrs=['(int) 0x', '2', '4', '6', '8', '10'])
self.expect(
'memory read --type int --count 5 --format hex `&my_ints[0]`',
substrs=[
'(int) 0x',
'0x',
'0a'])
self.expect(
'memory read --type int --count 5 --offset 5 `&my_ints[0]`',
substrs=[
'(int) 0x',
'12',
'14',
'16',
'18',
'20'])
# the gdb format specifier and the size in characters for
# the returned values including the 0x prefix.
variations = [['b', 4], ['h', 6], ['w', 10], ['g', 18]]
for v in variations:
formatter = v[0]
expected_object_length = v[1]
self.runCmd(
"memory read --gdb-format 4%s &my_uint64s" % formatter)
lines = self.res.GetOutput().splitlines()
objects_read = []
for l in lines:
objects_read.extend(l.split(':')[1].split())
# Check that we got back 4 0x0000 etc bytes
for o in objects_read:
self.assertTrue (len(o) == expected_object_length)
self.assertTrue(len(objects_read) == 4)
| apache-2.0 | -4,204,341,505,956,777,500 | 36.522388 | 82 | 0.538385 | false |
guardicore/monkey | monkey/infection_monkey/system_info/collectors/scoutsuite_collector/scoutsuite_collector.py | 1 | 1280 | import logging
from typing import Union
import ScoutSuite.api_run
from ScoutSuite.providers.base.provider import BaseProvider
from common.cloud.scoutsuite_consts import CloudProviders
from common.utils.exceptions import ScoutSuiteScanError
from infection_monkey.config import WormConfiguration
from infection_monkey.telemetry.scoutsuite_telem import ScoutSuiteTelem
logger = logging.getLogger(__name__)
def scan_cloud_security(cloud_type: CloudProviders):
try:
results = run_scoutsuite(cloud_type.value)
if isinstance(results, dict) and "error" in results and results["error"]:
raise ScoutSuiteScanError(results["error"])
send_scoutsuite_run_results(results)
except (Exception, ScoutSuiteScanError) as e:
logger.error(f"ScoutSuite didn't scan {cloud_type.value} security because: {e}")
def run_scoutsuite(cloud_type: str) -> Union[BaseProvider, dict]:
return ScoutSuite.api_run.run(
provider=cloud_type,
aws_access_key_id=WormConfiguration.aws_access_key_id,
aws_secret_access_key=WormConfiguration.aws_secret_access_key,
aws_session_token=WormConfiguration.aws_session_token,
)
def send_scoutsuite_run_results(run_results: BaseProvider):
ScoutSuiteTelem(run_results).send()
| gpl-3.0 | -3,070,371,654,368,405,500 | 35.571429 | 88 | 0.751563 | false |
sneeu/little | little/migrations/0003_auto__add_visit.py | 1 | 6090 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Visit'
db.create_table(u'little_visit', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('short', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['little.Short'])),
('remote_addr', self.gf('django.db.models.fields.CharField')(max_length=15)),
('user_agent', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('referrer', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
))
db.send_create_signal(u'little', ['Visit'])
def backwards(self, orm):
# Deleting model 'Visit'
db.delete_table(u'little_visit')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'little.apikey': {
'Meta': {'object_name': 'APIKey'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'little.short': {
'Meta': {'object_name': 'Short'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'destination': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'little.visit': {
'Meta': {'object_name': 'Visit'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'referrer': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'remote_addr': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'short': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['little.Short']"}),
'user_agent': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['little'] | mit | -474,116,276,274,267,140 | 65.934066 | 195 | 0.556979 | false |
agussman/talon | talon/signature/learning/featurespace.py | 8 | 2965 | # -*- coding: utf-8 -*-
""" The module provides functions for conversion of a message body/body lines
into classifiers features space.
The body and the message sender string are converted into unicode before
applying features to them.
"""
from talon.signature.constants import (SIGNATURE_MAX_LINES,
TOO_LONG_SIGNATURE_LINE)
from talon.signature.learning.helpers import *
def features(sender=''):
'''Returns a list of signature features.'''
return [
# This one isn't from paper.
# Meant to match companies names, sender's names, address.
many_capitalized_words,
# This one is not from paper.
# Line is too long.
# This one is less aggressive than `Line is too short`
lambda line: 1 if len(line) > TOO_LONG_SIGNATURE_LINE else 0,
# Line contains email pattern.
binary_regex_search(RE_EMAIL),
# Line contains url.
binary_regex_search(RE_URL),
# Line contains phone number pattern.
binary_regex_search(RE_RELAX_PHONE),
# Line matches the regular expression "^[\s]*---*[\s]*$".
binary_regex_match(RE_SEPARATOR),
# Line has a sequence of 10 or more special characters.
binary_regex_search(RE_SPECIAL_CHARS),
# Line contains any typical signature words.
binary_regex_search(RE_SIGNATURE_WORDS),
# Line contains a pattern like Vitor R. Carvalho or William W. Cohen.
binary_regex_search(RE_NAME),
# Percentage of punctuation symbols in the line is larger than 50%
lambda line: 1 if punctuation_percent(line) > 50 else 0,
# Percentage of punctuation symbols in the line is larger than 90%
lambda line: 1 if punctuation_percent(line) > 90 else 0,
contains_sender_names(sender)
]
def apply_features(body, features):
'''Applies features to message body lines.
Returns list of lists. Each of the lists corresponds to the body line
and is constituted by the numbers of features occurrences (0 or 1).
E.g. if element j of list i equals 1 this means that
feature j occurred in line i (counting from the last line of the body).
'''
# collect all non empty lines
lines = [line for line in body.splitlines() if line.strip()]
# take the last SIGNATURE_MAX_LINES
last_lines = lines[-SIGNATURE_MAX_LINES:]
# apply features, fallback to zeros
return ([[f(line) for f in features] for line in last_lines] or
[[0 for f in features]])
def build_pattern(body, features):
'''Converts body into a pattern i.e. a point in the features space.
Applies features to the body lines and sums up the results.
Elements of the pattern indicate how many times a certain feature occurred
in the last lines of the body.
'''
line_patterns = apply_features(body, features)
return reduce(lambda x, y: [i + j for i, j in zip(x, y)], line_patterns)
| apache-2.0 | -314,456,891,696,160,000 | 39.067568 | 78 | 0.661046 | false |
mohland/dogetipbot | src/ctb/ctb_action.py | 2 | 56747 | """
This file is part of ALTcointip.
ALTcointip is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
ALTcointip is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with ALTcointip. If not, see <http://www.gnu.org/licenses/>.
"""
import ctb_user, ctb_misc, ctb_stats
import logging, praw, re, time
from random import randint
lg = logging.getLogger('cointipbot')
class CtbAction(object):
"""
Action class for cointip bot
"""
type=None # 'accept', 'decline', 'history', 'info', 'register', 'givetip', 'withdraw', 'redeem', 'rates'
state=None # 'completed', 'pending', 'failed', 'declined'
txid=None # cryptocoin transaction id, a 64-char string, if applicable
u_from=None # CtbUser instance
u_to=None # CtbUser instance, if applicable
addr_to=None # destination cryptocoin address of 'givetip' and 'withdraw' actions, if applicable
coin=None # coin for this action (for example, 'ltc')
fiat=None # fiat for this action (for example, 'usd'), if applicable
coinval=None # coin value of 'givetip' and 'withdraw' actions
fiatval=None # fiat value of the 'givetip' or 'withdraw' action
keyword=None # keyword that's used instead of coinval/fiatval
subreddit=None # subreddit that originated the action, if applicable
msg=None # Reddit object pointing to originating message/comment
ctb=None # CointipBot instance
deleted_msg_id=None # Used for accepting tips if the original message was deleted
deleted_created_utc=None # Used for accepting tips if the original message was deleted
def __init__(self, atype=None, msg=None, deleted_msg_id=None, deleted_created_utc=None, from_user=None, to_user=None, to_addr=None, coin=None, fiat=None, coin_val=None, fiat_val=None, subr=None, ctb=None, keyword=None):
"""
Initialize CtbAction object with given parameters and run basic checks
"""
lg.debug("> CtbAction::__init__(type=%s)", atype)
self.type = atype
self.coin = coin.lower() if coin else None
self.fiat = fiat.lower() if fiat else None
self.coinval = coin_val
self.fiatval = fiat_val
self.keyword = keyword.lower() if keyword else None
self.msg = msg
self.ctb = ctb
self.deleted_msg_id = deleted_msg_id
self.deleted_created_utc = deleted_created_utc
self.addr_to = to_addr
self.u_to = ctb_user.CtbUser(name=to_user, ctb=ctb) if to_user else None
self.u_from = ctb_user.CtbUser(name=msg.author.name, redditobj=msg.author, ctb=ctb) if (msg and msg.author) else ctb_user.CtbUser(name=from_user, ctb=ctb)
self.subreddit = subr
# Do some checks
if not self.type:
raise Exception("CtbAction::__init__(type=?): type not set")
if not self.ctb:
raise Exception("CtbAction::__init__(type=%s): no reference to CointipBot", self.type)
# if not self.msg:
# raise Exception("CtbAction::__init__(type=%s): no reference to Reddit message/comment", self.type)
if self.type in ['givetip', 'withdraw']:
if not (bool(self.u_to) ^ bool(self.addr_to)):
raise Exception("CtbAction::__init__(atype=%s, from_user=%s): u_to xor addr_to must be set" % (self.type, self.u_from.name))
if not (bool(self.coin) or bool(self.fiat) or bool(self.keyword)):
raise Exception("CtbAction::__init__(atype=%s, from_user=%s): coin or fiat or keyword must be set" % (self.type, self.u_from.name))
if not (bool(self.coinval) or bool(self.fiatval) or bool(self.keyword)):
raise Exception("CtbAction::__init__(atype=%s, from_user=%s): coinval or fiatval or keyword must be set" % (self.type, self.u_from.name))
# Convert coinval and fiat to float, if necesary
if self.coinval and type(self.coinval) == unicode and self.coinval.replace('.', '').isnumeric():
self.coinval = float(self.coinval)
if self.fiatval and type(self.fiatval) == unicode and self.fiatval.replace('.', '').isnumeric():
self.fiatval = float(self.fiatval)
lg.debug("CtbAction::__init__(): %s", self)
# Determine coinval or fiatval, if keyword is given instead of numeric value
if self.type in ['givetip', 'withdraw']:
if self.keyword:
if not self.ctb.conf.keywords[self.keyword].for_coin and not self.fiat:
# If fiat-only, set fiat to 'usd' if missing
self.fiat = 'usd'
if not self.ctb.conf.keywords[self.keyword].for_coin and not self.fiatval:
# If fiat-only, set fiatval as coinval, and clear coinval
self.fiatval = self.coinval
self.coinval = None
if not self.coin and not self.fiat:
# If both coin and fiat missing, set fiat to 'usd'
self.fiat = 'usd'
if self.keyword and self.fiat and not self.coin and not self.ctb.conf.keywords[self.keyword].for_fiat:
# If keyword is coin-only but only fiat is set, give up
return None
if self.keyword and self.fiat and not type(self.fiatval) in [float, int]:
# Determine fiat value
lg.debug("CtbAction::__init__(): determining fiat value given '%s'", self.keyword)
val = self.ctb.conf.keywords[self.keyword].value
if type(val) == float:
self.fiatval = val
elif type(val) == str:
lg.debug("CtbAction::__init__(): evaluating '%s'", val)
self.fiatval = eval(val)
if not type(self.fiatval) == float:
lg.warning("CtbAction::__init__(atype=%s, from_user=%s): couldn't determine fiatval from keyword '%s' (not float)" % (self.type, self.u_from.name, self.keyword))
return None
else:
lg.warning("CtbAction::__init__(atype=%s, from_user=%s): couldn't determine fiatval from keyword '%s' (not float or str)" % (self.type, self.u_from.name, self.keyword))
return None
elif self.keyword and self.coin and not type(self.coinval) in [float, int]:
# Determine coin value
lg.debug("CtbAction::__init__(): determining coin value given '%s'", self.keyword)
val = self.ctb.conf.keywords[self.keyword].value
if type(val) == float:
self.coinval = val
elif type(val) == str:
lg.debug("CtbAction::__init__(): evaluating '%s'", val)
self.coinval = eval(val)
if not type(self.coinval) == float:
lg.warning("CtbAction::__init__(atype=%s, from_user=%s): couldn't determine coinval from keyword '%s' (not float)" % (self.type, self.u_from.name, self.keyword))
return None
else:
lg.warning("CtbAction::__init__(atype=%s, from_user=%s): couldn't determine coinval from keyword '%s' (not float or str)" % (self.type, self.u_from.name, self.keyword))
return None
# By this point we should have a proper coinval or fiatval
if not type(self.coinval) in [float, int] and not type(self.fiatval) in [float, int]:
raise Exception("CtbAction::__init__(atype=%s, from_user=%s): coinval or fiatval isn't determined" % (self.type, self.u_from.name))
# Determine coin, if given only fiat, using exchange rates
if self.type in ['givetip']:
if self.fiat and not self.coin:
lg.debug("CtbAction::__init__(atype=%s, from_user=%s): determining coin..." % (self.type, self.u_from.name))
if not self.u_from.is_registered():
# Can't proceed, abort
lg.warning("CtbAction::__init__(): can't determine coin for un-registered user %s", self.u_from.name)
return None
# Choose a coin based on from_user's available balance (pick first one that can satisfy the amount)
cc = self.ctb.conf.coins
for c in sorted(self.ctb.coins):
lg.debug("CtbAction::__init__(atype=%s, from_user=%s): considering %s" % (self.type, self.u_from.name, c))
# First, check if we have a ticker value for this coin and fiat
if not self.ctb.coin_value(cc[c].unit, self.fiat) > 0.0:
continue
# Compare available and needed coin balances
coin_balance_avail = self.u_from.get_balance(coin=cc[c].unit, kind='givetip')
coin_balance_need = self.fiatval / self.ctb.coin_value(cc[c].unit, self.fiat)
if coin_balance_avail > coin_balance_need or abs(coin_balance_avail - coin_balance_need) < 0.000001:
# Found coin with enough balance
self.coin = cc[c].unit
break
if not self.coin:
# Couldn't deteremine coin, abort
lg.warning("CtbAction::__init__(): can't determine coin for user %s", self.u_from.name)
return None
# Calculate fiat or coin value with exchange rates
if self.type in ['givetip', 'withdraw']:
if not self.fiat:
# Set fiat to 'usd' if not specified
self.fiat = 'usd'
if not self.fiatval:
# Determine fiat value
self.fiatval = self.coinval * self.ctb.coin_value(self.ctb.conf.coins[self.coin].unit, self.fiat)
elif not self.coinval:
# Determine coin value
self.coinval = self.fiatval / self.ctb.coin_value(self.ctb.conf.coins[self.coin].unit, self.fiat)
lg.debug("< CtbAction::__init__(atype=%s, from_user=%s) DONE", self.type, self.u_from.name)
def __str__(self):
"""""
Return string representation of self
"""
me = "<CtbAction: type=%s, msg=%s, from_user=%s, to_user=%s, to_addr=%s, coin=%s, fiat=%s, coin_val=%s, fiat_val=%s, subr=%s, ctb=%s>"
me = me % (self.type, self.msg.body, self.u_from, self.u_to, self.addr_to, self.coin, self.fiat, self.coinval, self.fiatval, self.subreddit, self.ctb)
return me
def save(self, state=None):
"""
Save action to database
"""
lg.debug("> CtbAction::save(%s)", state)
# Make sure no negative values exist
if self.coinval < 0.0:
self.coinval = 0.0
if self.fiatval < 0.0:
self.fiatval = 0.0
realutc = None
realmsgid = None
if self.msg:
realmsgid=self.msg.id
realutc=self.msg.created_utc
else:
realmsgid=self.deleted_msg_id;
realutc=self.deleted_created_utc;
conn = self.ctb.db
sql = "REPLACE INTO t_action (type, state, created_utc, from_user, to_user, to_addr, coin_val, fiat_val, txid, coin, fiat, subreddit, msg_id, msg_link)"
sql += " values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
try:
mysqlexec = conn.execute(sql,
(self.type,
state,
realutc,
self.u_from.name.lower(),
self.u_to.name.lower() if self.u_to else None,
self.addr_to,
self.coinval,
self.fiatval,
self.txid,
self.coin,
self.fiat,
self.subreddit,
realmsgid,
self.msg.permalink if hasattr(self.msg, 'permalink') else None))
if mysqlexec.rowcount <= 0:
raise Exception("query didn't affect any rows")
except Exception as e:
lg.error("CtbAction::save(%s): error executing query <%s>: %s", state, sql % (
self.type,
state,
self.msg.created_utc,
self.u_from.name.lower(),
self.u_to.name.lower() if self.u_to else None,
self.addr_to,
self.coinval,
self.fiatval,
self.txid,
self.coin,
self.fiat,
self.subreddit,
realmsgid,
self.msg.permalink if hasattr(self.msg, 'permalink') else None), e)
raise
lg.debug("< CtbAction::save() DONE")
return True
def do(self):
"""
Call appropriate function depending on action type
"""
lg.debug("> CtbAction::do()")
if not self.ctb.conf.regex.actions[self.type].enabled:
msg = self.ctb.jenv.get_template('command-disabled.tpl').render(a=self, ctb=self.ctb)
lg.info("CtbAction::do(): action %s is disabled", self.type)
ctb_misc.praw_call(self.msg.reply, msg)
return False
if self.type == 'accept':
if self.accept():
self.type = 'info'
return self.info()
else:
return False
if self.type == 'decline':
return self.decline()
if self.type == 'givetip':
result = self.givetip()
ctb_stats.update_user_stats(ctb=self.ctb, username=self.u_from.name)
if self.u_to:
ctb_stats.update_user_stats(ctb=self.ctb, username=self.u_to.name)
return result
if self.type == 'history':
return self.history()
if self.type == 'info':
return self.info()
if self.type == 'register':
if self.register():
self.type = 'info'
return self.info()
else:
return False
if self.type == 'withdraw':
return self.givetip()
if self.type == 'redeem':
return self.redeem()
if self.type == 'rates':
return self.rates()
lg.debug("< CtbAction::do() DONE")
return None
def history(self):
"""
Provide user with transaction history
"""
# Generate history array
history = []
sql_history = self.ctb.conf.db.sql.userhistory.sql
limit = int(self.ctb.conf.db.sql.userhistory.limit)
mysqlexec = self.ctb.db.execute(sql_history, (self.u_from.name.lower(), self.u_from.name.lower(), limit))
for m in mysqlexec:
history_entry = []
for k in mysqlexec.keys():
history_entry.append(ctb_stats.format_value(m, k, self.u_from.name.lower(), self.ctb, compact=True))
history.append(history_entry)
# Send message to user
msg = self.ctb.jenv.get_template('history.tpl').render(history=history, keys=mysqlexec.keys(), limit=limit, a=self, ctb=self.ctb)
lg.debug("CtbAction::history(): %s", msg)
ctb_misc.praw_call(self.msg.reply, msg)
return True
def accept(self):
"""
Accept pending tip
"""
lg.debug("> CtbAction::accept()")
# Register as new user if necessary
if not self.u_from.is_registered():
if not self.u_from.register():
lg.warning("CtbAction::accept(): self.u_from.register() failed")
self.save('failed')
return False
# Get pending actions
actions = get_actions(atype='givetip', to_user=self.u_from.name, state='pending', ctb=self.ctb)
if actions:
# Accept each action
for a in actions:
a.givetip(is_pending=True)
# Update user stats
ctb_stats.update_user_stats(ctb=a.ctb, username=a.u_from.name)
ctb_stats.update_user_stats(ctb=a.ctb, username=a.u_to.name)
else:
# No pending actions found, reply with error message
msg = self.ctb.jenv.get_template('no-pending-tips.tpl').render(user_from=self.u_from.name, a=self, ctb=self.ctb)
lg.debug("CtbAction::accept(): %s", msg)
ctb_misc.praw_call(self.msg.reply, msg)
# Save action to database
self.save('completed')
lg.debug("< CtbAction::accept() DONE")
return True
def decline(self):
"""
Decline pending tips
"""
lg.debug("> CtbAction::decline()")
actions = get_actions(atype='givetip', to_user=self.u_from.name, state='pending', ctb=self.ctb)
if actions:
for a in actions:
# Move coins back into a.u_from account
lg.info("CtbAction::decline(): moving %s %s from %s to %s", a.coinval, a.coin.upper(), self.ctb.conf.reddit.auth.user, a.u_from.name)
if not self.ctb.coins[a.coin].sendtouser(_userfrom=self.ctb.conf.reddit.auth.user, _userto=a.u_from.name, _amount=a.coinval):
raise Exception("CtbAction::decline(): failed to sendtouser()")
# Save transaction as declined
a.save('declined')
# Update user stats
ctb_stats.update_user_stats(ctb=a.ctb, username=a.u_from.name)
ctb_stats.update_user_stats(ctb=a.ctb, username=a.u_to.name)
# Respond to tip comment
msg = self.ctb.jenv.get_template('confirmation.tpl').render(title='Declined', a=a, ctb=a.ctb, source_link=a.msg.permalink if a.msg else None)
lg.debug("CtbAction::decline(): " + msg)
if self.ctb.conf.reddit.messages.declined:
if not ctb_misc.praw_call(a.msg.reply, msg):
a.u_from.tell(subj="+tip declined", msg=msg)
else:
a.u_from.tell(subj="+tip declined", msg=msg)
# Notify self.u_from
msg = self.ctb.jenv.get_template('pending-tips-declined.tpl').render(user_from=self.u_from.name, ctb=self.ctb)
lg.debug("CtbAction::decline(): %s", msg)
ctb_misc.praw_call(self.msg.reply, msg)
else:
msg = self.ctb.jenv.get_template('no-pending-tips.tpl').render(user_from=self.u_from.name, ctb=self.ctb)
lg.debug("CtbAction::decline(): %s", msg)
ctb_misc.praw_call(self.msg.reply, msg)
# Save action to database
self.save('completed')
lg.debug("< CtbAction::decline() DONE")
return True
def expire(self):
"""
Expire a pending tip
"""
lg.debug("> CtbAction::expire()")
# Move coins back into self.u_from account
lg.info("CtbAction::expire(): moving %s %s from %s to %s", self.coinval, self.coin.upper(), self.ctb.conf.reddit.auth.user, self.u_from.name)
if not self.ctb.coins[self.coin].sendtouser(_userfrom=self.ctb.conf.reddit.auth.user, _userto=self.u_from.name, _amount=self.coinval):
raise Exception("CtbAction::expire(): sendtouser() failed")
# Save transaction as expired
self.save('expired')
# Update user stats
ctb_stats.update_user_stats(ctb=self.ctb, username=self.u_from.name)
ctb_stats.update_user_stats(ctb=self.ctb, username=self.u_to.name)
# Respond to tip comment
msg = self.ctb.jenv.get_template('confirmation.tpl').render(title='Expired', a=self, ctb=self.ctb, source_link=self.msg.permalink if self.msg else None)
lg.debug("CtbAction::expire(): " + msg)
if self.ctb.conf.reddit.messages.expired:
if not ctb_misc.praw_call(self.msg.reply, msg):
self.u_from.tell(subj="+tip expired", msg=msg)
else:
self.u_from.tell(subj="+tip expired", msg=msg)
lg.debug("< CtbAction::expire() DONE")
return True
def validate(self, is_pending=False):
"""
Validate an action
"""
lg.debug("> CtbAction::validate()")
if self.type in ['givetip', 'withdraw']:
# Check if u_from has registered
if not self.u_from.is_registered():
msg = self.ctb.jenv.get_template('not-registered.tpl').render(a=self, ctb=self.ctb)
lg.debug("CtbAction::validate(): %s", msg)
self.u_from.tell(subj="+tip failed", msg=msg)
self.save('failed')
return False
if self.u_to and not self.u_to.is_on_reddit():
msg = self.ctb.jenv.get_template('not-on-reddit.tpl').render(a=self, ctb=self.ctb)
lg.debug("CtbAction::validate(): %s", msg)
self.u_from.tell(subj="+tip failed", msg=msg)
self.save('failed')
return False
# Verify that coin type is set
if not self.coin:
msg = self.ctb.jenv.get_template('no-coin-balances.tpl').render(a=self, ctb=self.ctb)
lg.debug("CtbAction::validate(): %s", msg)
self.u_from.tell(subj="+tip failed", msg=msg)
self.save('failed')
return False
# Verify that u_from has coin address
if not self.u_from.get_addr(coin=self.coin):
lg.error("CtbAction::validate(): user %s doesn't have %s address", self.u_from.name, self.coin.upper())
self.save('failed')
raise Exception
# Verify minimum transaction size
txkind = 'givetip' if self.u_to else 'withdraw'
if self.coinval < self.ctb.conf.coins[self.coin].txmin[txkind]:
msg = self.ctb.jenv.get_template('tip-below-minimum.tpl').render(min_value=self.ctb.conf.coins[self.coin].txmin[txkind], a=self, ctb=self.ctb)
lg.debug("CtbAction::validate(): " + msg)
self.u_from.tell(subj="+tip failed", msg=msg)
self.save('failed')
return False
# Verify balance (unless it's a pending transaction being processed, in which case coins have been already moved to pending acct)
if self.u_to and not is_pending:
# Tip to user (requires less confirmations)
balance_avail = self.u_from.get_balance(coin=self.coin, kind='givetip')
if not ( balance_avail > self.coinval or abs(balance_avail - self.coinval) < 0.000001 ):
msg = self.ctb.jenv.get_template('tip-low-balance.tpl').render(balance=balance_avail, action_name='tip', a=self, ctb=self.ctb)
lg.debug("CtbAction::validate(): " + msg)
self.u_from.tell(subj="+tip failed", msg=msg)
self.save('failed')
return False
elif self.addr_to:
# Tip/withdrawal to address (requires more confirmations)
balance_avail = self.u_from.get_balance(coin=self.coin, kind='withdraw')
balance_need = self.coinval
# Add mandatory network transaction fee
balance_need += self.ctb.conf.coins[self.coin].txfee
if not ( balance_avail > balance_need or abs(balance_avail - balance_need) < 0.000001 ):
msg = self.ctb.jenv.get_template('tip-low-balance.tpl').render(balance=balance_avail, action_name='withdraw', a=self, ctb=self.ctb)
lg.debug("CtbAction::validate(): " + msg)
self.u_from.tell(subj="+tip failed", msg=msg)
self.save('failed')
return False
# Check if u_to has any pending coin tips from u_from
if self.u_to and not is_pending:
if check_action(atype='givetip', state='pending', to_user=self.u_to.name, from_user=self.u_from.name, coin=self.coin, ctb=self.ctb):
# Send notice to u_from
msg = self.ctb.jenv.get_template('tip-already-pending.tpl').render(a=self, ctb=self.ctb)
lg.debug("CtbAction::validate(): " + msg)
self.u_from.tell(subj="+tip failed", msg=msg)
self.save('failed')
return False
# Check if u_to has registered, if applicable
if self.u_to and not self.u_to.is_registered():
# u_to not registered:
# - move tip into pending account
# - save action as 'pending'
# - notify u_to to accept tip
# Move coins into pending account
minconf = self.ctb.coins[self.coin].conf.minconf.givetip
lg.info("CtbAction::validate(): moving %s %s from %s to %s (minconf=%s)...", self.coinval, self.coin.upper(), self.u_from.name, self.ctb.conf.reddit.auth.user, minconf)
if not self.ctb.coins[self.coin].sendtouser(_userfrom=self.u_from.name, _userto=self.ctb.conf.reddit.auth.user, _amount=self.coinval, _minconf=minconf):
raise Exception("CtbAction::validate(): sendtouser() failed")
# Save action as pending
self.save('pending')
# Respond to tip comment
msg = self.ctb.jenv.get_template('confirmation.tpl').render(title='Verified', a=self, ctb=self.ctb)
lg.debug("CtbAction::validate(): " + msg)
if self.ctb.conf.reddit.messages.verified:
if not ctb_misc.praw_call(self.msg.reply, msg):
self.u_from.tell(subj="+tip pending +accept", msg=msg)
else:
self.u_from.tell(subj="+tip pending +accept", msg=msg)
# Send notice to u_to
msg = self.ctb.jenv.get_template('tip-incoming.tpl').render(a=self, ctb=self.ctb)
lg.debug("CtbAction::validate(): %s", msg)
self.u_to.tell(subj="+tip pending", msg=msg)
# Action saved as 'pending', return false to avoid processing it further
return False
# Validate addr_to, if applicable
if self.addr_to:
if not self.ctb.coins[self.coin].validateaddr(_addr=self.addr_to):
msg = self.ctb.jenv.get_template('address-invalid.tpl').render(a=self, ctb=self.ctb)
lg.debug("CtbAction::validate(): " + msg)
self.u_from.tell(subj="+tip failed", msg=msg)
self.save('failed')
return False
# Action is valid
lg.debug("< CtbAction::validate() DONE")
return True
def givetip(self, is_pending=False):
"""
Initiate tip
"""
lg.debug("> CtbAction::givetip()")
if self.msg:
my_id=self.msg.id
else:
my_id=self.deleted_msg_id
deleted_created_utc=self.deleted_created_utc
# Check if action has been processed
if check_action(atype=self.type, msg_id=my_id, ctb=self.ctb, is_pending=is_pending):
# Found action in database, returning
lg.warning("CtbAction::givetipt(): duplicate action %s (msg.id %s), ignoring", self.type, my_id)
return False
# Validate action
if not self.validate(is_pending=is_pending):
# Couldn't validate action, returning
return False
if self.u_to:
# Process tip to user
res = False
if is_pending:
# This is accept() of pending transaction, so move coins from pending account to receiver
lg.info("CtbAction::givetip(): moving %f %s from %s to %s...", self.coinval, self.coin.upper(), self.ctb.conf.reddit.auth.user, self.u_to.name)
res = self.ctb.coins[self.coin].sendtouser(_userfrom=self.ctb.conf.reddit.auth.user, _userto=self.u_to.name, _amount=self.coinval)
else:
# This is not accept() of pending transaction, so move coins from tipper to receiver
lg.info("CtbAction::givetip(): moving %f %s from %s to %s...", self.coinval, self.coin.upper(), self.u_from.name, self.u_to.name)
res = self.ctb.coins[self.coin].sendtouser(_userfrom=self.u_from.name, _userto=self.u_to.name, _amount=self.coinval)
if not res:
# Transaction failed
self.save('failed')
# Send notice to u_from
msg = self.ctb.jenv.get_template('tip-went-wrong.tpl').render(a=self, ctb=self.ctb)
self.u_from.tell(subj="+tip failed", msg=msg)
raise Exception("CtbAction::givetip(): sendtouser() failed")
# Transaction succeeded
self.save('completed')
# Send confirmation to u_to
msg = self.ctb.jenv.get_template('tip-received.tpl').render(a=self, ctb=self.ctb)
lg.debug("CtbAction::givetip(): " + msg)
self.u_to.tell(subj="+tip received", msg=msg)
# Send confirmation to u_from
msg = self.ctb.jenv.get_template('tip-sent.tpl').render(a=self, ctb=self.ctb)
lg.debug("CtbAction::givetip(): " + msg)
self.u_from.tell(subj="+tip sent", msg=msg)
# This is not accept() of pending transaction, so post verification comment
if not is_pending:
msg = self.ctb.jenv.get_template('confirmation.tpl').render(title='wow so verify', a=self, ctb=self.ctb)
lg.debug("CtbAction::givetip(): " + msg)
if self.ctb.conf.reddit.messages.verified:
if not ctb_misc.praw_call(self.msg.reply, msg):
self.u_from.tell(subj="+tip succeeded", msg=msg)
else:
self.u_from.tell(subj="+tip succeeded", msg=msg)
lg.debug("< CtbAction::givetip() DONE")
return True
elif self.addr_to:
# Process tip to address
try:
lg.info("CtbAction::givetip(): sending %f %s to %s...", self.coinval, self.coin, self.addr_to)
self.txid = self.ctb.coins[self.coin].sendtoaddr(_userfrom=self.u_from.name, _addrto=self.addr_to, _amount=self.coinval)
except Exception as e:
# Transaction failed
self.save('failed')
lg.error("CtbAction::givetip(): sendtoaddr() failed")
# Send notice to u_from
msg = self.ctb.jenv.get_template('tip-went-wrong.tpl').render(a=self, ctb=self.ctb)
self.u_from.tell(subj="+tip failed", msg=msg)
raise
# Transaction succeeded
self.save('completed')
# Post verification comment
msg = self.ctb.jenv.get_template('confirmation.tpl').render(title='wow so verify', a=self, ctb=self.ctb)
lg.debug("CtbAction::givetip(): " + msg)
if self.ctb.conf.reddit.messages.verified:
if not ctb_misc.praw_call(self.msg.reply, msg):
self.u_from.tell(subj="+tip succeeded", msg=msg)
else:
self.u_from.tell(subj="+tip succeeded", msg=msg)
lg.debug("< CtbAction::givetip() DONE")
return True
lg.debug("< CtbAction::givetip() DONE")
return None
def info(self):
"""
Send user info about account
"""
lg.debug("> CtbAction::info()")
# Check if user exists
if not self.u_from.is_registered():
msg = self.ctb.jenv.get_template('not-registered.tpl').render(a=self, ctb=self.ctb)
self.u_from.tell(subj="+info failed", msg=msg)
return False
# Info array to pass to template
info = []
# Get coin balances
for c in sorted(self.ctb.coins):
coininfo = ctb_misc.DotDict({})
coininfo.coin = c
try:
# Get tip balance
coininfo.balance = self.ctb.coins[c].getbalance(_user=self.u_from.name, _minconf=self.ctb.conf.coins[c].minconf.givetip)
info.append(coininfo)
except Exception as e:
lg.error("CtbAction::info(%s): error retrieving %s coininfo: %s", self.u_from.name, c, e)
raise
# Get fiat balances
fiat_total = 0.0
for i in info:
i.fiat_symbol = self.ctb.conf.fiat.usd.symbol
if self.ctb.coin_value(self.ctb.conf.coins[i.coin].unit, 'usd') > 0.0:
i.fiat_balance = i.balance * self.ctb.coin_value(self.ctb.conf.coins[i.coin].unit, 'usd')
fiat_total += i.fiat_balance
# Get coin addresses from MySQL
for i in info:
sql = "SELECT address FROM t_addrs WHERE username = '%s' AND coin = '%s'" % (self.u_from.name.lower(), i.coin)
mysqlrow = self.ctb.db.execute(sql).fetchone()
if not mysqlrow:
raise Exception("CtbAction::info(%s): no result from <%s>" % (self.u_from.name, sql))
i.address = mysqlrow['address']
# Format and send message
msg = self.ctb.jenv.get_template('info.tpl').render(info=info, fiat_symbol=self.ctb.conf.fiat.usd.symbol, fiat_total=fiat_total, a=self, ctb=self.ctb)
ctb_misc.praw_call(self.msg.reply, msg)
# Save action to database
self.save('completed')
lg.debug("< CtbAction::info() DONE")
return True
def register(self):
"""
Register a new user
"""
lg.debug("> CtbAction::register()")
# If user exists, do nothing
if self.u_from.is_registered():
lg.debug("CtbAction::register(%s): user already exists; ignoring request", self.u_from.name)
self.save('failed')
return True
result = self.u_from.register()
# Save action to database
self.save('completed')
lg.debug("< CtbAction::register() DONE")
return result
def redeem(self):
"""
Redeem karma for coins
"""
lg.debug("> CtbAction::redeem()")
# Check if user is registered
if not self.u_from.is_registered():
msg = self.ctb.jenv.get_template('not-registered.tpl').render(a=self, ctb=self.ctb)
lg.debug("CtbAction::redeem(): %s", msg)
ctb_misc.praw_call(self.msg.reply, msg)
self.save('failed')
return False
# Check if this user has redeemed karma in the past
has_redeemed = False
if self.ctb.conf.reddit.redeem.multicoin:
# Check if self.coin has been redeemed
has_redeemed = check_action(atype='redeem', from_user=self.u_from.name, state='completed', coin=self.coin, ctb=self.ctb)
else:
# Check if any coin has been redeemed
has_redeemed = check_action(atype='redeem', from_user=self.u_from.name, state='completed', ctb=self.ctb)
if has_redeemed:
msg = self.ctb.jenv.get_template('redeem-already-done.tpl').render(coin=self.ctb.conf.coins[self.coin].name if self.ctb.conf.reddit.redeem.multicoin else None, a=self, ctb=self.ctb)
lg.debug("CtbAction::redeem(): %s", msg)
ctb_misc.praw_call(self.msg.reply, msg)
self.save('failed')
return False
# Check if this user has > minimum karma
user_karma = int(self.u_from.prawobj.link_karma) + int(self.u_from.prawobj.comment_karma)
if user_karma < self.ctb.conf.reddit.redeem.min_karma:
msg = self.ctb.jenv.get_template('redeem-low-karma.tpl').render(user_karma=user_karma, a=self, ctb=self.ctb)
lg.debug("CtbAction::redeem(): %s", msg)
ctb_misc.praw_call(self.msg.reply, msg)
self.save('failed')
return False
# Determine amount
self.fiat = self.ctb.conf.reddit.redeem.unit
self.coinval, self.fiatval = self.u_from.get_redeem_amount(coin=self.coin, fiat=self.fiat)
# Check if coinval and fiatval are valid
if not self.coinval or not self.fiatval or not self.coinval > 0.0 or not self.fiatval > 0.0:
msg = self.ctb.jenv.get_template('redeem-cant-compute.tpl').render(a=self, ctb=self.ctb)
lg.debug("CtbAction::redeem(): %s", msg)
ctb_misc.praw_call(self.msg.reply, msg)
self.save('failed')
return False
# Check if redeem account has enough balance
funds = self.ctb.coins[self.coin].getbalance(_user=self.ctb.conf.reddit.redeem.account, _minconf=1)
if self.coinval > funds or abs(self.coinval - funds) < 0.000001:
# Reply with 'not enough funds' message
msg = self.ctb.jenv.get_template('redeem-low-funds.tpl').render(a=self, ctb=self.ctb)
lg.debug("CtbAction::redeem(): %s", msg)
ctb_misc.praw_call(self.msg.reply, msg)
self.save('failed')
return False
# Transfer coins
if self.ctb.coins[self.coin].sendtouser(_userfrom=self.ctb.conf.reddit.redeem.account, _userto=self.u_from.name, _amount=self.coinval, _minconf=1):
# Success, send confirmation
msg = self.ctb.jenv.get_template('redeem-confirmation.tpl').render(a=self, ctb=self.ctb)
lg.debug("CtbAction::redeem(): %s", msg)
ctb_misc.praw_call(self.msg.reply, msg)
self.save('completed')
return True
else:
raise Exception("CtbAction::redeem(): sendtouser failed")
def rates(self, fiat='usd'):
"""
Send info on coin exchange rates
"""
lg.debug("> CtbAction::rates()")
coins = []
exchanges = []
rates = {}
# Get exchange rates
for coin in self.ctb.coins:
coins.append(coin)
rates[coin] = {'average': {}}
rates[coin]['average']['btc'] = self.ctb.runtime['ev'][coin]['btc']
rates[coin]['average'][fiat] = self.ctb.runtime['ev'][coin]['btc'] * self.ctb.runtime['ev']['btc'][fiat]
for exchange in self.ctb.exchanges:
try:
rates[coin][exchange] = {}
if self.ctb.exchanges[exchange].supports_pair(_name1=coin, _name2='btc'):
rates[coin][exchange]['btc'] = self.ctb.exchanges[exchange].get_ticker_value(_name1=coin, _name2='btc')
if coin == 'btc' and self.ctb.exchanges[exchange].supports_pair(_name1='btc', _name2=fiat):
# Use exchange value to calculate btc's fiat value
rates[coin][exchange][fiat] = rates[coin][exchange]['btc'] * self.ctb.exchanges[exchange].get_ticker_value(_name1='btc', _name2=fiat)
else:
# Use average value to calculate coin's fiat value
rates[coin][exchange][fiat] = rates[coin][exchange]['btc'] * self.ctb.runtime['ev']['btc'][fiat]
else:
rates[coin][exchange]['btc'] = None
rates[coin][exchange][fiat] = None
except TypeError as e:
msg = self.ctb.jenv.get_template('rates-error.tpl').render(exchange=exchange, a=self, ctb=self.ctb)
lg.debug("CtbAction::rates(): %s", msg)
ctb_misc.praw_call(self.msg.reply, msg)
self.save('failed')
return False
for exchange in self.ctb.exchanges:
exchanges.append(exchange)
lg.debug("CtbAction::rates(): %s", rates)
# Send message
msg = self.ctb.jenv.get_template('rates.tpl').render(coins=sorted(coins), exchanges=sorted(exchanges), rates=rates, fiat=fiat, a=self, ctb=self.ctb)
lg.debug("CtbAction::rates(): %s", msg)
ctb_misc.praw_call(self.msg.reply, msg)
self.save('completed')
return True
def init_regex(ctb):
"""
Initialize regular expressions used to match messages and comments
"""
lg.debug("> init_regex()")
cc = ctb.conf.coins
fiat = ctb.conf.fiat
actions = ctb.conf.regex.actions
ctb.runtime['regex'] = []
for a in vars(actions):
if actions[a].simple:
# Add simple message actions (info, register, accept, decline, history, rates)
entry = ctb_misc.DotDict(
{'regex': actions[a].regex,
'action': a,
'rg_amount': 0,
'rg_keyword': 0,
'rg_address': 0,
'rg_to_user': 0,
'coin': None,
'fiat': None,
'keyword': None
})
lg.debug("init_regex(): ADDED %s: %s", entry.action, entry.regex)
ctb.runtime['regex'].append(entry)
else:
# Add non-simple actions (givetip, redeem, withdraw)
for r in sorted(vars(actions[a].regex)):
lg.debug("init_regex(): processing regex %s", actions[a].regex[r].value)
rval1 = actions[a].regex[r].value
rval1 = rval1.replace('{REGEX_TIP_INIT}', ctb.conf.regex.values.tip_init.regex)
rval1 = rval1.replace('{REGEX_USER}', ctb.conf.regex.values.username.regex)
rval1 = rval1.replace('{REGEX_AMOUNT}', ctb.conf.regex.values.amount.regex)
rval1 = rval1.replace('{REGEX_KEYWORD}', ctb.conf.regex.values.keywords.regex)
if actions[a].regex[r].rg_coin > 0:
for c in sorted(vars(cc)):
if not cc[c].enabled:
continue
# lg.debug("init_regex(): processing coin %s", c)
rval2 = rval1.replace('{REGEX_COIN}', cc[c].regex.units)
rval2 = rval2.replace('{REGEX_ADDRESS}', cc[c].regex.address)
if actions[a].regex[r].rg_fiat > 0:
for f in sorted(vars(fiat)):
if not fiat[f].enabled:
continue
# lg.debug("init_regex(): processing fiat %s", f)
rval3 = rval2.replace('{REGEX_FIAT}', fiat[f].regex.units)
entry = ctb_misc.DotDict(
{'regex': rval3,
'action': a,
'rg_amount': actions[a].regex[r].rg_amount,
'rg_keyword': actions[a].regex[r].rg_keyword,
'rg_address': actions[a].regex[r].rg_address,
'rg_to_user': actions[a].regex[r].rg_to_user,
'coin': cc[c].unit,
'fiat': fiat[f].unit
})
lg.debug("init_regex(): ADDED %s: %s", entry.action, entry.regex)
ctb.runtime['regex'].append(entry)
else:
entry = ctb_misc.DotDict(
{'regex': rval2,
'action': a,
'rg_amount': actions[a].regex[r].rg_amount,
'rg_keyword': actions[a].regex[r].rg_keyword,
'rg_address': actions[a].regex[r].rg_address,
'rg_to_user': actions[a].regex[r].rg_to_user,
'coin': cc[c].unit,
'fiat': None
})
lg.debug("init_regex(): ADDED %s: %s", entry.action, entry.regex)
ctb.runtime['regex'].append(entry)
elif actions[a].regex[r].rg_fiat > 0:
for f in sorted(vars(fiat)):
if not fiat[f].enabled:
continue
# lg.debug("init_regex(): processing fiat %s", f)
rval2 = rval1.replace('{REGEX_FIAT}', fiat[f].regex.units)
entry = ctb_misc.DotDict(
{'regex': rval2,
'action': a,
'rg_amount': actions[a].regex[r].rg_amount,
'rg_keyword': actions[a].regex[r].rg_keyword,
'rg_address': actions[a].regex[r].rg_address,
'rg_to_user': actions[a].regex[r].rg_to_user,
'coin': None,
'fiat': fiat[f].unit
})
lg.debug("init_regex(): ADDED %s: %s", entry.action, entry.regex)
ctb.runtime['regex'].append(entry)
elif actions[a].regex[r].rg_keyword > 0:
entry = ctb_misc.DotDict(
{'regex': rval1,
'action': a,
'rg_amount': actions[a].regex[r].rg_amount,
'rg_keyword': actions[a].regex[r].rg_keyword,
'rg_address': actions[a].regex[r].rg_address,
'rg_to_user': actions[a].regex[r].rg_to_user,
'coin': None,
'fiat': None
})
lg.debug("init_regex(): ADDED %s: %s", entry.action, entry.regex)
ctb.runtime['regex'].append(entry)
lg.info("< init_regex() DONE (%s expressions)", len(ctb.runtime['regex']))
return None
def eval_message(msg, ctb):
"""
Evaluate message body and return a CtbAction
object if successful
"""
lg.debug("> eval_message()")
body = msg.body
#lg.info(vars(msg)) #debug
for r in ctb.runtime['regex']:
# Attempt a match
rg = re.compile(r.regex, re.IGNORECASE|re.DOTALL)
#lg.debug("matching '%s' with '%s'", msg.body, r.regex)
m = rg.search(body)
if m:
# Match found
lg.debug("eval_message(): match found")
# Extract matched fields into variables
to_addr = m.group(r.rg_address) if r.rg_address > 0 else None
amount = m.group(r.rg_amount) if r.rg_amount > 0 else None
keyword = m.group(r.rg_keyword) if r.rg_keyword > 0 else None
if ((to_addr == None) and (r.action == 'givetip')):
lg.debug("eval_message(): can't tip with no to_addr")
return None
# Return CtbAction instance with given variables
return CtbAction( atype=r.action,
msg=msg,
from_user=msg.author,
to_user=None,
to_addr=to_addr,
coin=r.coin,
coin_val=amount if not r.fiat else None,
fiat=r.fiat,
fiat_val=amount if r.fiat else None,
keyword=keyword,
ctb=ctb)
# No match found
lg.debug("eval_message(): no match found")
return None
def eval_comment(comment, ctb):
"""
Evaluate comment body and return a CtbAction object if successful
"""
lg.debug("> eval_comment()")
body = comment.body
for r in ctb.runtime['regex']:
# Skip non-public actions
if not ctb.conf.regex.actions[r.action].public:
continue
# Attempt a match
rg = re.compile(r.regex, re.IGNORECASE|re.DOTALL)
#lg.debug("eval_comment(): matching '%s' with <%s>", comment.body, r.regex)
m = rg.search(body)
if m:
# Match found
lg.debug("eval_comment(): match found")
# Extract matched fields into variables
u_to = m.group(r.rg_to_user)[1:] if r.rg_to_user > 0 else None
to_addr = m.group(r.rg_address) if r.rg_address > 0 else None
amount = m.group(r.rg_amount) if r.rg_amount > 0 else None
keyword = m.group(r.rg_keyword) if r.rg_keyword > 0 else None
# Check if subreddit is promos
if comment.subreddit == 'promos':
return None
# If no destination mentioned, find parent submission's author
if not u_to and not to_addr:
# set u_to to author of parent comment
u_to = ctb_misc.reddit_get_parent_author(comment, ctb.reddit, ctb)
if not u_to:
# couldn't determine u_to, giving up
return None
# Check if from_user == to_user
if u_to and comment.author.name.lower() == u_to.lower():
lg.warning("eval_comment(): comment.author.name == u_to, ignoring comment", comment.author.name)
return None
# Return CtbAction instance with given variables
lg.debug("eval_comment(): creating action %s: to_user=%s, to_addr=%s, amount=%s, coin=%s, fiat=%s" % (r.action, u_to, to_addr, amount, r.coin, r.fiat))
#lg.debug("< eval_comment() DONE (yes)")
return CtbAction( atype=r.action,
msg=comment,
to_user=u_to,
to_addr=to_addr,
coin=r.coin,
coin_val=amount if not r.fiat else None,
fiat=r.fiat,
fiat_val=amount if r.fiat else None,
keyword=keyword,
subr=comment.subreddit,
ctb=ctb)
# No match found
lg.debug("< eval_comment() DONE (no match)")
return None
def check_action(atype=None, state=None, coin=None, msg_id=None, created_utc=None, from_user=None, to_user=None, subr=None, ctb=None, is_pending=False):
"""
Return True if action with given attributes exists in database
"""
lg.debug("> check_action(%s)", atype)
# Build SQL query
sql = "SELECT * FROM t_action"
sql_terms = []
if atype or state or coin or msg_id or created_utc or from_user or to_user or subr or is_pending:
sql += " WHERE "
if atype:
sql_terms.append("type = '%s'" % atype)
if state:
sql_terms.append("state = '%s'" % state)
if coin:
sql_terms.append("coin = '%s'" % coin)
if msg_id:
sql_terms.append("msg_id = '%s'" % msg_id)
if created_utc:
sql_terms.append("created_utc = %s" % created_utc)
if from_user:
sql_terms.append("from_user = '%s'" % from_user.lower())
if to_user:
sql_terms.append("to_user = '%s'" % to_user.lower())
if subr:
sql_terms.append("subreddit = '%s'" % subr)
if is_pending:
sql_terms.append("state <> 'pending'")
sql += ' AND '.join(sql_terms)
try:
lg.debug("check_action(): <%s>", sql)
mysqlexec = ctb.db.execute(sql)
if mysqlexec.rowcount <= 0:
lg.debug("< check_action() DONE (no)")
return False
else:
lg.debug("< check_action() DONE (yes)")
return True
except Exception as e:
lg.error("check_action(): error executing <%s>: %s", sql, e)
raise
lg.warning("< check_action() DONE (should not get here)")
return None
def get_actions(atype=None, state=None, deleted_msg_id=None, deleted_created_utc=None, coin=None, msg_id=None, created_utc=None, from_user=None, to_user=None, subr=None, ctb=None):
"""
Return an array of CtbAction objects from database with given attributes
"""
lg.debug("> get_actions(%s)", atype)
# Build SQL query
sql = "SELECT * FROM t_action"
sql_terms = []
if atype or state or coin or msg_id or created_utc or from_user or to_user or subr:
sql += " WHERE "
if atype:
sql_terms.append("type = '%s'" % atype)
if state:
sql_terms.append("state = '%s'" % state)
if coin:
sql_terms.append("coin = '%s'" % coin)
if msg_id:
sql_terms.append("msg_id = '%s'" % msg_id)
if created_utc:
sql_terms.append("created_utc %s" % created_utc)
if from_user:
sql_terms.append("from_user = '%s'" % from_user.lower())
if to_user:
sql_terms.append("to_user = '%s'" % to_user.lower())
if subr:
sql_terms.append("subreddit = '%s'" % subr)
sql += ' AND '.join(sql_terms)
#throttle ALL THE THINGS!
# if created_utc:
# sql += ' LIMIT 100'
while True:
try:
r = []
lg.debug("get_actions(): <%s>", sql)
mysqlexec = ctb.db.execute(sql)
if mysqlexec.rowcount <= 0:
lg.debug("< get_actions() DONE (no)")
return r
for m in mysqlexec:
lg.debug("get_actions(): found %s", m['msg_link'])
# Get PRAW message (msg) and author (msg.author) objects
submission = ctb_misc.praw_call(ctb.reddit.get_submission, m['msg_link'])
msg = None
if not submission:
lg.warning("get_actions(): submission not found for %s . msgid %s", m['msg_link'], m['msg_id'])
else:
if not len(submission.comments) > 0:
lg.warning("get_actions(): could not fetch msg (deleted?) from msg_link %s", m['msg_link'])
lg.warning("get_actions(): setting deleted_msg_id %s", m['msg_id'])
deleted_msg_id=m['msg_id']
deleted_created_utc=m['created_utc']
else:
msg = submission.comments[0]
if not msg.author:
lg.warning("get_actions(): could not fetch msg.author (deleted?) from msg_link %s", m['msg_link'])
lg.warning("get_actions(): setting msg.author to original tipper %s", m['from_user'])
r.append( CtbAction( atype=atype,
msg=msg,
deleted_msg_id=deleted_msg_id,
deleted_created_utc=deleted_created_utc,
from_user=m['from_user'],
to_user=m['to_user'],
to_addr=m['to_addr'] if not m['to_user'] else None,
coin=m['coin'],
fiat=m['fiat'],
coin_val=float(m['coin_val']) if m['coin_val'] else None,
fiat_val=float(m['fiat_val']) if m['fiat_val'] else None,
subr=m['subreddit'],
ctb=ctb))
lg.debug("< get_actions() DONE (yes)")
return r
except Exception as e:
lg.error("get_actions(): error executing <%s>: %s", sql, e)
raise
lg.warning("< get_actions() DONE (should not get here)")
return None
| gpl-2.0 | 9,034,789,442,223,852,000 | 44.144789 | 223 | 0.527693 | false |
Devyani-Divs/twython | setup.py | 1 | 1293 | #!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__author__ = 'Ryan McGrath <[email protected]>'
__version__ = '3.2.0'
packages = [
'twython',
'twython.streaming'
]
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
setup(
name='twython',
version=__version__,
install_requires=['requests>=2.1.0', 'requests_oauthlib>=0.4.0'],
author='Ryan McGrath',
author_email='[email protected]',
license=open('LICENSE').read(),
url='https://github.com/ryanmcgrath/twython/tree/master',
keywords='twitter search api tweet twython stream',
description='Actively maintained, pure Python wrapper for the \
Twitter API. Supports both normal and streaming Twitter APIs',
long_description=open('README.rst').read() + '\n\n' +
open('HISTORY.rst').read(),
include_package_data=True,
packages=packages,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
| mit | 866,624,229,310,748,300 | 27.108696 | 71 | 0.641145 | false |
Fat-Zer/FreeCAD_sf_master | src/Mod/Fem/femtest/app/support_utils.py | 12 | 13251 | # ***************************************************************************
# * Copyright (c) 2018 Bernd Hahnebach <[email protected]> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
__title__ = "Tools for FEM unit tests"
__author__ = "Bernd Hahnebach"
__url__ = "https://www.freecadweb.org"
import os
import sys
import tempfile
import unittest
import FreeCAD
from os.path import join
def get_fem_test_home_dir(
):
return join(FreeCAD.getHomePath(), "Mod", "Fem", "femtest", "data")
def get_fem_test_tmp_dir(
dirname=None
):
from uuid import uuid4
_unique_id = str(uuid4())[-12:]
# print(_unique_id)
if dirname is None:
temp_dir = join(tempfile.gettempdir(), "FEM_unittests", _unique_id)
else:
temp_dir = join(tempfile.gettempdir(), "FEM_unittests", dirname + "_" + _unique_id)
if not os.path.exists(temp_dir):
os.makedirs(temp_dir)
return(temp_dir)
def get_unit_test_tmp_dir(
temp_dir,
unittestdir
):
testdir = join(temp_dir, unittestdir)
if not os.path.exists(testdir):
os.makedirs(testdir)
return testdir
def fcc_print(
message
):
FreeCAD.Console.PrintMessage("{} \n".format(message))
def get_namefromdef(strdel="", stradd=""):
# https://code.activestate.com/recipes/66062-determining-current-function-name/
return (sys._getframe(1).f_code.co_name).replace(strdel, stradd)
def get_defmake_count(
fem_vtk_post=True
):
"""
count the def make in module ObjectsFem
could also be done in bash with
grep -c "def make" src/Mod/Fem/ObjectsFem.py
"""
name_modfile = join(FreeCAD.getHomePath(), "Mod", "Fem", "ObjectsFem.py")
modfile = open(name_modfile, "r")
lines_modefile = modfile.readlines()
modfile.close()
lines_defmake = [l for l in lines_modefile if l.startswith("def make")]
if not fem_vtk_post:
# FEM VTK post processing is disabled
# we are not able to create VTK post objects
new_lines = []
for l in lines_defmake:
if "PostVtk" not in l:
new_lines.append(l)
lines_defmake = new_lines
return len(lines_defmake)
def get_fem_test_defs(
):
test_path = join(FreeCAD.getHomePath(), "Mod", "Fem", "femtest", "app")
print("Modules, classes, methods taken from: {}".format(test_path))
collected_test_module_paths = []
for tfile in sorted(os.listdir(test_path)):
if tfile.startswith("test") and tfile.endswith(".py"):
collected_test_module_paths.append(join(test_path, tfile))
collected_test_modules = []
collected_test_classes = []
collected_test_methods = []
for f in collected_test_module_paths:
module_name = os.path.splitext(os.path.basename(f))[0]
module_path = "femtest.app.{}".format(module_name)
if module_path not in collected_test_modules:
collected_test_modules.append(module_path)
class_name = ""
tfile = open(f, "r")
for ln in tfile:
ln = ln.lstrip()
ln = ln.rstrip()
if ln.startswith("class "):
ln = ln.lstrip("class ")
ln = ln.split("(")[0]
class_name = ln
class_path = "femtest.app.{}.{}".format(module_name, class_name)
if class_path not in collected_test_classes:
collected_test_classes.append(class_path)
if ln.startswith("def test"):
ln = ln.lstrip("def ")
ln = ln.split("(")[0]
if ln == "test_00print":
continue
method_path = "femtest.app.{}.{}.{}".format(module_name, class_name, ln)
collected_test_methods.append(method_path)
tfile.close()
# write to file
file_path = join(tempfile.gettempdir(), "test_commands.sh")
cf = open(file_path, "w")
cf.write("# created by Python\n")
cf.write("'''\n")
cf.write("from femtest.app.support_utils import get_fem_test_defs\n")
cf.write("get_fem_test_defs()\n")
cf.write("\n")
cf.write("\n")
cf.write("'''\n")
cf.write("\n")
cf.write("# modules\n")
for m in collected_test_modules:
cf.write("make -j 4 && ./bin/FreeCADCmd -t {}\n".format(m))
cf.write("\n")
cf.write("\n")
cf.write("# classes\n")
for m in collected_test_classes:
cf.write("make -j 4 && ./bin/FreeCADCmd -t {}\n".format(m))
cf.write("\n")
cf.write("\n")
cf.write("# methods\n")
for m in collected_test_methods:
cf.write("make -j 4 && ./bin/FreeCADCmd -t {}\n".format(m))
cf.write("\n")
cf.write("\n")
cf.write("# methods in FreeCAD\n")
for m in collected_test_methods:
cf.write(
"\nimport unittest\n"
"unittest.TextTestRunner().run(unittest.TestLoader().loadTestsFromName(\n"
" '{}'\n"
"))\n"
.format(m)
)
cf.close()
print("The file was saved in:{}".format(file_path))
def compare_inp_files(
file_name1,
file_name2
):
file1 = open(file_name1, "r")
f1 = file1.readlines()
file1.close()
# l.startswith("17671.0,1") is a temporary workaround
# for python3 problem with 1DFlow input
# TODO as soon as the 1DFlow result reading is fixed
# this should be triggered in the 1DFlow unit test
lf1 = [l for l in f1 if not (
l.startswith("** written ") or l.startswith("** file ") or l.startswith("17671.0,1")
)]
lf1 = force_unix_line_ends(lf1)
file2 = open(file_name2, "r")
f2 = file2.readlines()
file2.close()
# TODO see comment on file1
lf2 = [l for l in f2 if not (
l.startswith("** written ") or l.startswith("** file ") or l.startswith("17671.0,1")
)]
lf2 = force_unix_line_ends(lf2)
import difflib
diff = difflib.unified_diff(lf1, lf2, n=0)
result = ""
for l in diff:
result += l
if result:
result = (
"Comparing {} to {} failed!\n"
.format(file_name1, file_name2) + result
)
return result
def compare_files(
file_name1,
file_name2
):
file1 = open(file_name1, "r")
f1 = file1.readlines()
file1.close()
# workaround to compare geos of elmer test and temporary file path
# (not only names change, path changes with operating system)
lf1 = [l for l in f1 if not (
l.startswith('Merge "') or l.startswith('Save "') or l.startswith("// ")
)]
lf1 = force_unix_line_ends(lf1)
file2 = open(file_name2, "r")
f2 = file2.readlines()
file2.close()
lf2 = [l for l in f2 if not (
l.startswith('Merge "') or l.startswith('Save "') or l.startswith("// ")
)]
lf2 = force_unix_line_ends(lf2)
import difflib
diff = difflib.unified_diff(lf1, lf2, n=0)
result = ""
for l in diff:
result += l
if result:
result = "Comparing {} to {} failed!\n".format(file_name1, file_name2) + result
return result
def compare_stats(
fea,
stat_file,
res_obj_name,
loc_stat_types=None
):
import femresult.resulttools as resulttools
# get the stat types which should be compared
stat_types = [
"U1",
"U2",
"U3",
"Uabs",
"Sabs",
"MaxPrin",
"MidPrin",
"MinPrin",
"MaxShear",
"Peeq",
"Temp",
"MFlow",
"NPress"
]
if not loc_stat_types:
loc_stat_types = stat_types
# get stats from result obj which should be compared
obj = fea.analysis.Document.getObject(res_obj_name)
# fcc_print(obj)
if obj:
# fcc_print(obj.Name)
stats = []
for s in loc_stat_types:
statval = resulttools.get_stats(obj, s)
stats.append(
"{}: ({:.10f}, {:.10f})\n"
.format(s, statval[0], statval[1])
)
else:
fcc_print("Result object not found. Name: {}".format(res_obj_name))
return True
# get stats to compare with, the expected ones
sf = open(stat_file, "r")
sf_content = []
for l in sf.readlines():
for st in loc_stat_types:
if l.startswith(st):
sf_content.append(l)
sf.close()
sf_content = force_unix_line_ends(sf_content)
if sf_content == []:
return True
# compare stats
if stats != sf_content:
fcc_print("Stats read from {}.frd file".format(fea.base_name))
fcc_print("!=")
fcc_print("Expected stats from {}".format(stat_file))
for i in range(len(stats)):
if stats[i] != sf_content[i]:
fcc_print("{} != {}".format(stats[i].rstrip(), sf_content[i].rstrip()))
return True
return False
def force_unix_line_ends(
line_list
):
new_line_list = []
for ln in line_list:
if ln.endswith("\r\n"):
ln = ln[:-2] + "\n"
new_line_list.append(ln)
return new_line_list
def collect_python_modules(
femsubdir=None
):
if not femsubdir:
pydir = join(FreeCAD.ConfigGet("AppHomePath"), "Mod", "Fem")
else:
pydir = join(FreeCAD.ConfigGet("AppHomePath"), "Mod", "Fem", femsubdir)
collected_modules = []
fcc_print(pydir)
for pyfile in sorted(os.listdir(pydir)):
if pyfile.endswith(".py") and not pyfile.startswith("Init"):
if not femsubdir:
collected_modules.append(
os.path.splitext(os.path.basename(pyfile))[0]
)
else:
collected_modules.append(
femsubdir.replace("/", ".") + "." + os.path.splitext(
os.path.basename(pyfile)
)[0]
)
return collected_modules
def all_test_files(
):
# open all files
cube_frequency()
cube_static()
Flow1D_thermomech()
multimat()
spine_thermomech()
# run the specific test case of the file
# open the file in FreeCAD GUI and return the doc identifier
def cube_frequency(
):
testname = "femtest.testccxtools.TestCcxTools.test_3_freq_analysis"
unittest.TextTestRunner().run(unittest.TestLoader().loadTestsFromName(testname))
doc = FreeCAD.open(join(
get_fem_test_tmp_dir(),
"FEM_ccx_frequency",
"cube_frequency.FCStd")
)
return doc
def cube_static(
):
testname = "femtest.testccxtools.TestCcxTools.test_1_static_analysis"
unittest.TextTestRunner().run(unittest.TestLoader().loadTestsFromName(testname))
doc = FreeCAD.open(
join(get_fem_test_tmp_dir(),
"FEM_ccx_static",
"cube_static.FCStd")
)
return doc
def Flow1D_thermomech(
):
testname = "femtest.testccxtools.TestCcxTools.test_5_Flow1D_thermomech_analysis"
unittest.TextTestRunner().run(unittest.TestLoader().loadTestsFromName(testname))
doc = FreeCAD.open(join(
get_fem_test_tmp_dir(),
"FEM_ccx_Flow1D_thermomech",
"Flow1D_thermomech.FCStd")
)
return doc
def multimat(
):
testname = "femtest.testccxtools.TestCcxTools.test_2_static_multiple_material"
unittest.TextTestRunner().run(unittest.TestLoader().loadTestsFromName(testname))
doc = FreeCAD.open(join(
get_fem_test_tmp_dir(),
"FEM_ccx_multimat",
"multimat.FCStd")
)
return doc
def spine_thermomech(
):
testname = "femtest.testccxtools.TestCcxTools.test_4_thermomech_analysis"
unittest.TextTestRunner().run(unittest.TestLoader().loadTestsFromName(testname))
doc = FreeCAD.open(join(
get_fem_test_tmp_dir(),
"FEM_ccx_thermomech",
"spine_thermomech.FCStd")
)
return doc
| lgpl-2.1 | 7,238,724,692,584,307,000 | 30.55 | 96 | 0.556713 | false |
prophile/django_split | tests/test_overlapping.py | 1 | 1674 | import pytest
from django_split.utils import overlapping
class OverlappingTests(object):
def test_identity(self):
assert overlapping(
(10, 30),
(10, 30),
)
def test_contains(self):
assert overlapping(
(10, 30),
(0, 40),
)
def test_contained(self):
assert overlapping(
(0, 40),
(10, 30),
)
def test_left_equal_right_greater(self):
assert overlapping(
(0, 10),
(0, 20),
)
def test_left_equal_right_less(self):
assert overlapping(
(0, 10),
(0, 5),
)
def test_left_less_right_equal(self):
assert overlapping(
(0, 10),
(-5, 10),
)
def test_left_greater_right_equal(self):
assert overlapping(
(0, 10),
(5, 10),
)
def test_entirely_greater(self):
assert not overlapping(
(0, 10),
(20, 30),
)
def test_entirely_less(self):
assert not overlapping(
(0, 10),
(-20, -10),
)
def test_swapped_elements_in_first_argument_raises_valueerror(self):
with pytest.raises(ValueError):
overlapping(
(10, 0),
(0, 10),
)
def test_swapped_elements_in_second_argument_raises_valueerror(self):
with pytest.raises(ValueError):
overlapping(
(0, 10),
(10, 0),
)
def test_equal_arguments_do_not_raise_valueerror(self):
overlapping((0, 0), (10, 10))
| mit | -3,989,410,077,130,463,000 | 21.32 | 73 | 0.469534 | false |
bancek/egradebook | src/lib/debug_toolbar/panels/template.py | 11 | 5162 | from os.path import normpath
from pprint import pformat
from django import http
from django.conf import settings
from django.core.signals import request_started
from django.dispatch import Signal
from django.template.context import get_standard_processors
from django.template.loader import render_to_string
from django.test.signals import template_rendered
from django.utils.translation import ugettext_lazy as _
from debug_toolbar.panels import DebugPanel
# Code taken and adapted from Simon Willison and Django Snippets:
# http://www.djangosnippets.org/snippets/766/
# Monkeypatch instrumented test renderer from django.test.utils - we could use
# django.test.utils.setup_test_environment for this but that would also set up
# e-mail interception, which we don't want
from django.test.utils import instrumented_test_render
from django.template import Template
if not hasattr(Template, '_render'): # Django < 1.2
if Template.render != instrumented_test_render:
Template.original_render = Template.render
Template.render = instrumented_test_render
else:
if Template._render != instrumented_test_render:
Template.original_render = Template._render
Template._render = instrumented_test_render
# MONSTER monkey-patch
old_template_init = Template.__init__
def new_template_init(self, template_string, origin=None, name='<Unknown Template>'):
old_template_init(self, template_string, origin, name)
self.origin = origin
Template.__init__ = new_template_init
class TemplateDebugPanel(DebugPanel):
"""
A panel that lists all templates used during processing of a response.
"""
name = 'Template'
has_content = True
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.templates = []
template_rendered.connect(self._store_template_info)
def _store_template_info(self, sender, **kwargs):
self.templates.append(kwargs)
def nav_title(self):
return _('Templates')
def title(self):
num_templates = len([t for t in self.templates
if not t['template'].name.startswith('debug_toolbar/')])
return _('Templates (%(num_templates)s rendered)') % {'num_templates': num_templates}
def url(self):
return ''
def process_request(self, request):
self.request = request
def content(self):
context_processors = dict(
[
("%s.%s" % (k.__module__, k.__name__),
pformat(k(self.request))) for k in get_standard_processors()
]
)
template_context = []
for template_data in self.templates:
info = {}
# Clean up some info about templates
template = template_data.get('template', None)
# Skip templates that we are generating through the debug toolbar.
if template.name.startswith('debug_toolbar/'):
continue
if template.origin and template.origin.name:
template.origin_name = template.origin.name
else:
template.origin_name = 'No origin'
info['template'] = template
# Clean up context for better readability
if getattr(settings, 'DEBUG_TOOLBAR_CONFIG', {}).get('SHOW_TEMPLATE_CONTEXT', True):
context_data = template_data.get('context', None)
context_list = []
for context_layer in context_data.dicts:
if hasattr(context_layer, 'items'):
for key, value in context_layer.items():
# Replace any request elements - they have a large
# unicode representation and the request data is
# already made available from the Request Vars panel.
if isinstance(value, http.HttpRequest):
context_layer[key] = '<<request>>'
# Replace the debugging sql_queries element. The SQL
# data is already made available from the SQL panel.
elif key == 'sql_queries' and isinstance(value, list):
context_layer[key] = '<<sql_queries>>'
# Replace LANGUAGES, which is available in i18n context processor
elif key == 'LANGUAGES' and isinstance(value, tuple):
context_layer[key] = '<<languages>>'
try:
context_list.append(pformat(context_layer))
except UnicodeEncodeError:
pass
info['context'] = '\n'.join(context_list)
template_context.append(info)
context = self.context.copy()
context.update({
'templates': template_context,
'template_dirs': [normpath(x) for x in settings.TEMPLATE_DIRS],
'context_processors': context_processors,
})
return render_to_string('debug_toolbar/panels/templates.html', context)
| gpl-3.0 | 6,607,130,047,074,682,000 | 41.311475 | 96 | 0.603255 | false |
ESOedX/edx-platform | openedx/core/djangoapps/user_api/accounts/utils.py | 1 | 7905 | """
Utility methods for the account settings.
"""
from __future__ import absolute_import, unicode_literals
import random
import re
import string
import waffle
from completion import waffle as completion_waffle
from completion.models import BlockCompletion
from django.conf import settings
from django.utils.translation import ugettext as _
from six import text_type
from six.moves import range
from six.moves.urllib.parse import urlparse # pylint: disable=import-error
from openedx.core.djangoapps.site_configuration.models import SiteConfiguration
from openedx.core.djangoapps.theming.helpers import get_config_value_from_site_or_settings, get_current_site
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError
ENABLE_SECONDARY_EMAIL_FEATURE_SWITCH = 'enable_secondary_email_feature'
def validate_social_link(platform_name, new_social_link):
"""
Given a new social link for a user, ensure that the link takes one of the
following forms:
1) A valid url that comes from the correct social site.
2) A valid username.
3) A blank value.
"""
formatted_social_link = format_social_link(platform_name, new_social_link)
# Ensure that the new link is valid.
if formatted_social_link is None:
required_url_stub = settings.SOCIAL_PLATFORMS[platform_name]['url_stub']
raise ValueError(_('Make sure that you are providing a valid username or a URL that contains "{url_stub}". '
'To remove the link from your edX profile, '
'leave this field blank.').format(url_stub=required_url_stub))
def format_social_link(platform_name, new_social_link):
"""
Given a user's social link, returns a safe absolute url for the social link.
Returns the following based on the provided new_social_link:
1) Given an empty string, returns ''
1) Given a valid username, return 'https://www.[platform_name_base][username]'
2) Given a valid URL, return 'https://www.[platform_name_base][username]'
3) Given anything unparseable, returns None
"""
# Blank social links should return '' or None as was passed in.
if not new_social_link:
return new_social_link
url_stub = settings.SOCIAL_PLATFORMS[platform_name]['url_stub']
username = _get_username_from_social_link(platform_name, new_social_link)
if not username:
return None
# For security purposes, always build up the url rather than using input from user.
return 'https://www.{}{}'.format(url_stub, username)
def _get_username_from_social_link(platform_name, new_social_link):
"""
Returns the username given a social link.
Uses the following logic to parse new_social_link into a username:
1) If an empty string, returns it as the username.
2) Given a URL, attempts to parse the username from the url and return it.
3) Given a non-URL, returns the entire string as username if valid.
4) If no valid username is found, returns None.
"""
# Blank social links should return '' or None as was passed in.
if not new_social_link:
return new_social_link
# Parse the social link as if it were a URL.
parse_result = urlparse(new_social_link)
url_domain_and_path = parse_result[1] + parse_result[2]
url_stub = re.escape(settings.SOCIAL_PLATFORMS[platform_name]['url_stub'])
username_match = re.search(r'(www\.)?' + url_stub + r'(?P<username>.*?)[/]?$', url_domain_and_path, re.IGNORECASE)
if username_match:
username = username_match.group('username')
else:
username = new_social_link
# Ensure the username is a valid username.
if not _is_valid_social_username(username):
return None
return username
def _is_valid_social_username(value):
"""
Given a particular string, returns whether the string can be considered a safe username.
This is a very liberal validation step, simply assuring forward slashes do not exist
in the username.
"""
return '/' not in value
def retrieve_last_sitewide_block_completed(user):
"""
Completion utility
From a string 'username' or object User retrieve
the last course block marked as 'completed' and construct a URL
:param user: obj(User)
:return: block_lms_url
"""
if not completion_waffle.waffle().is_enabled(completion_waffle.ENABLE_COMPLETION_TRACKING):
return
latest_completions_by_course = BlockCompletion.latest_blocks_completed_all_courses(user)
known_site_configs = [
other_site_config.get_value('course_org_filter') for other_site_config in SiteConfiguration.objects.all()
if other_site_config.get_value('course_org_filter')
]
current_site_configuration = get_config_value_from_site_or_settings(
name='course_org_filter',
site=get_current_site()
)
# courses.edx.org has no 'course_org_filter'
# however the courses within DO, but those entries are not found in
# known_site_configs, which are White Label sites
# This is necessary because the WL sites and courses.edx.org
# have the same AWS RDS mySQL instance
candidate_course = None
candidate_block_key = None
latest_date = None
# Go through dict, find latest
for course, [modified_date, block_key] in latest_completions_by_course.items():
if not current_site_configuration:
# This is a edx.org
if course.org in known_site_configs:
continue
if not latest_date or modified_date > latest_date:
candidate_course = course
candidate_block_key = block_key
latest_date = modified_date
else:
# This is a White Label site, and we should find candidates from the same site
if course.org not in current_site_configuration:
# Not the same White Label, or a edx.org course
continue
if not latest_date or modified_date > latest_date:
candidate_course = course
candidate_block_key = block_key
latest_date = modified_date
if not candidate_course:
return
lms_root = SiteConfiguration.get_value_for_org(candidate_course.org, "LMS_ROOT_URL", settings.LMS_ROOT_URL)
try:
item = modulestore().get_item(candidate_block_key, depth=1)
except ItemNotFoundError:
item = None
if not (lms_root and item):
return
return u"{lms_root}/courses/{course_key}/jump_to/{location}".format(
lms_root=lms_root,
course_key=text_type(item.location.course_key),
location=text_type(item.location),
)
def generate_password(length=12, chars=string.ascii_letters + string.digits):
"""Generate a valid random password"""
if length < 8:
raise ValueError("password must be at least 8 characters")
choice = random.SystemRandom().choice
password = ''
password += choice(string.digits)
password += choice(string.ascii_letters)
password += ''.join([choice(chars) for _i in range(length - 2)])
return password
def is_secondary_email_feature_enabled():
"""
Checks to see if the django-waffle switch for enabling the secondary email feature is active
Returns:
Boolean value representing switch status
"""
return waffle.switch_is_active(ENABLE_SECONDARY_EMAIL_FEATURE_SWITCH)
def is_secondary_email_feature_enabled_for_user(user):
"""
Checks to see if secondary email feature is enabled for the given user.
Returns:
Boolean value representing the status of secondary email feature.
"""
# import is placed here to avoid cyclic import.
from openedx.features.enterprise_support.utils import is_enterprise_learner
return is_secondary_email_feature_enabled() and is_enterprise_learner(user)
| agpl-3.0 | -1,499,620,048,383,449,000 | 35.597222 | 118 | 0.68425 | false |
noironetworks/heat | heat/tests/openstack/sahara/test_cluster.py | 1 | 9642 | # Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from oslo_config import cfg
import six
from heat.common import exception
from heat.common import template_format
from heat.engine.clients.os import glance
from heat.engine.clients.os import neutron
from heat.engine.clients.os import sahara
from heat.engine.resources.openstack.sahara import cluster as sc
from heat.engine import scheduler
from heat.tests import common
from heat.tests import utils
cluster_stack_template = """
heat_template_version: 2013-05-23
description: Hadoop Cluster by Sahara
resources:
super-cluster:
type: OS::Sahara::Cluster
properties:
name: super-cluster
plugin_name: vanilla
hadoop_version: 2.3.0
cluster_template_id: some_cluster_template_id
default_image_id: some_image
key_name: admin
neutron_management_network: some_network
shares:
- id: some_share_id
access_level: ro
"""
# NOTE(jfreud): the resource name contains an invalid character
cluster_stack_template_without_name = """
heat_template_version: 2013-05-23
description: Hadoop Cluster by Sahara
resources:
lots_of_underscore_name:
type: OS::Sahara::Cluster
properties:
plugin_name: vanilla
hadoop_version: 2.3.0
cluster_template_id: some_cluster_template_id
default_image_id: some_image
key_name: admin
neutron_management_network: some_network
shares:
- id: some_share_id
access_level: ro
"""
class FakeCluster(object):
def __init__(self, status='Active'):
self.status = status
self.id = "some_id"
self.name = "super-cluster"
self.info = {"HDFS": {"NameNode": "hdfs://hostname:port",
"Web UI": "http://host_ip:port"}}
self.to_dict = lambda: {"cluster": "info"}
class SaharaClusterTest(common.HeatTestCase):
def setUp(self):
super(SaharaClusterTest, self).setUp()
self.patchobject(sc.constraints.CustomConstraint, '_is_valid'
).return_value = True
self.patchobject(glance.GlanceClientPlugin,
'find_image_by_name_or_id'
).return_value = 'some_image_id'
self.patchobject(neutron.NeutronClientPlugin, '_create')
self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id',
return_value='some_network_id')
self.sahara_mock = mock.MagicMock()
self.patchobject(sahara.SaharaClientPlugin, '_create'
).return_value = self.sahara_mock
self.patchobject(sahara.SaharaClientPlugin, 'validate_hadoop_version'
).return_value = None
self.cl_mgr = self.sahara_mock.clusters
self.fake_cl = FakeCluster()
self.t = template_format.parse(cluster_stack_template)
self.t2 = template_format.parse(cluster_stack_template_without_name)
def _init_cluster(self, template, name='super-cluster'):
self.stack = utils.parse_stack(template)
cluster = self.stack[name]
return cluster
def _create_cluster(self, template):
cluster = self._init_cluster(template)
self.cl_mgr.create.return_value = self.fake_cl
self.cl_mgr.get.return_value = self.fake_cl
scheduler.TaskRunner(cluster.create)()
self.assertEqual((cluster.CREATE, cluster.COMPLETE),
cluster.state)
self.assertEqual(self.fake_cl.id, cluster.resource_id)
return cluster
def test_cluster_create(self):
self._create_cluster(self.t)
expected_args = ('super-cluster', 'vanilla', '2.3.0')
expected_kwargs = {'cluster_template_id': 'some_cluster_template_id',
'user_keypair_id': 'admin',
'default_image_id': 'some_image_id',
'net_id': 'some_network_id',
'use_autoconfig': None,
'shares': [{'id': 'some_share_id',
'access_level': 'ro',
'path': None}]}
self.cl_mgr.create.assert_called_once_with(*expected_args,
**expected_kwargs)
self.cl_mgr.get.assert_called_once_with(self.fake_cl.id)
def test_cluster_create_invalid_name(self):
cluster = self._init_cluster(self.t2, 'lots_of_underscore_name')
self.cl_mgr.create.return_value = self.fake_cl
self.cl_mgr.get.return_value = self.fake_cl
scheduler.TaskRunner(cluster.create)()
name = self.cl_mgr.create.call_args[0][0]
self.assertIn('lotsofunderscorename', name)
def test_cluster_create_fails(self):
cfg.CONF.set_override('action_retry_limit', 0)
cluster = self._init_cluster(self.t)
self.cl_mgr.create.return_value = self.fake_cl
self.cl_mgr.get.return_value = FakeCluster(status='Error')
create_task = scheduler.TaskRunner(cluster.create)
ex = self.assertRaises(exception.ResourceFailure, create_task)
expected = ('ResourceInError: resources.super-cluster: '
'Went to status Error due to "Unknown"')
self.assertEqual(expected, six.text_type(ex))
def test_cluster_check_delete_complete_error(self):
cluster = self._create_cluster(self.t)
self.cl_mgr.get.side_effect = [
self.fake_cl,
sahara.sahara_base.APIException()]
self.cl_mgr.get.reset_mock()
delete_task = scheduler.TaskRunner(cluster.delete)
ex = self.assertRaises(exception.ResourceFailure, delete_task)
expected = "APIException: resources.super-cluster: None"
self.assertEqual(expected, six.text_type(ex))
self.cl_mgr.delete.assert_called_once_with(self.fake_cl.id)
self.assertEqual(2, self.cl_mgr.get.call_count)
def test_cluster_delete_cluster_in_error(self):
cluster = self._create_cluster(self.t)
self.cl_mgr.get.side_effect = [
self.fake_cl,
FakeCluster(status='Error')]
self.cl_mgr.get.reset_mock()
delete_task = scheduler.TaskRunner(cluster.delete)
ex = self.assertRaises(exception.ResourceFailure, delete_task)
expected = ('ResourceInError: resources.super-cluster: '
'Went to status Error due to "Unknown"')
self.assertEqual(expected, six.text_type(ex))
self.cl_mgr.delete.assert_called_once_with(self.fake_cl.id)
self.assertEqual(2, self.cl_mgr.get.call_count)
def test_cluster_resolve_attribute(self):
cluster = self._create_cluster(self.t)
self.cl_mgr.get.reset_mock()
self.assertEqual(self.fake_cl.info,
cluster._resolve_attribute('info'))
self.assertEqual(self.fake_cl.status,
cluster._resolve_attribute('status'))
self.assertEqual({"cluster": "info"}, cluster.FnGetAtt('show'))
self.assertEqual(3, self.cl_mgr.get.call_count)
def test_cluster_create_no_image_anywhere_fails(self):
self.t['resources']['super-cluster']['properties'].pop(
'default_image_id')
self.sahara_mock.cluster_templates.get.return_value = mock.Mock(
default_image_id=None)
cluster = self._init_cluster(self.t)
ex = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(cluster.create))
self.assertIsInstance(ex.exc, exception.StackValidationFailed)
self.assertIn("default_image_id must be provided: "
"Referenced cluster template some_cluster_template_id "
"has no default_image_id defined.",
six.text_type(ex.message))
def test_cluster_validate_no_network_on_neutron_fails(self):
self.t['resources']['super-cluster']['properties'].pop(
'neutron_management_network')
cluster = self._init_cluster(self.t)
ex = self.assertRaises(exception.StackValidationFailed,
cluster.validate)
error_msg = ('Property error: resources.super-cluster.properties: '
'Property neutron_management_network not assigned')
self.assertEqual(error_msg, six.text_type(ex))
def test_deprecated_properties_correctly_translates(self):
tmpl = '''
heat_template_version: 2013-05-23
description: Hadoop Cluster by Sahara
resources:
super-cluster:
type: OS::Sahara::Cluster
properties:
name: super-cluster
plugin_name: vanilla
hadoop_version: 2.3.0
cluster_template_id: some_cluster_template_id
image: some_image
key_name: admin
neutron_management_network: some_network
'''
ct = self._create_cluster(template_format.parse(tmpl))
self.assertEqual('some_image_id',
ct.properties.get('default_image_id'))
self.assertIsNone(ct.properties.get('image_id'))
| apache-2.0 | 5,846,905,977,214,334,000 | 40.560345 | 77 | 0.627671 | false |
moiseslorap/RIT | Computer Science 1/Miscellaneous/slList.py | 4 | 7414 | """
File: slList.py
Purpose: rit_object-based single-linked list for CS141 LECTURE.
Author: ben k steele <[email protected]>
Author: sean strout <[email protected]>
Language: Python 3
Description: Implementation of a single-linked list data structure.
"""
from slCursor import *
###########################################################
# LINKED LIST CLASS DEFINITION
###########################################################
class SlList( struct ):
"""
SlList class encapsulates a node based linked list.
'head' slot refers to a Node instance.
'size' slot holds the number of nodes in the list.
"""
_slots = ( ((Node, NoneType), 'head'), (int, 'size' ))
def getCursor(self):
return Cursor(self.head)
###########################################################
# LINKED LIST FUNCTIONS
###########################################################
def clear( self ):
"""
Make a list empty.
Parameters:
lst ( SlList ) - the linked list
Returns:
None
"""
self.head = None
self.size = 0
def toString(self):
"""
Converts our linked list into a string form that is similar to Python's
printed list.
Parameters:
lst (SlList) - The linked list
Returns:
A string representation of the list (e.g. '[1,2,3]')
"""
result = '['
curr = self.head
while not curr == None :
if curr.next == None :
result += str(curr.data)
else:
result += str(curr.data) + ', '
curr = curr.next
result += ']'
return result
def append( self, value ):
"""
Add a node containing the value to the end of the list.
Parameters:
lst ( SlList ) - The linked list
value ( any type ) - The data to append to the end of the list
Returns:
None
"""
if self.head == None :
self.head = Node( value, None )
else:
curr = self.head
while curr.next != None :
curr = curr.next
curr.next = Node( value, None )
self.size += 1
def insertAt( self, index, value ):
"""
Insert a new element before the index.
Parameters:
lst ( SlList ) - The list to insert value into
index ( int ) - The 0-based index to insert before
value ( any type ) - The data to be inserted into the list
Preconditions:
0 <= index <= lst.size, raises IndexError exception
Returns:
None
"""
if index < 0 or index > self.size:
raise IndexError( str( index ) + ' is out of range.' )
if index == 0:
self.head = Node( value, self.head )
else:
prev = self.head
while index > 1:
prev = prev.next
index -= 1
prev.next = Node( value, prev.next )
self.size += 1
def get( self, index ):
"""
Returns the element that is at index in the list.
Parameters:
lst ( SlList ) - The list to insert value into
index ( int ) - The 0-based index to get
Preconditions:
0 <= index < lst.size, raises IndexError exception
Returns:
value at the index
"""
if index < 0 or index >= self.size:
raise IndexError( str( index ) + ' is out of range.' )
curr = self.head
while index > 0:
curr = curr.next
index -= 1
return curr.data
def set( self, index, value ):
"""
Sets the element that is at index in the list to the value.
Parameters:
lst ( SlList ) - The list to insert value into
index ( int ) - The 0-based index to set
value ( any type )
Preconditions:
0 <= index < lst.size, raises IndexError exception
Returns:
None
"""
if index < 0 or index >= self.size:
raise IndexError( str( index ) + ' is out of range.' )
curr = self.head
while index > 0:
curr = curr.next
index -= 1
curr.data = value
def pop( self, index ):
"""
pop removes and returns the element at index.
Parameters:
lst ( SlList ) - The list from which to remove
index ( int ) - The 0-based index to remove
Preconditions:
0 <= index < lst.size, raises IndexError exception
Returns:
The value ( any type ) being popped
"""
if index < 0 or index >= self.size:
raise IndexError( str( index ) + ' is out of range.' )
if index == 0:
value = self.head.data
self.head = self.head.next
else:
prev = self.head
while index > 1:
prev = prev.next
index -= 1
value = prev.next.data
prev.next = prev.next.next
self.size -=1
return value
def index( self, value ):
"""
Returns the index of the first occurrence of a value in the list
Parameters:
lst ( SlList ) - The list to insert value into
value ( any type ) - The data being searched for
Preconditions:
value exists in list, otherwise raises ValueError exception
Returns:
The 0-based index of value
"""
pos = 0
curr = self.head
while not curr == None :
if curr.data == value:
return pos
pos += 1
curr = curr.next
raise ValueError( str( value ) + " is not present in the list" )
###########################################################
# LINKED LIST CLASS CONSTRUCTOR
###########################################################
def createList():
"""
Create and return an instance
of an empty node-based, single-linked list.
Parameters:
None
Returns:
An empty list
"""
return SlList( None, 0 )
#FUNCTIONS NEEDED FOR THE HOMEWORK BELOW
def swap(node1, node2):
"""
This function swaps the data pertaining to two separate nodes.
"""
node1.data, node2.data = node2.data, node1.data
def findMinFrom(node):
"""
This function finds the node with the minimum value in the linked list and then returns it.
"""
minNode = node
while node is not None:
if minNode.data > node.data:
minNode = node
node = node.next
return minNode
def linkSort(lst):
"""
This function sorts the linked list with the selection sort algorithm implemented to be used with linked lists.
This function is then used to sort a linked list inputed by the user.
"""
currentNode = lst.head
while currentNode is not None:
minNode = findMinFrom(currentNode)
swap(minNode,currentNode)
currentNode = currentNode.next
| mit | -8,660,290,868,300,033,000 | 27.40613 | 115 | 0.4884 | false |
tuxology/bcc | examples/tracing/hello_perf_output_using_ns.py | 2 | 1843 | #!/usr/bin/python
# Carlos Neira <[email protected]>
# This is a Hello World example that uses BPF_PERF_OUTPUT.
# in this example bpf_get_ns_current_pid_tgid(), this helper
# works inside pid namespaces.
# bpf_get_current_pid_tgid() only returns the host pid outside any
# namespace and this will not work when the script is run inside a pid namespace.
from bcc import BPF
from bcc.utils import printb
import sys, os
from stat import *
# define BPF program
prog = """
#include <linux/sched.h>
// define output data structure in C
struct data_t {
u32 pid;
u64 ts;
char comm[TASK_COMM_LEN];
};
BPF_PERF_OUTPUT(events);
int hello(struct pt_regs *ctx) {
struct data_t data = {};
struct bpf_pidns_info ns = {};
if(bpf_get_ns_current_pid_tgid(DEV, INO, &ns, sizeof(struct bpf_pidns_info)))
return 0;
data.pid = ns.pid;
data.ts = bpf_ktime_get_ns();
bpf_get_current_comm(&data.comm, sizeof(data.comm));
events.perf_submit(ctx, &data, sizeof(data));
return 0;
}
"""
devinfo = os.stat("/proc/self/ns/pid")
for r in (("DEV", str(devinfo.st_dev)), ("INO", str(devinfo.st_ino))):
prog = prog.replace(*r)
# load BPF program
b = BPF(text=prog)
b.attach_kprobe(event=b.get_syscall_fnname("clone"), fn_name="hello")
# header
print("%-18s %-16s %-6s %s" % ("TIME(s)", "COMM", "PID", "MESSAGE"))
# process event
start = 0
def print_event(cpu, data, size):
global start
event = b["events"].event(data)
if start == 0:
start = event.ts
time_s = (float(event.ts - start)) / 1000000000
printb(
b"%-18.9f %-16s %-6d %s"
% (time_s, event.comm, event.pid, b"Hello, perf_output!")
)
# loop with callback to print_event
b["events"].open_perf_buffer(print_event)
while 1:
try:
b.perf_buffer_poll()
except KeyboardInterrupt:
exit()
| apache-2.0 | -1,083,691,668,696,982,900 | 23.573333 | 81 | 0.642973 | false |
ulrichard/electrum | lib/daemon.py | 1 | 5794 | #!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2015 Thomas Voegtlin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import ast, os
import jsonrpclib
from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer, SimpleJSONRPCRequestHandler
from util import json_decode, DaemonThread
from wallet import WalletStorage, Wallet
from commands import known_commands, Commands
from simple_config import SimpleConfig
def lockfile(config):
return os.path.join(config.path, 'daemon')
def get_daemon(config):
try:
with open(lockfile(config)) as f:
host, port = ast.literal_eval(f.read())
except:
return
server = jsonrpclib.Server('http://%s:%d' % (host, port))
# check if daemon is running
try:
server.ping()
return server
except:
pass
class RequestHandler(SimpleJSONRPCRequestHandler):
def do_OPTIONS(self):
self.send_response(200)
self.end_headers()
def end_headers(self):
self.send_header("Access-Control-Allow-Headers",
"Origin, X-Requested-With, Content-Type, Accept")
self.send_header("Access-Control-Allow-Origin", "*")
SimpleJSONRPCRequestHandler.end_headers(self)
class Daemon(DaemonThread):
def __init__(self, config, network, gui=None):
DaemonThread.__init__(self)
self.config = config
self.network = network
self.gui = gui
self.wallets = {}
if gui is None:
self.wallet = self.load_wallet(config)
else:
self.wallet = None
self.cmd_runner = Commands(self.config, self.wallet, self.network)
host = config.get('rpchost', 'localhost')
port = config.get('rpcport', 0)
self.server = SimpleJSONRPCServer((host, port), requestHandler=RequestHandler, logRequests=False)
with open(lockfile(config), 'w') as f:
f.write(repr(self.server.socket.getsockname()))
self.server.timeout = 0.1
for cmdname in known_commands:
self.server.register_function(getattr(self.cmd_runner, cmdname), cmdname)
self.server.register_function(self.run_cmdline, 'run_cmdline')
self.server.register_function(self.ping, 'ping')
self.server.register_function(self.run_daemon, 'daemon')
self.server.register_function(self.run_gui, 'gui')
def ping(self):
return True
def run_daemon(self, config):
sub = config.get('subcommand')
assert sub in ['start', 'stop', 'status']
if sub == 'start':
response = "Daemon already running"
elif sub == 'status':
p = self.network.get_parameters()
response = {
'path': self.network.config.path,
'server': p[0],
'blockchain_height': self.network.get_local_height(),
'server_height': self.network.get_server_height(),
'nodes': self.network.get_interfaces(),
'connected': self.network.is_connected(),
'auto_connect': p[4],
'wallets': dict([ (k, w.is_up_to_date()) for k, w in self.wallets.items()]),
}
elif sub == 'stop':
self.stop()
response = "Daemon stopped"
return response
def run_gui(self, config_options):
config = SimpleConfig(config_options)
if self.gui:
if hasattr(self.gui, 'new_window'):
path = config.get_wallet_path()
self.gui.new_window(path, config.get('url'))
response = "ok"
else:
response = "error: current GUI does not support multiple windows"
else:
response = "Error: Electrum is running in daemon mode. Please stop the daemon first."
return response
def load_wallet(self, config):
path = config.get_wallet_path()
if path in self.wallets:
wallet = self.wallets[path]
else:
storage = WalletStorage(path)
wallet = Wallet(storage)
wallet.start_threads(self.network)
self.wallets[path] = wallet
return wallet
def run_cmdline(self, config_options):
password = config_options.get('password')
config = SimpleConfig(config_options)
cmdname = config.get('cmd')
cmd = known_commands[cmdname]
wallet = self.load_wallet(config) if cmd.requires_wallet else None
# arguments passed to function
args = map(lambda x: config.get(x), cmd.params)
# decode json arguments
args = map(json_decode, args)
# options
args += map(lambda x: config.get(x), cmd.options)
cmd_runner = Commands(config, wallet, self.network)
cmd_runner.password = password
func = getattr(cmd_runner, cmd.name)
result = func(*args)
return result
def run(self):
while self.is_running():
self.server.handle_request()
os.unlink(lockfile(self.config))
def stop(self):
for k, wallet in self.wallets.items():
wallet.stop_threads()
DaemonThread.stop(self)
| gpl-3.0 | -4,235,953,728,070,261,000 | 34.546012 | 105 | 0.616155 | false |
nagyistoce/edx-platform | lms/djangoapps/instructor/views/api.py | 11 | 100710 | """
Instructor Dashboard API views
JSON views which the instructor dashboard requests.
Many of these GETs may become PUTs in the future.
"""
import StringIO
import json
import logging
import re
import time
import requests
from django.conf import settings
from django.views.decorators.csrf import ensure_csrf_cookie
from django.views.decorators.http import require_POST
from django.views.decorators.cache import cache_control
from django.core.exceptions import ValidationError, PermissionDenied
from django.core.mail.message import EmailMessage
from django.db import IntegrityError
from django.core.urlresolvers import reverse
from django.core.validators import validate_email
from django.utils.translation import ugettext as _
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseNotFound
from django.utils.html import strip_tags
from django.shortcuts import redirect
import string # pylint: disable=deprecated-module
import random
import unicodecsv
import urllib
import decimal
from student import auth
from student.roles import GlobalStaff, CourseSalesAdminRole, CourseFinanceAdminRole
from util.file import (
store_uploaded_file, course_and_time_based_filename_generator,
FileValidationException, UniversalNewlineIterator
)
from util.json_request import JsonResponse
from instructor.views.instructor_task_helpers import extract_email_features, extract_task_features
from microsite_configuration import microsite
from courseware.access import has_access
from courseware.courses import get_course_with_access, get_course_by_id
from django.contrib.auth.models import User
from django_comment_client.utils import has_forum_access
from django_comment_common.models import (
Role,
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
)
from edxmako.shortcuts import render_to_response, render_to_string
from courseware.models import StudentModule
from shoppingcart.models import (
Coupon,
CourseRegistrationCode,
RegistrationCodeRedemption,
Invoice,
CourseMode,
CourseRegistrationCodeInvoiceItem,
)
from student.models import (
CourseEnrollment, unique_id_for_user, anonymous_id_for_user,
UserProfile, Registration, EntranceExamConfiguration,
ManualEnrollmentAudit, UNENROLLED_TO_ALLOWEDTOENROLL, ALLOWEDTOENROLL_TO_ENROLLED,
ENROLLED_TO_ENROLLED, ENROLLED_TO_UNENROLLED, UNENROLLED_TO_ENROLLED,
UNENROLLED_TO_UNENROLLED, ALLOWEDTOENROLL_TO_UNENROLLED, DEFAULT_TRANSITION_STATE
)
import instructor_task.api
from instructor_task.api_helper import AlreadyRunningError
from instructor_task.models import ReportStore
import instructor.enrollment as enrollment
from instructor.enrollment import (
get_user_email_language,
enroll_email,
send_mail_to_student,
get_email_params,
send_beta_role_email,
unenroll_email,
)
from instructor.access import list_with_level, allow_access, revoke_access, ROLES, update_forum_role
from instructor.offline_gradecalc import student_grades
import instructor_analytics.basic
import instructor_analytics.distributions
import instructor_analytics.csvs
import csv
from openedx.core.djangoapps.user_api.preferences.api import get_user_preference, set_user_preference
from instructor.views import INVOICE_KEY
from submissions import api as sub_api # installed from the edx-submissions repository
from certificates import api as certs_api
from bulk_email.models import CourseEmail
from .tools import (
dump_student_extensions,
dump_module_extensions,
find_unit,
get_student_from_identifier,
require_student_from_identifier,
handle_dashboard_error,
parse_datetime,
set_due_date_extension,
strip_if_string,
bulk_email_is_enabled_for_course,
add_block_ids,
)
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys import InvalidKeyError
from openedx.core.djangoapps.course_groups.cohorts import is_course_cohorted
log = logging.getLogger(__name__)
def common_exceptions_400(func):
"""
Catches common exceptions and renders matching 400 errors.
(decorator without arguments)
"""
def wrapped(request, *args, **kwargs): # pylint: disable=missing-docstring
use_json = (request.is_ajax() or
request.META.get("HTTP_ACCEPT", "").startswith("application/json"))
try:
return func(request, *args, **kwargs)
except User.DoesNotExist:
message = _("User does not exist.")
if use_json:
return JsonResponse({"error": message}, 400)
else:
return HttpResponseBadRequest(message)
except AlreadyRunningError:
message = _("Task is already running.")
if use_json:
return JsonResponse({"error": message}, 400)
else:
return HttpResponseBadRequest(message)
return wrapped
def require_query_params(*args, **kwargs):
"""
Checks for required paremters or renders a 400 error.
(decorator with arguments)
`args` is a *list of required GET parameter names.
`kwargs` is a **dict of required GET parameter names
to string explanations of the parameter
"""
required_params = []
required_params += [(arg, None) for arg in args]
required_params += [(key, kwargs[key]) for key in kwargs]
# required_params = e.g. [('action', 'enroll or unenroll'), ['emails', None]]
def decorator(func): # pylint: disable=missing-docstring
def wrapped(*args, **kwargs): # pylint: disable=missing-docstring
request = args[0]
error_response_data = {
'error': 'Missing required query parameter(s)',
'parameters': [],
'info': {},
}
for (param, extra) in required_params:
default = object()
if request.GET.get(param, default) == default:
error_response_data['parameters'].append(param)
error_response_data['info'][param] = extra
if len(error_response_data['parameters']) > 0:
return JsonResponse(error_response_data, status=400)
else:
return func(*args, **kwargs)
return wrapped
return decorator
def require_post_params(*args, **kwargs):
"""
Checks for required parameters or renders a 400 error.
(decorator with arguments)
Functions like 'require_query_params', but checks for
POST parameters rather than GET parameters.
"""
required_params = []
required_params += [(arg, None) for arg in args]
required_params += [(key, kwargs[key]) for key in kwargs]
# required_params = e.g. [('action', 'enroll or unenroll'), ['emails', None]]
def decorator(func): # pylint: disable=missing-docstring
def wrapped(*args, **kwargs): # pylint: disable=missing-docstring
request = args[0]
error_response_data = {
'error': 'Missing required query parameter(s)',
'parameters': [],
'info': {},
}
for (param, extra) in required_params:
default = object()
if request.POST.get(param, default) == default:
error_response_data['parameters'].append(param)
error_response_data['info'][param] = extra
if len(error_response_data['parameters']) > 0:
return JsonResponse(error_response_data, status=400)
else:
return func(*args, **kwargs)
return wrapped
return decorator
def require_level(level):
"""
Decorator with argument that requires an access level of the requesting
user. If the requirement is not satisfied, returns an
HttpResponseForbidden (403).
Assumes that request is in args[0].
Assumes that course_id is in kwargs['course_id'].
`level` is in ['instructor', 'staff']
if `level` is 'staff', instructors will also be allowed, even
if they are not in the staff group.
"""
if level not in ['instructor', 'staff']:
raise ValueError("unrecognized level '{}'".format(level))
def decorator(func): # pylint: disable=missing-docstring
def wrapped(*args, **kwargs): # pylint: disable=missing-docstring
request = args[0]
course = get_course_by_id(CourseKey.from_string(kwargs['course_id']))
if has_access(request.user, level, course):
return func(*args, **kwargs)
else:
return HttpResponseForbidden()
return wrapped
return decorator
def require_global_staff(func):
"""View decorator that requires that the user have global staff permissions. """
def wrapped(request, *args, **kwargs): # pylint: disable=missing-docstring
if GlobalStaff().has_user(request.user):
return func(request, *args, **kwargs)
else:
return HttpResponseForbidden(
u"Must be {platform_name} staff to perform this action.".format(
platform_name=settings.PLATFORM_NAME
)
)
return wrapped
def require_sales_admin(func):
"""
Decorator for checking sales administrator access before executing an HTTP endpoint. This decorator
is designed to be used for a request based action on a course. It assumes that there will be a
request object as well as a course_id attribute to leverage to check course level privileges.
If the user does not have privileges for this operation, this will return HttpResponseForbidden (403).
"""
def wrapped(request, course_id): # pylint: disable=missing-docstring
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
log.error(u"Unable to find course with course key %s", course_id)
return HttpResponseNotFound()
access = auth.user_has_role(request.user, CourseSalesAdminRole(course_key))
if access:
return func(request, course_id)
else:
return HttpResponseForbidden()
return wrapped
def require_finance_admin(func):
"""
Decorator for checking finance administrator access before executing an HTTP endpoint. This decorator
is designed to be used for a request based action on a course. It assumes that there will be a
request object as well as a course_id attribute to leverage to check course level privileges.
If the user does not have privileges for this operation, this will return HttpResponseForbidden (403).
"""
def wrapped(request, course_id): # pylint: disable=missing-docstring
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
log.error(u"Unable to find course with course key %s", course_id)
return HttpResponseNotFound()
access = auth.user_has_role(request.user, CourseFinanceAdminRole(course_key))
if access:
return func(request, course_id)
else:
return HttpResponseForbidden()
return wrapped
EMAIL_INDEX = 0
USERNAME_INDEX = 1
NAME_INDEX = 2
COUNTRY_INDEX = 3
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def register_and_enroll_students(request, course_id): # pylint: disable=too-many-statements
"""
Create new account and Enroll students in this course.
Passing a csv file that contains a list of students.
Order in csv should be the following email = 0; username = 1; name = 2; country = 3.
Requires staff access.
-If the email address and username already exists and the user is enrolled in the course,
do nothing (including no email gets sent out)
-If the email address already exists, but the username is different,
match on the email address only and continue to enroll the user in the course using the email address
as the matching criteria. Note the change of username as a warning message (but not a failure). Send a standard enrollment email
which is the same as the existing manual enrollment
-If the username already exists (but not the email), assume it is a different user and fail to create the new account.
The failure will be messaged in a response in the browser.
"""
if not microsite.get_value('ALLOW_AUTOMATED_SIGNUPS', settings.FEATURES.get('ALLOW_AUTOMATED_SIGNUPS', False)):
return HttpResponseForbidden()
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
warnings = []
row_errors = []
general_errors = []
if 'students_list' in request.FILES:
students = []
try:
upload_file = request.FILES.get('students_list')
if upload_file.name.endswith('.csv'):
students = [row for row in csv.reader(upload_file.read().splitlines())]
course = get_course_by_id(course_id)
else:
general_errors.append({
'username': '', 'email': '',
'response': _('Make sure that the file you upload is in CSV format with no extraneous characters or rows.')
})
except Exception: # pylint: disable=broad-except
general_errors.append({
'username': '', 'email': '', 'response': _('Could not read uploaded file.')
})
finally:
upload_file.close()
generated_passwords = []
row_num = 0
for student in students:
row_num = row_num + 1
# verify that we have exactly four columns in every row but allow for blank lines
if len(student) != 4:
if len(student) > 0:
general_errors.append({
'username': '',
'email': '',
'response': _('Data in row #{row_num} must have exactly four columns: email, username, full name, and country').format(row_num=row_num)
})
continue
# Iterate each student in the uploaded csv file.
email = student[EMAIL_INDEX]
username = student[USERNAME_INDEX]
name = student[NAME_INDEX]
country = student[COUNTRY_INDEX][:2]
email_params = get_email_params(course, True, secure=request.is_secure())
try:
validate_email(email) # Raises ValidationError if invalid
except ValidationError:
row_errors.append({
'username': username, 'email': email, 'response': _('Invalid email {email_address}.').format(email_address=email)})
else:
if User.objects.filter(email=email).exists():
# Email address already exists. assume it is the correct user
# and just register the user in the course and send an enrollment email.
user = User.objects.get(email=email)
# see if it is an exact match with email and username
# if it's not an exact match then just display a warning message, but continue onwards
if not User.objects.filter(email=email, username=username).exists():
warning_message = _(
'An account with email {email} exists but the provided username {username} '
'is different. Enrolling anyway with {email}.'
).format(email=email, username=username)
warnings.append({
'username': username, 'email': email, 'response': warning_message
})
log.warning(u'email %s already exist', email)
else:
log.info(
u"user already exists with username '%s' and email '%s'",
username,
email
)
# make sure user is enrolled in course
if not CourseEnrollment.is_enrolled(user, course_id):
enrollment_obj = CourseEnrollment.enroll(user, course_id)
reason = 'Enrolling via csv upload'
ManualEnrollmentAudit.create_manual_enrollment_audit(
request.user, email, UNENROLLED_TO_ENROLLED, reason, enrollment_obj
)
log.info(
u'user %s enrolled in the course %s',
username,
course.id,
)
enroll_email(course_id=course_id, student_email=email, auto_enroll=True, email_students=True, email_params=email_params)
else:
# This email does not yet exist, so we need to create a new account
# If username already exists in the database, then create_and_enroll_user
# will raise an IntegrityError exception.
password = generate_unique_password(generated_passwords)
try:
enrollment_obj = create_and_enroll_user(email, username, name, country, password, course_id)
reason = 'Enrolling via csv upload'
ManualEnrollmentAudit.create_manual_enrollment_audit(
request.user, email, UNENROLLED_TO_ENROLLED, reason, enrollment_obj
)
except IntegrityError:
row_errors.append({
'username': username, 'email': email, 'response': _('Username {user} already exists.').format(user=username)})
except Exception as ex:
log.exception(type(ex).__name__)
row_errors.append({
'username': username, 'email': email, 'response': type(ex).__name__})
else:
# It's a new user, an email will be sent to each newly created user.
email_params['message'] = 'account_creation_and_enrollment'
email_params['email_address'] = email
email_params['password'] = password
email_params['platform_name'] = microsite.get_value('platform_name', settings.PLATFORM_NAME)
send_mail_to_student(email, email_params)
log.info(u'email sent to new created user at %s', email)
else:
general_errors.append({
'username': '', 'email': '', 'response': _('File is not attached.')
})
results = {
'row_errors': row_errors,
'general_errors': general_errors,
'warnings': warnings
}
return JsonResponse(results)
def generate_random_string(length):
"""
Create a string of random characters of specified length
"""
chars = [
char for char in string.ascii_uppercase + string.digits + string.ascii_lowercase
if char not in 'aAeEiIoOuU1l'
]
return string.join((random.choice(chars) for __ in range(length)), '')
def generate_unique_password(generated_passwords, password_length=12):
"""
generate a unique password for each student.
"""
password = generate_random_string(password_length)
while password in generated_passwords:
password = generate_random_string(password_length)
generated_passwords.append(password)
return password
def create_and_enroll_user(email, username, name, country, password, course_id):
""" Creates a user and enroll him/her in the course"""
user = User.objects.create_user(username, email, password)
reg = Registration()
reg.register(user)
profile = UserProfile(user=user)
profile.name = name
profile.country = country
profile.save()
# try to enroll the user in this course
return CourseEnrollment.enroll(user, course_id)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_post_params(action="enroll or unenroll", identifiers="stringified list of emails and/or usernames")
def students_update_enrollment(request, course_id):
"""
Enroll or unenroll students by email.
Requires staff access.
Query Parameters:
- action in ['enroll', 'unenroll']
- identifiers is string containing a list of emails and/or usernames separated by anything split_input_list can handle.
- auto_enroll is a boolean (defaults to false)
If auto_enroll is false, students will be allowed to enroll.
If auto_enroll is true, students will be enrolled as soon as they register.
- email_students is a boolean (defaults to false)
If email_students is true, students will be sent email notification
If email_students is false, students will not be sent email notification
Returns an analog to this JSON structure: {
"action": "enroll",
"auto_enroll": false,
"results": [
{
"email": "[email protected]",
"before": {
"enrollment": false,
"auto_enroll": false,
"user": true,
"allowed": false
},
"after": {
"enrollment": true,
"auto_enroll": false,
"user": true,
"allowed": false
}
}
]
}
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
action = request.POST.get('action')
identifiers_raw = request.POST.get('identifiers')
identifiers = _split_input_list(identifiers_raw)
auto_enroll = request.POST.get('auto_enroll') in ['true', 'True', True]
email_students = request.POST.get('email_students') in ['true', 'True', True]
is_white_label = CourseMode.is_white_label(course_id)
reason = request.POST.get('reason')
if is_white_label:
if not reason:
return JsonResponse(
{
'action': action,
'results': [{'error': True}],
'auto_enroll': auto_enroll,
}, status=400)
enrollment_obj = None
state_transition = DEFAULT_TRANSITION_STATE
email_params = {}
if email_students:
course = get_course_by_id(course_id)
email_params = get_email_params(course, auto_enroll, secure=request.is_secure())
results = []
for identifier in identifiers:
# First try to get a user object from the identifer
user = None
email = None
language = None
try:
user = get_student_from_identifier(identifier)
except User.DoesNotExist:
email = identifier
else:
email = user.email
language = get_user_email_language(user)
try:
# Use django.core.validators.validate_email to check email address
# validity (obviously, cannot check if email actually /exists/,
# simply that it is plausibly valid)
validate_email(email) # Raises ValidationError if invalid
if action == 'enroll':
before, after, enrollment_obj = enroll_email(
course_id, email, auto_enroll, email_students, email_params, language=language
)
before_enrollment = before.to_dict()['enrollment']
before_user_registered = before.to_dict()['user']
before_allowed = before.to_dict()['allowed']
after_enrollment = after.to_dict()['enrollment']
after_allowed = after.to_dict()['allowed']
if before_user_registered:
if after_enrollment:
if before_enrollment:
state_transition = ENROLLED_TO_ENROLLED
else:
if before_allowed:
state_transition = ALLOWEDTOENROLL_TO_ENROLLED
else:
state_transition = UNENROLLED_TO_ENROLLED
else:
if after_allowed:
state_transition = UNENROLLED_TO_ALLOWEDTOENROLL
elif action == 'unenroll':
before, after = unenroll_email(
course_id, email, email_students, email_params, language=language
)
before_enrollment = before.to_dict()['enrollment']
before_allowed = before.to_dict()['allowed']
if before_enrollment:
state_transition = ENROLLED_TO_UNENROLLED
else:
if before_allowed:
state_transition = ALLOWEDTOENROLL_TO_UNENROLLED
else:
state_transition = UNENROLLED_TO_UNENROLLED
else:
return HttpResponseBadRequest(strip_tags(
"Unrecognized action '{}'".format(action)
))
except ValidationError:
# Flag this email as an error if invalid, but continue checking
# the remaining in the list
results.append({
'identifier': identifier,
'invalidIdentifier': True,
})
except Exception as exc: # pylint: disable=broad-except
# catch and log any exceptions
# so that one error doesn't cause a 500.
log.exception(u"Error while #{}ing student")
log.exception(exc)
results.append({
'identifier': identifier,
'error': True,
})
else:
ManualEnrollmentAudit.create_manual_enrollment_audit(
request.user, email, state_transition, reason, enrollment_obj
)
results.append({
'identifier': identifier,
'before': before.to_dict(),
'after': after.to_dict(),
})
response_payload = {
'action': action,
'results': results,
'auto_enroll': auto_enroll,
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@common_exceptions_400
@require_post_params(
identifiers="stringified list of emails and/or usernames",
action="add or remove",
)
def bulk_beta_modify_access(request, course_id):
"""
Enroll or unenroll users in beta testing program.
Query parameters:
- identifiers is string containing a list of emails and/or usernames separated by
anything split_input_list can handle.
- action is one of ['add', 'remove']
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
action = request.POST.get('action')
identifiers_raw = request.POST.get('identifiers')
identifiers = _split_input_list(identifiers_raw)
email_students = request.POST.get('email_students') in ['true', 'True', True]
auto_enroll = request.POST.get('auto_enroll') in ['true', 'True', True]
results = []
rolename = 'beta'
course = get_course_by_id(course_id)
email_params = {}
if email_students:
secure = request.is_secure()
email_params = get_email_params(course, auto_enroll=auto_enroll, secure=secure)
for identifier in identifiers:
try:
error = False
user_does_not_exist = False
user = get_student_from_identifier(identifier)
if action == 'add':
allow_access(course, user, rolename)
elif action == 'remove':
revoke_access(course, user, rolename)
else:
return HttpResponseBadRequest(strip_tags(
"Unrecognized action '{}'".format(action)
))
except User.DoesNotExist:
error = True
user_does_not_exist = True
# catch and log any unexpected exceptions
# so that one error doesn't cause a 500.
except Exception as exc: # pylint: disable=broad-except
log.exception(u"Error while #{}ing student")
log.exception(exc)
error = True
else:
# If no exception thrown, see if we should send an email
if email_students:
send_beta_role_email(action, user, email_params)
# See if we should autoenroll the student
if auto_enroll:
# Check if student is already enrolled
if not CourseEnrollment.is_enrolled(user, course_id):
CourseEnrollment.enroll(user, course_id)
finally:
# Tabulate the action result of this email address
results.append({
'identifier': identifier,
'error': error,
'userDoesNotExist': user_does_not_exist
})
response_payload = {
'action': action,
'results': results,
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@common_exceptions_400
@require_query_params(
unique_student_identifier="email or username of user to change access",
rolename="'instructor', 'staff', 'beta', or 'ccx_coach'",
action="'allow' or 'revoke'"
)
def modify_access(request, course_id):
"""
Modify staff/instructor access of other user.
Requires instructor access.
NOTE: instructors cannot remove their own instructor access.
Query parameters:
unique_student_identifer is the target user's username or email
rolename is one of ['instructor', 'staff', 'beta', 'ccx_coach']
action is one of ['allow', 'revoke']
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'instructor', course_id, depth=None
)
try:
user = get_student_from_identifier(request.GET.get('unique_student_identifier'))
except User.DoesNotExist:
response_payload = {
'unique_student_identifier': request.GET.get('unique_student_identifier'),
'userDoesNotExist': True,
}
return JsonResponse(response_payload)
# Check that user is active, because add_users
# in common/djangoapps/student/roles.py fails
# silently when we try to add an inactive user.
if not user.is_active:
response_payload = {
'unique_student_identifier': user.username,
'inactiveUser': True,
}
return JsonResponse(response_payload)
rolename = request.GET.get('rolename')
action = request.GET.get('action')
if rolename not in ROLES:
error = strip_tags("unknown rolename '{}'".format(rolename))
log.error(error)
return HttpResponseBadRequest(error)
# disallow instructors from removing their own instructor access.
if rolename == 'instructor' and user == request.user and action != 'allow':
response_payload = {
'unique_student_identifier': user.username,
'rolename': rolename,
'action': action,
'removingSelfAsInstructor': True,
}
return JsonResponse(response_payload)
if action == 'allow':
allow_access(course, user, rolename)
elif action == 'revoke':
revoke_access(course, user, rolename)
else:
return HttpResponseBadRequest(strip_tags(
"unrecognized action '{}'".format(action)
))
response_payload = {
'unique_student_identifier': user.username,
'rolename': rolename,
'action': action,
'success': 'yes',
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@require_query_params(rolename="'instructor', 'staff', or 'beta'")
def list_course_role_members(request, course_id):
"""
List instructors and staff.
Requires instructor access.
rolename is one of ['instructor', 'staff', 'beta', 'ccx_coach']
Returns JSON of the form {
"course_id": "some/course/id",
"staff": [
{
"username": "staff1",
"email": "[email protected]",
"first_name": "Joe",
"last_name": "Shmoe",
}
]
}
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'instructor', course_id, depth=None
)
rolename = request.GET.get('rolename')
if rolename not in ROLES:
return HttpResponseBadRequest()
def extract_user_info(user):
""" convert user into dicts for json view """
return {
'username': user.username,
'email': user.email,
'first_name': user.first_name,
'last_name': user.last_name,
}
response_payload = {
'course_id': course_id.to_deprecated_string(),
rolename: map(extract_user_info, list_with_level(
course, rolename
)),
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_grading_config(request, course_id):
"""
Respond with json which contains a html formatted grade summary.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
grading_config_summary = instructor_analytics.basic.dump_grading_context(course)
response_payload = {
'course_id': course_id.to_deprecated_string(),
'grading_config_summary': grading_config_summary,
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_sale_records(request, course_id, csv=False): # pylint: disable=unused-argument, redefined-outer-name
"""
return the summary of all sales records for a particular course
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
query_features = [
'company_name', 'company_contact_name', 'company_contact_email', 'total_codes', 'total_used_codes',
'total_amount', 'created_at', 'customer_reference_number', 'recipient_name', 'recipient_email', 'created_by',
'internal_reference', 'invoice_number', 'codes', 'course_id'
]
sale_data = instructor_analytics.basic.sale_record_features(course_id, query_features)
if not csv:
for item in sale_data:
item['created_by'] = item['created_by'].username
response_payload = {
'course_id': course_id.to_deprecated_string(),
'sale': sale_data,
'queried_features': query_features
}
return JsonResponse(response_payload)
else:
header, datarows = instructor_analytics.csvs.format_dictlist(sale_data, query_features)
return instructor_analytics.csvs.create_csv_response("e-commerce_sale_invoice_records.csv", header, datarows)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_sale_order_records(request, course_id): # pylint: disable=unused-argument, redefined-outer-name
"""
return the summary of all sales records for a particular course
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
query_features = [
('id', 'Order Id'),
('company_name', 'Company Name'),
('company_contact_name', 'Company Contact Name'),
('company_contact_email', 'Company Contact Email'),
('logged_in_username', 'Login Username'),
('logged_in_email', 'Login User Email'),
('purchase_time', 'Date of Sale'),
('customer_reference_number', 'Customer Reference Number'),
('recipient_name', 'Recipient Name'),
('recipient_email', 'Recipient Email'),
('bill_to_street1', 'Street 1'),
('bill_to_street2', 'Street 2'),
('bill_to_city', 'City'),
('bill_to_state', 'State'),
('bill_to_postalcode', 'Postal Code'),
('bill_to_country', 'Country'),
('order_type', 'Order Type'),
('status', 'Order Item Status'),
('coupon_code', 'Coupon Code'),
('list_price', 'List Price'),
('unit_cost', 'Unit Price'),
('quantity', 'Quantity'),
('total_discount', 'Total Discount'),
('total_amount', 'Total Amount Paid'),
]
db_columns = [x[0] for x in query_features]
csv_columns = [x[1] for x in query_features]
sale_data = instructor_analytics.basic.sale_order_record_features(course_id, db_columns)
header, datarows = instructor_analytics.csvs.format_dictlist(sale_data, db_columns) # pylint: disable=unused-variable
return instructor_analytics.csvs.create_csv_response("e-commerce_sale_order_records.csv", csv_columns, datarows)
@require_level('staff')
@require_POST
def sale_validation(request, course_id):
"""
This method either invalidate or re validate the sale against the invoice number depending upon the event type
"""
try:
invoice_number = request.POST["invoice_number"]
except KeyError:
return HttpResponseBadRequest("Missing required invoice_number parameter")
try:
invoice_number = int(invoice_number)
except ValueError:
return HttpResponseBadRequest(
"invoice_number must be an integer, {value} provided".format(
value=invoice_number
)
)
try:
event_type = request.POST["event_type"]
except KeyError:
return HttpResponseBadRequest("Missing required event_type parameter")
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
obj_invoice = CourseRegistrationCodeInvoiceItem.objects.select_related('invoice').get(
invoice_id=invoice_number,
course_id=course_id
)
obj_invoice = obj_invoice.invoice
except CourseRegistrationCodeInvoiceItem.DoesNotExist: # Check for old type invoices
return HttpResponseNotFound(_("Invoice number '{num}' does not exist.").format(num=invoice_number))
if event_type == "invalidate":
return invalidate_invoice(obj_invoice)
else:
return re_validate_invoice(obj_invoice)
def invalidate_invoice(obj_invoice):
"""
This method invalidate the sale against the invoice number
"""
if not obj_invoice.is_valid:
return HttpResponseBadRequest(_("The sale associated with this invoice has already been invalidated."))
obj_invoice.is_valid = False
obj_invoice.save()
message = _('Invoice number {0} has been invalidated.').format(obj_invoice.id)
return JsonResponse({'message': message})
def re_validate_invoice(obj_invoice):
"""
This method re-validate the sale against the invoice number
"""
if obj_invoice.is_valid:
return HttpResponseBadRequest(_("This invoice is already active."))
obj_invoice.is_valid = True
obj_invoice.save()
message = _('The registration codes for invoice {0} have been re-activated.').format(obj_invoice.id)
return JsonResponse({'message': message})
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_students_features(request, course_id, csv=False): # pylint: disable=redefined-outer-name
"""
Respond with json which contains a summary of all enrolled students profile information.
Responds with JSON
{"students": [{-student-info-}, ...]}
TO DO accept requests for different attribute sets.
"""
course_key = CourseKey.from_string(course_id)
course = get_course_by_id(course_key)
available_features = instructor_analytics.basic.AVAILABLE_FEATURES
# Allow for microsites to be able to define additional columns (e.g. )
query_features = microsite.get_value('student_profile_download_fields')
if not query_features:
query_features = [
'id', 'username', 'name', 'email', 'language', 'location',
'year_of_birth', 'gender', 'level_of_education', 'mailing_address',
'goals'
]
# Provide human-friendly and translatable names for these features. These names
# will be displayed in the table generated in data_download.coffee. It is not (yet)
# used as the header row in the CSV, but could be in the future.
query_features_names = {
'id': _('User ID'),
'username': _('Username'),
'name': _('Name'),
'email': _('Email'),
'language': _('Language'),
'location': _('Location'),
'year_of_birth': _('Birth Year'),
'gender': _('Gender'),
'level_of_education': _('Level of Education'),
'mailing_address': _('Mailing Address'),
'goals': _('Goals'),
}
if is_course_cohorted(course.id):
# Translators: 'Cohort' refers to a group of students within a course.
query_features.append('cohort')
query_features_names['cohort'] = _('Cohort')
if not csv:
student_data = instructor_analytics.basic.enrolled_students_features(course_key, query_features)
response_payload = {
'course_id': unicode(course_key),
'students': student_data,
'students_count': len(student_data),
'queried_features': query_features,
'feature_names': query_features_names,
'available_features': available_features,
}
return JsonResponse(response_payload)
else:
try:
instructor_task.api.submit_calculate_students_features_csv(request, course_key, query_features)
success_status = _("The enrolled learner profile report is being created."
" To view the status of the report, see Pending Instructor Tasks below.")
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _(
"This enrollment report is currently being created."
" To view the status of the report, see Pending Instructor Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({"status": already_running_status})
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_students_who_may_enroll(request, course_id):
"""
Initiate generation of a CSV file containing information about
students who may enroll in a course.
Responds with JSON
{"status": "... status message ..."}
"""
course_key = CourseKey.from_string(course_id)
query_features = ['email']
try:
instructor_task.api.submit_calculate_may_enroll_csv(request, course_key, query_features)
success_status = _(
"The enrollment report is being created. This report contains"
" information about learners who can enroll in the course."
" To view the status of the report, see Pending Instructor Tasks below."
)
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _(
"This enrollment report is currently being created."
" To view the status of the report, see Pending Instructor Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({"status": already_running_status})
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_POST
@require_level('staff')
def add_users_to_cohorts(request, course_id):
"""
View method that accepts an uploaded file (using key "uploaded-file")
containing cohort assignments for users. This method spawns a celery task
to do the assignments, and a CSV file with results is provided via data downloads.
"""
course_key = SlashSeparatedCourseKey.from_string(course_id)
try:
def validator(file_storage, file_to_validate):
"""
Verifies that the expected columns are present.
"""
with file_storage.open(file_to_validate) as f:
reader = unicodecsv.reader(UniversalNewlineIterator(f), encoding='utf-8')
try:
fieldnames = next(reader)
except StopIteration:
fieldnames = []
msg = None
if "cohort" not in fieldnames:
msg = _("The file must contain a 'cohort' column containing cohort names.")
elif "email" not in fieldnames and "username" not in fieldnames:
msg = _("The file must contain a 'username' column, an 'email' column, or both.")
if msg:
raise FileValidationException(msg)
__, filename = store_uploaded_file(
request, 'uploaded-file', ['.csv'],
course_and_time_based_filename_generator(course_key, "cohorts"),
max_file_size=2000000, # limit to 2 MB
validator=validator
)
# The task will assume the default file storage.
instructor_task.api.submit_cohort_students(request, course_key, filename)
except (FileValidationException, PermissionDenied) as err:
return JsonResponse({"error": unicode(err)}, status=400)
return JsonResponse()
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_coupon_codes(request, course_id): # pylint: disable=unused-argument
"""
Respond with csv which contains a summary of all Active Coupons.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
coupons = Coupon.objects.filter(course_id=course_id)
query_features = [
('code', _('Coupon Code')),
('course_id', _('Course Id')),
('percentage_discount', _('% Discount')),
('description', _('Description')),
('expiration_date', _('Expiration Date')),
('is_active', _('Is Active')),
('code_redeemed_count', _('Code Redeemed Count')),
('total_discounted_seats', _('Total Discounted Seats')),
('total_discounted_amount', _('Total Discounted Amount')),
]
db_columns = [x[0] for x in query_features]
csv_columns = [x[1] for x in query_features]
coupons_list = instructor_analytics.basic.coupon_codes_features(db_columns, coupons, course_id)
__, data_rows = instructor_analytics.csvs.format_dictlist(coupons_list, db_columns)
return instructor_analytics.csvs.create_csv_response('Coupons.csv', csv_columns, data_rows)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_finance_admin
def get_enrollment_report(request, course_id):
"""
get the enrollment report for the particular course.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
instructor_task.api.submit_detailed_enrollment_features_csv(request, course_key)
success_status = _("The detailed enrollment report is being created."
" To view the status of the report, see Pending Instructor Tasks below.")
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _("The detailed enrollment report is being created."
" To view the status of the report, see Pending Instructor Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({
"status": already_running_status
})
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_finance_admin
def get_exec_summary_report(request, course_id):
"""
get the executive summary report for the particular course.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
instructor_task.api.submit_executive_summary_report(request, course_key)
status_response = _("The executive summary report is being created."
" To view the status of the report, see Pending Instructor Tasks below.")
except AlreadyRunningError:
status_response = _(
"The executive summary report is currently being created."
" To view the status of the report, see Pending Instructor Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({
"status": status_response
})
def save_registration_code(user, course_id, mode_slug, invoice=None, order=None, invoice_item=None):
"""
recursive function that generate a new code every time and saves in the Course Registration Table
if validation check passes
Args:
user (User): The user creating the course registration codes.
course_id (str): The string representation of the course ID.
mode_slug (str): The Course Mode Slug associated with any enrollment made by these codes.
invoice (Invoice): (Optional) The associated invoice for this code.
order (Order): (Optional) The associated order for this code.
invoice_item (CourseRegistrationCodeInvoiceItem) : (Optional) The associated CourseRegistrationCodeInvoiceItem
Returns:
The newly created CourseRegistrationCode.
"""
code = random_code_generator()
# check if the generated code is in the Coupon Table
matching_coupons = Coupon.objects.filter(code=code, is_active=True)
if matching_coupons:
return save_registration_code(
user, course_id, mode_slug, invoice=invoice, order=order, invoice_item=invoice_item
)
course_registration = CourseRegistrationCode(
code=code,
course_id=unicode(course_id),
created_by=user,
invoice=invoice,
order=order,
mode_slug=mode_slug,
invoice_item=invoice_item
)
try:
course_registration.save()
return course_registration
except IntegrityError:
return save_registration_code(
user, course_id, mode_slug, invoice=invoice, order=order, invoice_item=invoice_item
)
def registration_codes_csv(file_name, codes_list, csv_type=None):
"""
Respond with the csv headers and data rows
given a dict of codes list
:param file_name:
:param codes_list:
:param csv_type:
"""
# csv headers
query_features = [
'code', 'redeem_code_url', 'course_id', 'company_name', 'created_by',
'redeemed_by', 'invoice_id', 'purchaser', 'customer_reference_number', 'internal_reference', 'is_valid'
]
registration_codes = instructor_analytics.basic.course_registration_features(query_features, codes_list, csv_type)
header, data_rows = instructor_analytics.csvs.format_dictlist(registration_codes, query_features)
return instructor_analytics.csvs.create_csv_response(file_name, header, data_rows)
def random_code_generator():
"""
generate a random alphanumeric code of length defined in
REGISTRATION_CODE_LENGTH settings
"""
code_length = getattr(settings, 'REGISTRATION_CODE_LENGTH', 8)
return generate_random_string(code_length)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_POST
def get_registration_codes(request, course_id): # pylint: disable=unused-argument
"""
Respond with csv which contains a summary of all Registration Codes.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
#filter all the course registration codes
registration_codes = CourseRegistrationCode.objects.filter(
course_id=course_id
).order_by('invoice_item__invoice__company_name')
company_name = request.POST['download_company_name']
if company_name:
registration_codes = registration_codes.filter(invoice_item__invoice__company_name=company_name)
csv_type = 'download'
return registration_codes_csv("Registration_Codes.csv", registration_codes, csv_type)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_sales_admin
@require_POST
def generate_registration_codes(request, course_id):
"""
Respond with csv which contains a summary of all Generated Codes.
"""
course_id = CourseKey.from_string(course_id)
invoice_copy = False
# covert the course registration code number into integer
try:
course_code_number = int(request.POST['total_registration_codes'])
except ValueError:
course_code_number = int(float(request.POST['total_registration_codes']))
company_name = request.POST['company_name']
company_contact_name = request.POST['company_contact_name']
company_contact_email = request.POST['company_contact_email']
unit_price = request.POST['unit_price']
try:
unit_price = (
decimal.Decimal(unit_price)
).quantize(
decimal.Decimal('.01'),
rounding=decimal.ROUND_DOWN
)
except decimal.InvalidOperation:
return HttpResponse(
status=400,
content=_(u"Could not parse amount as a decimal")
)
recipient_name = request.POST['recipient_name']
recipient_email = request.POST['recipient_email']
address_line_1 = request.POST['address_line_1']
address_line_2 = request.POST['address_line_2']
address_line_3 = request.POST['address_line_3']
city = request.POST['city']
state = request.POST['state']
zip_code = request.POST['zip']
country = request.POST['country']
internal_reference = request.POST['internal_reference']
customer_reference_number = request.POST['customer_reference_number']
recipient_list = [recipient_email]
if request.POST.get('invoice', False):
recipient_list.append(request.user.email)
invoice_copy = True
sale_price = unit_price * course_code_number
set_user_preference(request.user, INVOICE_KEY, invoice_copy)
sale_invoice = Invoice.objects.create(
total_amount=sale_price,
company_name=company_name,
company_contact_email=company_contact_email,
company_contact_name=company_contact_name,
course_id=course_id,
recipient_name=recipient_name,
recipient_email=recipient_email,
address_line_1=address_line_1,
address_line_2=address_line_2,
address_line_3=address_line_3,
city=city,
state=state,
zip=zip_code,
country=country,
internal_reference=internal_reference,
customer_reference_number=customer_reference_number
)
invoice_item = CourseRegistrationCodeInvoiceItem.objects.create(
invoice=sale_invoice,
qty=course_code_number,
unit_price=unit_price,
course_id=course_id
)
course = get_course_by_id(course_id, depth=0)
paid_modes = CourseMode.paid_modes_for_course(course_id)
if len(paid_modes) != 1:
msg = (
u"Generating Code Redeem Codes for Course '{course_id}', which must have a single paid course mode. "
u"This is a configuration issue. Current course modes with payment options: {paid_modes}"
).format(course_id=course_id, paid_modes=paid_modes)
log.error(msg)
return HttpResponse(
status=500,
content=_(u"Unable to generate redeem codes because of course misconfiguration.")
)
course_mode = paid_modes[0]
course_price = course_mode.min_price
registration_codes = []
for __ in range(course_code_number): # pylint: disable=redefined-outer-name
generated_registration_code = save_registration_code(
request.user, course_id, course_mode.slug, invoice=sale_invoice, order=None, invoice_item=invoice_item
)
registration_codes.append(generated_registration_code)
site_name = microsite.get_value('SITE_NAME', 'localhost')
quantity = course_code_number
discount = (float(quantity * course_price) - float(sale_price))
course_url = '{base_url}{course_about}'.format(
base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME),
course_about=reverse('about_course', kwargs={'course_id': course_id.to_deprecated_string()})
)
dashboard_url = '{base_url}{dashboard}'.format(
base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME),
dashboard=reverse('dashboard')
)
try:
pdf_file = sale_invoice.generate_pdf_invoice(course, course_price, int(quantity), float(sale_price))
except Exception: # pylint: disable=broad-except
log.exception('Exception at creating pdf file.')
pdf_file = None
from_address = microsite.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL)
context = {
'invoice': sale_invoice,
'site_name': site_name,
'course': course,
'course_price': course_price,
'sub_total': course_price * quantity,
'discount': discount,
'sale_price': sale_price,
'quantity': quantity,
'registration_codes': registration_codes,
'currency_symbol': settings.PAID_COURSE_REGISTRATION_CURRENCY[1],
'course_url': course_url,
'platform_name': microsite.get_value('platform_name', settings.PLATFORM_NAME),
'dashboard_url': dashboard_url,
'contact_email': from_address,
'corp_address': microsite.get_value('invoice_corp_address', settings.INVOICE_CORP_ADDRESS),
'payment_instructions': microsite.get_value('invoice_payment_instructions', settings. INVOICE_PAYMENT_INSTRUCTIONS),
'date': time.strftime("%m/%d/%Y")
}
# composes registration codes invoice email
subject = u'Confirmation and Invoice for {course_name}'.format(course_name=course.display_name)
message = render_to_string('emails/registration_codes_sale_email.txt', context)
invoice_attachment = render_to_string('emails/registration_codes_sale_invoice_attachment.txt', context)
#send_mail(subject, message, from_address, recipient_list, fail_silently=False)
csv_file = StringIO.StringIO()
csv_writer = csv.writer(csv_file)
for registration_code in registration_codes:
full_redeem_code_url = 'http://{base_url}{redeem_code_url}'.format(
base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME),
redeem_code_url=reverse('register_code_redemption', kwargs={'registration_code': registration_code.code})
)
csv_writer.writerow([registration_code.code, full_redeem_code_url])
finance_email = microsite.get_value('finance_email', settings.FINANCE_EMAIL)
if finance_email:
# append the finance email into the recipient_list
recipient_list.append(finance_email)
# send a unique email for each recipient, don't put all email addresses in a single email
for recipient in recipient_list:
email = EmailMessage()
email.subject = subject
email.body = message
email.from_email = from_address
email.to = [recipient]
email.attach(u'RegistrationCodes.csv', csv_file.getvalue(), 'text/csv')
email.attach(u'Invoice.txt', invoice_attachment, 'text/plain')
if pdf_file is not None:
email.attach(u'Invoice.pdf', pdf_file.getvalue(), 'application/pdf')
else:
file_buffer = StringIO.StringIO(_('pdf download unavailable right now, please contact support.'))
email.attach(u'pdf_unavailable.txt', file_buffer.getvalue(), 'text/plain')
email.send()
return registration_codes_csv("Registration_Codes.csv", registration_codes)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_POST
def active_registration_codes(request, course_id): # pylint: disable=unused-argument
"""
Respond with csv which contains a summary of all Active Registration Codes.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
# find all the registration codes in this course
registration_codes_list = CourseRegistrationCode.objects.filter(
course_id=course_id
).order_by('invoice_item__invoice__company_name')
company_name = request.POST['active_company_name']
if company_name:
registration_codes_list = registration_codes_list.filter(invoice_item__invoice__company_name=company_name)
# find the redeemed registration codes if any exist in the db
code_redemption_set = RegistrationCodeRedemption.objects.select_related(
'registration_code', 'registration_code__invoice_item__invoice'
).filter(registration_code__course_id=course_id)
if code_redemption_set.exists():
redeemed_registration_codes = [code.registration_code.code for code in code_redemption_set]
# exclude the redeemed registration codes from the registration codes list and you will get
# all the registration codes that are active
registration_codes_list = registration_codes_list.exclude(code__in=redeemed_registration_codes)
return registration_codes_csv("Active_Registration_Codes.csv", registration_codes_list)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_POST
def spent_registration_codes(request, course_id): # pylint: disable=unused-argument
"""
Respond with csv which contains a summary of all Spent(used) Registration Codes.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
# find the redeemed registration codes if any exist in the db
code_redemption_set = RegistrationCodeRedemption.objects.select_related('registration_code').filter(
registration_code__course_id=course_id
)
spent_codes_list = []
if code_redemption_set.exists():
redeemed_registration_codes = [code.registration_code.code for code in code_redemption_set]
# filter the Registration Codes by course id and the redeemed codes and
# you will get a list of all the spent(Redeemed) Registration Codes
spent_codes_list = CourseRegistrationCode.objects.filter(
course_id=course_id, code__in=redeemed_registration_codes
).order_by('invoice_item__invoice__company_name').select_related('invoice_item__invoice')
company_name = request.POST['spent_company_name']
if company_name:
spent_codes_list = spent_codes_list.filter(invoice_item__invoice__company_name=company_name) # pylint: disable=maybe-no-member
csv_type = 'spent'
return registration_codes_csv("Spent_Registration_Codes.csv", spent_codes_list, csv_type)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_anon_ids(request, course_id): # pylint: disable=unused-argument
"""
Respond with 2-column CSV output of user-id, anonymized-user-id
"""
# TODO: the User.objects query and CSV generation here could be
# centralized into instructor_analytics. Currently instructor_analytics
# has similar functionality but not quite what's needed.
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
def csv_response(filename, header, rows):
"""Returns a CSV http response for the given header and rows (excel/utf-8)."""
response = HttpResponse(mimetype='text/csv')
response['Content-Disposition'] = 'attachment; filename={0}'.format(unicode(filename).encode('utf-8'))
writer = csv.writer(response, dialect='excel', quotechar='"', quoting=csv.QUOTE_ALL)
# In practice, there should not be non-ascii data in this query,
# but trying to do the right thing anyway.
encoded = [unicode(s).encode('utf-8') for s in header]
writer.writerow(encoded)
for row in rows:
encoded = [unicode(s).encode('utf-8') for s in row]
writer.writerow(encoded)
return response
students = User.objects.filter(
courseenrollment__course_id=course_id,
).order_by('id')
header = ['User ID', 'Anonymized User ID', 'Course Specific Anonymized User ID']
rows = [[s.id, unique_id_for_user(s, save=False), anonymous_id_for_user(s, course_id, save=False)] for s in students]
return csv_response(course_id.to_deprecated_string().replace('/', '-') + '-anon-ids.csv', header, rows)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@common_exceptions_400
@require_level('staff')
@require_query_params(
unique_student_identifier="email or username of student for whom to get progress url"
)
def get_student_progress_url(request, course_id):
"""
Get the progress url of a student.
Limited to staff access.
Takes query paremeter unique_student_identifier and if the student exists
returns e.g. {
'progress_url': '/../...'
}
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
user = get_student_from_identifier(request.GET.get('unique_student_identifier'))
progress_url = reverse('student_progress', kwargs={'course_id': course_id.to_deprecated_string(), 'student_id': user.id})
response_payload = {
'course_id': course_id.to_deprecated_string(),
'progress_url': progress_url,
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params(
problem_to_reset="problem urlname to reset"
)
@common_exceptions_400
def reset_student_attempts(request, course_id):
"""
Resets a students attempts counter or starts a task to reset all students
attempts counters. Optionally deletes student state for a problem. Limited
to staff access. Some sub-methods limited to instructor access.
Takes some of the following query paremeters
- problem_to_reset is a urlname of a problem
- unique_student_identifier is an email or username
- all_students is a boolean
requires instructor access
mutually exclusive with delete_module
mutually exclusive with delete_module
- delete_module is a boolean
requires instructor access
mutually exclusive with all_students
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
problem_to_reset = strip_if_string(request.GET.get('problem_to_reset'))
student_identifier = request.GET.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = request.GET.get('all_students', False) in ['true', 'True', True]
delete_module = request.GET.get('delete_module', False) in ['true', 'True', True]
# parameter combinations
if all_students and student:
return HttpResponseBadRequest(
"all_students and unique_student_identifier are mutually exclusive."
)
if all_students and delete_module:
return HttpResponseBadRequest(
"all_students and delete_module are mutually exclusive."
)
# instructor authorization
if all_students or delete_module:
if not has_access(request.user, 'instructor', course):
return HttpResponseForbidden("Requires instructor access.")
try:
module_state_key = course_id.make_usage_key_from_deprecated_string(problem_to_reset)
except InvalidKeyError:
return HttpResponseBadRequest()
response_payload = {}
response_payload['problem_to_reset'] = problem_to_reset
if student:
try:
enrollment.reset_student_attempts(course_id, student, module_state_key, delete_module=delete_module)
except StudentModule.DoesNotExist:
return HttpResponseBadRequest(_("Module does not exist."))
except sub_api.SubmissionError:
# Trust the submissions API to log the error
error_msg = _("An error occurred while deleting the score.")
return HttpResponse(error_msg, status=500)
response_payload['student'] = student_identifier
elif all_students:
instructor_task.api.submit_reset_problem_attempts_for_all_students(request, module_state_key)
response_payload['task'] = 'created'
response_payload['student'] = 'All Students'
else:
return HttpResponseBadRequest()
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@common_exceptions_400
def reset_student_attempts_for_entrance_exam(request, course_id): # pylint: disable=invalid-name
"""
Resets a students attempts counter or starts a task to reset all students
attempts counters for entrance exam. Optionally deletes student state for
entrance exam. Limited to staff access. Some sub-methods limited to instructor access.
Following are possible query parameters
- unique_student_identifier is an email or username
- all_students is a boolean
requires instructor access
mutually exclusive with delete_module
- delete_module is a boolean
requires instructor access
mutually exclusive with all_students
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
if not course.entrance_exam_id:
return HttpResponseBadRequest(
_("Course has no entrance exam section.")
)
student_identifier = request.GET.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = request.GET.get('all_students', False) in ['true', 'True', True]
delete_module = request.GET.get('delete_module', False) in ['true', 'True', True]
# parameter combinations
if all_students and student:
return HttpResponseBadRequest(
_("all_students and unique_student_identifier are mutually exclusive.")
)
if all_students and delete_module:
return HttpResponseBadRequest(
_("all_students and delete_module are mutually exclusive.")
)
# instructor authorization
if all_students or delete_module:
if not has_access(request.user, 'instructor', course):
return HttpResponseForbidden(_("Requires instructor access."))
try:
entrance_exam_key = course_id.make_usage_key_from_deprecated_string(course.entrance_exam_id)
if delete_module:
instructor_task.api.submit_delete_entrance_exam_state_for_student(request, entrance_exam_key, student)
else:
instructor_task.api.submit_reset_problem_attempts_in_entrance_exam(request, entrance_exam_key, student)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam section."))
response_payload = {'student': student_identifier or _('All Students'), 'task': 'created'}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@require_query_params(problem_to_reset="problem urlname to reset")
@common_exceptions_400
def rescore_problem(request, course_id):
"""
Starts a background process a students attempts counter. Optionally deletes student state for a problem.
Limited to instructor access.
Takes either of the following query paremeters
- problem_to_reset is a urlname of a problem
- unique_student_identifier is an email or username
- all_students is a boolean
all_students and unique_student_identifier cannot both be present.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
problem_to_reset = strip_if_string(request.GET.get('problem_to_reset'))
student_identifier = request.GET.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = request.GET.get('all_students') in ['true', 'True', True]
if not (problem_to_reset and (all_students or student)):
return HttpResponseBadRequest("Missing query parameters.")
if all_students and student:
return HttpResponseBadRequest(
"Cannot rescore with all_students and unique_student_identifier."
)
try:
module_state_key = course_id.make_usage_key_from_deprecated_string(problem_to_reset)
except InvalidKeyError:
return HttpResponseBadRequest("Unable to parse problem id")
response_payload = {}
response_payload['problem_to_reset'] = problem_to_reset
if student:
response_payload['student'] = student_identifier
instructor_task.api.submit_rescore_problem_for_student(request, module_state_key, student)
response_payload['task'] = 'created'
elif all_students:
instructor_task.api.submit_rescore_problem_for_all_students(request, module_state_key)
response_payload['task'] = 'created'
else:
return HttpResponseBadRequest()
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@common_exceptions_400
def rescore_entrance_exam(request, course_id):
"""
Starts a background process a students attempts counter for entrance exam.
Optionally deletes student state for a problem. Limited to instructor access.
Takes either of the following query parameters
- unique_student_identifier is an email or username
- all_students is a boolean
all_students and unique_student_identifier cannot both be present.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
student_identifier = request.GET.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = request.GET.get('all_students') in ['true', 'True', True]
if not course.entrance_exam_id:
return HttpResponseBadRequest(
_("Course has no entrance exam section.")
)
if all_students and student:
return HttpResponseBadRequest(
_("Cannot rescore with all_students and unique_student_identifier.")
)
try:
entrance_exam_key = course_id.make_usage_key_from_deprecated_string(course.entrance_exam_id)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam section."))
response_payload = {}
if student:
response_payload['student'] = student_identifier
else:
response_payload['student'] = _("All Students")
instructor_task.api.submit_rescore_entrance_exam_for_student(request, entrance_exam_key, student)
response_payload['task'] = 'created'
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_background_email_tasks(request, course_id): # pylint: disable=unused-argument
"""
List background email tasks.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
task_type = 'bulk_course_email'
# Specifying for the history of a single task type
tasks = instructor_task.api.get_instructor_task_history(course_id, task_type=task_type)
response_payload = {
'tasks': map(extract_task_features, tasks),
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_email_content(request, course_id): # pylint: disable=unused-argument
"""
List the content of bulk emails sent
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
task_type = 'bulk_course_email'
# First get tasks list of bulk emails sent
emails = instructor_task.api.get_instructor_task_history(course_id, task_type=task_type)
response_payload = {
'emails': map(extract_email_features, emails),
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_instructor_tasks(request, course_id):
"""
List instructor tasks.
Takes optional query paremeters.
- With no arguments, lists running tasks.
- `problem_location_str` lists task history for problem
- `problem_location_str` and `unique_student_identifier` lists task
history for problem AND student (intersection)
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
problem_location_str = strip_if_string(request.GET.get('problem_location_str', False))
student = request.GET.get('unique_student_identifier', None)
if student is not None:
student = get_student_from_identifier(student)
if student and not problem_location_str:
return HttpResponseBadRequest(
"unique_student_identifier must accompany problem_location_str"
)
if problem_location_str:
try:
module_state_key = course_id.make_usage_key_from_deprecated_string(problem_location_str)
except InvalidKeyError:
return HttpResponseBadRequest()
if student:
# Specifying for a single student's history on this problem
tasks = instructor_task.api.get_instructor_task_history(course_id, module_state_key, student)
else:
# Specifying for single problem's history
tasks = instructor_task.api.get_instructor_task_history(course_id, module_state_key)
else:
# If no problem or student, just get currently running tasks
tasks = instructor_task.api.get_running_instructor_tasks(course_id)
response_payload = {
'tasks': map(extract_task_features, tasks),
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_entrance_exam_instructor_tasks(request, course_id): # pylint: disable=invalid-name
"""
List entrance exam related instructor tasks.
Takes either of the following query parameters
- unique_student_identifier is an email or username
- all_students is a boolean
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_by_id(course_id)
student = request.GET.get('unique_student_identifier', None)
if student is not None:
student = get_student_from_identifier(student)
try:
entrance_exam_key = course_id.make_usage_key_from_deprecated_string(course.entrance_exam_id)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam section."))
if student:
# Specifying for a single student's entrance exam history
tasks = instructor_task.api.get_entrance_exam_instructor_task_history(course_id, entrance_exam_key, student)
else:
# Specifying for all student's entrance exam history
tasks = instructor_task.api.get_entrance_exam_instructor_task_history(course_id, entrance_exam_key)
response_payload = {
'tasks': map(extract_task_features, tasks),
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_report_downloads(_request, course_id):
"""
List grade CSV files that are available for download for this course.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
response_payload = {
'downloads': [
dict(name=name, url=url, link='<a href="{}">{}</a>'.format(url, name))
for name, url in report_store.links_for(course_id)
]
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_finance_admin
def list_financial_report_downloads(_request, course_id):
"""
List grade CSV files that are available for download for this course.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
report_store = ReportStore.from_config(config_name='FINANCIAL_REPORTS')
response_payload = {
'downloads': [
dict(name=name, url=url, link='<a href="{}">{}</a>'.format(url, name))
for name, url in report_store.links_for(course_id)
]
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def calculate_grades_csv(request, course_id):
"""
AlreadyRunningError is raised if the course's grades are already being updated.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
instructor_task.api.submit_calculate_grades_csv(request, course_key)
success_status = _("The grade report is being created."
" To view the status of the report, see Pending Instructor Tasks below.")
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _("The grade report is currently being created."
" To view the status of the report, see Pending Instructor Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({
"status": already_running_status
})
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def problem_grade_report(request, course_id):
"""
Request a CSV showing students' grades for all problems in the
course.
AlreadyRunningError is raised if the course's grades are already being
updated.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
instructor_task.api.submit_problem_grade_report(request, course_key)
success_status = _("The problem grade report is being created."
" To view the status of the report, see Pending Instructor Tasks below.")
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _("A problem grade report is already being generated."
" To view the status of the report, see Pending Instructor Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({
"status": already_running_status
})
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params('rolename')
def list_forum_members(request, course_id):
"""
Lists forum members of a certain rolename.
Limited to staff access.
The requesting user must be at least staff.
Staff forum admins can access all roles EXCEPT for FORUM_ROLE_ADMINISTRATOR
which is limited to instructors.
Takes query parameter `rolename`.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_by_id(course_id)
has_instructor_access = has_access(request.user, 'instructor', course)
has_forum_admin = has_forum_access(
request.user, course_id, FORUM_ROLE_ADMINISTRATOR
)
rolename = request.GET.get('rolename')
# default roles require either (staff & forum admin) or (instructor)
if not (has_forum_admin or has_instructor_access):
return HttpResponseBadRequest(
"Operation requires staff & forum admin or instructor access"
)
# EXCEPT FORUM_ROLE_ADMINISTRATOR requires (instructor)
if rolename == FORUM_ROLE_ADMINISTRATOR and not has_instructor_access:
return HttpResponseBadRequest("Operation requires instructor access.")
# filter out unsupported for roles
if rolename not in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]:
return HttpResponseBadRequest(strip_tags(
"Unrecognized rolename '{}'.".format(rolename)
))
try:
role = Role.objects.get(name=rolename, course_id=course_id)
users = role.users.all().order_by('username')
except Role.DoesNotExist:
users = []
def extract_user_info(user):
""" Convert user to dict for json rendering. """
return {
'username': user.username,
'email': user.email,
'first_name': user.first_name,
'last_name': user.last_name,
}
response_payload = {
'course_id': course_id.to_deprecated_string(),
rolename: map(extract_user_info, users),
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_post_params(send_to="sending to whom", subject="subject line", message="message text")
def send_email(request, course_id):
"""
Send an email to self, staff, or everyone involved in a course.
Query Parameters:
- 'send_to' specifies what group the email should be sent to
Options are defined by the CourseEmail model in
lms/djangoapps/bulk_email/models.py
- 'subject' specifies email's subject
- 'message' specifies email's content
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
if not bulk_email_is_enabled_for_course(course_id):
return HttpResponseForbidden("Email is not enabled for this course.")
send_to = request.POST.get("send_to")
subject = request.POST.get("subject")
message = request.POST.get("message")
# allow two branding points to come from Microsites: which CourseEmailTemplate should be used
# and what the 'from' field in the email should be
#
# If these are None (because we are not in a Microsite or they are undefined in Microsite config) than
# the system will use normal system defaults
template_name = microsite.get_value('course_email_template_name')
from_addr = microsite.get_value('course_email_from_addr')
# Create the CourseEmail object. This is saved immediately, so that
# any transaction that has been pending up to this point will also be
# committed.
email = CourseEmail.create(
course_id,
request.user,
send_to,
subject, message,
template_name=template_name,
from_addr=from_addr
)
# Submit the task, so that the correct InstructorTask object gets created (for monitoring purposes)
instructor_task.api.submit_bulk_course_email(request, course_id, email.id) # pylint: disable=no-member
response_payload = {
'course_id': course_id.to_deprecated_string(),
'success': True,
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params(
unique_student_identifier="email or username of user to change access",
rolename="the forum role",
action="'allow' or 'revoke'",
)
@common_exceptions_400
def update_forum_role_membership(request, course_id):
"""
Modify user's forum role.
The requesting user must be at least staff.
Staff forum admins can access all roles EXCEPT for FORUM_ROLE_ADMINISTRATOR
which is limited to instructors.
No one can revoke an instructors FORUM_ROLE_ADMINISTRATOR status.
Query parameters:
- `email` is the target users email
- `rolename` is one of [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]
- `action` is one of ['allow', 'revoke']
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_by_id(course_id)
has_instructor_access = has_access(request.user, 'instructor', course)
has_forum_admin = has_forum_access(
request.user, course_id, FORUM_ROLE_ADMINISTRATOR
)
unique_student_identifier = request.GET.get('unique_student_identifier')
rolename = request.GET.get('rolename')
action = request.GET.get('action')
# default roles require either (staff & forum admin) or (instructor)
if not (has_forum_admin or has_instructor_access):
return HttpResponseBadRequest(
"Operation requires staff & forum admin or instructor access"
)
# EXCEPT FORUM_ROLE_ADMINISTRATOR requires (instructor)
if rolename == FORUM_ROLE_ADMINISTRATOR and not has_instructor_access:
return HttpResponseBadRequest("Operation requires instructor access.")
if rolename not in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]:
return HttpResponseBadRequest(strip_tags(
"Unrecognized rolename '{}'.".format(rolename)
))
user = get_student_from_identifier(unique_student_identifier)
try:
update_forum_role(course_id, user, rolename, action)
except Role.DoesNotExist:
return HttpResponseBadRequest("Role does not exist.")
response_payload = {
'course_id': course_id.to_deprecated_string(),
'action': action,
}
return JsonResponse(response_payload)
@require_POST
def get_user_invoice_preference(request, course_id): # pylint: disable=unused-argument
"""
Gets invoice copy user's preferences.
"""
invoice_copy_preference = True
invoice_preference_value = get_user_preference(request.user, INVOICE_KEY)
if invoice_preference_value is not None:
invoice_copy_preference = invoice_preference_value == 'True'
return JsonResponse({
'invoice_copy': invoice_copy_preference
})
def _display_unit(unit):
"""
Gets string for displaying unit to user.
"""
name = getattr(unit, 'display_name', None)
if name:
return u'{0} ({1})'.format(name, unit.location.to_deprecated_string())
else:
return unit.location.to_deprecated_string()
@handle_dashboard_error
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params('student', 'url', 'due_datetime')
def change_due_date(request, course_id):
"""
Grants a due date extension to a student for a particular unit.
"""
course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id))
student = require_student_from_identifier(request.GET.get('student'))
unit = find_unit(course, request.GET.get('url'))
due_date = parse_datetime(request.GET.get('due_datetime'))
set_due_date_extension(course, unit, student, due_date)
return JsonResponse(_(
'Successfully changed due date for student {0} for {1} '
'to {2}').format(student.profile.name, _display_unit(unit),
due_date.strftime('%Y-%m-%d %H:%M')))
@handle_dashboard_error
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params('student', 'url')
def reset_due_date(request, course_id):
"""
Rescinds a due date extension for a student on a particular unit.
"""
course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id))
student = require_student_from_identifier(request.GET.get('student'))
unit = find_unit(course, request.GET.get('url'))
set_due_date_extension(course, unit, student, None)
if not getattr(unit, "due", None):
# It's possible the normal due date was deleted after an extension was granted:
return JsonResponse(
_("Successfully removed invalid due date extension (unit has no due date).")
)
original_due_date_str = unit.due.strftime('%Y-%m-%d %H:%M')
return JsonResponse(_(
'Successfully reset due date for student {0} for {1} '
'to {2}').format(student.profile.name, _display_unit(unit),
original_due_date_str))
@handle_dashboard_error
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params('url')
def show_unit_extensions(request, course_id):
"""
Shows all of the students which have due date extensions for the given unit.
"""
course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id))
unit = find_unit(course, request.GET.get('url'))
return JsonResponse(dump_module_extensions(course, unit))
@handle_dashboard_error
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params('student')
def show_student_extensions(request, course_id):
"""
Shows all of the due date extensions granted to a particular student in a
particular course.
"""
student = require_student_from_identifier(request.GET.get('student'))
course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id))
return JsonResponse(dump_student_extensions(course, student))
def _split_input_list(str_list):
"""
Separate out individual student email from the comma, or space separated string.
e.g.
in: "[email protected], [email protected]\[email protected]\r [email protected]\r, [email protected]"
out: ['[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]']
`str_list` is a string coming from an input text area
returns a list of separated values
"""
new_list = re.split(r'[\n\r\s,]', str_list)
new_list = [s.strip() for s in new_list]
new_list = [s for s in new_list if s != '']
return new_list
def _instructor_dash_url(course_key, section=None):
"""Return the URL for a section in the instructor dashboard.
Arguments:
course_key (CourseKey)
Keyword Arguments:
section (str): The name of the section to load.
Returns:
unicode: The URL of a section in the instructor dashboard.
"""
url = reverse('instructor_dashboard', kwargs={'course_id': unicode(course_key)})
if section is not None:
url += u'#view-{section}'.format(section=section)
return url
@require_global_staff
@require_POST
def generate_example_certificates(request, course_id=None): # pylint: disable=unused-argument
"""Start generating a set of example certificates.
Example certificates are used to verify that certificates have
been configured correctly for the course.
Redirects back to the intructor dashboard once certificate
generation has begun.
"""
course_key = CourseKey.from_string(course_id)
certs_api.generate_example_certificates(course_key)
return redirect(_instructor_dash_url(course_key, section='certificates'))
@require_global_staff
@require_POST
def enable_certificate_generation(request, course_id=None):
"""Enable/disable self-generated certificates for a course.
Once self-generated certificates have been enabled, students
who have passed the course will be able to generate certificates.
Redirects back to the intructor dashboard once the
setting has been updated.
"""
course_key = CourseKey.from_string(course_id)
is_enabled = (request.POST.get('certificates-enabled', 'false') == 'true')
certs_api.set_cert_generation_enabled(course_key, is_enabled)
return redirect(_instructor_dash_url(course_key, section='certificates'))
#---- Gradebook (shown to small courses only) ----
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def spoc_gradebook(request, course_id):
"""
Show the gradebook for this course:
- Only shown for courses with enrollment < settings.FEATURES.get("MAX_ENROLLMENT_INSTR_BUTTONS")
- Only displayed to course staff
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(request.user, 'staff', course_key, depth=None)
enrolled_students = User.objects.filter(
courseenrollment__course_id=course_key,
courseenrollment__is_active=1
).order_by('username').select_related("profile")
# possible extension: implement pagination to show to large courses
student_info = [
{
'username': student.username,
'id': student.id,
'email': student.email,
'grade_summary': student_grades(student, request, course),
'realname': student.profile.name,
}
for student in enrolled_students
]
return render_to_response('courseware/gradebook.html', {
'students': student_info,
'course': course,
'course_id': course_key,
# Checked above
'staff_access': True,
'ordered_grades': sorted(course.grade_cutoffs.items(), key=lambda i: i[1], reverse=True),
})
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_POST
def mark_student_can_skip_entrance_exam(request, course_id): # pylint: disable=invalid-name
"""
Mark a student to skip entrance exam.
Takes `unique_student_identifier` as required POST parameter.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
student_identifier = request.POST.get('unique_student_identifier')
student = get_student_from_identifier(student_identifier)
__, created = EntranceExamConfiguration.objects.get_or_create(user=student, course_id=course_id)
if created:
message = _('This student (%s) will skip the entrance exam.') % student_identifier
else:
message = _('This student (%s) is already allowed to skip the entrance exam.') % student_identifier
response_payload = {
'message': message,
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_POST
def start_certificate_generation(request, course_id):
"""
Start generating certificates for all students enrolled in given course.
"""
course_key = CourseKey.from_string(course_id)
task = instructor_task.api.generate_certificates_for_all_students(request, course_key)
message = _('Certificate generation task for all students of this course has been started. '
'You can view the status of the generation task in the "Pending Tasks" section.')
response_payload = {
'message': message,
'task_id': task.task_id
}
return JsonResponse(response_payload)
| agpl-3.0 | 3,830,859,681,415,716,400 | 38.324483 | 159 | 0.653421 | false |
ee08b397/LeetCode-4 | 225 Implement Stack using Queues.py | 3 | 1673 | """
Implement the following operations of a stack using queues.
push(x) -- Push element x onto stack.
pop() -- Removes the element on top of the stack.
top() -- Get the top element.
empty() -- Return whether the stack is empty.
Notes:
You must use only standard operations of a queue -- which means only push to back, peek/pop from front, size, and is
empty operations are valid.
Depending on your language, queue may not be supported natively. You may simulate a queue by using a list or deque
(double-ended queue), as long as you use only standard operations of a queue.
You may assume that all operations are valid (for example, no pop or top operations will be called on an empty stack).
"""
__author__ = 'Daniel'
class Stack:
def __init__(self):
"""
initialize your data structure here.
One queue cannot mimic the stack, then you should use two queues.
"""
self.q = [[], []]
def push(self, x):
"""
:type x: int
:rtype: nothing
"""
t = 0
if not self.q[t]:
t ^= 1
self.q[t].append(x)
def pop(self):
"""
:rtype: nothing
"""
t = 0
if not self.q[t]:
t ^= 1
while len(self.q[t]) > 1:
self.q[t^1].append(self.q[t].pop(0))
return self.q[t].pop()
def top(self):
"""
:rtype: int
"""
popped = self.pop()
t = 0
if not self.q[t]:
t ^= 1
self.q[t].append(popped)
return popped
def empty(self):
"""
:rtype: bool
"""
return not self.q[0] and not self.q[1]
| mit | -7,114,226,794,162,634,000 | 24.348485 | 118 | 0.54991 | false |
manojgudi/sandhi | modules/gr36/grc/python/extract_docs.py | 11 | 2155 | """
Copyright 2008-2011 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
import re
def _extract(key):
"""
Extract the documentation from the python __doc__ strings.
If multiple modules match, combine the docs.
@param key the block key
@return a string with documentation
"""
#extract matches
try:
module_name, constructor_name = key.split('_', 1)
module = __import__('gnuradio.'+module_name)
module = getattr(module, module_name)
except ImportError:
try:
module_name, constructor_name = key.split('_', 1)
module = __import__(module_name)
except: return ''
except:
return ''
pattern = constructor_name.replace('_', '_*').replace('x', '\w')
pattern_matcher = re.compile('^%s\w*$'%pattern)
matches = filter(lambda x: pattern_matcher.match(x), dir(module))
#combine all matches
doc_strs = list()
for match in matches:
try:
title = ' --- ' + match + ' --- '
doc_strs.append('\n\n'.join([title, getattr(module, match).__doc__]).strip())
except: pass
return '\n\n'.join(doc_strs)
_docs_cache = dict()
def extract(key):
"""
Call the private extract and cache the result.
@param key the block key
@return a string with documentation
"""
if not _docs_cache.has_key(key):
_docs_cache[key] = _extract(key)
return _docs_cache[key]
if __name__ == '__main__':
import sys
print extract(sys.argv[1])
| gpl-3.0 | 9,208,080,398,036,305,000 | 31.651515 | 80 | 0.67471 | false |
wuzheng-sjtu/FastFPN | libs/memory_util.py | 2 | 13633 | import os
import re
import sys
import tempfile
import tensorflow as tf
debug_messages = False
def vlog(level):
os.environ['TF_CPP_MIN_VLOG_LEVEL'] = str(level)
# this helper is here in case we later want to capture huge stderr that doesn't fit in RAM
class TemporaryFileHelper:
"""Provides a way to fetch contents of temporary file."""
def __init__(self, temporary_file):
self.temporary_file = temporary_file
def getvalue(self):
return open(self.temporary_file.name).read()
STDOUT=1
STDERR=2
class capture_stderr:
"""Utility to capture output, use as follows
with util.capture_stderr() as stderr:
sess = tf.Session()
print("Captured:", stderr.getvalue()).
"""
def __init__(self, fd=STDERR):
self.fd = fd
self.prevfd = None
def __enter__(self):
t = tempfile.NamedTemporaryFile()
self.prevfd = os.dup(self.fd)
os.dup2(t.fileno(), self.fd)
return TemporaryFileHelper(t)
def __exit__(self, exc_type, exc_value, traceback):
os.dup2(self.prevfd, self.fd)
################################################################################
# LOG_MEMORY_PARSING
################################################################################
# Until https://github.com/tensorflow/tensorflow/issues/6716 is resolved, the
# reliable way to get access to tensor deallocation information is to parse
# __LOG_MEMORY__ from VLOG print statements. This is sensitive to print order
# run unbuffered to prevent interleaving:
# python -u script.py
# Regex'es to parse __LOG_MEMORY__ statements
# Each regex is preceded by an example of line it's meant to pass
# I 5143420588.000000 file tensorflow/core/framework/log_memory.cc:41] __LOG_MEMORY__ MemoryLogTensorAllocation { step_id: -6 kernel_name: "Unknown (from Proto)" tensor { dtype: DT_INT32 shape { dim { size: 3 } } allocation_description { requested_bytes: 12 allocated_bytes: 12 allocator_name: "cpu" allocation_id: 3 has_single_reference: true ptr: 29496256 } } }
tensor_allocation_regex = re.compile("""MemoryLogTensorAllocation.*?step_id: (?P<step_id>[-0123456789]+).*kernel_name: \"(?P<kernel_name>[^"]+)\".*?allocated_bytes: (?P<allocated_bytes>\d+).*allocator_name: \"(?P<allocator_name>[^"]+)\".*allocation_id: (?P<allocation_id>\d+).*""")
# I 6795349363.000000 file tensorflow/core/framework/log_memory.cc:41] __LOG_MEMORY__ MemoryLogRawAllocation { step_id: -3 operation: "TF_AllocateTensor" num_bytes: 1000000 ptr: 80910752 allocation_id: 99 allocator_name: "cpu" }
raw_allocation_regex = re.compile("""MemoryLogRawAllocation.*?step_id: (?P<step_id>[-0123456789]+).*operation: \"(?P<kernel_name>[^"]+)\".*?num_bytes: (?P<allocated_bytes>\d+).*allocation_id: (?P<allocation_id>\d+).*allocator_name: "(?P<allocator_name>[^"]+)".*""")
# I 5143420588.000000 file tensorflow/core/framework/log_memory.cc:41] __LOG_MEMORY__ MemoryLogTensorOutput { step_id: 1 kernel_name: "Const" tensor { dtype: DT_INT32 shape { dim { size: 3 } } allocation_description { requested_bytes: 12 allocated_bytes: 12 allocator_name: "cpu" allocation_id: 3 ptr: 29496256 } } }
# 2017-01-26 10:13:30: I tensorflow/core/framework/log_memory.cc:35] __LOG_MEMORY__ MemoryLogTensorOutput { step_id: 2 kernel_name: "a0" tensor { dtype: DT_FLOAT shape { dim { size: 250000 } } allocation_description { requested_bytes: 1000000 allocated_bytes: 1000192 allocator_name: "gpu_bfc" allocation_id: 3 ptr: 30076651520 } } }
#tensor_output_regex = re.compile("""MemoryLogTensorOutput.* step_id: (?P<step_id>[-0123456789]+) kernel_name: \"(?P<kernel_name>[^"]+).*allocated_bytes: (?P<allocated_bytes>\d+).*allocation_id: (?P<allocation_id>\d+).*""")
tensor_output_regex = re.compile("""MemoryLogTensorOutput.* step_id: (?P<step_id>[-0123456789]+) kernel_name: \"(?P<kernel_name>[^"]+).*allocated_bytes: (?P<allocated_bytes>\d+).*allocator_name: \"(?P<allocator_name>[^"]+)\".*allocation_id: (?P<allocation_id>\d+).*""")
# some Shape lines are missing bytes info so have separate regex for them
# I 5162643141.000000 file tensorflow/core/framework/log_memory.cc:41] __LOG_MEMORY__ MemoryLogTensorOutput { step_id: 5 kernel_name: "gradients/Shape" tensor { dtype: DT_INT32 shape { dim { } } } }
tensor_output_regex_no_bytes = re.compile("""MemoryLogTensorOutput.* step_id: (?P<step_id>[-0123456789]+) kernel_name: \"(?P<kernel_name>[^"]+).*""")
# 5143420588.000000 file tensorflow/core/framework/log_memory.cc:41] __LOG_MEMORY__ MemoryLogTensorDeallocation { allocation_id: 2 allocator_name: "cpu" }
tensor_deallocation_regex = re.compile("""allocation_id: (?P<allocation_id>\d+).*allocator_name: \"(?P<allocator_name>[^"]+)\".*""")
# I 6796000229.000000 file tensorflow/core/framework/log_memory.cc:41] __LOG_MEMORY__ MemoryLogRawDeallocation { step_id: -3 operation: "TensorFlow C Api" allocation_id: 177 allocator_name: "cpu" }
raw_deallocation_regex = re.compile("""allocation_id: (?P<allocation_id>\d+).*allocator_name: \"(?P<allocator_name>[^"]+)\".*""")
# I 5143420588.000000 file tensorflow/core/framework/log_memory.cc:41] __LOG_MEMORY__ MemoryLogStep { step_id: 1 handle: "->Print:0//0/;0" }
tensor_logstep_regex = re.compile("""MemoryLogStep.*?step_id: (?P<step_id>[-0123456789]+).*""")
def _parse_logline(l):
if 'MemoryLogTensorOutput' in l:
m = tensor_output_regex.search(l)
if not m:
m = tensor_output_regex_no_bytes.search(l)
assert m, l
d = m.groupdict()
d["type"] = "MemoryLogTensorOutput"
elif 'MemoryLogTensorAllocation' in l:
m = tensor_allocation_regex.search(l)
# Broadcast args give weird allocation messages without size, ignore
# I tensorflow/core/framework/log_memory.cc:35] __LOG_MEMORY__ MemoryLogTensorAllocation { step_id: 2 kernel_name: "gradients/node_5_grad/BroadcastGradientArgs" tensor { dtype: DT_INT32 shape { dim { } } } }
if not m:
return {"type": "MemoryLogTensorAllocation", "line": l,
"allocation_id": "-1"}
assert m, l
d = m.groupdict()
d["type"] = "MemoryLogTensorAllocation"
if debug_messages:
print("Got allocation for %s, %s"%(d["allocation_id"], d["kernel_name"]))
elif 'MemoryLogTensorDeallocation' in l:
m = tensor_deallocation_regex.search(l)
assert m, l
d = m.groupdict()
d["type"] = "MemoryLogTensorDeallocation"
if debug_messages:
print("Got deallocation for %s"%(d["allocation_id"]))
elif 'MemoryLogStep' in l:
m = tensor_logstep_regex.search(l)
assert m, l
d = m.groupdict()
d["type"] = "MemoryLogStep"
elif 'MemoryLogRawAllocation' in l:
m = raw_allocation_regex.search(l)
assert m, l
d = m.groupdict()
d["type"] = "MemoryLogRawAllocation"
elif 'MemoryLogRawDeallocation' in l:
m = raw_deallocation_regex.search(l)
assert m, l
d = m.groupdict()
d["type"] = "MemoryLogRawDeallocation"
else:
assert False, "Unknown log line: "+l
if not "allocation_id" in d:
d["allocation_id"] = "-1"
d["line"] = l
return d
def memory_timeline(log):
if hasattr(log, 'getvalue'):
log = log.getvalue()
def unique_alloc_id(line):
if line["allocation_id"] == "-1":
return "-1"
return line["allocation_id"]+"-"+line["allocator_name"]
def get_alloc_names(line):
alloc_id = unique_alloc_id(line)
for entry in reversed(allocation_map.get(alloc_id, [])):
kernel_name = entry.get("kernel_name", "unknown")
if not "unknown" in kernel_name:
return kernel_name+"("+unique_alloc_id(line)+")"
# couldn't find an allocation message with name of kernel
return "("+alloc_id+")"
def get_alloc_bytes(line):
for entry in allocation_map.get(unique_alloc_id(line), []):
if "allocated_bytes" in entry:
return entry["allocated_bytes"]
return "0"
def get_alloc_type(line):
for entry in allocation_map.get(unique_alloc_id(line), []):
if "allocator_name" in entry:
return entry["allocator_name"]
return "0"
parsed_lines = []
for l in log.split("\n"):
if 'LOG_MEMORY' in l: # and not 'step_id: -6' in l:
parsed_lines.append(_parse_logline(l))
allocation_map = {} # map of <allocation_id>-<allocator_name>->parsed_logline of allocation
for line in parsed_lines:
if (line["type"] == "MemoryLogTensorAllocation" or line["type"] == "MemoryLogRawAllocation" or
line["type"] == "MemoryLogTensorOutput"):
allocation_map.setdefault(unique_alloc_id(line), []).append(line)
if debug_messages:
print(allocation_map)
result = []
for i, line in enumerate(parsed_lines):
# skip lines without allocation_id, ie lines like
# I tensorflow/core/framework/log_memory.cc:35] __LOG_MEMORY__ MemoryLogStep { step_id: 2 handle: "->/gradients/a1_grad/TanhGrad/0/;1" }
if int(line["allocation_id"]) == -1:
continue
alloc_names = get_alloc_names(line)
# if line doesn't specify bytes, look in history if there was corresponding TensorOutput or TensorAllocation msg
if int(line.get('allocated_bytes', -1)) < 0:
alloc_bytes = get_alloc_bytes(line)
else:
alloc_bytes = line.get('allocated_bytes', -1)
alloc_type = get_alloc_type(line)
if line["type"] == "MemoryLogTensorOutput":
continue
if line["type"] == "MemoryLogTensorDeallocation" or line["type"]=="MemoryLogRawDeallocation":
alloc_bytes = "-" + alloc_bytes
result.append((i, alloc_names, alloc_bytes, alloc_type))
return result
def peak_memory(log, gpu_only=False):
"""Peak memory used across all devices."""
peak_memory = -123456789 # to catch bugs
total_memory = 0
for record in memory_timeline(log):
i, kernel_name, allocated_bytes, allocator_type = record
allocated_bytes = int(allocated_bytes)
if gpu_only:
if not allocator_type.startswith("gpu"):
continue
total_memory += allocated_bytes
peak_memory = max(total_memory, peak_memory)
return peak_memory
def print_memory_timeline(log, gpu_only=False, ignore_less_than_bytes=0):
total_memory = 0
for record in memory_timeline(log):
i, kernel_name, allocated_bytes, allocator_type = record
allocated_bytes = int(allocated_bytes)
if gpu_only:
if not allocator_type.startswith("gpu"):
continue
if abs(allocated_bytes)<ignore_less_than_bytes:
continue # ignore small allocations
total_memory += allocated_bytes
print("%9d %42s %11d %11d %s"%(i, kernel_name, allocated_bytes, total_memory, allocator_type))
import matplotlib.pyplot as plt
def plot_memory_timeline(log, gpu_only=False, ignore_less_than_bytes=1000):
total_memory = 0
timestamps = []
data = []
current_time = 0
for record in memory_timeline(log):
timestamp, kernel_name, allocated_bytes, allocator_type = record
allocated_bytes = int(allocated_bytes)
if abs(allocated_bytes)<ignore_less_than_bytes:
continue # ignore small allocations
if gpu_only:
if not record[3].startswith("gpu"):
continue
timestamps.append(current_time-.00000001)
data.append(total_memory)
total_memory += int(record[2])
timestamps.append(current_time)
data.append(total_memory)
current_time+=1
plt.plot(timestamps, data)
################################################################################
# smart initialize
################################################################################
def smart_initialize(variables=None, sess=None):
"""Initializes all uninitialized variables in correct order. Initializers
are only run for uninitialized variables, so it's safe to run this multiple
times.
Args:
sess: session to use. Use default session if None.
"""
from tensorflow.contrib import graph_editor as ge
def make_initializer(var):
def f():
return tf.assign(var, var.initial_value).op
return f
def make_noop(): return tf.no_op()
def make_safe_initializer(var):
"""Returns initializer op that only runs for uninitialized ops."""
return tf.cond(tf.is_variable_initialized(var), make_noop,
make_initializer(var), name="safe_init_"+var.op.name).op
if not sess:
sess = tf.get_default_session()
g = tf.get_default_graph()
if not variables:
variables = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES)
safe_initializers = {}
for v in variables:
safe_initializers[v.op.name] = make_safe_initializer(v)
# initializers access variable vaue through read-only value cached in
# <varname>/read, so add control dependency to trigger safe_initializer
# on read access
for v in variables:
var_name = v.op.name
var_cache = g.get_operation_by_name(var_name+"/read")
ge.reroute.add_control_inputs(var_cache, [safe_initializers[var_name]])
sess.run(tf.group(*safe_initializers.values()))
# remove initializer dependencies to avoid slowing down future variable reads
for v in variables:
var_name = v.op.name
var_cache = g.get_operation_by_name(var_name+"/read")
ge.reroute.remove_control_inputs(var_cache, [safe_initializers[var_name]])
| apache-2.0 | -6,577,896,874,996,883,000 | 44.142384 | 363 | 0.633463 | false |
diorcety/translate | translate/storage/bundleprojstore.py | 3 | 10529 | # -*- coding: utf-8 -*-
#
# Copyright 2010 Zuza Software Foundation
#
# This file is part of the Translate Toolkit.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import os
import shutil
import tempfile
from zipfile import ZipFile
import six
from translate.storage.projstore import *
__all__ = ('BundleProjectStore', 'InvalidBundleError')
class InvalidBundleError(Exception):
pass
class BundleProjectStore(ProjectStore):
"""Represents a translate project bundle (zip archive)."""
# INITIALIZERS #
def __init__(self, fname):
super(BundleProjectStore, self).__init__()
self._tempfiles = {}
if fname and os.path.isfile(fname):
self.load(fname)
else:
self.zip = ZipFile(fname, 'w')
self.save()
self.zip.close()
self.zip = ZipFile(fname, 'a')
# CLASS METHODS #
@classmethod
def from_project(cls, proj, fname=None):
if fname is None:
fname = 'bundle.zip'
bundle = BundleProjectStore(fname)
for fn in proj.sourcefiles:
bundle.append_sourcefile(proj.get_file(fn))
for fn in proj.transfiles:
bundle.append_transfile(proj.get_file(fn))
for fn in proj.targetfiles:
bundle.append_targetfile(proj.get_file(fn))
bundle.settings = proj.settings.copy()
bundle.save()
return bundle
# METHODS #
def append_file(self, afile, fname, ftype='trans', delete_orig=False):
"""Append the given file to the project with the given filename, marked
to be of type ``ftype`` ('src', 'trans', 'tgt').
:param delete_orig: If ``True``, as set by
:meth:`~translate.storage.Project.convert_forward`,
``afile`` is deleted after appending, if
possible.
.. note:: For this implementation, the appended file will be deleted
from disk if ``delete_orig`` is ``True``.
"""
if fname and fname in self.zip.namelist():
raise ValueError("File already in bundle archive: %s" % (fname))
if not fname and isinstance(afile, six.string_types) and afile in self.zip.namelist():
raise ValueError("File already in bundle archive: %s" % (afile))
afile, fname = super(BundleProjectStore, self).append_file(afile, fname, ftype)
self._zip_add(fname, afile)
if delete_orig and hasattr(afile, 'name') and afile.name not in self._tempfiles:
try:
os.unlink(afile.name)
except Exception:
pass
return self.get_file(fname), fname
def remove_file(self, fname, ftype=None):
"""Remove the file with the given project name from the project."""
super(BundleProjectStore, self).remove_file(fname, ftype)
self._zip_delete([fname])
tempfiles = [tmpf for tmpf, prjf in six.iteritems(self._tempfiles) if prjf == fname]
if tempfiles:
for tmpf in tempfiles:
try:
os.unlink(tmpf)
except Exception:
pass
del self._tempfiles[tmpf]
def close(self):
super(BundleProjectStore, self).close()
self.cleanup()
self.zip.close()
def cleanup(self):
"""Clean up our mess: remove temporary files."""
for tempfname in self._tempfiles:
if os.path.isfile(tempfname):
os.unlink(tempfname)
self._tempfiles = {}
def get_file(self, fname):
"""Retrieve a project file (source, translation or target file) from
the project archive.
"""
retfile = None
if fname in self._files or fname in self.zip.namelist():
# Check if the file has not already been extracted to a temp file
tempfname = [tfn for tfn in self._tempfiles if self._tempfiles[tfn] == fname]
if tempfname and os.path.isfile(tempfname[0]):
tempfname = tempfname[0]
else:
tempfname = ''
if not tempfname:
# Extract the file to a temporary file
zfile = self.zip.open(fname)
tempfname = os.path.split(fname)[-1]
tempfd, tempfname = tempfile.mkstemp(suffix='_' + tempfname)
os.close(tempfd)
open(tempfname, 'w').write(zfile.read())
retfile = open(tempfname)
self._tempfiles[tempfname] = fname
if not retfile:
raise FileNotInProjectError(fname)
return retfile
def get_proj_filename(self, realfname):
"""Try and find a project file name for the given real file name."""
try:
fname = super(BundleProjectStore, self).get_proj_filename(realfname)
except ValueError as ve:
fname = None
if fname:
return fname
if realfname in self._tempfiles:
return self._tempfiles[realfname]
raise ValueError('Real file not in project store: %s' % (realfname))
def load(self, zipname):
"""Load the bundle project from the zip file of the given name."""
self.zip = ZipFile(zipname, mode='a')
self._load_settings()
append_section = {
'sources': self._sourcefiles.append,
'targets': self._targetfiles.append,
'transfiles': self._transfiles.append,
}
for section in ('sources', 'targets', 'transfiles'):
if section in self.settings:
for fname in self.settings[section]:
append_section[section](fname)
self._files[fname] = None
def save(self, filename=None):
"""Save all project files to the bundle zip file."""
self._update_from_tempfiles()
if filename:
newzip = ZipFile(filename, 'w')
else:
newzip = self._create_temp_zipfile()
# Write project file for the new zip bundle
newzip.writestr('project.xtp', self._generate_settings())
# Copy project files from project to the new zip file
project_files = self._sourcefiles + self._transfiles + self._targetfiles
for fname in project_files:
newzip.writestr(fname, self.get_file(fname).read())
# Copy any extra (non-project) files from the current zip
for fname in self.zip.namelist():
if fname in project_files or fname == 'project.xtp':
continue
newzip.writestr(fname, self.zip.read(fname))
self._replace_project_zip(newzip)
def update_file(self, pfname, infile):
"""Updates the file with the given project file name with the contents
of ``infile``.
:returns: the results from :meth:`BundleProjStore.append_file`.
"""
if pfname not in self._files:
raise FileNotInProjectError(pfname)
if pfname not in self.zip.namelist():
return super(BundleProjectStore, self).update_file(pfname, infile)
self._zip_delete([pfname])
self._zip_add(pfname, infile)
def _load_settings(self):
"""Grab the project.xtp file from the zip file and load it."""
if 'project.xtp' not in self.zip.namelist():
raise InvalidBundleError('Not a translate project bundle')
super(BundleProjectStore, self)._load_settings(self.zip.open('project.xtp').read())
def _create_temp_zipfile(self):
"""Create a new zip file with a temporary file name (with mode 'w')."""
newzipfd, newzipfname = tempfile.mkstemp(prefix='translate_bundle', suffix='.zip')
os.close(newzipfd)
return ZipFile(newzipfname, 'w')
def _replace_project_zip(self, zfile):
"""Replace the currently used zip file (``self.zip``) with the given
zip file. Basically, ``os.rename(zfile.filename,
self.zip.filename)``.
"""
if not zfile.fp.closed:
zfile.close()
if not self.zip.fp.closed:
self.zip.close()
shutil.move(zfile.filename, self.zip.filename)
self.zip = ZipFile(self.zip.filename, mode='a')
def _update_from_tempfiles(self):
"""Update project files from temporary files."""
for tempfname in self._tempfiles:
tmp = open(tempfname)
self.update_file(self._tempfiles[tempfname], tmp)
if not tmp.closed:
tmp.close()
def _zip_add(self, pfname, infile):
"""Add the contents of ``infile`` to the zip with file name ``pfname``."""
if hasattr(infile, 'seek'):
infile.seek(0)
self.zip.writestr(pfname, infile.read())
# Clear the cached file object to force the file to be read from the
# zip file.
self._files[pfname] = None
def _zip_delete(self, fnames):
"""Delete the files with the given names from the zip file (``self.zip``)."""
# Sanity checking
if not isinstance(fnames, (list, tuple)):
raise ValueError("fnames must be list or tuple: %s" % (fnames))
if not self.zip:
raise ValueError("No zip file to work on")
zippedfiles = self.zip.namelist()
for fn in fnames:
if fn not in zippedfiles:
raise KeyError("File not in zip archive: %s" % (fn))
newzip = self._create_temp_zipfile()
newzip.writestr('project.xtp', self._generate_settings())
for fname in zippedfiles:
# Copy all files from self.zip that are not project.xtp (already
# in the new zip file) or in fnames (they are to be removed, after
# all.
if fname in fnames or fname == 'project.xtp':
continue
newzip.writestr(fname, self.zip.read(fname))
self._replace_project_zip(newzip)
| gpl-2.0 | -6,607,760,367,712,483,000 | 36.738351 | 94 | 0.595403 | false |
Cadasta/cadasta-platform | cadasta/spatial/models.py | 1 | 11284 | from core.models import RandomIDModel
from django.utils.functional import cached_property
from django.core.urlresolvers import reverse
from django.contrib.gis.db.models import GeometryField
from django.contrib.postgres.fields import JSONField
from django.db import models
from django.utils.translation import ugettext as _
from django.utils.encoding import iri_to_uri
from django.dispatch import receiver
from django.utils.translation import get_language
from organization.models import Project
from tutelary.decorators import permissioned_model
from simple_history.models import HistoricalRecords
from shapely.geometry import Point, Polygon, LineString
from shapely.wkt import dumps
from . import messages, managers
from .choices import TYPE_CHOICES
from resources.mixins import ResourceModelMixin
from jsonattrs.fields import JSONAttributeField
from jsonattrs.decorators import fix_model_for_attributes
from questionnaires.models import Questionnaire, QuestionOption
@fix_model_for_attributes
@permissioned_model
class SpatialUnit(ResourceModelMixin, RandomIDModel):
"""A single spatial unit: has a type, an optional geometry, a
type-dependent set of attributes, and a set of relationships to
other spatial units.
"""
# All spatial units are associated with a single project.
project = models.ForeignKey(Project, on_delete=models.CASCADE,
related_name='spatial_units')
# Spatial unit type: used to manage range of allowed attributes.
type = models.CharField(max_length=100)
# Spatial unit geometry is optional: some spatial units may only
# have a textual description of their location.
geometry = GeometryField(null=True, geography=True)
# Area, auto-calculated via trigger (see spatial/migrations/#0005)
area = models.FloatField(default=0)
# JSON attributes field with management of allowed members.
attributes = JSONAttributeField(default={})
# Spatial unit-spatial unit relationships: includes spatial
# containment and split/merge relationships.
relationships = models.ManyToManyField(
'self',
through='SpatialRelationship',
through_fields=('su1', 'su2'),
symmetrical=False,
related_name='relationships_set',
)
# Denormalized duplication of label from the QuestionOption related to the
# SpatialUnit
label = JSONField(null=True)
# Audit history
created_date = models.DateTimeField(auto_now_add=True)
last_updated = models.DateTimeField(auto_now=True)
history = HistoricalRecords()
class Meta:
ordering = ('type',)
class TutelaryMeta:
perm_type = 'spatial'
path_fields = ('project', 'id')
actions = (
('spatial.list',
{'description': _("List existing spatial units of a project"),
'error_message': messages.SPATIAL_LIST,
'permissions_object': 'project'}),
('spatial.create',
{'description': _("Add a spatial unit to a project"),
'error_message': messages.SPATIAL_CREATE,
'permissions_object': 'project'}),
('spatial.view',
{'description': _("View an existing spatial unit"),
'error_message': messages.SPATIAL_VIEW}),
('spatial.update',
{'description': _("Update an existing spatial unit"),
'error_message': messages.SPATIAL_UPDATE}),
('spatial.delete',
{'description': _("Delete an existing spatial unit"),
'error_message': messages.SPATIAL_DELETE}),
('spatial.resources.add',
{'description': _("Add resources to this spatial unit"),
'error_message': messages.SPATIAL_ADD_RESOURCE})
)
def __str__(self):
return "<SpatialUnit: {}>".format(self.name)
def __repr__(self):
repr_string = ('<SpatialUnit id={obj.id}'
' project={obj.project.slug}'
' type={obj.type}>')
return repr_string.format(obj=self)
@property
def name(self):
return self.location_type_label
@property
def ui_class_name(self):
return _("Location")
def get_absolute_url(self):
return iri_to_uri(reverse(
'locations:detail',
kwargs={
'organization': self.project.organization.slug,
'project': self.project.slug,
'location': self.id,
},
))
@cached_property
def location_type_label(self):
# Handle no questionnaire
if (not self.project.current_questionnaire) or (self.label is None):
return dict(TYPE_CHOICES)[self.type]
# Handle non-translatable label
if isinstance(self.label, str):
return self.label
# Handle translated label
translated_label = self.label.get(get_language())
if translated_label:
return translated_label
# If label failed to translate, fallback to default language
rel_questionnaire = Questionnaire.objects.get(
id=self.project.current_questionnaire)
return self.label.get(rel_questionnaire.default_language)
def reassign_spatial_geometry(instance):
coords = list(instance.geometry.coords)
if type(coords[0]) == float:
coords = [coords]
else:
while (type(coords[0][0]) != float):
coords = coords[0]
coords = [list(x) for x in coords]
for point in coords:
if point[0] >= -180 and point[0] <= 180:
return
while coords[0][0] < -180:
for point in coords:
point[0] += 360
while coords[0][0] > 180:
for point in coords:
point[0] -= 360
geometry = []
for point in coords:
latlng = [point[0], point[1]]
geometry.append(tuple(latlng))
if len(geometry) > 1:
if geometry[0] == geometry[-1]:
instance.geometry = dumps(Polygon(geometry))
else:
instance.geometry = dumps(LineString(geometry))
else:
instance.geometry = dumps(Point(geometry))
@receiver(models.signals.pre_save, sender=SpatialUnit)
def check_extent(sender, instance, **kwargs):
geom = instance.geometry
# Store 'POLYGON EMPTY' data as null to avoid libgeos bug
# (https://trac.osgeo.org/geos/ticket/680)
# TODO: Rm this check when we're using Django 1.11+ or libgeos 3.6.1+
# https://github.com/django/django/commit/b90d72facf1e4294df1c2e6b51b26f6879bf2992#diff-181a3ea304dfaf57f1e1d680b32d2b76R248
from django.contrib.gis.geos import Polygon
if isinstance(geom, Polygon) and geom.empty:
instance.geometry = None
if geom and not geom.empty:
reassign_spatial_geometry(instance)
@receiver(models.signals.post_save, sender=SpatialUnit)
def refresh_area(sender, instance, **kwargs):
""" Ensure DB-generated area is set on instance """
from django.contrib.gis.geos import MultiPolygon, Polygon
geom = instance.geometry
if not isinstance(geom, (MultiPolygon, Polygon)):
return
qs = type(instance)._default_manager.filter(id=instance.id)
instance.area = qs.values_list('area', flat=True)[0]
@fix_model_for_attributes
@permissioned_model
class SpatialRelationship(RandomIDModel):
"""A relationship between spatial units: encodes simple logical terms
like ``su1 is-contained-in su2`` or ``su1 is-split-of su2``. May
have additional requirements.
"""
# Possible spatial unit relationships types: TYPE_CHOICES is the
# well-known name used by the JSONAttributesField field type to
# manage the range of allowed attribute fields.
TYPE_CHOICES = (('C', 'is-contained-in'),
('S', 'is-split-of'),
('M', 'is-merge-of'))
# All spatial unit relationships are associated with a single project.
project = models.ForeignKey(Project, on_delete=models.CASCADE,
related_name='spatial_relationships')
# Spatial units are in the relationships.
su1 = models.ForeignKey(SpatialUnit, on_delete=models.CASCADE,
related_name='spatial_unit_one')
su2 = models.ForeignKey(SpatialUnit, on_delete=models.CASCADE,
related_name='spatial_unit_two')
# Spatial unit relationship type: used to manage range of allowed
# attributes
type = models.CharField(max_length=1, choices=TYPE_CHOICES)
# JSON attributes field with management of allowed members.
attributes = JSONAttributeField(default={})
objects = managers.SpatialRelationshipManager()
# Audit history
created_date = models.DateTimeField(auto_now_add=True)
last_updated = models.DateTimeField(auto_now=True)
history = HistoricalRecords()
class TutelaryMeta:
perm_type = 'spatial_rel'
path_fields = ('project', 'id')
actions = (
('spatial_rel.list',
{'description': _("List existing spatial relationships"
" of a project"),
'error_message': messages.SPATIAL_REL_LIST,
'permissions_object': 'project'}),
('spatial_rel.create',
{'description': _("Add a spatial relationship to a project"),
'error_message': messages.SPATIAL_REL_CREATE,
'permissions_object': 'project'}),
('spatial_rel.view',
{'description': _("View an existing spatial relationship"),
'error_message': messages.SPATIAL_REL_VIEW}),
('spatial_rel.update',
{'description': _("Update an existing spatial relationship"),
'error_message': messages.SPATIAL_REL_UPDATE}),
('spatial_rel.delete',
{'description': _("Delete an existing spatial relationship"),
'error_message': messages.SPATIAL_REL_DELETE}),
)
def __str__(self):
return "<SpatialRelationship: <{su1}> {type} <{su2}>>".format(
su1=self.su1.name, su2=self.su2.name,
type=dict(self.TYPE_CHOICES).get(self.type))
def __repr__(self):
repr_string = ('<SpatialRelationship id={obj.id}'
' project={obj.project.slug}'
' su1={obj.su1_id}'
' su2={obj.su2_id}'
' type={obj.type}>')
return repr_string.format(obj=self)
@receiver(models.signals.pre_save, sender=SpatialUnit)
def set_label_on_spatialunits(sender, instance, **kwargs):
"""
Set label of related QuestionOption onto new SpatialUnit instances
"""
# Ignore if instance is not new and has not changed its 'type' property
if not instance._state.adding:
unchanged = SpatialUnit.objects.filter(
id=instance.id, type=instance.type).exists()
if unchanged:
return
try:
rel_questionoption = QuestionOption.objects.get(
name=instance.type,
question__name='location_type',
question__questionnaire__id=instance.project.current_questionnaire)
except QuestionOption.DoesNotExist:
return
instance.label = rel_questionoption.label_xlat
| agpl-3.0 | 2,044,181,148,656,346,400 | 36.613333 | 128 | 0.635147 | false |
Anaatti/Follow-the-leader-simulation | src/follower.py | 1 | 2059 | '''
Created on 16.3.2012
@author: Antti Vainio
'''
from thinker import thinker
class follower(thinker):
'''
This class is a very simple derivation of the thinker class.
The class implements the required think() function where it tries to follow its leader.
'''
def __init__(self, leader, x, y, max_speed = -1, max_force = -1, size = -1, random_position = True):
super(follower, self).__init__(x, y, max_speed, max_force, size, random_position)
self.leader = leader
self.is_leader = False
def think(self, others):
'''
The follower tries to follow its leader while also trying to avoid other followers.
If there is no leader to be followed the follower will still avoid others while also seeking to its own position.
The result in that situation is that the followers scatter with a slowing speed.
'''
if not self.leader:
self.seekTraget(self.pos, 0.2)
else:
#leader seeking
away = self.pos - self.leader.pos
away.normalize()
#position to seek
seek_pos = self.leader.pos + away * (self.size + self.leader.size) * 2.0
#for distance to seek position
distance = seek_pos - self.pos
seek_pos+= self.leader.speed * distance.lenght / self.max_speed * 0.1
#for arrival
arrival_max_dist = (self.max_speed ** 2.0) / 2.0 / self.max_force
if distance.lenght <= arrival_max_dist:
seek_pos+= away * distance.lenght
#go, seek!
self.seekTraget(seek_pos, 1.0)
#follower avoidance
for i in others:
if i.pos != self.pos:
distance = i.pos - self.pos
#this is 1/x^2 where x is distance
#x is 1 when distance is (self.radius + other.radius) * 1.2
distance = 1.0 / (distance.lenght / (self.size + i.size) / 1.2) ** 2.0
self.fleeTraget(i.pos, distance)
| unlicense | 3,078,281,819,314,505,000 | 38.596154 | 121 | 0.569694 | false |
mlhim/tb | src/data_RDF_gen.py | 1 | 5580 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
data_RDF_gen.py
Extracts MLHIM 2.5.0 data and creates RDF triples in RDF/XML based on the CCDs.
This script must be executed after the the CCD RDF/XML files are created.
Copyright (C) 2015 Timothy W. Cook [email protected]
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import sys
import re
from random import randint
from xml.sax.saxutils import escape
from lxml import etree
nsDict = {'rdf':'http://www.w3.org/1999/02/22-rdf-syntax-ns#',
'owl':'http://www.w3.org/2002/07/owl#',
'dc':'http://purl.org/dc/elements/1.1/',
'sawsdl':'http://www.w3.org/ns/sawsdl',
'sawsdlrdf':'http://www.w3.org/ns/sawsdl#',
'rdfs':'http://www.w3.org/2000/01/rdf-schema#'}
dest = None
filename = None
tree = None
def parse_el(element):
global dest
global filename
global tree
for child in element.getchildren():
if child.tag is not etree.Comment:
if 'pcs-' not in child.tag and child.text is not None:
c_name = child.tag.replace('{http://www.mlhim.org/ns/mlhim2/}','mlhim2:')
dest.write("<rdf:Description rdf:about='xmldata/"+filename+tree.getpath(child)+"'>\n")
dest.write(" <rdfs:domain rdf:resource='xmldata/"+filename+"'/>\n")
dest.write(" <rdf:subPropertyOf rdf:resource='"+tree.getpath(element)+"'/>\n")
dest.write(" <rdf:value>"+escape(child.text)+"</rdf:value>\n")
dest.write("</rdf:Description>\n\n")
else:
c_name = child.tag.replace('{http://www.mlhim.org/ns/mlhim2/}','mlhim2:')
dest.write("<rdf:Description rdf:about='data/"+filename+tree.getpath(child)+"'>\n")
dest.write(" <rdfs:domain rdf:resource='data/"+filename+"'/>\n")
dest.write(" <rdf:type rdf:resource='"+c_name.replace('pcs-','pcm-')+"'/>\n")
dest.write("</rdf:Description>\n\n")
parse_el(child)
def main():
global dest
global filename
global tree
header = """<?xml version="1.0" encoding="UTF-8"?>
<rdf:RDF xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#'
xmlns:rdfs='http://www.w3.org/2000/01/rdf-schema#'
xmlns:owl="http://www.w3.org/2002/07/owl#"
xmlns:dc='http://purl.org/dc/elements/1.1/'
xmlns:ehr='http://www.mlhim.org/xmlns/ehr'
xmlns:mlhim2='http://www.mlhim.org/ns/mlhim2/'>
\n"""
nsDict={'xs':'http://www.w3.org/2001/XMLSchema',
'rdf':'http://www.w3.org/1999/02/22-rdf-syntax-ns#',
'rdfs':'http://www.w3.org/2000/01/rdf-schema#',
'dc':'http://purl.org/dc/elements/1.1/'}
parser = etree.XMLParser(ns_clean=True, recover=True)
files = os.listdir('xmldata')
for filename in files:
if filename[-4:] == '.xml':
dest = open(os.path.join('rdfxml', filename.replace('.xml', '.rdf')), 'w')
dest.write(header)
print('\n\nProcessing: ', os.path.join('xmldata', filename))
src = open(os.path.join('xmldata', filename), 'r')
tree = etree.parse(src, parser)
root = tree.getroot()
ccdid = root.tag.replace('{http://www.mlhim.org/ns/mlhim2/}','')
# create triple for the file link to CCD
dest.write("\n<rdf:Description rdf:about='xmldata/"+filename+"'> <!-- The document unique path/filename -->\n")
dest.write(" <rdf:domain rdf:resource='http://www.ccdgen.com/ccdlib/"+ccdid+".xsd'/>\n")
dest.write("</rdf:Description>\n\n")
entry = root.getchildren()[0]
# create triple for Entry
entry_el = entry.tag.replace('{http://www.mlhim.org/ns/mlhim2/}','mlhim2:')
dest.write("<rdf:Description rdf:about='xmldata/"+filename+"/"+ccdid+"/"+entry_el+"'>\n")
dest.write(" <rdfs:domain rdf:resource='xmldata/"+filename+"'/>\n")
dest.write(" <rdf:type rdf:resource='"+entry_el.replace('pcs-','pcm-')+"'/>\n")
dest.write('</rdf:Description>\n\n')
parse_el(entry)
dest.write('\n</rdf:RDF>\n')
dest.close()
def genEntry(tree, entry, filename, ccdid, dest):
entry_el = entry.tag.replace('{http://www.mlhim.org/ns/mlhim2/}','mlhim2:')
dest.write("<rdf:Description rdf:about='xmldata/"+filename+"/"+ccdid+"/"+entry_el+"'>\n")
children = entry.getchildren()
for child in children:
if child.tag is etree.Comment:
pass
else:
el_name = child.tag.replace('{http://www.mlhim.org/ns/mlhim2/}','mlhim2:')
print("<rdf:Description rdf:about='xmldata/"+filename+tree.getpath(child)+"'>\n")
print("<rdf:type rdf:resource='"+el_name.replace('pcs-','pcm-')+"'/>\n")
dest.write("</rdf:Description>\n")
if __name__ == '__main__':
main()
print("\n\nDone! \nCreated RDF/XML files in the rdfxml directory.\n\n")
sys.exit(0)
| gpl-3.0 | 1,293,129,447,734,749,700 | 39.434783 | 123 | 0.596237 | false |
indradhanush/U1DB-ZeroMQ-Transport | zmq_transport/u1db/zmq_target.py | 1 | 14544 | """
SyncTarget API implementation to a remote ZMQ server.
"""
# Local Imports
from zmq_transport.client.zmq_client import ZMQClientBase
from zmq_transport.common.errors import UserIDNotSet
from zmq_transport.common import message_pb2 as proto
from zmq_transport.common.utils import (
serialize_msg,
create_get_sync_info_request_msg,
create_put_sync_info_request_msg,
create_send_document_request_msg,
create_get_document_request_msg,
create_all_sent_request_msg,
parse_response,
get_sync_id,
)
from u1db import (
SyncTarget,
Document
)
class ZMQSyncTarget(ZMQClientBase, SyncTarget):
"""
Implements the SyncTarget API to a remote ZMQ server.
"""
def __init__(self, endpoints):
"""
Initializes ZMQSyncTarget instance.
:param endpoints: list of endpoints. endpoints[0] is
endpoint_client_handler and endpoints[1] is
endpoint_publisher.
:type endpoints: list
"""
if isinstance(endpoints, list):
if len(endpoints) != 2:
raise ValueError("Length of endpoints must be 2.")
ZMQClientBase.__init__(self, endpoints[0], endpoints[1])
self.endpoints = endpoints
self.sync_required = False
self.sync_info = None
self.user_id = None
self.sync_id = None
self.target_last_known_generation = None
self.target_last_known_trans_id = None
self.source_current_gen = None
self.source_current_trans_id = None
self.source_last_known_generation = None
self.source_last_known_trans_id = None
else:
raise TypeError("Expected type(endpoints) list. Got %s" %
(type(endpoints)))
def _prepare_reactor(self):
"""
Overridden from zmq_transport.client.zmq_client.ZMQClientBase
Raises NotImplementedError because ZMQSyncTarget is using poller for
now.
"""
raise NotImplementedError("Target uses zmq.Poller()")
def set_user_id(self, user_id):
"""
Helper method to set the user_id.
:param user_id: The user_id of the current user.
:type user_id: str
"""
self.user_id = user_id
def release_user_id(self):
"""
Helper method to reset the user_id.
:param user_id: The user_id of the current user.
:type user_id: str
"""
self.user_id = None
def check_user_id(self):
"""
Checks if the user_id is set.
Raises zmq_transport.common.errors.UserIDNotSet exception.
"""
if not self.user_id:
raise UserIDNotSet("User ID is None.")
def start(self):
"""
Overridden from zmq_transport.client.zmq_client.ZMQClientBase
"""
self.check_user_id()
self.speaker.run()
@staticmethod
def connect(endpoints):
"""
Returns ZMQSyncTarget instance
:param endpoints: list of endpoints. endpoints[0] is
endpoint_client_handler and endpoints[1] is
endpoint_publisher
:type endpoints: list
"""
return ZMQSyncTarget(endpoints)
def get_sync_info(self, source_replica_uid):
"""
Returns the sync state information.
:return: Last time target was synced with source.
:rtype: tuple
"""
# Create GetSyncInfoRequest message.
self.sync_id = get_sync_id()
get_sync_info_struct = create_get_sync_info_request_msg(
user_id=self.user_id, source_replica_uid=source_replica_uid,
sync_id=self.sync_id)
iden_get_sync_info_struct = proto.Identifier(
type=proto.Identifier.GET_SYNC_INFO_REQUEST,
get_sync_info_request=get_sync_info_struct)
str_iden_get_sync_info = serialize_msg(iden_get_sync_info_struct)
# # TODO: Wrapping sync_type and zmq_verb messages in Identifier
# # message now. Might remove. Probably not required.
# # Create SyncType message.
# sync_type_struct = create_sync_type_msg(sync_type="sync-from")
# iden_sync_type_struct = proto.Identifier(
# type=proto.Identifier.SYNC_TYPE, sync_type=sync_type_struct)
# str_iden_sync_type = serialize_msg(iden_sync_type_struct)
# # Create ZMQVerb message.
# zmq_verb_struct = create_zmq_verb_msg(verb=proto.ZMQVerb.GET)
# iden_zmq_verb_struct = proto.Identifier(type=proto.Identifier.ZMQ_VERB,
# zmq_verb=zmq_verb_struct)
# str_iden_zmq_verb = serialize_msg(iden_zmq_verb_struct)
# # Frame 1: ZMQVerb; Frame 2: SyncType; Frame 3:GetSyncInfoRequest
# to_send = [str_iden_zmq_verb, str_iden_sync_type,
# str_iden_get_sync_info]
to_send = [str_iden_get_sync_info]
self.speaker.send(to_send)
# Frame 1: GetSyncInfoResponse;
response = self.speaker.recv()[0]
response = parse_response(response, "get_sync_info_response")
return (response.target_replica_uid, response.target_replica_generation,
response.target_replica_trans_id,
response.source_last_known_generation,
response.source_last_known_trans_id)
def record_sync_info(self, source_replica_uid, source_replica_generation,
source_transaction_id):
"""
Informs the target, about source's latest state after completion of the
sync_exchange.
:return: source_transaction_id and inserted status.
:rtype: tuple
"""
# Create PutSyncInfoRequest message.
put_sync_info_struct = create_put_sync_info_request_msg(
user_id=self.user_id,
sync_id=self.sync_id, source_replica_uid=source_replica_uid,
source_replica_generation=source_replica_generation,
source_transaction_id=source_transaction_id,
target_last_known_generation=self.target_last_known_generation,
target_last_known_trans_id=self.target_last_known_trans_id)
iden_put_sync_info_struct = proto.Identifier(
type=proto.Identifier.PUT_SYNC_INFO_REQUEST,
put_sync_info_request=put_sync_info_struct)
str_iden_put_sync_info = serialize_msg(iden_put_sync_info_struct)
to_send = [str_iden_put_sync_info]
self.speaker.send(to_send)
# Frame 1: PutSyncInfoResponse;
response = self.speaker.recv()[0]
response = parse_response(response, "put_sync_info_response")
return (response.source_transaction_id, response.inserted)
def send_doc_info(self, source_replica_uid, doc_id, doc_rev,
doc_generation, doc_content, source_generation,
source_transaction_id):
"""
After "GetSyncInfoRequest" message has been sent and
"GetSyncInfoResponse" is received, the source will now know which
documents have changed at source since the last sync. This method is
used to send those those documents one at a time to the target.
:param source_replica_uid: The uid that identifies the source db.
:type source_replica_uid: str
:param doc_id: The uid that identifies a particular document.
:type doc_id: str
:param doc_generation: Generation of the document.
:type doc_generation: int
:param doc_content: Contents of the document.
:type doc_content: str
:param source_replica_uid: The uid that identifies the source db.
:type source_replica_uid: str
:param source_generation: Generation at source.
:type source_generation: int
:param source_transaction_id: The current transaction id at source.
:type source_transaction_id: str
:return: SendDocumentResponse message.
:rtype: zmq_transport.common.message_pb2.SendDocumentResponse
"""
# Create SendDocumentRequest message.
send_doc_req_struct = create_send_document_request_msg(
user_id=self.user_id, source_replica_uid=source_replica_uid,
sync_id=self.sync_id, doc_id=doc_id, doc_rev=doc_rev,
doc_generation=doc_generation, doc_content=doc_content,
source_generation=source_generation,
source_transaction_id=str(source_transaction_id),
target_last_known_generation=self.target_last_known_generation,
target_last_known_trans_id=(self.target_last_known_trans_id))
iden_send_doc_req = proto.Identifier(
type=proto.Identifier.SEND_DOCUMENT_REQUEST,
send_document_request=send_doc_req_struct)
str_iden_send_doc_req = serialize_msg(iden_send_doc_req)
to_send = [str_iden_send_doc_req]
self.speaker.send(to_send)
# Frame 1: SendDocumentResponse;
response = self.speaker.recv()[0]
return parse_response(response, "send_document_response")
def get_doc_at_target(self, source_replica_uid, doc_info, docs_received_count):
"""
Sends a GetDocumentRequest to target to receive documents that
were changed at the target replica.
:param source_replica_uid: The identifier of the source replica.
:type source_replica_uid: str
:param doc_info: Doc ID, generation and transaction ID of the
document to be requested.
:type doc_info: tuple
:param docs_received_count: Total count of docs received.
:type docs_received_count: int
:return: A document from the target.
:rtype: dict
"""
# Create GetDocumentRequest message.
get_doc_req_struct = create_get_document_request_msg(
user_id=self.user_id, source_replica_uid=source_replica_uid,
sync_id=self.sync_id, doc_id=doc_info[0], doc_generation=doc_info[1],
trans_id=doc_info[2], docs_received_count=docs_received_count,
target_last_known_generation=self.target_last_known_generation,
target_last_known_trans_id=self.target_last_known_trans_id)
iden_get_doc_req = proto.Identifier(
type=proto.Identifier.GET_DOCUMENT_REQUEST,
get_document_request=get_doc_req_struct)
str_iden_get_doc_req = serialize_msg(iden_get_doc_req)
# Frame 1: GetDocumentRequest
to_send = [str_iden_get_doc_req]
self.speaker.send(to_send)
# Frame 1: GetDocumentResponse
response = self.speaker.recv()[0]
response = parse_response(response, "get_document_response")
return {"doc_id": response.doc_id,
"doc_rev": response.doc_rev,
"doc_generation":response.doc_generation,
"doc_content": response.doc_content,
"target_generation": response.target_generation,
"target_trans_id": response.target_trans_id}
def sync_exchange(self, docs_by_generation, source_replica_uid,
return_doc_cb, ensure_callback=None):
"""
Send docs changed at source one at a time, and then incorporate docs
changed at target.
:param docs_by_generation: A list of documents sorted by generation to
send to target.
:type docs_by_generation: list
:param source_replica_uid: Unique identifier of replica at source.
:type source_replica_uid: str
:param return_doc_cb: Callback method to invoke when a document is
received.
:type return_doc_cb: method
:param ensure_callback: Callback for tests.
:type ensure_callback: method.
:return: Latest transaction generation and id of target.
:rtype: tuple
"""
# Send docs changed at source.
for doc, gen, trans_id in docs_by_generation:
send_doc_resp_struct = self.send_doc_info(
source_replica_uid, doc.doc_id, doc.rev, gen,
doc.get_json(), self.source_current_gen,
trans_id)
if not send_doc_resp_struct.inserted:
# TODO: Maybe retry? or Report?
pass
# Intermediate PING-ACK. Also gets notified about incoming
# docs beforehand.
all_sent_req_struct = create_all_sent_request_msg(
user_id=self.user_id, sync_id=self.sync_id,
source_replica_uid=source_replica_uid,
total_docs_sent=len(docs_by_generation), all_sent=True,
target_last_known_generation=self.target_last_known_generation,
target_last_known_trans_id=self.target_last_known_trans_id)
iden_all_sent_req = proto.Identifier(
type=proto.Identifier.ALL_SENT_REQUEST,
all_sent_request=all_sent_req_struct)
str_iden_all_sent_req = serialize_msg(iden_all_sent_req)
# Frame 1: AllSentRequest
self.speaker.send([str_iden_all_sent_req])
# Frame 1: AllSentResponse
response = self.speaker.recv()[0]
all_sent_resp_struct = parse_response(response, "all_sent_response")
# TODO: What to do with all_sent_resp_struct.doc_info[] ; Maybe request
# for each doc by id?
# List of DocInfo messsage structs.
docs_list = all_sent_resp_struct.doc_info[:]
docs_to_receive = len(docs_list)
docs_received = 0
for doc_info_struct in docs_list:
doc_info = (doc_info_struct.doc_id, doc_info_struct.doc_generation,
doc_info_struct.trans_id)
doc_recvd = self.get_doc_at_target(
source_replica_uid, doc_info, docs_received)
if doc_recvd.get("doc_id"):
docs_received += 1
docs_to_receive -= 1
doc = Document(doc_recvd["doc_id"], doc_recvd["doc_rev"],
doc_recvd["doc_content"])
return_doc_cb(doc, doc_recvd["doc_generation"],
doc_recvd["target_generation"])
self.target_last_known_generation =\
all_sent_resp_struct.target_generation
self.target_last_known_trans_id = all_sent_resp_struct.target_trans_id
return all_sent_resp_struct.target_generation,\
all_sent_resp_struct.target_trans_id
def close(self):
pass
| gpl-2.0 | -3,582,001,112,336,405,500 | 40.084746 | 83 | 0.611386 | false |
Tiglas/pickup-planner | request_handler/request_handler.py | 1 | 2210 | #!flask/bin/python
from flask import Flask, jsonify, abort
from flask import make_response
from flask import request
from flask import url_for
import psycopg2 as pg
app = Flask(__name__)
def make_public_request(request):
new_request = {}
new_request['uri'] = url_for('get_requests', request_id=request[0], _external=True)
new_request['source'] = request[1]
new_request['destination'] = request[2]
return new_request
@app.route('/clientapp/requests', methods=['GET'])
def get_requests():
''' Get requests from the database
'''
conn = pg.connect(database="ngot", host="127.0.0.1", port="5432")
cursor = conn.cursor()
cursor.execute("SELECT request_id, source, destination FROM requests")
rows = list(cursor.fetchall())
cursor.close()
conn.close()
return jsonify({'requests': [make_public_request(req) for req in rows]})
@app.route('/clientapp/vehicle_trips', methods=['GET'])
def get_vehicle_trips():
''' Query the database and return generated vehicle trips
'''
conn = pg.connect(database="ngot", host="127.0.0.1", port="5432")
cursor = conn.cursor()
pg.extensions.register_type(
pg.extensions.new_array_type(
(1017,), 'PICKUP_POINTS[]', pg.STRING))
cursor.execute("SELECT pickup_points FROM vehicletrips")
rows = cursor.fetchone()
cursor.close()
conn.close()
return jsonify({'vehicle_trips': rows})
@app.route('/clientapp/requests', methods=['POST'])
def create_request():
#if not request.json in request.json:
#abort(404)
conn = pg.connect(database="ngot", host="127.0.0.1", port="5432")
cursor = conn.cursor()
#request_id = request.json['request_id']
source = request.json['source']
destination = request.json['destination']
cursor.execute("INSERT INTO requests (source, destination) VALUES (%s, %s)", (source, destination))
rows = cursor.rowcount
conn.commit()
cursor.close()
conn.close()
return jsonify({'rows': rows}), 201
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify({'error': 'Not found'}), 404)
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
#app.run(debug=True)
| mit | 5,616,394,711,720,372,000 | 29.273973 | 103 | 0.654299 | false |
pointhi/searx | tests/unit/engines/test_yahoo.py | 1 | 5765 | # -*- coding: utf-8 -*-
from collections import defaultdict
import mock
from searx.engines import yahoo
from searx.testing import SearxTestCase
class TestYahooEngine(SearxTestCase):
def test_parse_url(self):
test_url = 'http://r.search.yahoo.com/_ylt=A0LEb9JUSKcAEGRXNyoA;_ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb' +\
'2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10/RU=https%3a%2f%2fthis.is.the.url%2f/RK=0/RS=' +\
'dtcJsfP4mEeBOjnVfUQ-'
url = yahoo.parse_url(test_url)
self.assertEqual('https://this.is.the.url/', url)
test_url = 'http://r.search.yahoo.com/_ylt=A0LElb9JUSKcAEGRXNyoA;_ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb' +\
'2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10/RU=https%3a%2f%2fthis.is.the.url%2f/RS=' +\
'dtcJsfP4mEeBOjnVfUQ-'
url = yahoo.parse_url(test_url)
self.assertEqual('https://this.is.the.url/', url)
test_url = 'https://this.is.the.url/'
url = yahoo.parse_url(test_url)
self.assertEqual('https://this.is.the.url/', url)
def test_request(self):
query = 'test_query'
dicto = defaultdict(dict)
dicto['pageno'] = 1
dicto['time_range'] = ''
dicto['language'] = 'fr_FR'
params = yahoo.request(query, dicto)
self.assertIn('url', params)
self.assertIn(query, params['url'])
self.assertIn('search.yahoo.com', params['url'])
self.assertIn('fr', params['url'])
self.assertIn('cookies', params)
self.assertIn('sB', params['cookies'])
self.assertIn('fr', params['cookies']['sB'])
dicto['language'] = 'all'
params = yahoo.request(query, dicto)
self.assertIn('cookies', params)
self.assertIn('sB', params['cookies'])
self.assertIn('en', params['cookies']['sB'])
self.assertIn('en', params['url'])
def test_response(self):
self.assertRaises(AttributeError, yahoo.response, None)
self.assertRaises(AttributeError, yahoo.response, [])
self.assertRaises(AttributeError, yahoo.response, '')
self.assertRaises(AttributeError, yahoo.response, '[]')
response = mock.Mock(text='<html></html>')
self.assertEqual(yahoo.response(response), [])
html = """
<ol class="reg mb-15 searchCenterMiddle">
<li class="first">
<div class="dd algo fst Sr">
<div class="compTitle">
<h3 class="title"><a class=" td-u" href="http://r.search.yahoo.com/_ylt=A0LEb9JUSKcAEGRXNyoA;
_ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10
/RU=https%3a%2f%2fthis.is.the.url%2f/RK=0/RS=dtcJsfP4mEeBOjnVfUQ-"
target="_blank" data-bid="54e712e13671c">
<b><b>This is the title</b></b></a>
</h3>
</div>
<div class="compText aAbs">
<p class="lh-18"><b><b>This is the </b>content</b>
</p>
</div>
</div>
</li>
<li>
<div class="dd algo lst Sr">
<div class="compTitle">
</div>
<div class="compText aAbs">
<p class="lh-18">This is the second content</p>
</div>
</div>
</li>
</ol>
<div class="dd assist fst lst AlsoTry" data-bid="54e712e138d04">
<div class="compTitle mb-4 h-17">
<h3 class="title">Also Try</h3> </div>
<table class="compTable m-0 ac-1st td-u fz-ms">
<tbody>
<tr>
<td class="w-50p pr-28"><a href="https://search.yahoo.com/"><B>This is the </B>suggestion<B></B></a>
</td>
</tr>
</table>
</div>
"""
response = mock.Mock(text=html)
results = yahoo.response(response)
self.assertEqual(type(results), list)
self.assertEqual(len(results), 2)
self.assertEqual(results[0]['title'], 'This is the title')
self.assertEqual(results[0]['url'], 'https://this.is.the.url/')
self.assertEqual(results[0]['content'], 'This is the content')
self.assertEqual(results[1]['suggestion'], 'This is the suggestion')
html = """
<ol class="reg mb-15 searchCenterMiddle">
<li class="first">
<div class="dd algo fst Sr">
<div class="compTitle">
<h3 class="title"><a class=" td-u" href="http://r.search.yahoo.com/_ylt=A0LEb9JUSKcAEGRXNyoA;
_ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10
/RU=https%3a%2f%2fthis.is.the.url%2f/RK=0/RS=dtcJsfP4mEeBOjnVfUQ-"
target="_blank" data-bid="54e712e13671c">
<b><b>This is the title</b></b></a>
</h3>
</div>
<div class="compText aAbs">
<p class="lh-18"><b><b>This is the </b>content</b>
</p>
</div>
</div>
</li>
</ol>
"""
response = mock.Mock(text=html)
results = yahoo.response(response)
self.assertEqual(type(results), list)
self.assertEqual(len(results), 1)
self.assertEqual(results[0]['title'], 'This is the title')
self.assertEqual(results[0]['url'], 'https://this.is.the.url/')
self.assertEqual(results[0]['content'], 'This is the content')
html = """
<li class="b_algo" u="0|5109|4755453613245655|UAGjXgIrPH5yh-o5oNHRx_3Zta87f_QO">
</li>
"""
response = mock.Mock(text=html)
results = yahoo.response(response)
self.assertEqual(type(results), list)
self.assertEqual(len(results), 0)
| agpl-3.0 | 6,518,948,343,053,568,000 | 39.598592 | 119 | 0.566522 | false |
ppjk1/conference-central | models.py | 1 | 6153 | #!/usr/bin/env python
"""models.py
Udacity conference server-side Python App Engine data & ProtoRPC models
$Id: models.py,v 1.1 2014/05/24 22:01:10 wesc Exp $
created/forked from conferences.py by wesc on 2014 may 24
"""
__author__ = '[email protected] (Wesley Chun)'
import httplib
import endpoints
from protorpc import messages
from google.appengine.ext import ndb
class ConflictException(endpoints.ServiceException):
"""ConflictException -- exception mapped to HTTP 409 response"""
http_status = httplib.CONFLICT
class StringMessage(messages.Message):
"""StringMessage -- outbound (single) string message"""
data = messages.StringField(1, required=True)
class BooleanMessage(messages.Message):
"""BooleanMessage -- outbound Boolean value message"""
data = messages.BooleanField(1)
class Profile(ndb.Model):
"""Profile -- User profile object"""
displayName = ndb.StringProperty()
mainEmail = ndb.StringProperty()
teeShirtSize = ndb.StringProperty(default='NOT_SPECIFIED')
conferenceKeysToAttend = ndb.StringProperty(repeated=True)
sessionWishlistKeys = ndb.StringProperty(repeated=True)
class ProfileMiniForm(messages.Message):
"""ProfileMiniForm -- update Profile form message"""
displayName = messages.StringField(1)
teeShirtSize = messages.EnumField('TeeShirtSize', 2)
class ProfileForm(messages.Message):
"""ProfileForm -- Profile outbound form message"""
displayName = messages.StringField(1)
mainEmail = messages.StringField(2)
teeShirtSize = messages.EnumField('TeeShirtSize', 3)
conferenceKeysToAttend = messages.StringField(4, repeated=True)
sessionWishlistKeys = messages.StringField(5, repeated=True)
class TeeShirtSize(messages.Enum):
"""TeeShirtSize -- t-shirt size enumeration value"""
NOT_SPECIFIED = 1
XS_M = 2
XS_W = 3
S_M = 4
S_W = 5
M_M = 6
M_W = 7
L_M = 8
L_W = 9
XL_M = 10
XL_W = 11
XXL_M = 12
XXL_W = 13
XXXL_M = 14
XXXL_W = 15
class Conference(ndb.Model):
"""Conference -- Conference object"""
name = ndb.StringProperty(required=True)
description = ndb.StringProperty()
organizerUserId = ndb.StringProperty()
topics = ndb.StringProperty(repeated=True)
city = ndb.StringProperty()
startDate = ndb.DateProperty()
month = ndb.IntegerProperty()
endDate = ndb.DateProperty()
maxAttendees = ndb.IntegerProperty()
seatsAvailable = ndb.IntegerProperty()
class ConferenceForm(messages.Message):
"""ConferenceForm -- Conference outbound form message"""
name = messages.StringField(1)
description = messages.StringField(2)
organizerUserId = messages.StringField(3)
topics = messages.StringField(4, repeated=True)
city = messages.StringField(5)
startDate = messages.StringField(6) #DateTimeField()
month = messages.IntegerField(7)
maxAttendees = messages.IntegerField(8)
seatsAvailable = messages.IntegerField(9)
endDate = messages.StringField(10) #DateTimeField()
websafeKey = messages.StringField(11)
organizerDisplayName = messages.StringField(12)
class ConferenceForms(messages.Message):
"""ConferenceForms -- multiple Conference outbound form message"""
items = messages.MessageField(ConferenceForm, 1, repeated=True)
class ConferenceQueryForm(messages.Message):
"""ConferenceQueryForm -- Conference query inbound form message"""
field = messages.StringField(1)
operator = messages.StringField(2)
value = messages.StringField(3)
class ConferenceQueryForms(messages.Message):
"""ConferenceQueryForms -- multiple ConferenceQueryForm inbound form message"""
filters = messages.MessageField(ConferenceQueryForm, 1, repeated=True)
class Speaker(ndb.Model):
"""Speaker -- Speaker object"""
name = ndb.StringProperty(required=True)
bio = ndb.TextProperty()
organization = ndb.StringProperty()
class SpeakerForm(messages.Message):
"""SpeakerForm -- Speaker outbound form message"""
name = messages.StringField(1)
bio = messages.StringField(2)
organization = messages.StringField(3)
websafeKey = messages.StringField(5)
class SpeakerForms(messages.Message):
"""SpeakerForms -- multiple Speaker outbound form message"""
speakers = messages.MessageField(SpeakerForm, 1, repeated=True)
class Session(ndb.Model):
"""Session -- Session object"""
name = ndb.StringProperty(required=True)
highlights = ndb.TextProperty()
speakerKeys = ndb.StringProperty(repeated=True)
duration = ndb.StringProperty()
typeOfSession = ndb.StringProperty(default='NOT_SPECIFIED')
date = ndb.DateProperty()
startTime = ndb.IntegerProperty()
websafeConferenceKey = ndb.StringProperty(required=True)
class SessionForm(messages.Message):
"""SessionForm -- Session outbound form message"""
name = messages.StringField(1)
highlights = messages.StringField(2)
speakerKeys = messages.StringField(3, repeated=True)
duration = messages.StringField(4)
typeOfSession = messages.EnumField('TypeOfSession', 5)
date = messages.StringField(7)
startTime = messages.StringField(8)
websafeKey = messages.StringField(9)
websafeConferenceKey = messages.StringField(10)
class SessionForms(messages.Message):
"""SessionForms -- multiple Session outbound form message"""
sessions = messages.MessageField(SessionForm, 1, repeated=True)
class SessionMiniHardForm(messages.Message):
"""SessionMiniHardForm -- Session query inbound form message"""
notTypeOfSession = messages.StringField(1)
beforeTime = messages.StringField(2)
class TypeOfSession(messages.Enum):
"""TypeOfSession -- session type enumeration value"""
NOT_SPECIFIED = 1
KEYNOTE = 2
LECTURE = 3
DEMONSTRATION = 4
PANEL = 5
WORKSHOP = 6
ROUNDTABLE = 7
| apache-2.0 | -4,351,080,315,380,698,000 | 31.384211 | 83 | 0.685357 | false |
asmundg/coherence | misc/Rhythmbox-Plugin/upnp_coherence/__init__.py | 5 | 18399 | # -*- Mode: python; coding: utf-8; tab-width: 8; indent-tabs-mode: t; -*-
#
# Copyright 2011, Caleb Callaway <[email protected]>
# Copyright 2008-2010, Frank Scholz <[email protected]>
# Copyright 2008, James Livingston <[email protected]>
#
# Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
import rhythmdb, rb
import gobject
gobject.threads_init()
import gconf
import coherence.extern.louie as louie
from coherence import log
from CoherenceDBEntryType import CoherenceDBEntryType
# for the icon
import os.path, urllib, gio, gtk.gdk
# the gconf configuration
gconf_keys = {
'port': "/apps/rhythmbox/plugins/coherence/port",
'interface': "/apps/rhythmbox/plugins/coherence/interface",
# DMS
'dms_uuid': "/apps/rhythmbox/plugins/coherence/dms/uuid",
'dms_active': "/apps/rhythmbox/plugins/coherence/dms/active",
'dms_version': "/apps/rhythmbox/plugins/coherence/dms/version",
'dms_name': "/apps/rhythmbox/plugins/coherence/dms/name",
# DMR
'dmr_uuid': "/apps/rhythmbox/plugins/coherence/dmr/uuid",
'dmr_active': "/apps/rhythmbox/plugins/coherence/dmr/active",
'dmr_version': "/apps/rhythmbox/plugins/coherence/dmr/version",
'dmr_name': "/apps/rhythmbox/plugins/coherence/dmr/name",
# DMC
'dmc_active': "/apps/rhythmbox/plugins/coherence/dmc/active",
}
class CoherencePlugin(rb.Plugin, log.Loggable):
logCategory = 'rb_coherence_plugin'
def __init__(self):
rb.Plugin.__init__(self)
self.coherence = None
self.config = gconf.client_get_default()
if self.config.get(gconf_keys['dmc_active']) is None:
# key not yet found represented by "None"
self._set_defaults()
def _set_defaults(self):
for a in ('r', 's'):
self.config.set_bool(gconf_keys['dm%s_active' % a], True)
self.config.set_int(gconf_keys['dm%s_version' % a], 2)
self.config.set_string(gconf_keys['dmr_name'], "Rhythmbox UPnP MediaRenderer on {host}")
self.config.set_string(gconf_keys['dms_name'], "Rhythmbox UPnP MediaServer on {host}")
self.config.set_bool(gconf_keys['dmc_active'], True)
def activate(self, shell):
from twisted.internet import gtk2reactor
try:
gtk2reactor.install()
except AssertionError, e:
# sometimes it's already installed
self.warning("gtk2reactor already installed %r" % e)
self.coherence = self.get_coherence()
if self.coherence is None:
self.warning("Coherence is not installed or too old, aborting")
return
self.warning("Coherence UPnP plugin activated")
self.shell = shell
self.sources = {}
# Set up our icon
the_icon = None
face_path = os.path.join(os.path.expanduser('~'), ".face")
if os.path.exists(face_path):
file = gio.File(path=face_path);
url = file.get_uri();
info = file.query_info("standard::fast-content-type");
mimetype = info.get_attribute_as_string("standard::fast-content-type");
pixbuf = gtk.gdk.pixbuf_new_from_file(face_path)
width = "%s" % pixbuf.get_width()
height = "%s" % pixbuf.get_height()
depth = '24'
the_icon = {
'url': url,
'mimetype': mimetype,
'width': width,
'height': height,
'depth': depth
}
if self.config.get_bool(gconf_keys['dms_active']):
# create our own media server
from coherence.upnp.devices.media_server import MediaServer
from MediaStore import MediaStore
kwargs = {
'version': self.config.get_int(gconf_keys['dms_version']),
'no_thread_needed': True,
'db': self.shell.props.db,
'plugin': self}
if the_icon:
kwargs['icon'] = the_icon
dms_uuid = self.config.get_string(gconf_keys['dms_uuid'])
if dms_uuid:
kwargs['uuid'] = dms_uuid
name = self.config.get_string(gconf_keys['dms_name'])
if name:
name = name.replace('{host}',self.coherence.hostname)
kwargs['name'] = name
self.server = MediaServer(self.coherence, MediaStore, **kwargs)
if dms_uuid is None:
self.config.set_string(gconf_keys['dms_uuid'], str(self.server.uuid))
self.warning("Media Store available with UUID %s" % str(self.server.uuid))
if self.config.get_bool(gconf_keys['dmr_active']):
# create our own media renderer
# but only if we have a matching Coherence package installed
if self.coherence_version < (0, 5, 2):
print "activation failed. Coherence is older than version 0.5.2"
else:
from coherence.upnp.devices.media_renderer import MediaRenderer
from MediaPlayer import RhythmboxPlayer
kwargs = {
"version": self.config.get_int(gconf_keys['dmr_version']),
"no_thread_needed": True,
"shell": self.shell,
'dmr_uuid': gconf_keys['dmr_uuid']
}
if the_icon:
kwargs['icon'] = the_icon
dmr_uuid = self.config.get_string(gconf_keys['dmr_uuid'])
if dmr_uuid:
kwargs['uuid'] = dmr_uuid
name = self.config.get_string(gconf_keys['dmr_name'])
if name:
name = name.replace('{host}',self.coherence.hostname)
kwargs['name'] = name
self.renderer = MediaRenderer(self.coherence,
RhythmboxPlayer, **kwargs)
if dmr_uuid is None:
self.config.set_string(gconf_keys['dmr_uuid'], str(self.renderer.uuid))
self.warning("Media Renderer available with UUID %s" % str(self.renderer.uuid))
if self.config.get_bool(gconf_keys['dmc_active']):
self.warning("start looking for media servers")
# watch for media servers
louie.connect(self.detected_media_server,
'Coherence.UPnP.ControlPoint.MediaServer.detected',
louie.Any)
louie.connect(self.removed_media_server,
'Coherence.UPnP.ControlPoint.MediaServer.removed',
louie.Any)
def deactivate(self, shell):
self.info("Coherence UPnP plugin deactivated")
if self.coherence is None:
return
self.coherence.shutdown()
try:
louie.disconnect(self.detected_media_server,
'Coherence.UPnP.ControlPoint.MediaServer.detected',
louie.Any)
except louie.error.DispatcherKeyError:
pass
try:
louie.disconnect(self.removed_media_server,
'Coherence.UPnP.ControlPoint.MediaServer.removed',
louie.Any)
except louie.error.DispatcherKeyError:
pass
del self.shell
del self.coherence
for usn, source in self.sources.iteritems():
source.delete_thyself()
del self.sources
# uninstall twisted reactor? probably not, since other things may have used it
def get_coherence (self):
coherence_instance = None
required_version = (0, 5, 7)
try:
from coherence.base import Coherence
from coherence import __version_info__
except ImportError, e:
print "Coherence not found"
return None
if __version_info__ < required_version:
required = '.'.join([str(i) for i in required_version])
found = '.'.join([str(i) for i in __version_info__])
print "Coherence %s required. %s found. Please upgrade" % (required, found)
return None
self.coherence_version = __version_info__
coherence_config = {
#'logmode': 'info',
'controlpoint': 'yes',
'plugins': {},
}
serverport = self.config.get_int(gconf_keys['port'])
if serverport:
coherence_config['serverport'] = serverport
interface = self.config.get_string(gconf_keys['interface'])
if interface:
coherence_config['interface'] = interface
coherence_instance = Coherence(coherence_config)
return coherence_instance
def removed_media_server(self, udn):
self.info("upnp server went away %s" % udn)
if self.sources.has_key(udn):
self.sources[udn].delete_thyself()
del self.sources[udn]
def detected_media_server(self, client, udn):
self.info("found upnp server %s (%s)" % (client.device.get_friendly_name(), udn))
""" don't react on our own MediaServer"""
if hasattr(self, 'server') and client.device.get_id() == str(self.server.uuid):
return
db = self.shell.props.db
group = rb.rb_display_page_group_get_by_id ("shared")
entry_type = CoherenceDBEntryType(client.device.get_id()[5:])
db.register_entry_type(entry_type)
from UpnpSource import UpnpSource
source = gobject.new (UpnpSource,
shell=self.shell,
entry_type=entry_type,
plugin=self,
client=client,
udn=udn)
self.sources[udn] = source
self.shell.append_display_page (source, group)
def create_configure_dialog(self, dialog=None):
if dialog is None:
def store_config(dialog,port_spinner,interface_entry,
dms_check,dms_name_entry,dms_version_entry,dms_uuid_entry,
dmr_check,dmr_name_entry,dmr_version_entry,dmr_uuid_entry,
dmc_check):
port = port_spinner.get_value_as_int()
self.config.set_int(gconf_keys['port'],port)
interface = interface_entry.get_text()
if len(interface) != 0:
self.config.set_string(gconf_keys['interface'],interface)
self.config.set_bool(gconf_keys['dms_active'],dms_check.get_active())
self.config.set_string(gconf_keys['dms_name'],dms_name_entry.get_text())
self.config.set_int(gconf_keys['dms_version'],int(dms_version_entry.get_active_text()))
self.config.set_string(gconf_keys['dms_uuid'],dms_uuid_entry.get_text())
self.config.set_bool(gconf_keys['dmr_active'],dmr_check.get_active())
self.config.set_string(gconf_keys['dmr_name'],dmr_name_entry.get_text())
self.config.set_int(gconf_keys['dmr_version'],int(dmr_version_entry.get_active_text()))
self.config.set_string(gconf_keys['dmr_uuid'],dmr_uuid_entry.get_text())
self.config.set_bool(gconf_keys['dmc_active'],dmc_check.get_active())
dialog.hide()
dialog = gtk.Dialog(title='DLNA/UPnP Configuration',
parent=None,flags=0,buttons=None)
dialog.set_default_size(500,350)
table = gtk.Table(rows=2, columns=2, homogeneous=False)
dialog.vbox.pack_start(table, False, False, 0)
label = gtk.Label("Network Interface:")
label.set_alignment(0,0.5)
table.attach(label, 0, 1, 0, 1)
interface_entry = gtk.Entry()
interface_entry.set_max_length(16)
if self.config.get_string(gconf_keys['interface']) != None:
interface_entry.set_text(self.config.get_string(gconf_keys['interface']))
else:
interface_entry.set_text('')
table.attach(interface_entry, 1, 2, 0, 1,
xoptions=gtk.FILL|gtk.EXPAND,yoptions=gtk.FILL|gtk.EXPAND,xpadding=5,ypadding=5)
label = gtk.Label("Port:")
label.set_alignment(0,0.5)
table.attach(label, 0, 1, 1, 2)
value = 0
if self.config.get_int(gconf_keys['port']) != None:
value = self.config.get_int(gconf_keys['port'])
adj = gtk.Adjustment(value, 0, 65535, 1, 100, 0)
port_spinner = gtk.SpinButton(adj, 0, 0)
port_spinner.set_wrap(True)
port_spinner.set_numeric(True)
table.attach(port_spinner, 1, 2, 1, 2,
xoptions=gtk.FILL|gtk.EXPAND,yoptions=gtk.FILL|gtk.EXPAND,xpadding=5,ypadding=5)
frame = gtk.Frame('MediaServer')
dialog.vbox.add(frame)
vbox = gtk.VBox(False, 0)
vbox.set_border_width(5)
frame.add(vbox)
table = gtk.Table(rows=4, columns=2, homogeneous=True)
vbox.pack_start(table, False, False, 0)
label = gtk.Label("enabled:")
label.set_alignment(0,0.5)
table.attach(label, 0, 1, 0, 1)
dms_check = gtk.CheckButton()
dms_check.set_active(self.config.get_bool(gconf_keys['dms_active']))
table.attach(dms_check, 1, 2, 0, 1)
label = gtk.Label("Name:")
label.set_alignment(0,0.5)
table.attach(label, 0, 1, 1, 2)
dms_name_entry = gtk.Entry()
if self.config.get_string(gconf_keys['dms_name']) != None:
dms_name_entry.set_text(self.config.get_string(gconf_keys['dms_name']))
else:
dms_name_entry.set_text('')
table.attach(dms_name_entry, 1, 2, 1, 2)
label = gtk.Label("UPnP version:")
label.set_alignment(0,0.5)
table.attach(label, 0, 1, 3, 4)
dms_version_entry = gtk.combo_box_new_text()
dms_version_entry.insert_text(0,'2')
dms_version_entry.insert_text(1,'1')
dms_version_entry.set_active(0)
if self.config.get_int(gconf_keys['dms_version']) != None:
if self.config.get_int(gconf_keys['dms_version']) == 1:
dms_version_entry.set_active(1)
table.attach(dms_version_entry, 1, 2, 3, 4)
label = gtk.Label("UUID:")
label.set_alignment(0,0.5)
table.attach(label, 0, 1, 2, 3)
dms_uuid_entry = gtk.Entry()
if self.config.get_string(gconf_keys['dms_uuid']) != None:
dms_uuid_entry.set_text(self.config.get_string(gconf_keys['dms_uuid']))
else:
dms_uuid_entry.set_text('')
table.attach(dms_uuid_entry, 1, 2, 2, 3)
frame = gtk.Frame('MediaRenderer')
dialog.vbox.add(frame)
vbox = gtk.VBox(False, 0)
vbox.set_border_width(5)
frame.add(vbox)
table = gtk.Table(rows=4, columns=2, homogeneous=True)
vbox.pack_start(table, False, False, 0)
label = gtk.Label("enabled:")
label.set_alignment(0,0.5)
table.attach(label, 0, 1, 0, 1)
dmr_check = gtk.CheckButton()
dmr_check.set_active(self.config.get_bool(gconf_keys['dmr_active']))
table.attach(dmr_check, 1, 2, 0, 1)
label = gtk.Label("Name:")
label.set_alignment(0,0.5)
table.attach(label, 0, 1, 1, 2)
dmr_name_entry = gtk.Entry()
if self.config.get_string(gconf_keys['dmr_name']) != None:
dmr_name_entry.set_text(self.config.get_string(gconf_keys['dmr_name']))
else:
dmr_name_entry.set_text('')
table.attach(dmr_name_entry, 1, 2, 1, 2)
label = gtk.Label("UPnP version:")
label.set_alignment(0,0.5)
table.attach(label, 0, 1, 3, 4)
dmr_version_entry = gtk.combo_box_new_text()
dmr_version_entry.insert_text(0,'2')
dmr_version_entry.insert_text(1,'1')
dmr_version_entry.set_active(0)
if self.config.get_int(gconf_keys['dmr_version']) != None:
if self.config.get_int(gconf_keys['dmr_version']) == 1:
dmr_version_entry.set_active(1)
table.attach(dmr_version_entry, 1, 2, 3, 4)
label = gtk.Label("UUID:")
label.set_alignment(0,0.5)
table.attach(label, 0, 1, 2, 3)
dmr_uuid_entry = gtk.Entry()
if self.config.get_string(gconf_keys['dmr_uuid']) != None:
dmr_uuid_entry.set_text(self.config.get_string(gconf_keys['dmr_uuid']))
else:
dmr_uuid_entry.set_text('')
table.attach(dmr_uuid_entry, 1, 2, 2, 3)
frame = gtk.Frame('MediaClient')
dialog.vbox.add(frame)
vbox = gtk.VBox(False, 0)
vbox.set_border_width(5)
frame.add(vbox)
table = gtk.Table(rows=1, columns=2, homogeneous=True)
vbox.pack_start(table, False, False, 0)
label = gtk.Label("enabled:")
label.set_alignment(0,0.5)
table.attach(label, 0, 1, 0, 1)
dmc_check = gtk.CheckButton()
dmc_check.set_active(self.config.get_bool(gconf_keys['dmc_active']))
table.attach(dmc_check, 1, 2, 0, 1)
button = gtk.Button(stock=gtk.STOCK_CANCEL)
dialog.action_area.pack_start(button, True, True, 5)
button.connect("clicked", lambda w: dialog.hide())
button = gtk.Button(stock=gtk.STOCK_OK)
button.connect("clicked", lambda w: store_config(dialog,port_spinner,interface_entry,
dms_check,dms_name_entry,dms_version_entry,dms_uuid_entry,
dmr_check,dmr_name_entry,dmr_version_entry,dmr_uuid_entry,
dmc_check))
dialog.action_area.pack_start(button, True, True, 5)
dialog.show_all()
dialog.present()
return dialog
| mit | -5,631,350,750,587,948,000 | 38.911063 | 119 | 0.552856 | false |
bbc/kamaelia | Sketches/MH/Layout/Visualisation/Axon/PComponent.py | 3 | 3060 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
import pygame
from Visualisation.Graph import BaseParticle
from pygame.locals import *
_COMPONENT_RADIUS = 32
def abbreviate(string):
"""Abbreviates strings to capitals, word starts and numerics and underscores"""
out = ""
prev = ""
for c in string:
if c.isupper() or c.isdigit() or c == "_" or c == "." or (c.isalpha() and not prev.isalpha()):
out += c.upper()
prev = c
return out
class PComponent(BaseParticle):
def __init__(self, ID, position, name):
super(PComponent,self).__init__(position=position, ID = ID )
self.name = name
self.ptype = "component"
self.shortname = abbreviate(name)
self.left = 0
self.top = 0
self.selected = False
font = pygame.font.Font(None, 20)
self.slabel = font.render(self.shortname, True, (0,0,0))
self.slabelxo = - self.slabel.get_width()/2
self.slabelyo = - self.slabel.get_height()/2
self.radius = _COMPONENT_RADIUS
self.desclabel = font.render("Component "+self.shortname+" : "+self.name, True, (0,0,0), (255,255,255))
def render(self, surface):
x = int(self.pos[0] - self.left)
y = int(self.pos[1] - self.top )
yield 1
for p in self.bondedTo:
px = int(p.pos[0] - self.left)
py = int(p.pos[1] - self.top )
pygame.draw.line(surface, (192,192,192), (x,y), (px,py))
yield 2
colour = (192,192,192)
if self.selected:
colour = (160,160,255)
pygame.draw.circle(surface, colour, (x,y), self.radius)
surface.blit(self.slabel, ( x+self.slabelxo, y+self.slabelyo ) )
if self.selected:
yield 10
surface.blit(self.desclabel, (72,16) )
def setOffset( self, (x,y) ):
self.left = x
self.top = y
def select( self ):
"""Tell this particle it is selected"""
self.selected = True
def deselect( self ):
"""Tell this particle it is selected"""
self.selected = False
| apache-2.0 | 7,623,455,718,069,045,000 | 33 | 111 | 0.583333 | false |
tanglei528/nova | nova/tests/api/openstack/compute/contrib/test_hosts.py | 13 | 17166 | # Copyright (c) 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from lxml import etree
import testtools
import webob.exc
from nova.api.openstack.compute.contrib import hosts as os_hosts
from nova.compute import power_state
from nova.compute import vm_states
from nova import context as context_maker
from nova import db
from nova import exception
from nova import test
from nova.tests import fake_hosts
from nova.tests import utils
def stub_service_get_all(context, disabled=None):
return fake_hosts.SERVICES_LIST
def stub_service_get_by_host_and_topic(context, host_name, topic):
for service in stub_service_get_all(context):
if service['host'] == host_name and service['topic'] == topic:
return service
def stub_set_host_enabled(context, host_name, enabled):
"""Simulates three possible behaviours for VM drivers or compute
drivers when enabling or disabling a host.
'enabled' means new instances can go to this host
'disabled' means they can't
"""
results = {True: "enabled", False: "disabled"}
if host_name == "notimplemented":
# The vm driver for this host doesn't support this feature
raise NotImplementedError()
elif host_name == "dummydest":
# The host does not exist
raise exception.ComputeHostNotFound(host=host_name)
elif host_name == "host_c2":
# Simulate a failure
return results[not enabled]
else:
# Do the right thing
return results[enabled]
def stub_set_host_maintenance(context, host_name, mode):
# We'll simulate success and failure by assuming
# that 'host_c1' always succeeds, and 'host_c2'
# always fails
results = {True: "on_maintenance", False: "off_maintenance"}
if host_name == "notimplemented":
# The vm driver for this host doesn't support this feature
raise NotImplementedError()
elif host_name == "dummydest":
# The host does not exist
raise exception.ComputeHostNotFound(host=host_name)
elif host_name == "host_c2":
# Simulate a failure
return results[not mode]
else:
# Do the right thing
return results[mode]
def stub_host_power_action(context, host_name, action):
if host_name == "notimplemented":
raise NotImplementedError()
elif host_name == "dummydest":
# The host does not exist
raise exception.ComputeHostNotFound(host=host_name)
return action
def _create_instance(**kwargs):
"""Create a test instance."""
ctxt = context_maker.get_admin_context()
return db.instance_create(ctxt, _create_instance_dict(**kwargs))
def _create_instance_dict(**kwargs):
"""Create a dictionary for a test instance."""
inst = {}
inst['image_ref'] = 'cedef40a-ed67-4d10-800e-17455edce175'
inst['reservation_id'] = 'r-fakeres'
inst['user_id'] = kwargs.get('user_id', 'admin')
inst['project_id'] = kwargs.get('project_id', 'fake')
inst['instance_type_id'] = '1'
if 'host' in kwargs:
inst['host'] = kwargs.get('host')
inst['vcpus'] = kwargs.get('vcpus', 1)
inst['memory_mb'] = kwargs.get('memory_mb', 20)
inst['root_gb'] = kwargs.get('root_gb', 30)
inst['ephemeral_gb'] = kwargs.get('ephemeral_gb', 30)
inst['vm_state'] = kwargs.get('vm_state', vm_states.ACTIVE)
inst['power_state'] = kwargs.get('power_state', power_state.RUNNING)
inst['task_state'] = kwargs.get('task_state', None)
inst['availability_zone'] = kwargs.get('availability_zone', None)
inst['ami_launch_index'] = 0
inst['launched_on'] = kwargs.get('launched_on', 'dummy')
return inst
class FakeRequest(object):
environ = {"nova.context": context_maker.get_admin_context()}
GET = {}
class FakeRequestWithNovaZone(object):
environ = {"nova.context": context_maker.get_admin_context()}
GET = {"zone": "nova"}
class HostTestCase(test.TestCase):
"""Test Case for hosts."""
def setUp(self):
super(HostTestCase, self).setUp()
self.controller = os_hosts.HostController()
self.hosts_api = self.controller.api
self.req = FakeRequest()
# Pretend we have fake_hosts.HOST_LIST in the DB
self.stubs.Set(db, 'service_get_all',
stub_service_get_all)
# Only hosts in our fake DB exist
self.stubs.Set(db, 'service_get_by_host_and_topic',
stub_service_get_by_host_and_topic)
# 'host_c1' always succeeds, and 'host_c2'
self.stubs.Set(self.hosts_api, 'set_host_enabled',
stub_set_host_enabled)
# 'host_c1' always succeeds, and 'host_c2'
self.stubs.Set(self.hosts_api, 'set_host_maintenance',
stub_set_host_maintenance)
self.stubs.Set(self.hosts_api, 'host_power_action',
stub_host_power_action)
def _test_host_update(self, host, key, val, expected_value):
body = {key: val}
result = self.controller.update(self.req, host, body)
self.assertEqual(result[key], expected_value)
def test_list_hosts(self):
"""Verify that the compute hosts are returned."""
result = self.controller.index(self.req)
self.assertIn('hosts', result)
hosts = result['hosts']
self.assertEqual(fake_hosts.HOST_LIST, hosts)
def test_list_hosts_with_zone(self):
result = self.controller.index(FakeRequestWithNovaZone())
self.assertIn('hosts', result)
hosts = result['hosts']
self.assertEqual(fake_hosts.HOST_LIST_NOVA_ZONE, hosts)
def test_disable_host(self):
self._test_host_update('host_c1', 'status', 'disable', 'disabled')
self._test_host_update('host_c2', 'status', 'disable', 'enabled')
def test_enable_host(self):
self._test_host_update('host_c1', 'status', 'enable', 'enabled')
self._test_host_update('host_c2', 'status', 'enable', 'disabled')
def test_enable_maintenance(self):
self._test_host_update('host_c1', 'maintenance_mode',
'enable', 'on_maintenance')
def test_disable_maintenance(self):
self._test_host_update('host_c1', 'maintenance_mode',
'disable', 'off_maintenance')
def _test_host_update_notimpl(self, key, val):
def stub_service_get_all_notimpl(self, req):
return [{'host': 'notimplemented', 'topic': None,
'availability_zone': None}]
self.stubs.Set(db, 'service_get_all',
stub_service_get_all_notimpl)
body = {key: val}
self.assertRaises(webob.exc.HTTPNotImplemented,
self.controller.update,
self.req, 'notimplemented', body=body)
def test_disable_host_notimpl(self):
self._test_host_update_notimpl('status', 'disable')
def test_enable_maintenance_notimpl(self):
self._test_host_update_notimpl('maintenance_mode', 'enable')
def test_host_startup(self):
result = self.controller.startup(self.req, "host_c1")
self.assertEqual(result["power_action"], "startup")
def test_host_shutdown(self):
result = self.controller.shutdown(self.req, "host_c1")
self.assertEqual(result["power_action"], "shutdown")
def test_host_reboot(self):
result = self.controller.reboot(self.req, "host_c1")
self.assertEqual(result["power_action"], "reboot")
def _test_host_power_action_notimpl(self, method):
self.assertRaises(webob.exc.HTTPNotImplemented,
method, self.req, "notimplemented")
def test_host_startup_notimpl(self):
self._test_host_power_action_notimpl(self.controller.startup)
def test_host_shutdown_notimpl(self):
self._test_host_power_action_notimpl(self.controller.shutdown)
def test_host_reboot_notimpl(self):
self._test_host_power_action_notimpl(self.controller.reboot)
def test_host_status_bad_host(self):
# A host given as an argument does not exist.
self.req.environ["nova.context"].is_admin = True
dest = 'dummydest'
with testtools.ExpectedException(webob.exc.HTTPNotFound,
".*%s.*" % dest):
self.controller.update(self.req, dest, body={'status': 'enable'})
def test_host_maintenance_bad_host(self):
# A host given as an argument does not exist.
self.req.environ["nova.context"].is_admin = True
dest = 'dummydest'
with testtools.ExpectedException(webob.exc.HTTPNotFound,
".*%s.*" % dest):
self.controller.update(self.req, dest,
body={'maintenance_mode': 'enable'})
def test_host_power_action_bad_host(self):
# A host given as an argument does not exist.
self.req.environ["nova.context"].is_admin = True
dest = 'dummydest'
with testtools.ExpectedException(webob.exc.HTTPNotFound,
".*%s.*" % dest):
self.controller.reboot(self.req, dest)
def test_bad_status_value(self):
bad_body = {"status": "bad"}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req, "host_c1", bad_body)
bad_body2 = {"status": "disablabc"}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req, "host_c1", bad_body2)
def test_bad_update_key(self):
bad_body = {"crazy": "bad"}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req, "host_c1", bad_body)
def test_bad_update_key_and_correct_update_key(self):
bad_body = {"status": "disable", "crazy": "bad"}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req, "host_c1", bad_body)
def test_good_update_keys(self):
body = {"status": "disable", "maintenance_mode": "enable"}
result = self.controller.update(self.req, 'host_c1', body)
self.assertEqual(result["host"], "host_c1")
self.assertEqual(result["status"], "disabled")
self.assertEqual(result["maintenance_mode"], "on_maintenance")
def test_show_forbidden(self):
self.req.environ["nova.context"].is_admin = False
dest = 'dummydest'
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.show,
self.req, dest)
self.req.environ["nova.context"].is_admin = True
def test_show_host_not_exist(self):
# A host given as an argument does not exist.
self.req.environ["nova.context"].is_admin = True
dest = 'dummydest'
with testtools.ExpectedException(webob.exc.HTTPNotFound,
".*%s.*" % dest):
self.controller.show(self.req, dest)
def _create_compute_service(self):
"""Create compute-manager(ComputeNode and Service record)."""
ctxt = self.req.environ["nova.context"]
dic = {'host': 'dummy', 'binary': 'nova-compute', 'topic': 'compute',
'report_count': 0}
s_ref = db.service_create(ctxt, dic)
dic = {'service_id': s_ref['id'],
'vcpus': 16, 'memory_mb': 32, 'local_gb': 100,
'vcpus_used': 16, 'memory_mb_used': 32, 'local_gb_used': 10,
'hypervisor_type': 'qemu', 'hypervisor_version': 12003,
'cpu_info': '', 'stats': ''}
db.compute_node_create(ctxt, dic)
return db.service_get(ctxt, s_ref['id'])
def test_show_no_project(self):
"""No instances are running on the given host."""
ctxt = context_maker.get_admin_context()
s_ref = self._create_compute_service()
result = self.controller.show(self.req, s_ref['host'])
proj = ['(total)', '(used_now)', '(used_max)']
column = ['host', 'project', 'cpu', 'memory_mb', 'disk_gb']
self.assertEqual(len(result['host']), 3)
for resource in result['host']:
self.assertIn(resource['resource']['project'], proj)
self.assertEqual(len(resource['resource']), 5)
self.assertTrue(set(resource['resource'].keys()) == set(column))
db.service_destroy(ctxt, s_ref['id'])
def test_show_works_correctly(self):
"""show() works correctly as expected."""
ctxt = context_maker.get_admin_context()
s_ref = self._create_compute_service()
i_ref1 = _create_instance(project_id='p-01', host=s_ref['host'])
i_ref2 = _create_instance(project_id='p-02', vcpus=3,
host=s_ref['host'])
result = self.controller.show(self.req, s_ref['host'])
proj = ['(total)', '(used_now)', '(used_max)', 'p-01', 'p-02']
column = ['host', 'project', 'cpu', 'memory_mb', 'disk_gb']
self.assertEqual(len(result['host']), 5)
for resource in result['host']:
self.assertIn(resource['resource']['project'], proj)
self.assertEqual(len(resource['resource']), 5)
self.assertTrue(set(resource['resource'].keys()) == set(column))
db.service_destroy(ctxt, s_ref['id'])
db.instance_destroy(ctxt, i_ref1['uuid'])
db.instance_destroy(ctxt, i_ref2['uuid'])
class HostSerializerTest(test.TestCase):
def setUp(self):
super(HostSerializerTest, self).setUp()
self.deserializer = os_hosts.HostUpdateDeserializer()
def test_index_serializer(self):
serializer = os_hosts.HostIndexTemplate()
text = serializer.serialize(fake_hosts.OS_API_HOST_LIST)
tree = etree.fromstring(text)
self.assertEqual('hosts', tree.tag)
self.assertEqual(len(fake_hosts.HOST_LIST), len(tree))
for i in range(len(fake_hosts.HOST_LIST)):
self.assertEqual('host', tree[i].tag)
self.assertEqual(fake_hosts.HOST_LIST[i]['host_name'],
tree[i].get('host_name'))
self.assertEqual(fake_hosts.HOST_LIST[i]['service'],
tree[i].get('service'))
self.assertEqual(fake_hosts.HOST_LIST[i]['zone'],
tree[i].get('zone'))
def test_update_serializer_with_status(self):
exemplar = dict(host='host_c1', status='enabled')
serializer = os_hosts.HostUpdateTemplate()
text = serializer.serialize(exemplar)
tree = etree.fromstring(text)
self.assertEqual('host', tree.tag)
for key, value in exemplar.items():
self.assertEqual(value, tree.get(key))
def test_update_serializer_with_maintenance_mode(self):
exemplar = dict(host='host_c1', maintenance_mode='enabled')
serializer = os_hosts.HostUpdateTemplate()
text = serializer.serialize(exemplar)
tree = etree.fromstring(text)
self.assertEqual('host', tree.tag)
for key, value in exemplar.items():
self.assertEqual(value, tree.get(key))
def test_update_serializer_with_maintenance_mode_and_status(self):
exemplar = dict(host='host_c1',
maintenance_mode='enabled',
status='enabled')
serializer = os_hosts.HostUpdateTemplate()
text = serializer.serialize(exemplar)
tree = etree.fromstring(text)
self.assertEqual('host', tree.tag)
for key, value in exemplar.items():
self.assertEqual(value, tree.get(key))
def test_action_serializer(self):
exemplar = dict(host='host_c1', power_action='reboot')
serializer = os_hosts.HostActionTemplate()
text = serializer.serialize(exemplar)
tree = etree.fromstring(text)
self.assertEqual('host', tree.tag)
for key, value in exemplar.items():
self.assertEqual(value, tree.get(key))
def test_update_deserializer(self):
exemplar = dict(status='enabled', maintenance_mode='disable')
intext = """<?xml version='1.0' encoding='UTF-8'?>
<updates>
<status>enabled</status>
<maintenance_mode>disable</maintenance_mode>
</updates>"""
result = self.deserializer.deserialize(intext)
self.assertEqual(dict(body=exemplar), result)
def test_corrupt_xml(self):
self.assertRaises(
exception.MalformedRequestBody,
self.deserializer.deserialize,
utils.killer_xml_body())
| apache-2.0 | 98,876,251,007,403,070 | 38.644342 | 78 | 0.611092 | false |
michaelarnauts/home-assistant | tests/components/test_device_sun_light_trigger.py | 5 | 3506 | """
tests.test_component_device_sun_light_trigger
~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests device sun light trigger component.
"""
# pylint: disable=too-many-public-methods,protected-access
import os
import unittest
import homeassistant.loader as loader
from homeassistant.const import CONF_PLATFORM
from homeassistant.components import (
device_tracker, light, sun, device_sun_light_trigger)
from tests.common import (
get_test_home_assistant, ensure_sun_risen, ensure_sun_set,
trigger_device_tracker_scan)
KNOWN_DEV_PATH = None
def setUpModule(): # pylint: disable=invalid-name
""" Initalizes a Home Assistant server. """
global KNOWN_DEV_PATH
hass = get_test_home_assistant()
loader.prepare(hass)
KNOWN_DEV_PATH = hass.config.path(
device_tracker.KNOWN_DEVICES_FILE)
hass.stop()
with open(KNOWN_DEV_PATH, 'w') as fil:
fil.write('device,name,track,picture\n')
fil.write('DEV1,device 1,1,http://example.com/dev1.jpg\n')
fil.write('DEV2,device 2,1,http://example.com/dev2.jpg\n')
def tearDownModule(): # pylint: disable=invalid-name
""" Stops the Home Assistant server. """
os.remove(KNOWN_DEV_PATH)
class TestDeviceSunLightTrigger(unittest.TestCase):
""" Test the device sun light trigger module. """
def setUp(self): # pylint: disable=invalid-name
self.hass = get_test_home_assistant()
self.scanner = loader.get_component(
'device_tracker.test').get_scanner(None, None)
self.scanner.reset()
self.scanner.come_home('DEV1')
loader.get_component('light.test').init()
device_tracker.setup(self.hass, {
device_tracker.DOMAIN: {CONF_PLATFORM: 'test'}
})
light.setup(self.hass, {
light.DOMAIN: {CONF_PLATFORM: 'test'}
})
sun.setup(self.hass, {sun.DOMAIN: {sun.CONF_ELEVATION: 0}})
def tearDown(self): # pylint: disable=invalid-name
""" Stop down stuff we started. """
self.hass.stop()
def test_lights_on_when_sun_sets(self):
""" Test lights go on when there is someone home and the sun sets. """
device_sun_light_trigger.setup(
self.hass, {device_sun_light_trigger.DOMAIN: {}})
ensure_sun_risen(self.hass)
light.turn_off(self.hass)
self.hass.pool.block_till_done()
ensure_sun_set(self.hass)
self.hass.pool.block_till_done()
self.assertTrue(light.is_on(self.hass))
def test_lights_turn_off_when_everyone_leaves(self):
""" Test lights turn off when everyone leaves the house. """
light.turn_on(self.hass)
self.hass.pool.block_till_done()
device_sun_light_trigger.setup(
self.hass, {device_sun_light_trigger.DOMAIN: {}})
self.scanner.leave_home('DEV1')
trigger_device_tracker_scan(self.hass)
self.hass.pool.block_till_done()
self.assertFalse(light.is_on(self.hass))
def test_lights_turn_on_when_coming_home_after_sun_set(self):
""" Test lights turn on when coming home after sun set. """
light.turn_off(self.hass)
ensure_sun_set(self.hass)
self.hass.pool.block_till_done()
device_sun_light_trigger.setup(
self.hass, {device_sun_light_trigger.DOMAIN: {}})
self.scanner.come_home('DEV2')
trigger_device_tracker_scan(self.hass)
self.hass.pool.block_till_done()
self.assertTrue(light.is_on(self.hass))
| mit | 6,791,135,812,417,860,000 | 26.606299 | 78 | 0.635767 | false |
bjoernricks/kommons | docs/source/conf.py | 1 | 7934 | # -*- coding: utf-8 -*-
#
# kommons documentation build configuration file, created by
# sphinx-quickstart on Thu Oct 10 20:29:23 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
sys.path.append(os.path.abspath(os.path.join(os.pardir, os.pardir)))
import kommons
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'kommons'
copyright = u'2013, ' + kommons.__author__
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = kommons.__version__
# The full version, including alpha/beta/rc tags.
release = kommons.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
highlight_language = "python"
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'kommonsdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'kommons.tex', u'Kommons Documentation',
kommons.__author__, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'kommons', u'Kommons Documentation',
[kommons.__author__], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'kommons', u'Kommons Documentation',
kommons.__author__, 'kommons', kommons.__description__,
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| lgpl-2.1 | -3,670,814,229,177,733,000 | 30.863454 | 80 | 0.704311 | false |
tensorflow/quantum | tensorflow_quantum/core/ops/math_ops/inner_product_op.py | 1 | 6703 | # Copyright 2020 The TensorFlow Quantum Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module to register python op gradient."""
import os
import tensorflow as tf
from tensorflow_quantum.core.ops.load_module import load_module
MATH_OP_MODULE = load_module(os.path.join("math_ops", "_tfq_math_ops.so"))
def _inner_product_grad(programs, symbol_names, symbol_values, other_programs,
prev_grad):
"""Calculate the adjoint gradients of the inner product between circuits.
Compute the gradients of the (potentially many) inner products between
the given circuits and the symbol free comparison circuits.
Calculates out[i][j][k] = $ \frac{\langle \psi_{\text{programs[i]}} \\
(\text{symbol_values[i]})}{\partial \text{symbol_names[k]}} | \\
\psi_{\text{other_programs[j]}} \rangle $
Note: `other_programs` must not contain any free symbols. These can
be resolved beforehand with `tfq.resolve_parameters`.
Note: len(symbol_names) (=n_params) should be a positive integer.
Args:
programs: `tf.Tensor` of strings with shape [batch_size] containing
the string representations of the circuits
symbol_names: `tf.Tensor` of strings with shape [n_params], which
is used to specify the order in which the values in
`symbol_values` should be placed inside of the circuits in
`programs`.
symbol_values: `tf.Tensor` of real numbers with shape
[batch_size, n_params] specifying parameter values to resolve
into the circuits specificed by programs, following the ordering
dictated by `symbol_names`.
other_programs: `tf.Tensor` of strings with shape [batch_size, n_others]
containing the string representations of the circuits with which to
compute the overlap on `programs` with. Must not contain any free
symbols.
prev_grad: `tf.Tensor` of real numbers with shape [batch_size, n_ops]
backprop of values from downstream in the compute graph.
Returns:
tf.Tensor` with shape [batch_size, n_symbols] where `out[i][j]` is equal
to the gradient of the inner product between programs[i] and all
other_programs[i] w.r.t. `symbol_names[j]` and `programs[i]` is resolved
with `symbol_values[i]`.
"""
# Due to TF gradient scheme, we return complex conjugate derivative.
return tf.math.conj(
MATH_OP_MODULE.tfq_inner_product_grad(
programs, symbol_names, tf.cast(symbol_values, tf.float32),
other_programs, tf.cast(prev_grad, tf.float32)))
@tf.custom_gradient
def inner_product(programs, symbol_names, symbol_values, other_programs):
"""Calculate the inner product between circuits.
Compute (potentially many) inner products between the given circuits and
the symbol free comparison circuits.
Calculates out[i][j] = $ \langle \psi_{\text{programs[i]}} \\
(\text{symbol\_values[i]}) | \psi_{\text{other\_programs[j]}} \rangle $
>>> symbols = sympy.symbols('alpha beta')
>>> qubits = cirq.GridQubit.rect(1, 2)
>>> reference_circuits = [
... cirq.Circuit((cirq.H**symbols[0]).on_each(qubits)),
... cirq.Circuit(
... cirq.X(qubits[0]) ** symbols[0],
... cirq.Y(qubits[1]) ** symbols[1])
... ]
>>> other_circuits = [
... cirq.Circuit(cirq.X.on_each(qubits)),
... cirq.Circuit((cirq.Y**0.125).on_each(qubits)),
... cirq.Circuit((cirq.X**0.5).on_each(qubits))
... ]
>>> reference_tensor = tfq.convert_to_tensor(reference_circuits)
>>> symbol_tensor = tf.convert_to_tensor([s.name for s in symbols])
>>> values_tensor = tf.convert_to_tensor(np.arange(4).reshape(2, 2))
>>> other_tensor = tfq.convert_to_tensor([other_circuits, other_circuits])
>>> ip = tfq.math.inner_product(reference_tensor, symbol_tensor,
... values_tensor, other_tensor)
>>> ip
tf.Tensor(
[[ 0+0.j, 8.8871640e-01+0.3681184j,
0+0.5j],
[ 0+0.j, 7.3223300e-02-0.17677669j,
0-0.5j]],shape=(2, 3), dtype=complex64)
Note: `other_programs` must not contain any free symbols. These can
be resolved beforehand with `tfq.resolve_parameters`.
Args:
programs: `tf.Tensor` of strings with shape [batch_size] containing
the string representations of the circuits
symbol_names: `tf.Tensor` of strings with shape [n_params], which
is used to specify the order in which the values in
`symbol_values` should be placed inside of the circuits in
`programs`.
symbol_values: `tf.Tensor` of real numbers with shape
[batch_size, n_params] specifying parameter values to resolve
into the circuits specificed by programs, following the ordering
dictated by `symbol_names`.
other_programs: `tf.Tensor` of strings with shape [batch_size, n_others]
containing the string representations of the circuits with which to
compute the overlap on `programs` with. Must not contain any free
symbols.
Returns:
`tf.Tensor` with shape [batch_size, n_others] where `out[i][j]` is equal
to the inner product of `programs[i]` with `symbol_values[i]`
resolved in and `other_programs[i][j]`.
"""
def grad(dy):
def _true_grad():
return _inner_product_grad(programs, symbol_names, symbol_values,
other_programs, dy)
ret_zero = tf.equal(tf.size(symbol_names), 0)
inner_prod_grad = tf.cond(
ret_zero, lambda: tf.zeros_like(symbol_values, dtype=tf.complex64),
_true_grad)
return [None, None, inner_prod_grad, None]
return MATH_OP_MODULE.tfq_inner_product(programs, symbol_names,
tf.cast(symbol_values, tf.float32),
other_programs), grad
| apache-2.0 | -5,318,145,168,956,347,000 | 44.290541 | 80 | 0.627629 | false |
bobbyrward/fr0st | fr0stlib/gui/preview.py | 1 | 7462 | import wx, sys, numpy as N
from fr0stlib.decorators import *
from config import config
from _events import InMain
class PreviewFrame(wx.Frame):
@BindEvents
def __init__(self,parent):
self.title = "Flame Preview"
self.parent = parent
wx.Frame.__init__(self,parent,wx.ID_ANY, self.title)
wx.GetApp().LoadIconsInto(self)
self.CreateStatusBar()
self.image = PreviewPanel(self)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(self.image, 1, wx.EXPAND)
self.SetSizer(sizer)
self.SetDoubleBuffered(True)
# This must be 0,0 so OnIdle doesn't render anything on startup.
self._lastsize = 0,0
self.SetSize((520,413))
self.SetMinSize((128,119)) # This makes for a 120x90 bitmap
def GetPanelSize(self):
"""This method corrects platform dependency issues."""
## if "linux" in sys.platform:
## return self.GetSize()
return self.GetSizer().GetSize()
@Bind(wx.EVT_CLOSE)
def OnExit(self,e):
self.Show(False)
self.Parent.Raise()
@Bind(wx.EVT_SIZE)
def OnResize(self, e):
if not self.image.oldbmp:
self.image.oldbmp = self.image.bmp
image = wx.ImageFromBitmap(self.image.oldbmp)
# TODO: This was here for windows. Need to find a clean way to make
# resize work nice and consistent cross-platform.
## if self._lastsize == (0,0):
## return
pw, ph = map(float, self.GetPanelSize())
fw, fh = map(float, self.parent.flame.size)
ratio = min(pw/fw, ph/fh)
image.Rescale(int(fw * ratio), int(fh * ratio))
self.image.bmp = wx.BitmapFromImage(image)
self.Refresh()
e.Skip()
@Bind(wx.EVT_IDLE)
def OnIdle(self, e):
size = self.GetPanelSize()
if size == self._lastsize:
return
self._lastsize = size
self.RenderPreview()
def RenderPreview(self, flame=None):
flame = flame or self.parent.flame
pw, ph = map(float, self.GetPanelSize())
fw, fh = map(float, self.parent.flame.size)
ratio = min(pw/fw, ph/fh)
size = int(fw * ratio), int(fh * ratio)
req = self.parent.renderer.LargePreviewRequest
req(self.RenderCallback, flame, size, progress_func=self.prog_func,
**config["Large-Preview-Settings"])
def RenderCallback(self, bmp):
self.image.UpdateBitmap(bmp)
self.SetTitle("%s - Flame Preview" % self.parent.flame.name)
self.SetStatusText("rendering: 100.00 %")
RenderCallback._can_cancel = True
@InMain
def prog_func(self, py_object, fraction, stage, eta):
self.SetStatusText("rendering: %.2f %%" %fraction)
class PreviewBase(wx.Panel):
HasChanged = False
StartMove = None
EndMove = None
_move = None
_zoom = 1
@BindEvents
def __init__(self, parent):
self.bmp = wx.EmptyBitmap(400,300, 32)
wx.Panel.__init__(self, parent, -1)
@Bind(wx.EVT_IDLE)
def OnIdle(self, e):
if self._move is not None:
diff = self._move
self._move = None
self.StartMove = self.EndMove
self.Move(diff)
elif self._zoom != 1:
diff = self._zoom
self._zoom = 1
self.Zoom(diff)
def Move(self, diff):
flame = self.parent.flame
fw,fh = self.bmp.GetSize()
pixel_per_unit = fw * flame.scale / 100.
flame.move_center([i / pixel_per_unit for i in diff])
self.parent.image.RenderPreview()
self.parent.adjust.UpdateView()
def Zoom(self, diff):
self.parent.flame.scale *= diff
self.parent.image.RenderPreview()
self.parent.adjust.UpdateView()
self.HasChanged = True
@Bind(wx.EVT_LEFT_DOWN)
def OnLeftDown(self, e):
self.SetFocus()
self.StartMove = N.array(e.GetPosition())
@Bind(wx.EVT_LEFT_UP)
def OnLeftUp(self, e):
self.StartMove = None
if self.EndMove is not None:
self.EndMove = None
self.parent.TreePanel.TempSave()
@Bind(wx.EVT_MOUSE_EVENTS)
def OnMove(self, e):
if self.StartMove is not None:
self.EndMove = N.array(e.GetPosition())
self._move = self.StartMove - self.EndMove
@Bind(wx.EVT_MOUSEWHEEL)
def OnWheel(self, e):
if e.ControlDown():
if e.AltDown():
diff = 0.01
else:
diff = 0.1
elif e.AltDown():
diff = 0.001
else:
e.Skip()
return
self.SetFocus() # Makes sure OnKeyUp gets called.
self._zoom *= 1 + (diff if e.GetWheelRotation() > 0 else -diff)
@Bind(wx.EVT_KEY_UP)
def OnKeyUp(self, e):
key = e.GetKeyCode()
if (key == wx.WXK_CONTROL and not e.AltDown()) or (
key == wx.WXK_ALT and not e.ControlDown()):
if self.HasChanged:
self.parent.TreePanel.TempSave()
self.HasChanged = False
class PreviewPanel(PreviewBase):
_offset = N.array([0,0])
_zoomfactor = 1.0
oldbmp = None
@BindEvents
def __init__(self, parent):
self.__class__ = PreviewBase
PreviewBase.__init__(self, parent)
self.__class__ = PreviewPanel
self.parent = parent.parent
self.GetPanelSize = parent.GetPanelSize
def UpdateBitmap(self, bmp):
self.bmp = bmp
self.oldbmp = bmp
self._offset = N.array([0,0])
self._zoomfactor = 1.0
self.Refresh()
@Bind(wx.EVT_PAINT)
def OnPaint(self, evt):
fw,fh = self.bmp.GetSize()
pw,ph = self.GetPanelSize()
dc = wx.PaintDC(self)
dc.DrawBitmap(self.bmp, (pw-fw)/2, (ph-fh)/2, True)
def Move(self, diff):
PreviewBase.Move(self, diff)
self._offset += diff
self.MoveAndZoom()
def Zoom(self, val):
PreviewBase.Zoom(self, val)
self._zoomfactor *= val
self._offset *= val
self.MoveAndZoom()
def MoveAndZoom(self):
fw,fh = self.bmp.GetSize()
ow, oh = self._offset
image = wx.ImageFromBitmap(self.oldbmp)
# Use fastest order of operations in each case (i.e. the order that
# avoids huge images that will just be shrinked or cropped).
# Both paths yield equivalent results.
zoom = self._zoomfactor
if zoom > 1:
iw, ih = int(fw/zoom), int(fh/zoom)
newimg = wx.EmptyImage(iw, ih, 32)
newimg.Paste(image, (iw-fw)/2 - ow/zoom,
(ih-fh)/2 - oh/zoom)
newimg.Rescale(fw,fh)
else:
iw, ih = int(fw*zoom), int(fh*zoom)
image.Rescale(iw, ih)
newimg = wx.EmptyImage(fw, fh, 32)
newimg.Paste(image, (fw-iw)/2 - ow, (fh-ih)/2 - oh)
self.bmp = wx.BitmapFromImage(newimg)
self.Refresh()
| gpl-3.0 | -5,828,525,901,854,154,000 | 26.158491 | 75 | 0.531761 | false |
stonier/ecto | python/ecto/cell.py | 3 | 2815 | #
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from ecto import _cell_base
class Cell(_cell_base):
"""
When creating a cell from Python, just inherit from that class and define
the same functions as in C++ if you want (i.e. declare_params(self, p),
declare_io(self, p, i, o), configure(self, p, i, o) and process(self, i, o)
"""
__looks_like_a_cell__ = True
def __getattr__(self, name):
if name == '__impl':
return self
else:
if name in self.__dict__:
return self.__dict__[name]
else:
raise AttributeError(self, name)
def __init__(self, *args, **kwargs):
_cell_base.__init__(self)
if args:
_cell_base.name(self, args[0])
_cell_base.declare_params(self)
for k, v in kwargs.iteritems():
self.params.at(k).set(v)
self.params.notify()
_cell_base.declare_io(self)
if self.__doc__ is None:
self.__doc__ = "TODO docstr me."
self._short_doc = self.__doc__
self.__doc__ = self.gen_doc(self.__doc__)
def short_doc(self):
return self._short_doc
@classmethod
def inspect(cls, *args, **kwargs):
m = cls(*args, **kwargs)
return m
| bsd-3-clause | -2,987,144,213,507,633,000 | 40.397059 | 79 | 0.662167 | false |
vrenkens/Nabu-asr | nabu/scripts/prepare_data.py | 2 | 2975 | '''dataprep.py
does the data preperation for a single database'''
import sys
import os
sys.path.append(os.getcwd())
import subprocess
import shutil
from six.moves import configparser
import tensorflow as tf
import data
def main(expdir, recipe, computing):
'''main method'''
if recipe is None:
raise Exception('no recipe specified. Command usage: '
'nabu data --recipe=/path/to/recipe')
if not os.path.isdir(recipe):
raise Exception('cannot find recipe %s' % recipe)
if expdir is None:
raise Exception('no expdir specified. Command usage: '
'nabu data --expdir=/path/to/recipe '
'--recipe=/path/to/recipe')
if computing not in ['standard', 'condor']:
raise Exception('unknown computing mode: %s' % computing)
#read the data conf file
parsed_cfg = configparser.ConfigParser()
parsed_cfg.read(os.path.join(recipe, 'database.conf'))
#loop over the sections in the data config
for name in parsed_cfg.sections():
print 'processing %s' % name
#read the section
conf = dict(parsed_cfg.items(name))
if not os.path.exists(conf['dir']):
os.makedirs(conf['dir'])
else:
print '%s already exists, skipping this section' % conf['dir']
continue
#create the expdir for this section
if not os.path.isdir(os.path.join(expdir, name)):
os.makedirs(os.path.join(expdir, name))
#create the database configuration
dataconf = configparser.ConfigParser()
dataconf.add_section(name)
for item in conf:
dataconf.set(name, item, conf[item])
with open(os.path.join(expdir, name, 'database.conf'), 'w') as fid:
dataconf.write(fid)
#copy the processor config
shutil.copyfile(
conf['processor_config'],
os.path.join(expdir, name, 'processor.cfg'))
if computing == 'condor':
if not os.path.isdir(os.path.join(expdir, name, 'outputs')):
os.makedirs(os.path.join(expdir, name, 'outputs'))
subprocess.call(['condor_submit',
'expdir=%s' % os.path.join(expdir, name),
'nabu/computing/condor/dataprep.job'])
else:
data.main(os.path.join(expdir, name))
if __name__ == '__main__':
tf.app.flags.DEFINE_string('expdir', None,
'the exeriments directory'
)
tf.app.flags.DEFINE_string('recipe', None,
'The directory containing the recipe')
tf.app.flags.DEFINE_string('computing', 'standard',
'the distributed computing system one of'
' condor'
)
FLAGS = tf.app.flags.FLAGS
main(FLAGS.expdir, FLAGS.recipe, FLAGS.computing)
| mit | -3,562,746,009,994,248,700 | 32.806818 | 75 | 0.567395 | false |
nickhand/pyRSD | pyRSD/rsdfit/solvers/emcee_solver.py | 1 | 15344 | from ... import numpy as np, os
from .. import logging
from ..results import EmceeResults
from . import tools, objectives
import time
import signal
import traceback
import functools
logger = logging.getLogger('rsdfit.emcee_fitter')
logger.addHandler(logging.NullHandler())
#------------------------------------------------------------------------------
# context manager for running multiple chains
#------------------------------------------------------------------------------
class ChainManager(object):
"""
Class to serve as context manager for running multiple chains, which
will handle exceptions (user-supplied or otherwise) and convergence
criteria from multiple chains
"""
def __init__(self, sampler, niters, nwalkers, free_names, comm):
"""
Parameters
----------
sampler : emcee.EnsembleSampler
the emcee sampler object
niter : int
the number of iterations to run
nwalkers : int
the number of walkers we are using
free_names : list of str
the names of the free parameters
comm : MPI.Communicator
the communicator for the the multiple chains
"""
self.sampler = sampler
self.niters = niters
self.nwalkers = nwalkers
self.free_names = free_names
self.comm = comm
self.exception = None
# register the signal handlers and tags
signal.signal(signal.SIGUSR1, initiate_exit)
signal.signal(signal.SIGUSR2, initiate_exit)
signal.signal(signal.SIGQUIT, initiate_exit)
self.tags = enum('CONVERGED', 'EXIT', 'CTRL_C')
# remember the start time
self.start = time.time()
def __enter__(self):
return self
def update_progress(self, niter):
conditions = [niter < 10, niter < 50 and niter % 2 == 0, niter < 500 and niter % 10 == 0, niter % 100 == 0]
if any(conditions):
update_progress(self.free_names, self.sampler, self.niters, self.nwalkers)
def check_status(self):
from mpi4py import MPI
if self.comm is not None:
if self.comm.Iprobe(source=MPI.ANY_SOURCE, tag=self.tags.EXIT):
raise ExitingException
if self.comm.Iprobe(source=MPI.ANY_SOURCE, tag=self.tags.CONVERGED):
raise ConvergenceException
if self.comm.Iprobe(source=MPI.ANY_SOURCE, tag=self.tags.CTRL_C):
raise KeyboardInterrupt
def do_convergence(self, niter):
if niter < 500:
return False
elif niter < 1500 and niter % 200 == 0:
return True
elif niter >= 1500 and niter % 100 == 0:
return True
return False
def check_convergence(self, niter, epsilon, start_iter, start_chain):
if self.comm is not None and self.do_convergence(start_iter+niter+1):
chain = self.sampler.chain if start_chain is None else np.concatenate([start_chain, self.sampler.chain],axis=1)
self.comm.Barrier() # sync each chain to same number of iterations
chains = self.comm.gather(chain, root=0)
if self.comm.rank == 0:
converged = test_convergence(chains, start_iter+niter+1, epsilon)
if converged: raise ConvergenceException
def sample(self, p0, lnprob0):
kwargs = {}
kwargs['lnprob0'] = lnprob0
kwargs['iterations'] = self.niters
kwargs['storechain'] = True
return enumerate(self.sampler.sample(p0, **kwargs))
def __exit__(self, exc_type, exc_value, exc_traceback):
# emcee raises a RuntimeError -- check if it was actually a KeyboardInterrupt
# if isinstance(exc_value, RuntimeError):
# tb = traceback.format_exc()
# if 'KeyboardInterrupt' in tb:
# exc_type = KeyboardInterrupt
# exc_value = exc_type()
if isinstance(exc_value, KeyboardInterrupt):
logger.warning("EMCEE: ctrl+c pressed - saving current state of chain")
tag = self.tags.CTRL_C
elif isinstance(exc_value, ConvergenceException):
logger.warning("EMCEE: convergence criteria satisfied -- exiting")
tag = self.tags.CONVERGED
elif exc_value is not None:
logger.warning("EMCEE: exception occurred - trying to save current state of chain")
trace = ''.join(traceback.format_exception(exc_type, exc_value, exc_traceback, limit=5))
logger.warning(" traceback:\n%s" %trace)
tag = self.tags.EXIT
exceptions = (ConvergenceException, ExitingException)
if exc_value is not None and not isinstance(exc_value, exceptions):
logger.warning("EMCEE: setting exception to true before exiting")
self.exception = exc_value
# convergence exception
if exc_value is not None:
if self.comm is not None:
for r in range(0, self.comm.size):
if r != self.comm.rank:
self.comm.send(None, dest=r, tag=tag)
# print out some info and exit
stop = time.time()
logger.warning("EMCEE: ...iterations finished. Time elapsed: {}".format(tools.hms_string(stop-self.start)))
logger.warning("EMCEE: mean acceptance fraction: {0:.3f}".format(np.mean(self.sampler.acceptance_fraction)))
try:
logger.warning("EMCEE: autocorrelation time: {}".format(self.sampler.get_autocorr_time()))
except:
pass
return True
#------------------------------------------------------------------------------
# tools setup
#------------------------------------------------------------------------------
def enum(*sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('Enum', (), enums)
class ConvergenceException(Exception):
pass
class ExitingException(Exception):
pass
def initiate_exit(signum, stack):
raise ExitingException
def update_progress(free_names, sampler, niters, nwalkers, last=10):
"""
Report the current status of the sampler.
"""
k = sum(sampler.lnprobability[0] < 0)
if not k:
logger.warning("No iterations with valid parameters (chain shape = {})".format(sampler.chain.shape))
return None
chain = sampler.chain[:,:k,:]
logprobs = sampler.lnprobability[:,:k]
best_iter = np.argmax(logprobs.max(axis=0))
best_walker = np.argmax(logprobs[:,best_iter])
text = ["EMCEE Iteration {:>6d}/{:<6d}: {} walkers, {} parameters".format(k-1, niters, nwalkers, chain.shape[-1])]
text += [" best logp = {:.6g} (reached at iter {}, walker {})".format(logprobs.max(axis=0)[best_iter], best_iter, best_walker)]
try: acc_frac = sampler.acceptance_fraction
except: acc_frac = np.array([np.nan])
try: acor = sampler.acor
except: acor = np.ones(len(free_names))*np.nan
text += [" acceptance_fraction ({}->{} (median {}))".format(acc_frac.min(), acc_frac.max(), np.median(acc_frac))]
for i, name in enumerate(free_names):
pos = chain[:,-last:,i].ravel()
msg = " {:15s} = {:.6g} +/- {:<12.6g} (best={:.6g})".format(name,
np.median(pos), np.std(pos), chain[best_walker, best_iter, i])
if not np.isnan(acor[i]):
msg += " (autocorr: {:.3g})".format(acor[i])
text.append(msg)
text = "\n".join(text) +'\n'
logger.warning(text)
def test_convergence(chains0, niter, epsilon):
"""
Test convergence using the Gelman-Rubin diagnostic
# Calculate Gelman & Rubin diagnostic
# 1. Remove the first half of the current chains
# 2. Calculate the within chain and between chain variances
# 3. estimate your variance from the within chain and between chain variance
# 4. Calculate the potential scale reduction parameter
"""
(walkers, _, ndim) = chains0[0].shape
Nchains = len(chains0)
n = max(niter/2, 1)
chains, withinchainvar, meanchain = [],[],[]
for chain in chains0:
inds = np.nonzero(chain)
chain = chain[inds].reshape((walkers, -1, ndim))
chain = chain[:, n:, :].reshape((-1, ndim))
chains.append(chain)
withinchainvar.append(np.var(chain, axis=0))
meanchain.append(np.mean(chain, axis=0))
meanall = np.mean(meanchain, axis=0)
W = np.mean(withinchainvar, axis=0)
B = np.zeros(ndim)
for jj in range(0, Nchains):
B += n*(meanall - meanchain[jj])**2 / (Nchains-1.)
estvar = (1. - 1./n)*W + B/n
with np.errstate(invalid='ignore'):
scalereduction = np.sqrt(estvar/W)
converged = abs(1.-scalereduction) <= epsilon
logger.warning("EMCEE: testing convergence with epsilon = %.4f" %epsilon)
logger.warning(" %d/%d parameters have converged" %(converged.sum(), ndim))
logger.warning(" scale-reduction = %s" %str(scalereduction))
return np.all(converged)
#------------------------------------------------------------------------------
# the main function to runs
#------------------------------------------------------------------------------
def run(params, fit_params, pool=None, chains_comm=None, init_values=None):
"""
Perform MCMC sampling of the parameter space of a system using `emcee`
Parameters
----------
params : ParameterSet
This holds the parameters needed to run the `emcee` fitter
fit_params : ParameterSet
the theoretical parameters
pool : emcee.MPIPool, optional
Pool object if we are using MPI to run emcee
init_values : array_like, `EmceeResults`
Initial positions; if not `None`, initialize the emcee walkers
in a small, random ball around these positions
Notes
-----
* Have a look at the `acortime`, it is good practice to let the sampler
run at least 10 times the `acortime`. If ``acortime = np.nan``, you
probably need to take more iterations!
* Use as many walkers as possible (hundreds for a handful of parameters)
Number of walkers must be even and at least twice the number of
parameters
* Beware of a burn-in period. The most conservative you can get is making
a histogram of only the final states of all walkers.
"""
import emcee
# get params and/or defaults
nwalkers = params.get('walkers', 20)
niters = params.get('iterations', 500)
label = params.get('label')
ndim = len(fit_params.free_names)
init_from = params.get('init_from', 'prior')
epsilon = params.get('epsilon', 0.02)
test_conv = params.get('test_convergence', False)
#---------------------------------------------------------------------------
# let's check a few things so we dont mess up too badly
#---------------------------------------------------------------------------
# now, if the number of walkers is smaller then twice the number of
# parameters, adjust that number to the required minimum and raise a warning
if 2*ndim > nwalkers:
msg = "EMCEE: number of walkers ({}) cannot be smaller ".format(nwalkers)
msg += "than 2 x npars: set to {}".format(2*ndim)
logger.warning(msg)
nwalkers = 2*ndim
if nwalkers % 2 != 0:
nwalkers += 1
logger.warning("EMCEE: number of walkers must be even: set to {}".format(nwalkers))
#---------------------------------------------------------------------------
# initialize the parameters
#---------------------------------------------------------------------------
old_results = None
start_iter = 0
lnprob0 = None
start_chain = None
# 1) initialixe from initial provided values
if init_from in ['nlopt', 'fiducial', 'result']:
if init_values is None:
raise ValueError("EMCEE: cannot initialize around best guess -- none provided")
labels = {'nlopt' : 'maximum probability', 'fiducial': 'fiducial', 'result': "previous result best-fit"}
lab = labels[init_from]
# initialize in random ball
# shape is (nwalkers, ndim)
p0 = np.array([init_values + 1e-3*np.random.randn(ndim) for i in range(nwalkers)])
logger.warning("EMCEE: initializing walkers in random ball around %s parameters" %lab)
# 2) initialize and restart from previous run
elif isinstance(init_values, EmceeResults):
# copy the results object
old_results = init_values.copy()
# get the attributes
start_chain = old_results.chain
lnprob0 = old_results.lnprobs[:,-1]
start_iter = start_chain.shape[1]
p0 = np.array(start_chain[:, -1, :])
logger.warning("EMCEE: continuing previous run (starting at iteration {})".format(start_iter))
# 3) start from scratch
else:
if init_from == 'previous_run':
raise ValueError('trying to init from previous run, but old chain failed')
# Initialize a set of parameters
try:
logger.warning("Attempting multivariate initialization from {}".format(init_from))
p0, drew_from = tools.multivariate_init(fit_params, nwalkers, draw_from=init_from, logger=logger)
logger.warning("Initialized walkers from {} with multivariate normals".format(drew_from))
except ValueError:
logger.warning("Attempting univariate initialization")
p0, drew_from = tools.univariate_init(fit_params, nwalkers, draw_from=init_from, logger=logger)
logger.warning("Initialized walkers from {} with univariate distributions".format(drew_from))
# initialize the sampler
logger.warning("EMCEE: initializing sampler with {} walkers".format(nwalkers))
objective = functools.partial(objectives.lnprob)
sampler = emcee.EnsembleSampler(nwalkers, ndim, objective, pool=pool)
# iterator interface allows us to tap ctrl+c and know where we are
niters -= start_iter
burnin = 0 if start_iter > 0 else params.get('burnin', 100)
logger.warning("EMCEE: running {} iterations with {} free parameters...".format(niters, ndim))
#---------------------------------------------------------------------------
# do the sampling
#---------------------------------------------------------------------------
with ChainManager(sampler, niters, nwalkers, fit_params.free_names, chains_comm) as manager:
for niter, result in manager.sample(p0, lnprob0):
# check if we need to exit due to exception/convergence
manager.check_status()
# update progress and test convergence
manager.update_progress(niter)
if test_conv:
manager.check_convergence(niter, epsilon, start_iter, start_chain)
# make the results and return
new_results = EmceeResults(sampler, fit_params, burnin)
if old_results is not None:
new_results = old_results + new_results
exception_raised = False
if manager.exception is not None and not isinstance(manager.exception, KeyboardInterrupt):
exception_raised = True
logger.warning("EMCEE: exiting EMCEE fitter with exception = %s" %str(exception_raised))
return new_results, manager.exception
| gpl-3.0 | -4,063,767,412,536,699,400 | 40.358491 | 136 | 0.590394 | false |
kYc0o/RIOT | tests/posix_time/tests/01-run.py | 21 | 1343 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
# Copyright (C) 2017 Freie Universität Berlin
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import sys
import time
from testrunner import run
US_PER_SEC = 1000000
EXTERNAL_JITTER = 0.15
class InvalidTimeout(Exception):
pass
def testfunc(child):
try:
start_test = time.time()
child.expect_exact("5 x usleep(i++ * 500000)")
for i in range(5):
child.expect_exact("wake up")
child.expect_exact("5 x sleep(i++)")
for i in range(5):
child.expect_exact("wake up")
child.expect_exact("DONE")
testtime = (time.time() - start_test) * US_PER_SEC
exp = sum(i * 500000 for i in range(5)) + \
sum(i * US_PER_SEC for i in range(5))
lower_bound = exp - (exp * EXTERNAL_JITTER)
upper_bound = exp + (exp * EXTERNAL_JITTER)
if not (lower_bound < testtime < upper_bound):
raise InvalidTimeout("Host timer measured %d us (client measured %d us)" %
(testtime, exp))
except InvalidTimeout as e:
print(e)
sys.exit(1)
if __name__ == "__main__":
sys.exit(run(testfunc))
| lgpl-2.1 | -2,776,041,272,215,573,000 | 27.553191 | 86 | 0.592399 | false |
breunigs/livestreamer-debian | src/livestreamer/plugins/nhkworld.py | 34 | 1262 | """Plugin for NHK World, NHK Japan's english TV channel."""
import re
from livestreamer.plugin import Plugin
from livestreamer.plugin.api import http, validate
from livestreamer.stream import HDSStream
API_URL = "http://api.sh.nhk.fivecool.tv/api/cdn/?publicId=3bz2huey&playerId=7Dy"
_url_re = re.compile("http(s)?://(\w+\.)?nhk.or.jp/nhkworld")
_schema = validate.Schema({
"live-streams": [{
"streams": validate.all(
[{
"protocol": validate.text,
"streamUrl": validate.text
}],
validate.filter(lambda s: s["protocol"] in ("http-flash", "http-hds"))
)
}]
})
class NHKWorld(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
def _get_streams(self):
res = http.get(API_URL)
data = http.json(res, schema=_schema)
streams = {}
for livestreams in data["live-streams"]:
for stream in livestreams["streams"]:
url = stream["streamUrl"]
for name, stream in HDSStream.parse_manifest(self.session, url).items():
if name.endswith("k"):
streams[name] = stream
return streams
__plugin__ = NHKWorld
| bsd-2-clause | -3,643,217,686,225,475,000 | 27.044444 | 88 | 0.576862 | false |
hajicj/FEL-NLP-IR_2016 | npfl103/io/collection.py | 1 | 5340 | # -*- coding: utf-8 -*-
"""This module implements a class that..."""
from __future__ import print_function, unicode_literals
import logging
import os
import xml.etree.ElementTree as ET
import time
from npfl103.io.document import Document
from npfl103.io.topic import Topic
__version__ = "0.0.1"
__author__ = "Jan Hajic jr."
class Collection:
"""This class represents the entire collection of documents
in the assignment.
The Collection is initialized with the ``document.list`` file that
contains paths to the individual documents. It is assumed the
paths in the document list file are relative to the location of
that file -- if not, you can use the ``docpath`` parameter
to set the prefix to the document paths in the list file.
(For instance, if you have absolute paths in ``documents.list``,
you would use ``docpath=''``.)
>>> corpus = Collection('../test_data/test-documents-tiny.list')
>>> len(corpus)
4
>>> print(corpus[0].texts[0].to_text(sentence_idxs=[1, 3]))
leden provázely v celé Evropě bouřlivé oslavy a ohňostroje .
" Nevypadají jako opravdové .
The Collection supports iteration over documents:
>>> for doc in corpus:
... print(len(list(doc.tokens(zones=['TEXT']))))
354
1290
364
393
The documents are loaded lazily (only parsed when requested)
and cached.
You can supply any class that takes a filename as an initialization
argument. For instance, the Topics can also be loaded as a Collection:
>>> tcorpus = Collection('../test_data/test-topics.list', document_cls=Topic)
>>> len(tcorpus)
2
>>> for topic in tcorpus:
... print(topic.tid)
10.2452/401-AH
10.2452/402-AH
"""
def __init__(self, document_list, docpath=None, cache_disabled=False,
document_cls=Document, name='collection'):
"""Initialize the corpus.
:param document_list: A file with one document path per line.
:param docpath: A prefix for the document paths. If ``None``
(default), will assume the paths are relative to the location
of the ``document_list``.
:param nocache: If True, will never cache the loaded documents.
This decreases memory requirements, but slows down loading
times when a document is accessed more than once.
:param document_cls: What kind of documents are the content of this
collection? (Mostly: Document, or Topic)
:param name: What the Collection should be called in logging.
"""
if not os.path.isfile(document_list):
raise ValueError('Document list file {0} not found!'
''.format(document_list))
self.document_list_fname = document_list
with open(self.document_list_fname) as instream:
self.document_list = [l.strip() for l in instream]
if docpath is None:
docpath = os.path.dirname(self.document_list_fname)
self.docpath = docpath
self._document_cls = document_cls
self._cache_disabled = cache_disabled
self._cache = {}
self.__uid_cache ={} # This one is persistent.
self.name = name
def __getitem__(self, item):
if item in self._cache:
return self._cache[item]
else:
return self.load_document(item)
def __iter__(self):
_time_start = time.clock()
for i in range(len(self.document_list)):
if i % 1000 == 0 and i != 0:
_now = time.clock()
print('Loaded {0} documents in {1:.2f} s, {2:.5f} s'
' average per document.'.format(i, _now - _time_start,
(_now - _time_start) / i))
try:
yield self[i]
except (ET.ParseError, TypeError):
logging.error('Could not parse document no. {0}, fname {1}'
''.format(i, self.document_list[i]))
raise
def __len__(self):
return len(self.document_list)
def load_document(self, index):
"""Handles loading the document itself, when needed."""
if index >= len(self.document_list):
raise ValueError('Document with index {0} not in corpus ({1} documents'
'available)'.format(index, len(self.document_list)))
fname = os.path.join(self.docpath, self.document_list[index])
if not os.path.isfile(fname):
# Try a gzipped version?
fname += '.gz'
if not os.path.isfile(fname):
raise ValueError('Document no. {0} not found! (Fname: {1}, '
'docpath: {2})'
''.format(index, fname, self.docpath))
document = self._document_cls(fname)
if not self._cache_disabled:
self._cache[index] = document
self.__uid_cache[index] = document.uid
return document
@property
def _loaded_idxs(self):
return self._cache.keys()
def get_uid(self, idx):
"""Return the UID of the i-th item in the collection."""
try:
return self.__uid_cache[idx]
except KeyError:
return self[idx].uid
| apache-2.0 | -4,777,920,385,166,660,000 | 33.179487 | 83 | 0.586272 | false |
paolomonella/ursus | xmlToolBox/lxmlToolBox.py | 1 | 11692 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This script simplifies the encoding of casanatensis.xml
# and creates a file ALIM2_publication/casanatensis_AL.xml
# with the Alphabetic Layer only.
#
# It's written in Python 3.4, but also works with Python 2.7.
# It uses the Python lxml library.
from __future__ import print_function
import os
from lxml import etree
# Clear screen
os.system('clear')
# Namespaces
n = '{http://www.tei-c.org/ns/1.0}' # for XML/TEI
xml = '{http://www.w3.org/XML/1998/namespace}' # for attributes like xml:id
#ET.register_namespace('', 'http://www.tei-c.org/ns/1.0') # This used to work when I used ElementTree
ns = {'tei': 'http://www.tei-c.org/ns/1.0', # for TEI XML
'xml': 'http://www.w3.org/XML/1998/namespace'} # for attributes like xml:id
# Parse the tree of casanatensis.xml
tree = etree.parse('../casanatensis.xml')
# Corr with cert=medium
print('\n################################\n# CORRECTIONS WITH CERT=MEDIUM #\n################################\n\n')
L = []
c = 0
d = 0
for x in tree.findall('.//' + n + 'corr'):
if x.get('cert') == 'medium':
choice = x.getparent()
sic = choice.find(n + 'sic')
if sic.find(n + 'w') is not None:
sicw = sic.find(n + 'w') # 'wrong' word
sicwtxt = sicw.xpath('normalize-space()').encode('utf8')
else:
sicwtxt = ''
if x.find(n + 'w') is not None:
corrw = x.find(n + 'w') # 'right' word
corrwid = corrw.get(xml + 'id')
print(corrwid, end = '\t')
corrwtxt = corrw.xpath('normalize-space()').encode('utf8')
#wtxt = corrw.text.strip()
else:
corrwtxt = ''
print('"' + str(sicwtxt) + '" → "' + str(corrwtxt) + '"')
#print(x.get('subtype'))
if x.find(n + 'note') is not None:
y = x.find(n + 'note')
print(y.text.encode('utf-8'))
c = c + 1
else:
z = x.get('type')
print('Error type: "' + z + '"')
d = d + 1
print()
for l in set(L):
print(l)
# Notes with subtype='crux'
print('\n\n#############################\n# NOTES WITH SUBTYPE="CRUX" #\n#############################\n\n')
cc = 0
for x in tree.findall('.//' + n + 'note'):
if x.get('type') == 'emendation' and x.get('subtype') == 'crux':
prevWord = x.getprevious() # The previous word
print('\n' + prevWord.get(xml + 'id') + '\t"' + prevWord.xpath(".//text()")[0].strip() + '"')
print(x.text.encode('utf-8'))
cc = cc + 1
# Summary
print('\n\n###########\n# SUMMARY #\n###########\n\n')
print(str(c) + ' notes on general matters\n' \
+ str(d) + ' notes with a <correction> type\n' \
+ str(cc) + ' cruces desperationis\n')
# Parse the tree of the ALIM2 template: it will be the base for the output tree
no_blank = False
if no_blank:
parser = etree.XMLParser(remove_blank_text=True)
tree = etree.parse('ALIM2_publication/teiHeader_template.xml', parser)
else:
tree = etree.parse('ALIM2_publication/teiHeader_template.xml')
root = tree.getroot()
#root.set('xmlns:xsi', 'http://www.w3.org/2001/XMLSchema-instance')
# Append the <body> of casanatensis.xml into <text> of the output xml file
myBody = casanaTree.getroot().find(n + 'text').find(n + 'body')
myText = root.find(n + 'text') # <text> of output xml file
myText.append(myBody)
#######################################################
# Temporary section to check notes @type="emendation" #
#######################################################
f = open('/home/ilbuonme/Scrivania/foo.txt', 'w')
cc = 0
cww = 0
crr = 0
for x in tree.findall('.//' + n + 'note[@type="emendation"]'):
cc = cc + 1
daddy = x.getparent()
dtag = etree.QName(daddy).localname
if dtag == 'w':
#print('Daddy is w')
cww = cww + 1
elif dtag == 'ref':
print('"' + x.text + '"', file=f)
print('---', file=f)
crr = crr + 1
x.set('subtype', 'emendation_to_whole_section')
else:
print(x.text)
print('Total: %s. Notes to word: %s. Notes to section: %s' % (cc, cww, crr))
f.close()
#############
# Functions #
#############
def deleteAllElements(myElemName, myNameSpace):
search = ('.//{0}' + myElemName).format(myNameSpace)
my_elem_parents = tree.findall(search + '/..')
for x in my_elem_parents:
for y in x.findall(myNameSpace + myElemName):
x.remove(y)
def substituteAllElements(oldName, newName, myNameSpace):
for x in tree.findall('.//' + myNameSpace + oldName):
x.tag = myNameSpace + newName
x.set('type', oldName)
def manageWord(wordElem):
# Easy solution (only backdraw: it moves all elements children of <w> after the text). This is
# OK (it's actually better) for 'anchor/pb/cb/lb', but it creates a slight inaccuracy with 'gap':
tempText = wordElem.xpath('normalize-space()').replace(' ', '').replace('·', '') # This is the unified text of the word
if wordElem.get('type') in ['alphabemes', 'foreign']:
tempText = '"' + tempText + '"'
for y in wordElem:
yt = etree.QName(y).localname
if yt in ['choice', 'add', 'pc', 'hi']: # I'm removing them b/c they include text, or b/c it's <pc n="space">
y.getparent().remove(y)
y.tail = None
wordElem.text = tempText
def managePunctuation(punctElem):
v = punctElem.get('n').replace('question', '?')
punctElem.text = v
if v in ['0', 'quote', 'space']: # Delete <pc> altogether
punctElem.getparent().remove(punctElem)
elif v in ['.', '?', ',']: # Append to the text content of previous <w>
# I'm using '?' instead of 'question' because of the line that replaced 'question' with '?'
if punctElem.getprevious() is None:
# Just give up and leave the <pc> element as it is
pass
elif punctElem.getprevious().tag in [n + 'lb', n + 'milestone', n + 'gap']:
# Just give up and leave the <pc> element as it is
if punctElem.getprevious().tag != n + 'gap':
print('Alas! Punctuation sign not coming immediately after <w> or <gap>')
punctElem.set('type', 'trouble')
elif punctElem.getprevious().tag == n + 'w': # If previous sibling is <w>, append punctuation to its textual content
punctElem.getprevious().text = punctElem.getprevious().text + v
#punctElem.getprevious().tail = v + '\n' # Nope: this generates code like
# <w n="dicam" xml:id="w564">dicam<lb n="1r.a.23" break="no"/></w>,
punctElem.getparent().remove(punctElem)
elif punctElem.getprevious().tag in [n + 'add', n + 'unclear']:
if punctElem.getprevious().find(n + 'w') is not None and len(punctElem.getprevious().find(n + 'w')) == 0:
# If <add> or <unclear> have a <w> child and this <w> has no children (<lb> or <choice>)
punctElem.getprevious().find(n + 'w').text = punctElem.getprevious().find(n + 'w').text + v
punctElem.getprevious().find(n + 'w').text = punctElem.getprevious().find(n + 'w').text.replace('\n', '')
punctElem.getprevious().find(n + 'w').text = punctElem.getprevious().find(n + 'w').text.replace('\t', '')
punctElem.getparent().remove(punctElem)
elif punctElem.getprevious().find(n + 'w') is not None and len(punctElem.getprevious().find(n + 'w')) > 0:
# If the previous <w> has children (<lb> or <choice>, it's best to leave the <pc> as it is)
pass
else:
print('Alas! Childless element <' + punctElem.getprevious().tag + '>')
punctElem.getprevious().set('type', 'trouble')
##################################
# Take care of specific elements #
##################################
for i in ['note', 'abbr', 'pb', 'milestone']:
deleteAllElements(i, n)
for cb in tree.findall('.//' + n + 'cb'):
ocn = cb.get('n') # Old Column @n
ncn = 'Column_' + ocn # New Column @n
cb.set('n', ncn)
substituteAllElements('cb', 'pb', n) # § to-do: if <anchor> generates an empty space, change this to <span>
#######################
# Manage <w> and <pc> #
#######################
for ab in root.findall(n + 'text/' + n + 'body/' + n + 'ab'): # All 'ab' elements (children of <body>)
# Insert an <ab type="added_heading"> with the title of the section (that I made up)
newHead = etree.Element('ab')
newHead.text = '[' + ab.get('n') + ']'
newHead.tail = '\n'
newHead.set('type', 'added_heading')
newHead.set('rend', 'bold')
previousPosition = ab.getparent().index(ab) # This is an integer representing the previous position in <body>
ab.getparent().insert(previousPosition, newHead)
for ref in ab: # Iterate over the <ref> children of <ab>
for w in ref: # Iterate over word-like elements (such as <w>, <gap>, <pc> etc.)
wt = etree.QName(w).localname # The tag name w/o namespace (e.g.: 'w', 'pc' etc.)
if wt == 'w':
manageWord(w)
elif wt == 'add':
# Possible children of 'add' are: w, pc, gap (it may have more than one child)
for c in w:
if c.tag == n + 'w':
manageWord(c)
elif c.tag == n + 'pc':
#pass
managePunctuation(c)
elif c.tag == n + 'milestone':
pass
elif wt == 'unclear':
# Possible children of 'unclear' are: only 'w'
unWord = w.find(n + 'w')
manageWord(unWord)
elif wt == 'anchor':
print('I found an <anchor>')
elif wt == 'pc':
managePunctuation(w)
else:
pass
These are the possible @n <pc>s children of <ref>:
0
.
question
space
,
quote
Possible element children of <w>:
{http://www.tei-c.org/ns/1.0}gap # This is OK: leave it where it is
{http://www.tei-c.org/ns/1.0}anchor # This is OK: leave it where it is
{http://www.tei-c.org/ns/1.0}pc # Delete this, if it's only n="space"
{http://www.tei-c.org/ns/1.0}choice # Extract the text
{http://www.tei-c.org/ns/1.0}add # Extract the text
this is the new list of world-like elements, possible children of <ref>:
milestone # leave it
gap # leave it
anchor # leave it (it occurs only once)
unclear
pc
span
This was the original the list of word-like elements, possible children of <ref>:
{http://www.tei-c.org/ns/1.0}lb # Turn into <anchor>... or just delete
{http://www.tei-c.org/ns/1.0}cb # same as above, but possibly anchor
{http://www.tei-c.org/ns/1.0}pb # same as above, but possibly anchor
{http://www.tei-c.org/ns/1.0}pc # Use @n as text content
{http://www.tei-c.org/ns/1.0}note # Replicate? Nope, delete
{http://www.tei-c.org/ns/1.0}milestone # Replicate?
{http://www.tei-c.org/ns/1.0}w # Replicate
{http://www.tei-c.org/ns/1.0}anchor # Replicate
{http://www.tei-c.org/ns/1.0}add
{http://www.tei-c.org/ns/1.0}unclear
{http://www.tei-c.org/ns/1.0}gap
tree.write('ALIM2_publication/casanatensis_AL.xml', encoding='UTF-8', method='xml', xml_declaration=True)
| gpl-2.0 | -5,554,226,811,947,690,000 | 38.486486 | 124 | 0.539185 | false |
alialiev/dj-stripe | djstripe/signals.py | 4 | 1499 | # -*- coding: utf-8 -*-
from django.dispatch import Signal
cancelled = Signal(providing_args=["stripe_response"])
card_changed = Signal(providing_args=["stripe_response"])
subscription_made = Signal(providing_args=["plan", "stripe_response"])
webhook_processing_error = Signal(providing_args=["data", "exception"])
WEBHOOK_SIGNALS = dict([
(hook, Signal(providing_args=["event"]))
for hook in [
"account.updated",
"account.application.deauthorized",
"charge.succeeded",
"charge.failed",
"charge.refunded",
"charge.dispute.created",
"charge.dispute.updated",
"charge.dispute.closed",
"customer.created",
"customer.updated",
"customer.deleted",
"customer.subscription.created",
"customer.subscription.updated",
"customer.subscription.deleted",
"customer.subscription.trial_will_end",
"customer.discount.created",
"customer.discount.updated",
"customer.discount.deleted",
"invoice.created",
"invoice.updated",
"invoice.payment_succeeded",
"invoice.payment_failed",
"invoiceitem.created",
"invoiceitem.updated",
"invoiceitem.deleted",
"plan.created",
"plan.updated",
"plan.deleted",
"coupon.created",
"coupon.updated",
"coupon.deleted",
"transfer.created",
"transfer.updated",
"transfer.failed",
"ping"
]
])
| bsd-3-clause | -867,833,763,552,967,400 | 29.591837 | 71 | 0.603069 | false |
rapidpro/tracpro | tracpro/orgs_ext/migrations/0002_auto_20150724_1609.py | 2 | 1035 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django.conf import settings
from django.db import models, migrations
def add_available_languages(apps, schema_editor):
"""Set default available_languages to all languages defined for this project."""
all_languages = [l[0] for l in settings.LANGUAGES]
for org in apps.get_model('orgs', 'Org').objects.all():
updated = False
config = json.loads(org.config) if org.config else {}
if not config.get('available_languages'):
config['available_languages'] = all_languages
org.config = json.dumps(config)
updated = True
if not org.language:
org.language = settings.DEFAULT_LANGUAGE
updated = True
if updated:
org.save()
class Migration(migrations.Migration):
dependencies = [
('orgs_ext', '0001_initial'),
]
operations = [
migrations.RunPython(add_available_languages, migrations.RunPython.noop),
]
| bsd-3-clause | -4,159,729,107,407,770,000 | 29.441176 | 84 | 0.634783 | false |
facebookresearch/Detectron | detectron/core/test_retinanet.py | 1 | 7825 | # Copyright (c) 2017-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
"""Test a RetinaNet network on an image database"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import logging
from collections import defaultdict
from caffe2.python import core, workspace
from detectron.core.config import cfg
from detectron.modeling.generate_anchors import generate_anchors
from detectron.utils.timer import Timer
import detectron.utils.blob as blob_utils
import detectron.utils.boxes as box_utils
logger = logging.getLogger(__name__)
def _create_cell_anchors():
"""
Generate all types of anchors for all fpn levels/scales/aspect ratios.
This function is called only once at the beginning of inference.
"""
k_max, k_min = cfg.FPN.RPN_MAX_LEVEL, cfg.FPN.RPN_MIN_LEVEL
scales_per_octave = cfg.RETINANET.SCALES_PER_OCTAVE
aspect_ratios = cfg.RETINANET.ASPECT_RATIOS
anchor_scale = cfg.RETINANET.ANCHOR_SCALE
A = scales_per_octave * len(aspect_ratios)
anchors = {}
for lvl in range(k_min, k_max + 1):
# create cell anchors array
stride = 2. ** lvl
cell_anchors = np.zeros((A, 4))
a = 0
for octave in range(scales_per_octave):
octave_scale = 2 ** (octave / float(scales_per_octave))
for aspect in aspect_ratios:
anchor_sizes = (stride * octave_scale * anchor_scale, )
anchor_aspect_ratios = (aspect, )
cell_anchors[a, :] = generate_anchors(
stride=stride, sizes=anchor_sizes,
aspect_ratios=anchor_aspect_ratios)
a += 1
anchors[lvl] = cell_anchors
return anchors
def im_detect_bbox(model, im, timers=None):
"""Generate RetinaNet detections on a single image."""
if timers is None:
timers = defaultdict(Timer)
# Although anchors are input independent and could be precomputed,
# recomputing them per image only brings a small overhead
anchors = _create_cell_anchors()
timers['im_detect_bbox'].tic()
k_max, k_min = cfg.FPN.RPN_MAX_LEVEL, cfg.FPN.RPN_MIN_LEVEL
A = cfg.RETINANET.SCALES_PER_OCTAVE * len(cfg.RETINANET.ASPECT_RATIOS)
inputs = {}
inputs['data'], im_scale, inputs['im_info'] = \
blob_utils.get_image_blob(im, cfg.TEST.SCALE, cfg.TEST.MAX_SIZE)
cls_probs, box_preds = [], []
for lvl in range(k_min, k_max + 1):
suffix = 'fpn{}'.format(lvl)
cls_probs.append(core.ScopedName('retnet_cls_prob_{}'.format(suffix)))
box_preds.append(core.ScopedName('retnet_bbox_pred_{}'.format(suffix)))
for k, v in inputs.items():
workspace.FeedBlob(core.ScopedName(k), v.astype(np.float32, copy=False))
workspace.RunNet(model.net.Proto().name)
cls_probs = workspace.FetchBlobs(cls_probs)
box_preds = workspace.FetchBlobs(box_preds)
# here the boxes_all are [x0, y0, x1, y1, score]
boxes_all = defaultdict(list)
cnt = 0
for lvl in range(k_min, k_max + 1):
# create cell anchors array
stride = 2. ** lvl
cell_anchors = anchors[lvl]
# fetch per level probability
cls_prob = cls_probs[cnt]
box_pred = box_preds[cnt]
cls_prob = cls_prob.reshape((
cls_prob.shape[0], A, int(cls_prob.shape[1] / A),
cls_prob.shape[2], cls_prob.shape[3]))
box_pred = box_pred.reshape((
box_pred.shape[0], A, 4, box_pred.shape[2], box_pred.shape[3]))
cnt += 1
if cfg.RETINANET.SOFTMAX:
cls_prob = cls_prob[:, :, 1::, :, :]
cls_prob_ravel = cls_prob.ravel()
# In some cases [especially for very small img sizes], it's possible that
# candidate_ind is empty if we impose threshold 0.05 at all levels. This
# will lead to errors since no detections are found for this image. Hence,
# for lvl 7 which has small spatial resolution, we take the threshold 0.0
th = cfg.RETINANET.INFERENCE_TH if lvl < k_max else 0.0
candidate_inds = np.where(cls_prob_ravel > th)[0]
if (len(candidate_inds) == 0):
continue
pre_nms_topn = min(cfg.RETINANET.PRE_NMS_TOP_N, len(candidate_inds))
inds = np.argpartition(
cls_prob_ravel[candidate_inds], -pre_nms_topn)[-pre_nms_topn:]
inds = candidate_inds[inds]
inds_5d = np.array(np.unravel_index(inds, cls_prob.shape)).transpose()
classes = inds_5d[:, 2]
anchor_ids, y, x = inds_5d[:, 1], inds_5d[:, 3], inds_5d[:, 4]
scores = cls_prob[:, anchor_ids, classes, y, x]
boxes = np.column_stack((x, y, x, y)).astype(dtype=np.float32)
boxes *= stride
boxes += cell_anchors[anchor_ids, :]
if not cfg.RETINANET.CLASS_SPECIFIC_BBOX:
box_deltas = box_pred[0, anchor_ids, :, y, x]
else:
box_cls_inds = classes * 4
box_deltas = np.vstack(
[box_pred[0, ind:ind + 4, yi, xi]
for ind, yi, xi in zip(box_cls_inds, y, x)]
)
pred_boxes = (
box_utils.bbox_transform(boxes, box_deltas)
if cfg.TEST.BBOX_REG else boxes)
pred_boxes /= im_scale
pred_boxes = box_utils.clip_tiled_boxes(pred_boxes, im.shape)
box_scores = np.zeros((pred_boxes.shape[0], 5))
box_scores[:, 0:4] = pred_boxes
box_scores[:, 4] = scores
for cls in range(1, cfg.MODEL.NUM_CLASSES):
inds = np.where(classes == cls - 1)[0]
if len(inds) > 0:
boxes_all[cls].extend(box_scores[inds, :])
timers['im_detect_bbox'].toc()
# Combine predictions across all levels and retain the top scoring by class
timers['misc_bbox'].tic()
detections = []
for cls, boxes in boxes_all.items():
cls_dets = np.vstack(boxes).astype(dtype=np.float32)
# do class specific nms here
if cfg.TEST.SOFT_NMS.ENABLED:
cls_dets, keep = box_utils.soft_nms(
cls_dets,
sigma=cfg.TEST.SOFT_NMS.SIGMA,
overlap_thresh=cfg.TEST.NMS,
score_thresh=0.0001,
method=cfg.TEST.SOFT_NMS.METHOD
)
else:
keep = box_utils.nms(cls_dets, cfg.TEST.NMS)
cls_dets = cls_dets[keep, :]
out = np.zeros((len(keep), 6))
out[:, 0:5] = cls_dets
out[:, 5].fill(cls)
detections.append(out)
# detections (N, 6) format:
# detections[:, :4] - boxes
# detections[:, 4] - scores
# detections[:, 5] - classes
detections = np.vstack(detections)
# sort all again
inds = np.argsort(-detections[:, 4])
detections = detections[inds[0:cfg.TEST.DETECTIONS_PER_IM], :]
# Convert the detections to image cls_ format (see core/test_engine.py)
num_classes = cfg.MODEL.NUM_CLASSES
cls_boxes = [[] for _ in range(cfg.MODEL.NUM_CLASSES)]
for c in range(1, num_classes):
inds = np.where(detections[:, 5] == c)[0]
cls_boxes[c] = detections[inds, :5]
timers['misc_bbox'].toc()
return cls_boxes
| apache-2.0 | -7,050,057,338,502,659,000 | 38.125 | 82 | 0.604856 | false |
xia2/xia2 | tests/Modules/Indexer/test_DIALS_indexer.py | 1 | 2572 | from unittest import mock
import os
import pytest
import sys
from dxtbx.model import ExperimentList
from xia2.Handlers.Phil import PhilIndex
from xia2.Modules.Indexer.DialsIndexer import DialsIndexer
from xia2.Schema.XCrystal import XCrystal
from xia2.Schema.XWavelength import XWavelength
from xia2.Schema.XSweep import XSweep
from xia2.Schema.XSample import XSample
def exercise_dials_indexer(dials_data, tmp_dir, nproc=None):
if nproc is not None:
PhilIndex.params.xia2.settings.multiprocessing.nproc = nproc
template = dials_data("insulin").join("insulin_1_###.img").strpath
indexer = DialsIndexer()
indexer.set_working_directory(tmp_dir)
experiments = ExperimentList.from_templates([template])
imageset = experiments.imagesets()[0]
indexer.add_indexer_imageset(imageset)
cryst = XCrystal("CRYST1", None)
wav = XWavelength("WAVE1", cryst, imageset.get_beam().get_wavelength())
samp = XSample("X1", cryst)
directory, image = os.path.split(imageset.get_path(1))
sweep = XSweep("SWEEP1", wav, samp, directory=directory, image=image)
indexer.set_indexer_sweep(sweep)
indexer.index()
assert indexer.get_indexer_cell() == pytest.approx(
(78.14, 78.14, 78.14, 90, 90, 90), rel=1e-3
)
solution = indexer.get_solution()
assert solution["rmsd"] == pytest.approx(0.03545, abs=1e-3)
assert solution["metric"] == pytest.approx(0.02517, abs=5e-3)
assert solution["number"] == 22
assert solution["lattice"] == "cI"
beam_centre = indexer.get_indexer_beam_centre()
assert beam_centre == pytest.approx(
(94.41567208118963, 94.51337522659865), abs=1e-3
)
print(indexer.get_indexer_experiment_list()[0].crystal)
print(indexer.get_indexer_experiment_list()[0].detector)
# test serialization of indexer
json_str = indexer.as_json()
indexer2 = DialsIndexer.from_json(string=json_str)
indexer2.index()
assert indexer.get_indexer_cell() == pytest.approx(indexer2.get_indexer_cell())
assert indexer.get_indexer_beam_centre() == pytest.approx(
indexer2.get_indexer_beam_centre()
)
indexer.eliminate()
indexer2.eliminate()
assert indexer.get_indexer_cell() == pytest.approx(indexer2.get_indexer_cell())
assert indexer.get_indexer_lattice() == "hR"
assert indexer2.get_indexer_lattice() == "hR"
def test_dials_indexer_serial(regression_test, ccp4, dials_data, run_in_tmpdir):
with mock.patch.object(sys, "argv", []):
exercise_dials_indexer(dials_data, run_in_tmpdir.strpath, nproc=1)
| bsd-3-clause | -5,237,932,397,015,187,000 | 33.756757 | 83 | 0.702955 | false |
BanzaiTokyo/askapp | askapp/migrations/0019_auto_20191119_0846.py | 2 | 1308 | # Generated by Django 2.2.5 on 2019-11-19 08:46
from django.db import migrations, models
import django.db.models.deletion
def add_level(apps, schema_editor):
UserLevel = apps.get_model("askapp", "UserLevel")
db_alias = schema_editor.connection.alias
UserLevel.objects.using(db_alias).bulk_create([
UserLevel(name="Level 1"),
])
class Migration(migrations.Migration):
dependencies = [
('askapp', '0018_auto_20191109_1248'),
]
operations = [
migrations.CreateModel(
name='UserLevel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('upvotes', models.IntegerField(default=3)),
('downvotes', models.IntegerField(default=0)),
('upvote_same', models.IntegerField(default=1)),
('downvote_same', models.IntegerField(default=1)),
],
),
migrations.RunPython(add_level),
migrations.AddField(
model_name='profile',
name='level',
field=models.ForeignKey(default=1, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='askapp.UserLevel'),
),
]
| apache-2.0 | 5,249,319,833,664,830,000 | 32.538462 | 129 | 0.59633 | false |
dutradda/myreco | tests/integration/engines/test_engines_integration.py | 1 | 19507 | # MIT License
# Copyright (c) 2016 Diogo Dutra <[email protected]>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import asyncio
import tempfile
from datetime import datetime
from time import sleep
from unittest import mock
from swaggerit.models._base import _all_models
from tests.integration.fixtures import EngineStrategyTest
import pytest
import ujson
@pytest.fixture
def init_db(models, session, api):
user = {
'name': 'test',
'email': 'test',
'password': 'test',
'admin': True
}
session.loop.run_until_complete(models['users'].insert(session, user))
tmp = tempfile.TemporaryDirectory()
store = {
'name': 'test',
'country': 'test',
'configuration': {}
}
session.loop.run_until_complete(models['stores'].insert(session, store))
item_type = {
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'stores': [{'id': 1}]
}
session.loop.run_until_complete(models['item_types'].insert(session, item_type))
strategy = {
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest'
}
session.loop.run_until_complete(models['engine_strategies'].insert(session, strategy))
engine = {
'name': 'Seven Days Top Seller',
'objects': [{
'_operation': 'insert',
'name': 'Top Seller Object',
'type': 'top_seller_array',
'configuration': {'days_interval': 7}
}],
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}
session.loop.run_until_complete(models['engines'].insert(session, engine))
tmp.cleanup()
_all_models.pop('store_items_products_1', None)
class TestEnginesModelPost(object):
async def test_post_without_body(self, init_db, headers, client):
client = await client
resp = await client.post('/engines/', headers=headers)
assert resp.status == 400
assert await resp.json() == {'message': 'Request body is missing'}
async def test_post_with_invalid_body(self, init_db, headers, client):
client = await client
resp = await client.post('/engines/', headers=headers, data='[{}]')
assert resp.status == 400
assert await resp.json() == {
'message': "'store_id' is a required property. Failed validating "\
"instance['0'] for schema['items']['required']",
'schema': {
'type': 'object',
'additionalProperties': False,
'required': ['store_id', 'item_type_id', 'strategy_id'],
'properties': {
'name': {'type': 'string'},
'store_id': {'type': 'integer'},
'item_type_id': {'type': 'integer'},
'strategy_id': {'type': 'integer'},
'objects': {
'type': 'array',
'minItems': 1,
'items': {
'type': 'object',
'oneOf': [
{'$ref': '#/definitions/EnginesModel.insert_objects_schema'},
{'$ref': '#/definitions/EnginesModel.update_objects_schema'},
{'$ref': '#/definitions/EnginesModel.get_objects_schema'}
]
}
}
}
}
}
async def test_post_with_invalid_grant(self, init_db, client):
client = await client
resp = await client.post('/engines/', headers={'Authorization': 'invalid'})
assert resp.status == 401
assert await resp.json() == {'message': 'Invalid authorization'}
async def test_post_valid_with_insert_object(self, init_db, headers, client):
body = [{
'name': 'Seven Days Top Seller 2',
'objects': [{
'_operation': 'insert',
'name': 'Top Seller Object 2',
'type': 'top_seller_array',
'configuration': {'days_interval': 7}
}],
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}]
client = await client
resp = await client.post('/engines/', headers=headers, data=ujson.dumps(body))
assert resp.status == 201
body[0]['id'] = 2
body[0]['store'] = {'id': 1, 'name': 'test', 'country': 'test', 'configuration': {}}
body[0]['strategy_id'] = 1
body[0]['item_type'] = {
'id': 1,
'store_items_class': None,
'stores': [body[0]['store']],
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'available_filters': [{'name': 'sku', 'schema': {'type': 'string'}}]
}
body[0]['strategy'] = {
'id': 1,
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest',
'object_types': ['top_seller_array']
}
body[0]['objects'] = [{
'id': 2,
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1,
'name': 'Top Seller Object 2',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'item_type': body[0]['item_type'],
'store': body[0]['store'],
'strategy': body[0]['strategy']
}]
body[0]['variables'] = []
assert await resp.json() == body
async def test_post_valid_with_update_object(self, init_db, headers, client):
body = [{
'name': 'Seven Days Top Seller 2',
'objects': [{
'_operation': 'update',
'id': 1,
'name': 'Top Seller Object 2'
}],
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}]
client = await client
resp = await client.post('/engines/', headers=headers, data=ujson.dumps(body))
assert resp.status == 201
body[0]['id'] = 2
body[0]['store'] = {'id': 1, 'name': 'test', 'country': 'test', 'configuration': {}}
body[0]['strategy_id'] = 1
body[0]['item_type'] = {
'id': 1,
'store_items_class': None,
'stores': [body[0]['store']],
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'available_filters': [{'name': 'sku', 'schema': {'type': 'string'}}]
}
body[0]['strategy'] = {
'id': 1,
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest',
'object_types': ['top_seller_array']
}
body[0]['objects'] = [{
'id': 1,
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1,
'name': 'Top Seller Object 2',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'item_type': body[0]['item_type'],
'store': body[0]['store'],
'strategy': body[0]['strategy']
}]
body[0]['variables'] = []
assert await resp.json() == body
async def test_post_valid_with_get_object(self, init_db, headers, client):
body = [{
'name': 'Seven Days Top Seller 2',
'objects': [{
'_operation': 'get',
'id': 1
}],
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}]
client = await client
resp = await client.post('/engines/', headers=headers, data=ujson.dumps(body))
assert resp.status == 201
body[0]['id'] = 2
body[0]['store'] = {'id': 1, 'name': 'test', 'country': 'test', 'configuration': {}}
body[0]['strategy_id'] = 1
body[0]['item_type'] = {
'id': 1,
'store_items_class': None,
'stores': [body[0]['store']],
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'available_filters': [{'name': 'sku', 'schema': {'type': 'string'}}]
}
body[0]['strategy'] = {
'id': 1,
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest',
'object_types': ['top_seller_array']
}
body[0]['objects'] = [{
'id': 1,
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1,
'name': 'Top Seller Object',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'item_type': body[0]['item_type'],
'store': body[0]['store'],
'strategy': body[0]['strategy']
}]
body[0]['variables'] = []
assert await resp.json() == body
class TestEnginesModelGet(object):
async def test_get_not_found(self, init_db, headers_without_content_type, client):
client = await client
resp = await client.get('/engines/?store_id=2', headers=headers_without_content_type)
assert resp.status == 404
async def test_get_invalid_with_body(self, init_db, headers, client):
client = await client
resp = await client.get('/engines/?store_id=1', headers=headers, data='{}')
assert resp.status == 400
assert await resp.json() == {'message': 'Request body is not acceptable'}
async def test_get_valid(self, init_db, headers, headers_without_content_type, client):
body = [{
'name': 'Seven Days Top Seller',
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}]
body[0]['id'] = 1
body[0]['store'] = {'id': 1, 'name': 'test', 'country': 'test', 'configuration': {}}
body[0]['strategy_id'] = 1
body[0]['item_type'] = {
'id': 1,
'store_items_class': None,
'stores': [body[0]['store']],
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'available_filters': [{'name': 'sku', 'schema': {'type': 'string'}}]
}
body[0]['strategy'] = {
'id': 1,
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest',
'object_types': ['top_seller_array']
}
body[0]['objects'] = [{
'id': 1,
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1,
'name': 'Top Seller Object',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'item_type': body[0]['item_type'],
'store': body[0]['store'],
'strategy': body[0]['strategy']
}]
body[0]['variables'] = []
client = await client
resp = await client.get('/engines/?store_id=1', headers=headers_without_content_type)
assert resp.status == 200
assert await resp.json() == body
class TestEnginesModelUriTemplatePatch(object):
async def test_patch_without_body(self, init_db, headers, client):
client = await client
resp = await client.patch('/engines/1/', headers=headers, data='')
assert resp.status == 400
assert await resp.json() == {'message': 'Request body is missing'}
async def test_patch_with_invalid_body(self, init_db, headers, client):
client = await client
resp = await client.patch('/engines/1/', headers=headers, data='{}')
assert resp.status == 400
assert await resp.json() == {
'message': "{} does not have enough properties. "\
"Failed validating instance for schema['minProperties']",
'schema': {
'type': 'object',
'additionalProperties': False,
'minProperties': 1,
'properties': {
'name': {'type': 'string'},
'store_id': {'type': 'integer'},
'item_type_id': {'type': 'integer'},
'strategy_id': {'type': 'integer'},
'objects': {
'type': 'array',
'minItems': 1,
'items': {
'anyOf': [
{'$ref': '#/definitions/EnginesModel.insert_objects_schema'},
{'$ref': '#/definitions/EnginesModel.update_objects_schema'},
{'$ref': '#/definitions/EnginesModel.get_objects_schema'},
{'$ref': '#/definitions/EnginesModel.delete_remove_objects_schema'}
]
}
}
}
}
}
async def test_patch_not_found(self, init_db, headers, client):
body = {
'name': 'test',
'store_id': 1
}
client = await client
resp = await client.patch('/engines/2/', headers=headers, data=ujson.dumps(body))
assert resp.status == 404
async def test_patch(self, init_db, headers, headers_without_content_type, client):
client = await client
resp = await client.get('/engines/1', headers=headers_without_content_type)
obj = await resp.json()
body = {
'name': 'test2'
}
resp = await client.patch('/engines/1/', headers=headers, data=ujson.dumps(body))
obj['name'] = 'test2'
assert resp.status == 200
assert await resp.json() == obj
async def test_patch_with_invalid_update_object_configuration(self, init_db, headers, client):
body = {
'objects': [{'id': 1, '_operation': 'update', 'configuration': {}}]
}
client = await client
resp = await client.patch('/engines/1/', headers=headers, data=ujson.dumps(body))
assert resp.status == 400
assert await resp.json() == {
'message': "'days_interval' is a required property. "\
"Failed validating instance['top_seller_array'] for "\
"schema['properties']['top_seller_array']['required']",
'schema': {
'type': 'object',
'required': ['days_interval'],
'additionalProperties': False,
'properties': {
'days_interval': {'type': 'integer'}
}
}
}
class TestEnginesModelUriTemplateDelete(object):
async def test_delete_with_body(self, init_db, headers, client):
client = await client
resp = await client.delete('/engines/1/', headers=headers, data='{}')
assert resp.status == 400
assert await resp.json() == {'message': 'Request body is not acceptable'}
async def test_delete(self, init_db, headers, headers_without_content_type, client):
client = await client
resp = await client.get('/engines/1/', headers=headers_without_content_type)
assert resp.status == 200
resp = await client.delete('/engines/1/', headers=headers_without_content_type)
assert resp.status == 204
resp = await client.get('/engines/1/', headers=headers_without_content_type)
assert resp.status == 404
class TestEnginesModelUriTemplateGet(object):
async def test_get_with_body(self, init_db, headers, client):
client = await client
resp = await client.get('/engines/1/', headers=headers, data='{}')
assert resp.status == 400
assert await resp.json() == {'message': 'Request body is not acceptable'}
async def test_get_not_found(self, init_db, headers_without_content_type, client):
client = await client
resp = await client.get('/engines/2/', headers=headers_without_content_type)
assert resp.status == 404
async def test_get(self, init_db, headers, headers_without_content_type, client):
client = await client
resp = await client.get('/engines/1/', headers=headers_without_content_type)
body = [{
'name': 'Seven Days Top Seller',
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}]
body[0]['id'] = 1
body[0]['store'] = {'id': 1, 'name': 'test', 'country': 'test', 'configuration': {}}
body[0]['strategy_id'] = 1
body[0]['item_type'] = {
'id': 1,
'store_items_class': None,
'stores': [body[0]['store']],
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'available_filters': [{'name': 'sku', 'schema': {'type': 'string'}}]
}
body[0]['strategy'] = {
'id': 1,
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest',
'object_types': ['top_seller_array']
}
body[0]['objects'] = [{
'id': 1,
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1,
'name': 'Top Seller Object',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'item_type': body[0]['item_type'],
'store': body[0]['store'],
'strategy': body[0]['strategy']
}]
body[0]['variables'] = []
assert resp.status == 200
assert await resp.json() == body[0]
| mit | -6,780,705,163,787,334,000 | 36.01518 | 99 | 0.499103 | false |
MarkTheF4rth/youtube-dl | youtube_dl/extractor/crunchyroll.py | 29 | 16834 | # encoding: utf-8
from __future__ import unicode_literals
import re
import json
import base64
import zlib
import xml.etree.ElementTree
from hashlib import sha1
from math import pow, sqrt, floor
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
compat_urllib_parse_unquote,
compat_urllib_request,
compat_urlparse,
)
from ..utils import (
ExtractorError,
bytes_to_intlist,
intlist_to_bytes,
int_or_none,
remove_end,
unified_strdate,
urlencode_postdata,
xpath_text,
)
from ..aes import (
aes_cbc_decrypt,
)
class CrunchyrollBaseIE(InfoExtractor):
def _download_webpage(self, url_or_request, video_id, note=None, errnote=None, fatal=True, tries=1, timeout=5, encoding=None):
request = (url_or_request if isinstance(url_or_request, compat_urllib_request.Request)
else compat_urllib_request.Request(url_or_request))
# Accept-Language must be set explicitly to accept any language to avoid issues
# similar to https://github.com/rg3/youtube-dl/issues/6797.
# Along with IP address Crunchyroll uses Accept-Language to guess whether georestriction
# should be imposed or not (from what I can see it just takes the first language
# ignoring the priority and requires it to correspond the IP). By the way this causes
# Crunchyroll to not work in georestriction cases in some browsers that don't place
# the locale lang first in header. However allowing any language seems to workaround the issue.
request.add_header('Accept-Language', '*')
return super(CrunchyrollBaseIE, self)._download_webpage(
request, video_id, note, errnote, fatal, tries, timeout, encoding)
class CrunchyrollIE(CrunchyrollBaseIE):
_VALID_URL = r'https?://(?:(?P<prefix>www|m)\.)?(?P<url>crunchyroll\.(?:com|fr)/(?:media(?:-|/\?id=)|[^/]*/[^/?&]*?)(?P<video_id>[0-9]+))(?:[/?&]|$)'
_NETRC_MACHINE = 'crunchyroll'
_TESTS = [{
'url': 'http://www.crunchyroll.com/wanna-be-the-strongest-in-the-world/episode-1-an-idol-wrestler-is-born-645513',
'info_dict': {
'id': '645513',
'ext': 'flv',
'title': 'Wanna be the Strongest in the World Episode 1 – An Idol-Wrestler is Born!',
'description': 'md5:2d17137920c64f2f49981a7797d275ef',
'thumbnail': 'http://img1.ak.crunchyroll.com/i/spire1-tmb/20c6b5e10f1a47b10516877d3c039cae1380951166_full.jpg',
'uploader': 'Yomiuri Telecasting Corporation (YTV)',
'upload_date': '20131013',
'url': 're:(?!.*&)',
},
'params': {
# rtmp
'skip_download': True,
},
}, {
'url': 'http://www.crunchyroll.com/media-589804/culture-japan-1',
'info_dict': {
'id': '589804',
'ext': 'flv',
'title': 'Culture Japan Episode 1 – Rebuilding Japan after the 3.11',
'description': 'md5:fe2743efedb49d279552926d0bd0cd9e',
'thumbnail': 're:^https?://.*\.jpg$',
'uploader': 'Danny Choo Network',
'upload_date': '20120213',
},
'params': {
# rtmp
'skip_download': True,
},
}, {
'url': 'http://www.crunchyroll.fr/girl-friend-beta/episode-11-goodbye-la-mode-661697',
'only_matching': True,
}]
_FORMAT_IDS = {
'360': ('60', '106'),
'480': ('61', '106'),
'720': ('62', '106'),
'1080': ('80', '108'),
}
def _login(self):
(username, password) = self._get_login_info()
if username is None:
return
self.report_login()
login_url = 'https://www.crunchyroll.com/?a=formhandler'
data = urlencode_postdata({
'formname': 'RpcApiUser_Login',
'name': username,
'password': password,
})
login_request = compat_urllib_request.Request(login_url, data)
login_request.add_header('Content-Type', 'application/x-www-form-urlencoded')
self._download_webpage(login_request, None, False, 'Wrong login info')
def _real_initialize(self):
self._login()
def _decrypt_subtitles(self, data, iv, id):
data = bytes_to_intlist(base64.b64decode(data.encode('utf-8')))
iv = bytes_to_intlist(base64.b64decode(iv.encode('utf-8')))
id = int(id)
def obfuscate_key_aux(count, modulo, start):
output = list(start)
for _ in range(count):
output.append(output[-1] + output[-2])
# cut off start values
output = output[2:]
output = list(map(lambda x: x % modulo + 33, output))
return output
def obfuscate_key(key):
num1 = int(floor(pow(2, 25) * sqrt(6.9)))
num2 = (num1 ^ key) << 5
num3 = key ^ num1
num4 = num3 ^ (num3 >> 3) ^ num2
prefix = intlist_to_bytes(obfuscate_key_aux(20, 97, (1, 2)))
shaHash = bytes_to_intlist(sha1(prefix + str(num4).encode('ascii')).digest())
# Extend 160 Bit hash to 256 Bit
return shaHash + [0] * 12
key = obfuscate_key(id)
decrypted_data = intlist_to_bytes(aes_cbc_decrypt(data, key, iv))
return zlib.decompress(decrypted_data)
def _convert_subtitles_to_srt(self, sub_root):
output = ''
for i, event in enumerate(sub_root.findall('./events/event'), 1):
start = event.attrib['start'].replace('.', ',')
end = event.attrib['end'].replace('.', ',')
text = event.attrib['text'].replace('\\N', '\n')
output += '%d\n%s --> %s\n%s\n\n' % (i, start, end, text)
return output
def _convert_subtitles_to_ass(self, sub_root):
output = ''
def ass_bool(strvalue):
assvalue = '0'
if strvalue == '1':
assvalue = '-1'
return assvalue
output = '[Script Info]\n'
output += 'Title: %s\n' % sub_root.attrib["title"]
output += 'ScriptType: v4.00+\n'
output += 'WrapStyle: %s\n' % sub_root.attrib["wrap_style"]
output += 'PlayResX: %s\n' % sub_root.attrib["play_res_x"]
output += 'PlayResY: %s\n' % sub_root.attrib["play_res_y"]
output += """ScaledBorderAndShadow: yes
[V4+ Styles]
Format: Name, Fontname, Fontsize, PrimaryColour, SecondaryColour, OutlineColour, BackColour, Bold, Italic, Underline, StrikeOut, ScaleX, ScaleY, Spacing, Angle, BorderStyle, Outline, Shadow, Alignment, MarginL, MarginR, MarginV, Encoding
"""
for style in sub_root.findall('./styles/style'):
output += 'Style: ' + style.attrib["name"]
output += ',' + style.attrib["font_name"]
output += ',' + style.attrib["font_size"]
output += ',' + style.attrib["primary_colour"]
output += ',' + style.attrib["secondary_colour"]
output += ',' + style.attrib["outline_colour"]
output += ',' + style.attrib["back_colour"]
output += ',' + ass_bool(style.attrib["bold"])
output += ',' + ass_bool(style.attrib["italic"])
output += ',' + ass_bool(style.attrib["underline"])
output += ',' + ass_bool(style.attrib["strikeout"])
output += ',' + style.attrib["scale_x"]
output += ',' + style.attrib["scale_y"]
output += ',' + style.attrib["spacing"]
output += ',' + style.attrib["angle"]
output += ',' + style.attrib["border_style"]
output += ',' + style.attrib["outline"]
output += ',' + style.attrib["shadow"]
output += ',' + style.attrib["alignment"]
output += ',' + style.attrib["margin_l"]
output += ',' + style.attrib["margin_r"]
output += ',' + style.attrib["margin_v"]
output += ',' + style.attrib["encoding"]
output += '\n'
output += """
[Events]
Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
"""
for event in sub_root.findall('./events/event'):
output += 'Dialogue: 0'
output += ',' + event.attrib["start"]
output += ',' + event.attrib["end"]
output += ',' + event.attrib["style"]
output += ',' + event.attrib["name"]
output += ',' + event.attrib["margin_l"]
output += ',' + event.attrib["margin_r"]
output += ',' + event.attrib["margin_v"]
output += ',' + event.attrib["effect"]
output += ',' + event.attrib["text"]
output += '\n'
return output
def _extract_subtitles(self, subtitle):
sub_root = xml.etree.ElementTree.fromstring(subtitle)
return [{
'ext': 'srt',
'data': self._convert_subtitles_to_srt(sub_root),
}, {
'ext': 'ass',
'data': self._convert_subtitles_to_ass(sub_root),
}]
def _get_subtitles(self, video_id, webpage):
subtitles = {}
for sub_id, sub_name in re.findall(r'\?ssid=([0-9]+)" title="([^"]+)', webpage):
sub_page = self._download_webpage(
'http://www.crunchyroll.com/xml/?req=RpcApiSubtitle_GetXml&subtitle_script_id=' + sub_id,
video_id, note='Downloading subtitles for ' + sub_name)
id = self._search_regex(r'id=\'([0-9]+)', sub_page, 'subtitle_id', fatal=False)
iv = self._search_regex(r'<iv>([^<]+)', sub_page, 'subtitle_iv', fatal=False)
data = self._search_regex(r'<data>([^<]+)', sub_page, 'subtitle_data', fatal=False)
if not id or not iv or not data:
continue
subtitle = self._decrypt_subtitles(data, iv, id).decode('utf-8')
lang_code = self._search_regex(r'lang_code=["\']([^"\']+)', subtitle, 'subtitle_lang_code', fatal=False)
if not lang_code:
continue
subtitles[lang_code] = self._extract_subtitles(subtitle)
return subtitles
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('video_id')
if mobj.group('prefix') == 'm':
mobile_webpage = self._download_webpage(url, video_id, 'Downloading mobile webpage')
webpage_url = self._search_regex(r'<link rel="canonical" href="([^"]+)" />', mobile_webpage, 'webpage_url')
else:
webpage_url = 'http://www.' + mobj.group('url')
webpage = self._download_webpage(webpage_url, video_id, 'Downloading webpage')
note_m = self._html_search_regex(
r'<div class="showmedia-trailer-notice">(.+?)</div>',
webpage, 'trailer-notice', default='')
if note_m:
raise ExtractorError(note_m)
mobj = re.search(r'Page\.messaging_box_controller\.addItems\(\[(?P<msg>{.+?})\]\)', webpage)
if mobj:
msg = json.loads(mobj.group('msg'))
if msg.get('type') == 'error':
raise ExtractorError('crunchyroll returned error: %s' % msg['message_body'], expected=True)
if 'To view this, please log in to verify you are 18 or older.' in webpage:
self.raise_login_required()
video_title = self._html_search_regex(r'<h1[^>]*>(.+?)</h1>', webpage, 'video_title', flags=re.DOTALL)
video_title = re.sub(r' {2,}', ' ', video_title)
video_description = self._html_search_regex(r'"description":"([^"]+)', webpage, 'video_description', default='')
if not video_description:
video_description = None
video_upload_date = self._html_search_regex(
[r'<div>Availability for free users:(.+?)</div>', r'<div>[^<>]+<span>\s*(.+?\d{4})\s*</span></div>'],
webpage, 'video_upload_date', fatal=False, flags=re.DOTALL)
if video_upload_date:
video_upload_date = unified_strdate(video_upload_date)
video_uploader = self._html_search_regex(
r'<a[^>]+href="/publisher/[^"]+"[^>]*>([^<]+)</a>', webpage,
'video_uploader', fatal=False)
playerdata_url = compat_urllib_parse_unquote(self._html_search_regex(r'"config_url":"([^"]+)', webpage, 'playerdata_url'))
playerdata_req = compat_urllib_request.Request(playerdata_url)
playerdata_req.data = compat_urllib_parse.urlencode({'current_page': webpage_url})
playerdata_req.add_header('Content-Type', 'application/x-www-form-urlencoded')
playerdata = self._download_webpage(playerdata_req, video_id, note='Downloading media info')
stream_id = self._search_regex(r'<media_id>([^<]+)', playerdata, 'stream_id')
video_thumbnail = self._search_regex(r'<episode_image_url>([^<]+)', playerdata, 'thumbnail', fatal=False)
formats = []
for fmt in re.findall(r'showmedia\.([0-9]{3,4})p', webpage):
stream_quality, stream_format = self._FORMAT_IDS[fmt]
video_format = fmt + 'p'
streamdata_req = compat_urllib_request.Request(
'http://www.crunchyroll.com/xml/?req=RpcApiVideoPlayer_GetStandardConfig&media_id=%s&video_format=%s&video_quality=%s'
% (stream_id, stream_format, stream_quality),
compat_urllib_parse.urlencode({'current_page': url}).encode('utf-8'))
streamdata_req.add_header('Content-Type', 'application/x-www-form-urlencoded')
streamdata = self._download_xml(
streamdata_req, video_id,
note='Downloading media info for %s' % video_format)
stream_info = streamdata.find('./{default}preload/stream_info')
video_url = stream_info.find('./host').text
video_play_path = stream_info.find('./file').text
metadata = stream_info.find('./metadata')
format_info = {
'format': video_format,
'format_id': video_format,
'height': int_or_none(xpath_text(metadata, './height')),
'width': int_or_none(xpath_text(metadata, './width')),
}
if '.fplive.net/' in video_url:
video_url = re.sub(r'^rtmpe?://', 'http://', video_url.strip())
parsed_video_url = compat_urlparse.urlparse(video_url)
direct_video_url = compat_urlparse.urlunparse(parsed_video_url._replace(
netloc='v.lvlt.crcdn.net',
path='%s/%s' % (remove_end(parsed_video_url.path, '/'), video_play_path.split(':')[-1])))
if self._is_valid_url(direct_video_url, video_id, video_format):
format_info.update({
'url': direct_video_url,
})
formats.append(format_info)
continue
format_info.update({
'url': video_url,
'play_path': video_play_path,
'ext': 'flv',
})
formats.append(format_info)
subtitles = self.extract_subtitles(video_id, webpage)
return {
'id': video_id,
'title': video_title,
'description': video_description,
'thumbnail': video_thumbnail,
'uploader': video_uploader,
'upload_date': video_upload_date,
'subtitles': subtitles,
'formats': formats,
}
class CrunchyrollShowPlaylistIE(CrunchyrollBaseIE):
IE_NAME = "crunchyroll:playlist"
_VALID_URL = r'https?://(?:(?P<prefix>www|m)\.)?(?P<url>crunchyroll\.com/(?!(?:news|anime-news|library|forum|launchcalendar|lineup|store|comics|freetrial|login))(?P<id>[\w\-]+))/?$'
_TESTS = [{
'url': 'http://www.crunchyroll.com/a-bridge-to-the-starry-skies-hoshizora-e-kakaru-hashi',
'info_dict': {
'id': 'a-bridge-to-the-starry-skies-hoshizora-e-kakaru-hashi',
'title': 'A Bridge to the Starry Skies - Hoshizora e Kakaru Hashi'
},
'playlist_count': 13,
}]
def _real_extract(self, url):
show_id = self._match_id(url)
webpage = self._download_webpage(url, show_id)
title = self._html_search_regex(
r'(?s)<h1[^>]*>\s*<span itemprop="name">(.*?)</span>',
webpage, 'title')
episode_paths = re.findall(
r'(?s)<li id="showview_videos_media_[0-9]+"[^>]+>.*?<a href="([^"]+)"',
webpage)
entries = [
self.url_result('http://www.crunchyroll.com' + ep, 'Crunchyroll')
for ep in episode_paths
]
entries.reverse()
return {
'_type': 'playlist',
'id': show_id,
'title': title,
'entries': entries,
}
| unlicense | -993,487,340,882,416,400 | 42.488372 | 237 | 0.552704 | false |
GuessWhoSamFoo/pandas | pandas/tests/indexes/multi/test_format.py | 2 | 3635 | # -*- coding: utf-8 -*-
import warnings
import pytest
from pandas.compat import PY3, range, u
import pandas as pd
from pandas import MultiIndex, compat
import pandas.util.testing as tm
def test_dtype_str(indices):
dtype = indices.dtype_str
assert isinstance(dtype, compat.string_types)
assert dtype == str(indices.dtype)
def test_format(idx):
idx.format()
idx[:0].format()
def test_format_integer_names():
index = MultiIndex(levels=[[0, 1], [0, 1]],
codes=[[0, 0, 1, 1], [0, 1, 0, 1]], names=[0, 1])
index.format(names=True)
def test_format_sparse_config(idx):
warn_filters = warnings.filters
warnings.filterwarnings('ignore', category=FutureWarning,
module=".*format")
# GH1538
pd.set_option('display.multi_sparse', False)
result = idx.format()
assert result[1] == 'foo two'
tm.reset_display_options()
warnings.filters = warn_filters
def test_format_sparse_display():
index = MultiIndex(levels=[[0, 1], [0, 1], [0, 1], [0]],
codes=[[0, 0, 0, 1, 1, 1], [0, 0, 1, 0, 0, 1],
[0, 1, 0, 0, 1, 0], [0, 0, 0, 0, 0, 0]])
result = index.format()
assert result[3] == '1 0 0 0'
def test_repr_with_unicode_data():
with pd.core.config.option_context("display.encoding", 'UTF-8'):
d = {"a": [u("\u05d0"), 2, 3], "b": [4, 5, 6], "c": [7, 8, 9]}
index = pd.DataFrame(d).set_index(["a", "b"]).index
assert "\\u" not in repr(index) # we don't want unicode-escaped
@pytest.mark.skip(reason="#22511 will remove this test")
def test_repr_roundtrip():
mi = MultiIndex.from_product([list('ab'), range(3)],
names=['first', 'second'])
str(mi)
if PY3:
tm.assert_index_equal(eval(repr(mi)), mi, exact=True)
else:
result = eval(repr(mi))
# string coerces to unicode
tm.assert_index_equal(result, mi, exact=False)
assert mi.get_level_values('first').inferred_type == 'string'
assert result.get_level_values('first').inferred_type == 'unicode'
mi_u = MultiIndex.from_product(
[list(u'ab'), range(3)], names=['first', 'second'])
result = eval(repr(mi_u))
tm.assert_index_equal(result, mi_u, exact=True)
# formatting
if PY3:
str(mi)
else:
compat.text_type(mi)
# long format
mi = MultiIndex.from_product([list('abcdefg'), range(10)],
names=['first', 'second'])
if PY3:
tm.assert_index_equal(eval(repr(mi)), mi, exact=True)
else:
result = eval(repr(mi))
# string coerces to unicode
tm.assert_index_equal(result, mi, exact=False)
assert mi.get_level_values('first').inferred_type == 'string'
assert result.get_level_values('first').inferred_type == 'unicode'
result = eval(repr(mi_u))
tm.assert_index_equal(result, mi_u, exact=True)
def test_unicode_string_with_unicode():
d = {"a": [u("\u05d0"), 2, 3], "b": [4, 5, 6], "c": [7, 8, 9]}
idx = pd.DataFrame(d).set_index(["a", "b"]).index
if PY3:
str(idx)
else:
compat.text_type(idx)
def test_bytestring_with_unicode():
d = {"a": [u("\u05d0"), 2, 3], "b": [4, 5, 6], "c": [7, 8, 9]}
idx = pd.DataFrame(d).set_index(["a", "b"]).index
if PY3:
bytes(idx)
else:
str(idx)
def test_repr_max_seq_item_setting(idx):
# GH10182
idx = idx.repeat(50)
with pd.option_context("display.max_seq_items", None):
repr(idx)
assert '...' not in str(idx)
| bsd-3-clause | 7,238,619,758,494,744,000 | 26.537879 | 74 | 0.561486 | false |
aleksandar-mitrevski/fault_and_anomaly_detection | generative_model_fd/tests/multi-memory/test_td_trbm.py | 1 | 3598 | import numpy as np
from test_generic import *
def train1(y, f, model_manager, number_of_model_parameters, sequence_len):
x = np.linspace(0, sequence_len, sequence_len)
models = model_manager.optimise_td(x, y, f, sequence_len)
models, arr_min, arr_max = rescale(models)
network = TRBM(number_of_model_parameters,number_of_model_parameters*2)
network.train(models, epochs=10, learning_rate=0.1)
return network, arr_min, arr_max
def train2(y, f, model_manager, number_of_model_parameters, sequence_len):
x = np.linspace(0, sequence_len, sequence_len)
models = model_manager.optimise_td(x, y, f, sequence_len)
models, arr_min, arr_max = rescale(models)
network = TRBM(number_of_model_parameters,number_of_model_parameters*3)
network.train(models, epochs=10, learning_rate=0.1)
return network, arr_min, arr_max
def test(y, arr_min, arr_max, f, network, model_manager, number_of_model_parameters, sequence_len, initial_guess=None):
number_of_sequences = len(y) - sequence_len
x = np.linspace(0, sequence_len, sequence_len)
test_models = model_manager.optimise_td(x, y, f, sequence_len, initial_guess=initial_guess)
test_models,_,_ = rescale(test_models, arr_min, arr_max)
initial_data = np.zeros((number_of_model_parameters,1))
for i in xrange(number_of_model_parameters):
initial_data[i] = np.random.rand()
network.initialise(initial_data)
distances = list()
for i in xrange(0,number_of_sequences):
sample = network.sample_network(test_models[i])
dist = distance(sample, test_models[i])
distances.append(dist)
return distances
def retrain(y, arr_min, arr_max, network, f, model_manager, sequence_len):
x = np.linspace(0, sequence_len, sequence_len)
models = model_manager.optimise_td(x, y, f, sequence_len)
models,_,_ = rescale(models, arr_min, arr_max)
network.train(models, epochs=2, learning_rate=0.1)
return network
sequence_len = 10
number_of_model_parameters = [3,4,5]
model_manager = ModelFitLibrary()
number_of_test_sequences = 90
curve_functions = [lambda x, a, b, c: a * np.square(x) + b * x + c, \
lambda x, a, b, c, d: a * np.power(x,3) + b * np.square(x) + c * x + d,\
lambda x, a, b, c, d, e: a * np.power(x,4) + b * np.power(x,3) + c * np.square(x) + d * x + e]
number_of_model_parameters = [3]
curve_functions = [lambda x, a, b, c: a * np.square(x) + b * x + c]
# print 'Testing with hidden neurons = 2 * visible neurons'
generic_test(train1, retrain, test, curve_functions, model_manager, number_of_model_parameters, sequence_len, number_of_test_sequences)
# print 'Testing with hidden neurons = 3 * visible neurons'
# generic_test(train2, retrain, test, curve_functions, model_manager, number_of_model_parameters, sequence_len, number_of_test_sequences)
# train_data_front = np.genfromtxt('../../logs/laser_front.log')
# test_data_front = np.genfromtxt('../../logs/laser_front_test.log')
# anomalous_data_front = np.genfromtxt('../../logs/laser_front_faulty.log')
# retrain_data_front = np.genfromtxt('../../logs/laser_front_retrain.log')
# train_data = train_data_front.mean(axis=1)
# test_data = test_data_front.mean(axis=1)
# anomalous_data = anomalous_data_front.mean(axis=1)
# retrain_data = retrain_data_front.mean(axis=1)
# number_of_test_sequences = max(test_data_front.shape[0], anomalous_data.shape[0]) - sequence_len
# generic_sensor_test(train_data, retrain_data, test_data, anomalous_data, train1, retrain, test, curve_functions, model_manager, number_of_model_parameters, sequence_len, number_of_test_sequences) | bsd-2-clause | 257,775,259,246,922,800 | 44.556962 | 197 | 0.692885 | false |
cdriehuys/chmvh-website | chmvh_website/chmvh_website/settings.py | 1 | 6882 | """
Django settings for chmvh_website project.
Generated by 'django-admin startproject' using Django 1.10.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import logging
import os
def env_bool(name: str) -> bool:
raw_value = os.getenv(name, "")
return raw_value.lower() == "true"
def env_list(name: str) -> list[str]:
raw_value = os.getenv(name, "")
if not raw_value:
return []
return raw_value.split(",")
SILENCED_SYSTEM_CHECKS = []
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env_bool("CHMVH_DEBUG")
SECRET_KEY = os.getenv("CHMVH_SECRET_KEY")
if DEBUG and not SECRET_KEY:
SECRET_KEY = "debug"
ALLOWED_HOSTS = env_list("CHMVH_ALLOWED_HOSTS")
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.messages",
"django.contrib.sessions",
"django.contrib.sitemaps",
"django.contrib.staticfiles",
# Third Party Apps
"adminsortable2",
"captcha",
"rest_framework",
"sass_processor",
"solo",
# Custom Apps
"common",
"configuration",
"contact",
"gallery",
"resources",
"staticpages",
"team",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "chmvh_website.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(BASE_DIR, "templates")],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
"common.context_processors.analytics",
"configuration.context_processors.practice_info",
],
},
},
]
WSGI_APPLICATION = "chmvh_website.wsgi.application"
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"HOST": os.getenv("CHMVH_DB_HOST", "localhost"),
"PORT": os.getenv("CHMVH_DB_PORT", "5432"),
"USER": os.getenv("CHMVH_DB_USER", "postgres"),
"PASSWORD": os.getenv("CHMVH_DB_PASSWORD"),
"NAME": os.getenv("CHMVH_DB_NAME", "postgres"),
}
}
if os.getenv("CHMVH_TEST"):
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", # noqa
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", # noqa
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_ROOT = os.getenv("CHMVH_STATIC_ROOT")
STATIC_URL = "/static/"
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
STATICFILES_FINDERS = [
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
"sass_processor.finders.CssFinder",
]
# Media Files (User Uploaded)
MEDIA_ROOT = os.getenv("CHMVH_MEDIA_ROOT", os.path.join(BASE_DIR, "media"))
MEDIA_URL = "/media/"
# HTTPS
if env_bool("CHMVH_HTTPS"):
CSRF_COOKIE_HTTPONLY = True
CSRF_COOKIE_SECURE = True
SESSION_COOKIE_SECURE = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_SSL_REDIRECT = True
X_FRAME_OPTIONS = "DENY"
# Email Settings
DEFAULT_FROM_EMAIL = "[email protected]"
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
EMAIL_SUBJECT_PREFIX = "[CHMVH Website] "
if os.getenv("CHMVH_EMAIL_USER"):
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
EMAIL_HOST = "smtp.sendgrid.net"
EMAIL_HOST_USER = os.getenv("CHMVH_EMAIL_USER")
EMAIL_HOST_PASSWORD = os.getenv("CHMVH_EMAIL_PASSWORD")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
if os.getenv("CHMVH_ADMIN_NAME"):
ADMINS = ((os.getenv("CHMVH_ADMIN_NAME"), os.getenv("CHMVH_ADMIN_EMAIL")),)
# Google Analytics
GOOGLE_ANALYTICS_ID = os.getenv("CHMVH_GOOGLE_ANALYTICS_ID")
# ReCAPTCHA
if os.getenv("CHMVH_RECAPTCHA_PRIVATE_KEY"):
RECAPTCHA_PRIVATE_KEY = os.getenv("CHMVH_RECAPTCHA_PRIVATE_KEY")
RECAPTCHA_PUBLIC_KEY = os.getenv("CHMVH_RECAPTCHA_PUBLIC_KEY")
else:
NOCAPTCHA = True
SILENCED_SYSTEM_CHECKS.append("captcha.recaptcha_test_key_error")
# Gallery Settings
GALLERY_THUMBNAIL_SIZE = 300, 300
# Config for django-sass-processor
SASS_PROCESSOR_ROOT = os.path.join(BASE_DIR, "static")
# Config for djangorestframework
REST_FRAMEWORK = {
"DEFAULT_AUTHENTICATION_CLASSES": (
"rest_framework.authentication.BasicAuthentication",
"rest_framework.authentication.SessionAuthentication",
),
"DEFAULT_PERMISSION_CLASSES": (
"rest_framework.permissions.IsAuthenticated",
),
}
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s", # noqa
"datefmt": "%d/%b/%Y %H:%M:%S",
},
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"formatter": "standard",
},
"mail_admins": {
"level": "ERROR",
"class": "django.utils.log.AdminEmailHandler",
},
},
"loggers": {
"root": {
"handlers": ["console", "mail_admins"],
"level": logging.INFO,
}
},
}
| mit | 6,999,142,171,614,039,000 | 24.969811 | 94 | 0.647632 | false |
duncanmmacleod/gwpy | gwpy/table/tests/test_gravityspy.py | 3 | 2176 | # -*- coding: utf-8 -*-
# Copyright (C) Duncan Macleod (2014-2020)
#
# This file is part of GWpy.
#
# GWpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GWpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GWpy. If not, see <http://www.gnu.org/licenses/>.
"""Unit tests for `gwpy.table`
"""
from ...testing import utils
from ...testing.errors import pytest_skip_network_error
from .. import GravitySpyTable
from .test_table import TestEventTable as _TestEventTable
__author__ = 'Duncan Macleod <[email protected]>'
JSON_RESPONSE = {
"url4": [u"https://panoptes-uploads.zooniverse.org/production/"
"subject_location/08895951-ea30-4cf7-9374-135a335afe0e.png"],
"peak_frequency": [84.4759674072266],
"links_subjects": [5740011.0],
"ml_label": [u"Scratchy"],
"searchedID": [u"8FHTgA8MEu"],
"snr": [8.96664047241211],
"gravityspy_id": [u"8FHTgA8MEu"],
"searchedzooID": [5740011.0],
"ifo": [u"H1"],
"url3": [u"https://panoptes-uploads.zooniverse.org/production/"
"subject_location/415dde44-3109-434c-b3ad-b722a879c159.png"],
"url2": [u"https://panoptes-uploads.zooniverse.org/production/"
"subject_location/09ebb6f4-e839-466f-80a1-64d79ac4d934.png"],
"url1": [u"https://panoptes-uploads.zooniverse.org/production/"
"subject_location/5e89d817-583c-4646-8e6c-9391bb99ad41.png"],
}
class TestGravitySpyTable(_TestEventTable):
TABLE = GravitySpyTable
@pytest_skip_network_error
def test_search(self):
table = self.TABLE.search(
gravityspy_id="8FHTgA8MEu",
howmany=1,
remote_timeout=60,
)
utils.assert_table_equal(table, self.TABLE(JSON_RESPONSE))
| gpl-3.0 | -1,615,811,458,453,927,200 | 35.881356 | 74 | 0.686581 | false |
galtys/galtys-addons | galtys_deploy/galtys_deploy.py | 1 | 24393 | from odoo import fields, models
#import openerp.addons.decimal_precision as dp
import bitcoin
#Steps
#golive register
#golive pull
#Main menu: Deploy
def secret_random_key(a):
return bitcoin.random_key()
#class SkynetCode(models.Model):
# pass
class deploy_account(models.Model):
_order = "name"
_name = "deploy.account" #Accounts
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
name = fields.Char('Name', size=444)
#'login':fields.Char('Login',size=444),
#'host_ids':fields.Many2many('deploy.host', 'deploy_account_deploy_host_rel', 'host_id', 'account_id', 'Hosts'),
user_ids = fields.Many2many('deploy.host.user', 'deploy_account_deploy_host_user_rel', 'user_id', 'account_id', 'Users')
app_ids = fields.Many2many('deploy.application', 'deploy_account_deploy_application_rel', 'app_id', 'account_id', 'Apps')
class deploy_password(models.Model):
_name = "deploy.password" #Passwords
name = fields.Char('Name',size=1000)
password = fields.Text('Password') #password encrypted
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
class host(models.Model):
_name = "deploy.host" #Hosts
name = fields.Char('Name',size=100)
memory_total = fields.Integer('Memory Total Pages')
memory_pagesize = fields.Integer('Memory PageSize')
memory_buffer_percent = fields.Integer('Buffer Size Percent')
user_ids = fields.One2many('deploy.host.user','host_id','Users')
group_ids = fields.One2many('deploy.host.group','host_id','Groups')
cluster_ids = fields.One2many('deploy.pg.cluster','host_id','PG Clusters')
control = fields.Boolean('Control')
ip_forward = fields.Boolean('ip_forward')
ssmtp_root = fields.Char("ssmtp_root", size=444)
ssmtp_mailhub = fields.Char("ssmtp_mailhub", size=444)
ssmtp_rewritedomain = fields.Char("rewritedomain", size=444)
ssmtp_authuser = fields.Char("authuser", size=444)
ssmtp_authpass = fields.Char("authpass", size=444)
#'deploy_ids':fields.One2many('deploy.deploy','host_id','Deployments'),
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
_defaults ={
'ip_forward':False,
'ssmtp_mailhub':'mailhub=smtp.deliverhq.com:2525',
}
class pg_cluster(models.Model):
_name = "deploy.pg.cluster" #Postgresql Clusters
# _rec_name = 'host_id'
host_id = fields.Many2one('deploy.host','Hostname')
port = fields.Integer('Port')
version = fields.Char('Version',size=10)
name = fields.Char('Name', size=20)
description = fields.Char('Description',size=4444)
active = fields.Boolean('Active')
listen_addresses = fields.Char('listen_addresses',size=444)
shared_buffers = fields.Char('shared_buffers',size=444)
fsync = fields.Char('fsync',size=444)
synchronous_commit = fields.Char('synchronous_commit',size=444)
full_page_writes = fields.Char('full_page_writes', size=444)
checkpoint_segments = fields.Char('checkpoint_segments',size=444)
checkpoint_timeout = fields.Char('checkpoint_timeout',size=444)
user_ids = fields.One2many("deploy.pg.user",'cluster_id','PG Users')
# 'database_ids':fields.One2many("deploy.pg.database",'cluster_id','Databases'),
hba_ids = fields.One2many("deploy.pg.hba",'cluster_id','HBA')
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
_defaults = {
'listen_addresses':'127.0.0.1',
'shared_buffers':'24MB',
'fsync': 'off',
'synchronous_commit':'off',
'full_page_writes':'off',
'checkpoint_segments':7,
'checkpoint_timeout':'15min',
}
class pg_user(models.Model):
_name="deploy.pg.user"
_rec_name='login'
#_columns ={
cluster_id = fields.Many2one('deploy.pg.cluster','PG Cluster')
account_id = fields.Many2one('res.users','Account')
password_id = fields.Many2one('deploy.password','PG Password')
superuser = fields.Boolean('Superuser')
create_db = fields.Boolean('Create db')
create_role = fields.Boolean('Create Role')
login = fields.Char('Login',size=44)
type = fields.Selection([('real','real'),('virtual','virtual'),('system','system')],'Type' )
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
# }
class pg_hba(models.Model):
_name = "deploy.pg.hba"
name = fields.Char('Description', size=444)
type = fields.Selection([('local','local'),('host','host'),('hostssl','hostssl'),('hostnossl','hostnossl')], 'Type' )
database_ids = fields.Many2one('deploy.pg.database','database')
cluster_id = fields.Many2one('deploy.pg.cluster','PG Cluster')
user = fields.Many2one('deploy.pg.user','PG USER')
address = fields.Char('address',size=444)
ip_address = fields.Char('ip_address',size=444)
ip_mask = fields.Char('ip_mask',size=444)
auth_method = fields.Selection([('peer','peer'),('trust','trust'),('md5','md5')], 'auth_method')
auth_options = fields.Char('auth_options',size=444)
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
#}
class host_group(models.Model):
_name = "deploy.host.group" #Host Groups
_rec_name='name'
# _columns = {
name = fields.Char('Name',size=100)
gid = fields.Integer('GID')
host_id = fields.Many2one('deploy.host','Host')
sftp = fields.Boolean('Allow SFTP')
type = fields.Selection([('user','user'),('system','system')],'Type')
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
class host_user(models.Model):
_name = 'deploy.host.user' #Host User
_rec_name='login'
name = fields.Char('Name',size=100)
login = fields.Char('Login',size=100)
group_id = fields.Many2one('deploy.host.group','Main Group')
password_id = fields.Many2one('deploy.password','Password')
account_id = fields.Many2one('res.users','Account')
#'owner_id':fields.Many2one('res.users','Owner'),
uid = fields.Integer('UID')
ssh = fields.Boolean('Allow SSH')
sudo_x = fields.Boolean('sudo_x')
host_id = fields.Many2one('deploy.host','Host')
home = fields.Char('home',size=44)
shell = fields.Char('shell',size=44)
type = fields.Selection([('user','user'),('system','system')],'Type')
deploy_ids = fields.One2many('deploy.deploy','user_id','Deployments')
app_ids = fields.Many2many('deploy.application', 'host_user_application_rel', 'user_id', 'app_id', 'Apps')
validated_root = fields.Char('Validated ROOT',size=444)
backup_subdir = fields.Char('backup_subdir', size=444)
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
#'user_id':fields.Many2one('deploy.host.user','HostUser'),
def name_get2(self,cr, uid, ids):
ret={}
for u in self.browse(ids):
if u.host_id:
ret[u.id]="%s@%s"%(u.login,u.host_id.name)
return ret
# _defaults = {
# 'backup_subdir':'backups',
#Submenu: Applications
class repository(models.Model):
_name = 'deploy.repository' #Repositories
name = fields.Char('Name',size=100)
delete = fields.Boolean('Delete')
pull = fields.Boolean('Pull')
push = fields.Boolean('Push')
type = fields.Selection([('git','git'),('bzr','bzr'),('rsync','RSYNC')],'Type')
use = fields.Selection([('addon','addon'),('server','server'),('scripts','scripts'),('site','site')],'Use')
#
remote_id = fields.Many2one('deploy.repository','Parent Repository')
validated_addon_path = fields.Char('Validated Addon Path',size=444)
local_user_id = fields.Many2one('deploy.host.user','Local user')
host_id = fields.Many2one('deploy.host','Host')
version = fields.Char('Version',size=10)
#'remote_account_id':fields.Many2one('res.users','Remote Account'),
remote_login = fields.Char('Remote Login',size=122)
remote_location = fields.Char('Remote Location',size=1111)
remote_proto = fields.Selection([('git','git'),('bzr+ssh','bzr+ssh'),('http','http'),('https','https'),('ssh','ssh'),('lp','lp')],'Remote_Proto') #not all supported
remote_name = fields.Char('Remote Name',size=122) #used in git
local_location = fields.Char('Local Location',size=1111)
branch = fields.Char('Branch',size=100)
addon_subdir = fields.Char('Addon Subdir',size=100)
is_module_path = fields.Boolean('Is Module Path')
root_directory = fields.Char('Root Directory',size=100)
#'clone_ids':fields.One2many('deploy.repository.clone','remote_id','Reposisoty Clones'),
clone_ids = fields.One2many('deploy.repository','remote_id','Reposisoty Clones')
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
# }
#_defaults = {
# 'delete':False,
# 'push':True,
# 'pull':True,
# }
#[root_directory, remote_host_id.name, local_location, remote_location]
class repository_clone(models.Model): #will be likely deprecated
_name ='deploy.repository.clone' #Repository clones
# _inherits = {'deploy.repository':'remote_id'}
name = fields.Char('Name',size=100)
owner_id = fields.Many2one('res.users','Owner')
remote_id = fields.Many2one('deploy.repository','Repository')
validated_addon_path = fields.Char('Validated Addon Path',size=444)
#remote_host_id > remote_id.host_id
#'remote_account_id':fields.Many2one('res.users','Remote Account'),
#'remote_login':fields.Char('Remote Login',size=122),
#'remote_location':fields.Char('Remote Location',size=1111),
#'remote_proto':fields.Selection([('git','git'),('bzr+ssh','bzr+ssh'),('http','http'),('https','https'),('ssh','ssh'),('lp','lp')],'Remote_Proto'),#not all supported
#'remote_name':fields.Char('Remote Name',size=122), #used in git
#'local_host_id':fields.Many2one('deploy.host','Local host'),
#'local_host_ids':fields.Many2many('deploy.host','repository_clone_host_rel','clone_id','host_id','Hosts'),
local_user_id = fields.Many2one('deploy.host.user','Local user')
local_location = fields.Char('Local Locationi',size=1111)
branch_db = fields.Char('Branch',size=100)
addon_subdir_db = fields.Char('Addon Subdir',size=100)
is_module_path_db = fields.Boolean('Is Module Path')
root_directory = fields.Char('Root Directory',size=100)
#'URL':fnc
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
class application(models.Model):
_name = 'deploy.application' #Applications
repository_ids = fields.Many2many('deploy.repository', 'application_repository_rel','app_id','repository_id', 'Repositories', domain=[('remote_id','=',False)])
name = fields.Char('Name',size=444)
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
class options(models.Model):
_name='deploy.options' #Server options
unaccent = fields.Boolean('Unaccent')
xmlrpc_interface = fields.Char('xmlrpc_interface',size=100)
xmlrpc_port = fields.Integer('xmlrpc_port')
#'admin_password':fields.Many2one('deploy.password','Admin Password'),
name = fields.Char('Name',size=444)
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
#'logfile':
class deploy(models.Model):
_name='deploy.deploy' #Deployments
application_id = fields.Many2one('deploy.application', 'Application')
pg_user_id = fields.Many2one('deploy.pg.user','PG USER')
options_id = fields.Many2one('deploy.options','Options')
account_id = fields.Many2one('res.users','Account')
password_id = fields.Many2one('deploy.password','Admin Password')
#'clone_ids':fields.Many2many('deploy.repository', 'application_repository','app_id','repository_id', 'Repositories'),
name = fields.Char('Name',size=444)
# host_id = fields.Many2one('deploy.host', related='user_id', string='Host') #hostname
user_id = fields.Many2one('deploy.host.user','User' )
#'host_id_depr':fields.Many2one('deploy.host','HostDepr'),
#'host_id':fields.Many2one('deploy.host','Host'),
#host_id = fields.Many2one('user_id', 'host_id', string="Host",type="many2one",relation="deploy.host")
# host_id = fields.Many2one('user_id', 'host_id', string="Host",type="many2one",relation="deploy.host")
#'ROOT':fields.Char('site_name',size=444),
site_name = fields.Char('site_name',size=444)
daemon = fields.Boolean('daemon')
vhost = fields.Boolean('vhost')
wsgi = fields.Boolean('wsgi')
parse_config = fields.Boolean('parse_config')
ServerName = fields.Char('ServerName',size=444)
IP = fields.Char('IP',size=100)
PORT = fields.Integer('PORT')
IPSSL = fields.Char('IP',size=100)
PORTSSL = fields.Integer('PORT')
SSLCertificateFile = fields.Char('SSLCertificateFile',size=111)
SSLCertificateKeyFile = fields.Char('SSLCertificateKeyFile',size=111)
SSLCACertificateFile = fields.Char('SSLCACertificateFile', size=111)
ssl = fields.Boolean('ssl')
Redirect = fields.Char('Redirect',size=444)
ProxyPass = fields.Char('ProxyPass',size=444)
mode = fields.Selection([('dev','dev'),('live','live')],'Mode')
validated_server_path = fields.Char('Validated Server Path',size=444)
validated_config_file = fields.Char('Validated Config File',size=444)
validated_root = fields.Char('Validated ROOT',size=444)
db_ids = fields.One2many('deploy.pg.database','deploy_id',"db_ids")
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
class pg_database(models.Model):
_name="deploy.pg.database"
name = fields.Char("name",size=444)
type = fields.Selection([('live','live'),('virtual','virtual'),('system','system'),('demo','demo'),('snapshot','snapshot'),('replicated','replicated')], 'type')
date = fields.Date('date')
backup = fields.Boolean('backup needed')
#'pg_user_id':fields.Many2one('deploy.pg.user','PG USER'),
deploy_id = fields.Many2one('deploy.deploy','Deployments')
# pg_user_id = fields.Related('deploy_id', 'pg_user_id', string="PG USER",type="many2one",relation="deploy.pg.user")
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
class mako_template(models.Model):
_name = "deploy.mako.template"
name = fields.Char('name',size=444)
type = fields.Selection([('template','template'),('bash','bash'),('python','python')],'Type' )
gl_command = fields.Char('GoLive Command',size=444)
model = fields.Char('model',size=444)
module = fields.Char('module',size=444) #to locate template
path = fields.Char('path', size=444) #to locate template
fn = fields.Char('fn',size=4444) #to locate template
domain = fields.Char('domain',size=444)
out_fn = fields.Char('out_fn',size=444)
sequence = fields.Integer('Sequence')
python_function = fields.Char('python_function',size=444)
subprocess_arg = fields.Char('subprocess_arg',size=444)
chmod = fields.Char('chmod',size=444)
user_id = fields.Many2one('deploy.host.user','HostUser')
target_user = fields.Char('target_user',size=444)
target_group = fields.Char('target_group',size=444)
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
class deploy_file(models.Model):
_name = "deploy.file"
_name_rec = "command"
#_columns = {
command = fields.Char('Last Command',size=444)
#'model':fields.Char('model',size=444),
res_id = fields.Integer('res_id')
template_id = fields.Many2one('deploy.mako.template', 'Template Used')
encrypted = fields.Boolean('Encrypted')
user_id = fields.Many2one('deploy.host.user','User')
sequence = fields.Integer('Sequence')
file_written = fields.Char('File Written', size=444)
content_written = fields.Text('Content Written')
cmd_exit_code = fields.Char('cmd_exit_code', size=444)
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
class export_tag(models.Model):
_name = "deploy.export.tag"
name = fields.Char("name", size=100)
sequence = fields.Integer('sequence')
parent_id = fields.Many2one("deploy.export.tag", "Parent Tag")
field_ids = fields.Many2many('fields_rel', 'tag_id', 'field_id', 'Tags')
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
class ir_model(models.Model):
_inherit = "ir.model"
_order = "sequence"
sequence = fields.Integer('Sequence')
export_domain = fields.Char("Export Domain", size=500)
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
def name_get2(self,cr, uid, ids):
ret={}
for m in self.browse(ids):
ret[m.id]="%s[%s]"%(m.name,m.model)
return ret
#_default = {
# 'sequence':100,
# 'export_domain':'[]',
# }
class ir_model_fields(models.Model):
_inherit = "ir.model.fields"
_order = "sequence"
sequence = fields.Integer('Sequence')
export_tag_ids = fields.Many2many('deploy.export.tag', 'deploy_export_tag_ir_model_fields_rel', 'field_id', 'tag_id', 'Export Tags')
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
def write(self, vals):
if context is None:
context = {}
context['manual']='manual'
prev={}
for f in self.browse(ids):
prev[f.id]=f.state
cr.execute("update ir_model_fields set state='manual' where id=%s", (f.id, ) )
# f.write({'state':'manual'})
res = super(ir_model_fields,self).write(ids, vals)
for f in self.browse(ids):
cr.execute("update ir_model_fields set state=%s where id=%s", (prev[f.id], f.id, ) )
return res
_default = {
'sequence':100,
}
#import galtyslib.openerplib as openerplib
def export_data(pool, cr, uid, model, fn, field_list, arg):
if arg:
arg=eval(arg)
else:
arg=[]
obj=pool.get(model)
fields = obj.fields_get(cr, uid)
f_map={}
for f,v in fields.Items():
if f in field_list:
f_map[f]=v
fields = f_map
#id_ref_ids = pool.get('ir.model.data').search([('model','=',model)])
#ref_ids = [x.res_id for x in pool.get('ir.model.data').browse(id_ref_ids)]
ids = pool.get(model).search(arg)
header=[]
header_export=['id']
for f, v in fields.Items():
if 'function' not in v:
if v['type'] in ['many2one', 'many2many']:
if v['relation'] in ['account.account', 'account.journal']:
header_export.append( "%s/code" % f )
#elif v['relation'] in ['account.tax']:
# header_export.append( "%s/description" % f )
else:
header_export.append( "%s/id" % f )
header.append(f)
elif v['type']=='one2many':
pass
else:
header.append(f)
header_export.append(f)
header_types = [fields[x]['type'] for x in header]
data = pool.get(model).export_data(ids, header_export)
out=[]
for row in data['datas']:
out_row=[row[0]]
for i,h in enumerate(header):
v=row[i+1]
t=header_types[i]
if (v is False) and (t != 'boolean'):
out_row.append('')
else:
out_row.append(v.encode('utf8'))
out.append(out_row)
import csv
fp = open(fn, 'wb')
csv_writer=csv.writer(fp)
csv_writer.writerows( [header_export] )
csv_writer.writerows( out )
fp.close()
return out
import os
class exported_file(models.Model):
_name = "deploy.exported.file"
_order = "sequence"
path = fields.Char('path')
fn = fields.Char('fn')
model_id = fields.Many2one('ir.model','Model')
company_id = fields.Many2one('res.company','Company')
tag_id = fields.Many2one('deploy.export.tag', 'Export Tag')
sequence = fields.Integer('sequence')
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
class res_company(models.Model):
_inherit = "res.company"
export_module_name = fields.Char('Export Module Name', size=100)
export_module_repo = fields.Char('Export Module Repository', size=100)
exported_file_ids = fields.One2many('deploy.exported.file','company_id','Exported Files')
secret_key = fields.Char("Secret Key")
code = fields.Char("Code Stored")
signature = fields.Text("Signature")
def set_external_ids(self):
for c in self.browse(ids):
tag_ids = self.env['deploy.export.tag'].search([])
model_ids = []
tag_id_map={}
for tag in self.env['deploy.export.tag'].browse(tag_ids):
tag_id_map[tag.name]=tag
for f in tag.field_ids:
model_ids.append( f.model_id.id )
for m in self.env['ir.model'].browse(model_ids):
fields = m.field_id
tag_map = {}
for f in fields:
for t in f.export_tag_ids:
v=tag_map.setdefault(t.name, [])
v.append(f.name)
for tag,flds in tag_map.items():
path = os.path.join(c.export_module_repo)
print path
if not os.path.isdir(path):
os.makedirs(path)
tag_inst=tag_id_map[tag]
sq = m.sequence * tag_inst.sequence
fn="%s_%d_%s"%(tag, sq, m.model+'_.csv')
file_path=os.path.join(path, fn)
arg=[('path','=',path ),
('fn','=',fn),
('model_id','=',m.id),
('company_id','=',c.id),
('tag_id','=',tag_inst.id),
]
val=dict( [(x[0],x[2]) for x in arg] )
val['sequence']=sq
ef_id = self.env['deploy.exported.file'].search(cr, uid,arg)
if not ef_id:
ef_id = self.env['deploy.exported.file'].create(val)
export_data(self.env, cr, uid, m.model, file_path, flds, m.export_domain)
return True
class tag_wizzard(models.TransientModel):
_name = 'deploy.export.tag.wizzard'
_description="Export Tag"
tag_ids = fields.Many2many('deploy.export.tag', 'tag_wizzard_tag_rel', 'wiz_id', 'tag_id', 'Export Tags')
#'name':fields.Char('Name', size=444),
#'start_period': fields.Many2one('account.period','Start Period', required=True),
#'end_period': fields.Many2one('account.period','End Period', required=True),
def set_tags(self):
active_ids = context.get('active_ids', [])
print active_ids
for w in self.browse(ids):
val={'export_tag_ids':[(6,0,[t.id for t in w.tag_ids])]}
self.env['ir.model.fields'].write(active_ids, val)
return True
| agpl-3.0 | 6,955,040,989,887,281,000 | 38.663415 | 173 | 0.608494 | false |
peterjliu/rate_limit | test_lib.py | 2 | 2134 | #!/usr/bin/env python
"""
Test rate-limiter lib
"""
import logging
import sys
import time
import unittest
from google.appengine.api import memcache
from google.appengine.ext import testbed
from lib import Limiter, QuotaKey
class Events:
USER_READ = 1
USER_WRITE = 2
RATE_LIMIT_SPEC = {
Events.USER_READ: (2, 1),
Events.USER_WRITE: (5, 1)
}
class RateLimitTestCase(unittest.TestCase):
def setUp(self):
# First, create an instance of the Testbed class.
self.testbed = testbed.Testbed()
# Then activate the testbed, which prepares the service stubs for use.
self.testbed.activate()
# Next, declare which service stubs you want to use.
self.testbed.init_memcache_stub()
self.limiter = Limiter(RATE_LIMIT_SPEC)
def tearDown(self):
self.testbed.deactivate()
def testRateLimiter(self):
q = QuotaKey("user1", Events.USER_READ)
# Unfortunately there's no obvious way to inject a clock into the
# memcache stub, so we assume the following runs in less than 1 sec.
for _ in range(0, 2):
self.assertTrue(self.limiter.CanSpend(q))
# We used up our budget of 2 in less than 1 second
self.assertFalse(self.limiter.CanSpend(q))
q = QuotaKey("user2", Events.USER_WRITE)
for _ in range(0, 5):
self.assertTrue(self.limiter.CanSpend(q))
self.assertFalse(self.limiter.CanSpend(q))
def testRateLimiterWithExpiration(self):
l = Limiter(RATE_LIMIT_SPEC)
q = QuotaKey("user1", Events.USER_READ)
log = logging.getLogger("rate_limit.lib.test")
for _ in range(0, 2):
self.assertTrue(self.limiter.CanSpend(q))
# Expire cache by waiting. Too bad we can't inject the time, eh?
log.info("wait 1 second for cache to expire")
time.sleep(1)
for _ in range(0, 2):
self.assertTrue(self.limiter.CanSpend(q))
if __name__ == '__main__':
logging.basicConfig(stream=sys.stderr)
logging.getLogger("rate_limit.lib.test" ).setLevel( logging.DEBUG )
unittest.main()
| mit | 1,269,979,692,019,813,400 | 28.638889 | 78 | 0.64105 | false |
CUCWD/edx-platform | common/test/acceptance/tests/xblock/test_crowdsourcehinter_problem.py | 14 | 4260 | """
Javascript tests for the crowdsourcehinter xblock
"""
from textwrap import dedent
from common.test.acceptance.fixtures.course import CourseFixture, XBlockFixtureDesc
from common.test.acceptance.pages.common.auto_auth import AutoAuthPage
from common.test.acceptance.pages.lms.courseware import CoursewarePage
from common.test.acceptance.pages.xblock.crowdsourcehinter_problem import CrowdsourcehinterProblemPage
from common.test.acceptance.tests.helpers import UniqueCourseTest
class CrowdsourcehinterProblemTest(UniqueCourseTest):
"""
Test scenario for the hinter.
"""
shard = 21
USERNAME = "STAFF_TESTER"
EMAIL = "[email protected]"
def setUp(self):
super(CrowdsourcehinterProblemTest, self).setUp()
self.courseware_page = CoursewarePage(self.browser, self.course_id)
# Install a course with sections/problems, tabs, updates, and handouts
course_fix = CourseFixture(
self.course_info['org'], self.course_info['number'],
self.course_info['run'], self.course_info['display_name']
)
problem_data = dedent('''
<problem>
<p>A text input problem accepts a line of text from the student, and evaluates the input for correctness based on an expected answer.</p>
<p>The answer is correct if it matches every character of the expected answer. This can be a problem with international spelling, dates, or anything where the format of the answer is not clear.</p>
<p>Which US state has Lansing as its capital?</p>
<stringresponse answer="Michigan" type="ci" >
<textline label="Which US state has Lansing as its capital?" size="20"/>
</stringresponse>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
<p>Lansing is the capital of Michigan, although it is not Michigan's largest city, or even the seat of the county in which it resides.</p>
</div>
</solution>
</problem>
''')
children = XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit').add_children(
XBlockFixtureDesc('problem', 'text input problem', data=problem_data),
XBlockFixtureDesc('crowdsourcehinter', 'test crowdsourcehinter')
)
)
)
course_fix.add_children(children).install()
# Auto-auth register for the course.
AutoAuthPage(self.browser, username=self.USERNAME, email=self.EMAIL,
course_id=self.course_id, staff=False).visit()
def _goto_csh_problem_page(self):
"""
Visit the page courseware page containing the hinter
"""
self.courseware_page.visit()
csh_problem_page = CrowdsourcehinterProblemPage(self.browser)
self.assertGreater(len(self.browser.find_elements_by_class_name('crowdsourcehinter_block')), 0)
return csh_problem_page
def test_student_hint_workflow(self):
"""
Test the basic workflow of a student recieving hints. The student should submit an incorrect answer and
receive a hint (in this case no hint since none are set), be able to rate that hint, see a different UX
after submitting a correct answer, and be capable of contributing a new hint to the system.
"""
csh_problem_page = self._goto_csh_problem_page()
csh_problem_page.submit_text_answer("michigann")
csh_problem_page.wait_for_ajax()
self.assertEqual(csh_problem_page.get_hint_text()[0], u"Hint: Sorry, there are no hints for this answer.")
self.assertGreater(len(self.browser.find_elements_by_class_name('csh_rate_hint')), 0)
csh_problem_page.rate_hint()
csh_problem_page.wait_for_ajax()
csh_problem_page.submit_text_answer("michigan")
csh_problem_page.wait_for_ajax()
self.assertGreater(len(self.browser.find_elements_by_id('show_hint_rating_ux')), 0)
csh_problem_page.submit_new_hint("new hint text")
| agpl-3.0 | 8,654,292,654,914,702,000 | 45.304348 | 213 | 0.652582 | false |
nijel/translate | translate/storage/tiki.py | 2 | 6958 | #
# Copyright 2008 Mozilla Corporation, Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""Class that manages TikiWiki files for translation. Tiki files are <strike>ugly and
inconsistent</strike> formatted as a single large PHP array with several special
sections identified by comments. Example current as of 2008-12-01:
.. code-block:: php
<?php
// Many comments at the top
$lang=Array(
// ### Start of unused words
"aaa" => "zzz",
// ### end of unused words
// ### start of untranslated words
// "bbb" => "yyy",
// ### end of untranslated words
// ### start of possibly untranslated words
"ccc" => "xxx",
// ### end of possibly untranslated words
"ddd" => "www",
"###end###"=>"###end###");
?>
In addition there are several auto-generated //-style comments scattered through the
page and array, some of which matter when being parsed.
This has all been gleaned from the
`TikiWiki source <http://tikiwiki.svn.sourceforge.net/viewvc/tikiwiki/trunk/get_strings.php?view=markup>`_.
As far as I know no detailed documentation exists for the tiki language.php files.
"""
import datetime
import re
from io import BytesIO
from translate.storage import base
class TikiUnit(base.TranslationUnit):
"""A tiki unit entry."""
def __init__(self, source=None, **kwargs):
self.location = []
super().__init__(source)
def __str__(self):
"""Returns a string formatted to be inserted into a tiki language.php file."""
ret = f'"{self.source}" => "{self.target}",'
if self.location == ["untranslated"]:
ret = "// " + ret
return ret + "\n"
def addlocation(self, location):
"""Location is defined by the comments in the file. This function will only
set valid locations.
:param location: Where the string is located in the file. Must be a valid location.
"""
if location in ["unused", "untranslated", "possiblyuntranslated", "translated"]:
self.location.append(location)
def getlocations(self):
"""Returns the a list of the location(s) of the string."""
return self.location
class TikiStore(base.TranslationStore):
"""Represents a tiki language.php file."""
UnitClass = TikiUnit
def __init__(self, inputfile=None):
"""If an inputfile is specified it will be parsed.
:param inputfile: Either a string or a filehandle of the source file
"""
super().__init__()
self.filename = getattr(inputfile, "name", "")
if inputfile is not None:
self.parse(inputfile)
def serialize(self, out):
"""Will return a formatted tiki-style language.php file."""
_unused = []
_untranslated = []
_possiblyuntranslated = []
_translated = []
out.write(self._tiki_header().encode(self.encoding))
# Reorder all the units into their groups
for unit in self.units:
if unit.getlocations() == ["unused"]:
_unused.append(unit)
elif unit.getlocations() == ["untranslated"]:
_untranslated.append(unit)
elif unit.getlocations() == ["possiblyuntranslated"]:
_possiblyuntranslated.append(unit)
else:
_translated.append(unit)
out.write(b"// ### Start of unused words\n")
for unit in _unused:
out.write(str(unit).encode(self.encoding))
out.write(
b"// ### end of unused words\n\n" b"// ### start of untranslated words\n"
)
for unit in _untranslated:
out.write(str(unit).encode(self.encoding))
out.write(
b"// ### end of untranslated words\n\n"
b"// ### start of possibly untranslated words\n"
)
for unit in _possiblyuntranslated:
out.write(str(unit).encode(self.encoding))
out.write(b"// ### end of possibly untranslated words\n\n")
for unit in _translated:
out.write(str(unit).encode(self.encoding))
out.write(self._tiki_footer().encode(self.encoding))
def _tiki_header(self):
"""Returns a tiki-file header string."""
return (
"<?php // -*- coding:utf-8 -*-\n// Generated from po2tiki on %s\n\n$lang=Array(\n"
% datetime.datetime.now()
)
def _tiki_footer(self):
"""Returns a tiki-file footer string."""
return '"###end###"=>"###end###");\n?>'
def parse(self, input):
"""Parse the given input into source units.
:param input: the source, either a string or filehandle
"""
if hasattr(input, "name"):
self.filename = input.name
if isinstance(input, bytes):
input = BytesIO(input)
_split_regex = re.compile(r"^(?:// )?\"(.*)\" => \"(.*)\",$", re.UNICODE)
try:
_location = "translated"
for line in input:
line = line.decode(self.encoding)
# The tiki file fails to identify each section so we have to look for start and end
# points and if we're outside of them we assume the string is translated
if line.count("### Start of unused words"):
_location = "unused"
elif line.count("### start of untranslated words"):
_location = "untranslated"
elif line.count("### start of possibly untranslated words"):
_location = "possiblyuntranslated"
elif line.count("### end of unused words"):
_location = "translated"
elif line.count("### end of untranslated words"):
_location = "translated"
elif line.count("### end of possibly untranslated words"):
_location = "translated"
match = _split_regex.match(line)
if match:
unit = self.addsourceunit("".join(match.group(1)))
# Untranslated words get an empty msgstr
if not _location == "untranslated":
unit.target = match.group(2)
unit.addlocation(_location)
finally:
input.close()
| gpl-2.0 | 80,276,186,011,743,940 | 34.5 | 107 | 0.587525 | false |
bohdan-shramko/learning-python | source/chapter07/read_it.py | 1 | 1172 | # Read It
# Demonstrates reading from a text file
print("Opening and closing the file.")
text_file = open("read_it.txt", "r")
text_file.close()
print("\nReading characters from the file.")
text_file = open("read_it.txt", "r")
print(text_file.read(1))
print(text_file.read(5))
text_file.close()
print("\nReading the entire file at once.")
text_file = open("read_it.txt", "r")
whole_thing = text_file.read()
print(whole_thing)
text_file.close()
print("\nReading characters from a line.")
text_file = open("read_it.txt", "r")
print(text_file.readline(1))
print(text_file.readline(5))
text_file.close()
print("\nReading one line at a time.")
text_file = open("read_it.txt", "r")
print(text_file.readline())
print(text_file.readline())
print(text_file.readline())
text_file.close()
print("\nReading the entire file into a list.")
text_file = open("read_it.txt", "r")
lines = text_file.readlines()
print(lines)
print(len(lines))
for line in lines:
print(line)
text_file.close()
print("\nLooping through the file, line by line.")
text_file = open("read_it.txt", "r")
for line in text_file:
print(line)
text_file.close()
input("\n\nPress the enter key to exit.")
| mit | 8,929,293,620,233,847,000 | 23.416667 | 50 | 0.692833 | false |
azdkj532/sligen | src/generate.py | 1 | 1078 | import os
import sys
import random
from slidegen import Generator
from slidegen import DataProvider
from wordgen import Wordgen
if len(sys.argv) < 2:
print('Usage: %s output-path' % sys.argv[0])
exit(1)
class WordgenProvider(DataProvider):
def __init__(self):
self.wg = Wordgen()
def text(self):
return self.wg.moistPhrase()
class GeneratorBridge(object):
def __init__(self, generator, output_path):
self.generator = generator
self.path = output_path
self.index = 0
def __hook(self, val):
self.index += 1
with open(os.path.join(self.path, '%.3d.md' % self.index), 'w', encoding='utf-8') as fo:
fo.write(val)
return val
def __getattr__(self, name):
f = getattr(self.generator, name)
def _proc(*args):
return self.__hook(f(*args))
return _proc
g = Generator(WordgenProvider())
g = GeneratorBridge(g, sys.argv[1])
g.cover()
for i in range(18):
if random.randint(0, 100) < 77:
g.content()
else:
g.full_image()
| mit | 2,383,481,675,161,381,400 | 22.955556 | 96 | 0.600186 | false |
r-icarus/openstack_microserver | horizon/tables/actions.py | 7 | 25614 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from collections import defaultdict # noqa
import logging
import new
from django.conf import settings # noqa
from django.core import urlresolvers
from django import shortcuts
from django.utils.functional import Promise # noqa
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from horizon import messages
from horizon.utils import functions
from horizon.utils import html
LOG = logging.getLogger(__name__)
# For Bootstrap integration; can be overridden in settings.
ACTION_CSS_CLASSES = ("btn", "btn-small")
STRING_SEPARATOR = "__"
class BaseAction(html.HTMLElement):
""" Common base class for all ``Action`` classes. """
table = None
handles_multiple = False
requires_input = False
preempt = False
policy_rules = None
def __init__(self, datum=None):
super(BaseAction, self).__init__()
self.datum = datum
def data_type_matched(self, datum):
""" Method to see if the action is allowed for a certain type of data.
Only affects mixed data type tables.
"""
if datum:
action_data_types = getattr(self, "allowed_data_types", [])
# If the data types of this action is empty, we assume it accepts
# all kinds of data and this method will return True.
if action_data_types:
datum_type = getattr(datum, self.table._meta.data_type_name,
None)
if datum_type and (datum_type not in action_data_types):
return False
return True
def get_policy_target(self, request, datum):
""" Provide the target for a policy request.
This method is meant to be overridden to return target details when
one of the policy checks requires them. E.g., {"user_id": datum.id}
"""
return {}
def allowed(self, request, datum):
""" Determine whether this action is allowed for the current request.
This method is meant to be overridden with more specific checks.
"""
return True
def _allowed(self, request, datum):
policy_check = getattr(settings, "POLICY_CHECK_FUNCTION", None)
if policy_check and self.policy_rules:
target = self.get_policy_target(request, datum)
return (policy_check(self.policy_rules, request, target) and
self.allowed(request, datum))
return self.allowed(request, datum)
def update(self, request, datum):
""" Allows per-action customization based on current conditions.
This is particularly useful when you wish to create a "toggle"
action that will be rendered differently based on the value of an
attribute on the current row's data.
By default this method is a no-op.
"""
pass
def get_default_classes(self):
"""
Returns a list of the default classes for the action. Defaults to
``["btn", "btn-small"]``.
"""
return getattr(settings, "ACTION_CSS_CLASSES", ACTION_CSS_CLASSES)
def get_default_attrs(self):
"""
Returns a list of the default HTML attributes for the action. Defaults
to returning an ``id`` attribute with the value
``{{ table.name }}__action_{{ action.name }}__{{ creation counter }}``.
"""
if self.datum is not None:
bits = (self.table.name,
"row_%s" % self.table.get_object_id(self.datum),
"action_%s" % self.name)
else:
bits = (self.table.name, "action_%s" % self.name)
return {"id": STRING_SEPARATOR.join(bits)}
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.name)
class Action(BaseAction):
""" Represents an action which can be taken on this table's data.
.. attribute:: name
Required. The short name or "slug" representing this
action. This name should not be changed at runtime.
.. attribute:: verbose_name
A descriptive name used for display purposes. Defaults to the
value of ``name`` with the first letter of each word capitalized.
.. attribute:: verbose_name_plural
Used like ``verbose_name`` in cases where ``handles_multiple`` is
``True``. Defaults to ``verbose_name`` with the letter "s" appended.
.. attribute:: method
The HTTP method for this action. Defaults to ``POST``. Other methods
may or may not succeed currently.
.. attribute:: requires_input
Boolean value indicating whether or not this action can be taken
without any additional input (e.g. an object id). Defaults to ``True``.
.. attribute:: preempt
Boolean value indicating whether this action should be evaluated in
the period after the table is instantiated but before the data has
been loaded.
This can allow actions which don't need access to the full table data
to bypass any API calls and processing which would otherwise be
required to load the table.
.. attribute:: allowed_data_types
A list that contains the allowed data types of the action. If the
datum's type is in this list, the action will be shown on the row
for the datum.
Default to be an empty list (``[]``). When set to empty, the action
will accept any kind of data.
.. attribute:: policy_rules
list of scope and rule tuples to do policy checks on, the
composition of which is (scope, rule)
scope: service type managing the policy for action
rule: string representing the action to be checked
for a policy that requires a single rule check:
policy_rules should look like
"(("compute", "compute:create_instance"),)"
for a policy that requires multiple rule checks:
rules should look like
"(("identity", "identity:list_users"),
("identity", "identity:list_roles"))"
At least one of the following methods must be defined:
.. method:: single(self, data_table, request, object_id)
Handler for a single-object action.
.. method:: multiple(self, data_table, request, object_ids)
Handler for multi-object actions.
.. method:: handle(self, data_table, request, object_ids)
If a single function can work for both single-object and
multi-object cases then simply providing a ``handle`` function
will internally route both ``single`` and ``multiple`` requests
to ``handle`` with the calls from ``single`` being transformed
into a list containing only the single object id.
"""
method = "POST"
requires_input = True
def __init__(self, verbose_name=None, verbose_name_plural=None,
single_func=None, multiple_func=None, handle_func=None,
handles_multiple=False, attrs=None, requires_input=True,
allowed_data_types=[], datum=None):
super(Action, self).__init__(datum=datum)
# Priority: constructor, class-defined, fallback
self.verbose_name = verbose_name or getattr(self, 'verbose_name',
self.name.title())
self.verbose_name_plural = verbose_name_plural or \
getattr(self, 'verbose_name_plural',
"%ss" % self.verbose_name)
self.handles_multiple = getattr(self,
"handles_multiple",
handles_multiple)
self.requires_input = getattr(self,
"requires_input",
requires_input)
self.allowed_data_types = getattr(self, "allowed_data_types",
allowed_data_types)
if attrs:
self.attrs.update(attrs)
# Don't set these if they're None
if single_func:
self.single = single_func
if multiple_func:
self.multiple = multiple_func
if handle_func:
self.handle = handle_func
# Ensure we have the appropriate methods
has_handler = hasattr(self, 'handle') and callable(self.handle)
has_single = hasattr(self, 'single') and callable(self.single)
has_multiple = hasattr(self, 'multiple') and callable(self.multiple)
if has_handler or has_multiple:
self.handles_multiple = True
if not has_handler and (not has_single or has_multiple):
cls_name = self.__class__.__name__
raise NotImplementedError('You must define either a "handle" '
'method or a "single" or "multiple" '
'method on %s.' % cls_name)
if not has_single:
def single(self, data_table, request, object_id):
return self.handle(data_table, request, [object_id])
self.single = new.instancemethod(single, self)
if not has_multiple and self.handles_multiple:
def multiple(self, data_table, request, object_ids):
return self.handle(data_table, request, object_ids)
self.multiple = new.instancemethod(multiple, self)
def get_param_name(self):
""" Returns the full POST parameter name for this action.
Defaults to
``{{ table.name }}__{{ action.name }}``.
"""
return "__".join([self.table.name, self.name])
class LinkAction(BaseAction):
""" A table action which is simply a link rather than a form POST.
.. attribute:: name
Required. The short name or "slug" representing this
action. This name should not be changed at runtime.
.. attribute:: verbose_name
A string which will be rendered as the link text. (Required)
.. attribute:: url
A string or a callable which resolves to a url to be used as the link
target. You must either define the ``url`` attribute or override
the ``get_link_url`` method on the class.
.. attribute:: allowed_data_types
A list that contains the allowed data types of the action. If the
datum's type is in this list, the action will be shown on the row
for the datum.
Defaults to be an empty list (``[]``). When set to empty, the action
will accept any kind of data.
"""
method = "GET"
bound_url = None
def __init__(self, verbose_name=None, allowed_data_types=[],
url=None, attrs=None):
super(LinkAction, self).__init__()
self.verbose_name = verbose_name or getattr(self,
"verbose_name",
self.name.title())
self.url = getattr(self, "url", url)
if not self.verbose_name:
raise NotImplementedError('A LinkAction object must have a '
'verbose_name attribute.')
self.allowed_data_types = getattr(self, "allowed_data_types",
allowed_data_types)
if attrs:
self.attrs.update(attrs)
def get_link_url(self, datum=None):
""" Returns the final URL based on the value of ``url``.
If ``url`` is callable it will call the function.
If not, it will then try to call ``reverse`` on ``url``.
Failing that, it will simply return the value of ``url`` as-is.
When called for a row action, the current row data object will be
passed as the first parameter.
"""
if not self.url:
raise NotImplementedError('A LinkAction class must have a '
'url attribute or define its own '
'get_link_url method.')
if callable(self.url):
return self.url(datum, **self.kwargs)
try:
if datum:
obj_id = self.table.get_object_id(datum)
return urlresolvers.reverse(self.url, args=(obj_id,))
else:
return urlresolvers.reverse(self.url)
except urlresolvers.NoReverseMatch as ex:
LOG.info('No reverse found for "%s": %s' % (self.url, ex))
return self.url
class FilterAction(BaseAction):
""" A base class representing a filter action for a table.
.. attribute:: name
The short name or "slug" representing this action. Defaults to
``"filter"``.
.. attribute:: verbose_name
A descriptive name used for display purposes. Defaults to the
value of ``name`` with the first letter of each word capitalized.
.. attribute:: param_name
A string representing the name of the request parameter used for the
search term. Default: ``"q"``.
.. attribute: filter_type
A string representing the type of this filter. Default: ``"query"``.
.. attribute: needs_preloading
If True, the filter function will be called for the initial
GET request with an empty ``filter_string``, regardless of the
value of ``method``.
"""
# TODO(gabriel): The method for a filter action should be a GET,
# but given the form structure of the table that's currently impossible.
# At some future date this needs to be reworked to get the filter action
# separated from the table's POST form.
method = "POST"
name = "filter"
verbose_name = _("Filter")
filter_type = "query"
needs_preloading = False
def __init__(self, verbose_name=None, param_name=None):
super(FilterAction, self).__init__()
self.verbose_name = verbose_name or self.name
self.param_name = param_name or 'q'
def get_param_name(self):
""" Returns the full query parameter name for this action.
Defaults to
``{{ table.name }}__{{ action.name }}__{{ action.param_name }}``.
"""
return "__".join([self.table.name, self.name, self.param_name])
def get_default_classes(self):
classes = super(FilterAction, self).get_default_classes()
classes += ("btn-search",)
return classes
def assign_type_string(self, table, data, type_string):
for datum in data:
setattr(datum, table._meta.data_type_name, type_string)
def data_type_filter(self, table, data, filter_string):
filtered_data = []
for data_type in table._meta.data_types:
func_name = "filter_%s_data" % data_type
filter_func = getattr(self, func_name, None)
if not filter_func and not callable(filter_func):
# The check of filter function implementation should happen
# in the __init__. However, the current workflow of DataTable
# and actions won't allow it. Need to be fixed in the future.
cls_name = self.__class__.__name__
raise NotImplementedError("You must define a %s method "
"for %s data type in %s." %
(func_name, data_type, cls_name))
_data = filter_func(table, data, filter_string)
self.assign_type_string(table, _data, data_type)
filtered_data.extend(_data)
return filtered_data
def filter(self, table, data, filter_string):
""" Provides the actual filtering logic.
This method must be overridden by subclasses and return
the filtered data.
"""
raise NotImplementedError("The filter method has not been "
"implemented by %s." % self.__class__)
class FixedFilterAction(FilterAction):
""" A filter action with fixed buttons.
"""
filter_type = 'fixed'
needs_preloading = True
def __init__(self, *args, **kwargs):
super(FixedFilterAction, self).__init__(args, kwargs)
self.fixed_buttons = self.get_fixed_buttons()
self.filter_string = ''
def filter(self, table, images, filter_string):
self.filter_string = filter_string
categories = self.categorize(table, images)
self.categories = defaultdict(list, categories)
for button in self.fixed_buttons:
button['count'] = len(self.categories[button['value']])
if not filter_string:
return images
return self.categories[filter_string]
def get_fixed_buttons(self):
"""Returns a list of dictionaries describing the fixed buttons
to use for filtering.
Each list item should be a dict with the following keys:
* ``text``: Text to display on the button
* ``icon``: Icon class for icon element (inserted before text).
* ``value``: Value returned when the button is clicked. This value is
passed to ``filter()`` as ``filter_string``.
"""
raise NotImplementedError("The get_fixed_buttons method has "
"not been implemented by %s." %
self.__class__)
def categorize(self, table, images):
"""Override to separate images into categories.
Return a dict with a key for the value of each fixed button,
and a value that is a list of images in that category.
"""
raise NotImplementedError("The categorize method has not been "
"implemented by %s." % self.__class__)
class BatchAction(Action):
""" A table action which takes batch action on one or more
objects. This action should not require user input on a
per-object basis.
.. attribute:: name
An internal name for this action.
.. attribute:: action_present
String or tuple/list. The display forms of the name.
Should be a transitive verb, capitalized and translated. ("Delete",
"Rotate", etc.) If tuple or list - then setting
self.current_present_action = n will set the current active item
from the list(action_present[n])
.. attribute:: action_past
String or tuple/list. The past tense of action_present. ("Deleted",
"Rotated", etc.) If tuple or list - then
setting self.current_past_action = n will set the current active item
from the list(action_past[n])
.. attribute:: data_type_singular
A display name for the type of data that receives the
action. ("Keypair", "Floating IP", etc.)
.. attribute:: data_type_plural
Optional plural word for the type of data being acted
on. Defaults to appending 's'. Relying on the default is bad
for translations and should not be done.
.. attribute:: success_url
Optional location to redirect after completion of the delete
action. Defaults to the current page.
"""
success_url = None
def __init__(self):
self.current_present_action = 0
self.current_past_action = 0
self.data_type_plural = getattr(self, 'data_type_plural',
self.data_type_singular + 's')
# If setting a default name, don't initialise it too early
self.verbose_name = getattr(self, "verbose_name",
self._conjugate)
self.verbose_name_plural = getattr(self, "verbose_name_plural",
lambda: self._conjugate('plural'))
# Keep record of successfully handled objects
self.success_ids = []
super(BatchAction, self).__init__()
def _allowed(self, request, datum=None):
# Override the default internal action method to prevent batch
# actions from appearing on tables with no data.
if not self.table.data and not datum:
return False
return super(BatchAction, self)._allowed(request, datum)
def _conjugate(self, items=None, past=False):
"""
Builds combinations like 'Delete Object' and 'Deleted
Objects' based on the number of items and `past` flag.
"""
action_type = "past" if past else "present"
action_attr = getattr(self, "action_%s" % action_type)
if isinstance(action_attr, (basestring, Promise)):
action = action_attr
else:
toggle_selection = getattr(self, "current_%s_action" % action_type)
action = action_attr[toggle_selection]
if items is None or len(items) == 1:
data_type = self.data_type_singular
else:
data_type = self.data_type_plural
return _("%(action)s %(data_type)s") % {'action': action,
'data_type': data_type}
def action(self, request, datum_id):
"""
Required. Accepts a single object id and performs the specific action.
Return values are discarded, errors raised are caught and logged.
"""
raise NotImplementedError('action() must be defined for '
'BatchAction: %s' % self.data_type_singular)
def update(self, request, datum):
"""
Switches the action verbose name, if needed
"""
if getattr(self, 'action_present', False):
self.verbose_name = self._conjugate()
self.verbose_name_plural = self._conjugate('plural')
def get_success_url(self, request=None):
"""
Returns the URL to redirect to after a successful action.
"""
if self.success_url:
return self.success_url
return request.get_full_path()
def handle(self, table, request, obj_ids):
action_success = []
action_failure = []
action_not_allowed = []
for datum_id in obj_ids:
datum = table.get_object_by_id(datum_id)
datum_display = table.get_object_display(datum) or _("N/A")
if not table._filter_action(self, request, datum):
action_not_allowed.append(datum_display)
LOG.info('Permission denied to %s: "%s"' %
(self._conjugate(past=True).lower(), datum_display))
continue
try:
self.action(request, datum_id)
#Call update to invoke changes if needed
self.update(request, datum)
action_success.append(datum_display)
self.success_ids.append(datum_id)
LOG.info('%s: "%s"' %
(self._conjugate(past=True), datum_display))
except Exception as ex:
# Handle the exception but silence it since we'll display
# an aggregate error message later. Otherwise we'd get
# multiple error messages displayed to the user.
if getattr(ex, "_safe_message", None):
ignore = False
else:
ignore = True
action_failure.append(datum_display)
exceptions.handle(request, ignore=ignore)
# Begin with success message class, downgrade to info if problems.
success_message_level = messages.success
if action_not_allowed:
msg = _('You do not have permission to %(action)s: %(objs)s')
params = {"action": self._conjugate(action_not_allowed).lower(),
"objs": functions.lazy_join(", ", action_not_allowed)}
messages.error(request, msg % params)
success_message_level = messages.info
if action_failure:
msg = _('Unable to %(action)s: %(objs)s')
params = {"action": self._conjugate(action_failure).lower(),
"objs": functions.lazy_join(", ", action_failure)}
messages.error(request, msg % params)
success_message_level = messages.info
if action_success:
msg = _('%(action)s: %(objs)s')
params = {"action": self._conjugate(action_success, True),
"objs": functions.lazy_join(", ", action_success)}
success_message_level(request, msg % params)
return shortcuts.redirect(self.get_success_url(request))
class DeleteAction(BatchAction):
name = "delete"
action_present = _("Delete")
action_past = _("Deleted")
def action(self, request, obj_id):
return self.delete(request, obj_id)
def delete(self, request, obj_id):
raise NotImplementedError("DeleteAction must define a delete method.")
def get_default_classes(self):
classes = super(DeleteAction, self).get_default_classes()
classes += ("btn-danger", "btn-delete")
return classes
| apache-2.0 | -1,195,086,635,827,140,000 | 38.045732 | 79 | 0.590654 | false |
wyojustin/clockthreesr | arduino/libraries/ClockTHREE/examples/ClockTHREE_02/scanwin32.py | 26 | 8092 | import ctypes
import re
def ValidHandle(value):
if value == 0:
raise ctypes.WinError()
return value
NULL = 0
HDEVINFO = ctypes.c_int
BOOL = ctypes.c_int
CHAR = ctypes.c_char
PCTSTR = ctypes.c_char_p
HWND = ctypes.c_uint
DWORD = ctypes.c_ulong
PDWORD = ctypes.POINTER(DWORD)
ULONG = ctypes.c_ulong
ULONG_PTR = ctypes.POINTER(ULONG)
#~ PBYTE = ctypes.c_char_p
PBYTE = ctypes.c_void_p
class GUID(ctypes.Structure):
_fields_ = [
('Data1', ctypes.c_ulong),
('Data2', ctypes.c_ushort),
('Data3', ctypes.c_ushort),
('Data4', ctypes.c_ubyte*8),
]
def __str__(self):
return "{%08x-%04x-%04x-%s-%s}" % (
self.Data1,
self.Data2,
self.Data3,
''.join(["%02x" % d for d in self.Data4[:2]]),
''.join(["%02x" % d for d in self.Data4[2:]]),
)
class SP_DEVINFO_DATA(ctypes.Structure):
_fields_ = [
('cbSize', DWORD),
('ClassGuid', GUID),
('DevInst', DWORD),
('Reserved', ULONG_PTR),
]
def __str__(self):
return "ClassGuid:%s DevInst:%s" % (self.ClassGuid, self.DevInst)
PSP_DEVINFO_DATA = ctypes.POINTER(SP_DEVINFO_DATA)
class SP_DEVICE_INTERFACE_DATA(ctypes.Structure):
_fields_ = [
('cbSize', DWORD),
('InterfaceClassGuid', GUID),
('Flags', DWORD),
('Reserved', ULONG_PTR),
]
def __str__(self):
return "InterfaceClassGuid:%s Flags:%s" % (self.InterfaceClassGuid, self.Flags)
PSP_DEVICE_INTERFACE_DATA = ctypes.POINTER(SP_DEVICE_INTERFACE_DATA)
PSP_DEVICE_INTERFACE_DETAIL_DATA = ctypes.c_void_p
class dummy(ctypes.Structure):
_fields_=[("d1", DWORD), ("d2", CHAR)]
_pack_ = 1
SIZEOF_SP_DEVICE_INTERFACE_DETAIL_DATA_A = ctypes.sizeof(dummy)
SetupDiDestroyDeviceInfoList = ctypes.windll.setupapi.SetupDiDestroyDeviceInfoList
SetupDiDestroyDeviceInfoList.argtypes = [HDEVINFO]
SetupDiDestroyDeviceInfoList.restype = BOOL
SetupDiGetClassDevs = ctypes.windll.setupapi.SetupDiGetClassDevsA
SetupDiGetClassDevs.argtypes = [ctypes.POINTER(GUID), PCTSTR, HWND, DWORD]
SetupDiGetClassDevs.restype = ValidHandle # HDEVINFO
SetupDiEnumDeviceInterfaces = ctypes.windll.setupapi.SetupDiEnumDeviceInterfaces
SetupDiEnumDeviceInterfaces.argtypes = [HDEVINFO, PSP_DEVINFO_DATA, ctypes.POINTER(GUID), DWORD, PSP_DEVICE_INTERFACE_DATA]
SetupDiEnumDeviceInterfaces.restype = BOOL
SetupDiGetDeviceInterfaceDetail = ctypes.windll.setupapi.SetupDiGetDeviceInterfaceDetailA
SetupDiGetDeviceInterfaceDetail.argtypes = [HDEVINFO, PSP_DEVICE_INTERFACE_DATA, PSP_DEVICE_INTERFACE_DETAIL_DATA, DWORD, PDWORD, PSP_DEVINFO_DATA]
SetupDiGetDeviceInterfaceDetail.restype = BOOL
SetupDiGetDeviceRegistryProperty = ctypes.windll.setupapi.SetupDiGetDeviceRegistryPropertyA
SetupDiGetDeviceRegistryProperty.argtypes = [HDEVINFO, PSP_DEVINFO_DATA, DWORD, PDWORD, PBYTE, DWORD, PDWORD]
SetupDiGetDeviceRegistryProperty.restype = BOOL
GUID_CLASS_COMPORT = GUID(0x86e0d1e0L, 0x8089, 0x11d0,
(ctypes.c_ubyte*8)(0x9c, 0xe4, 0x08, 0x00, 0x3e, 0x30, 0x1f, 0x73))
DIGCF_PRESENT = 2
DIGCF_DEVICEINTERFACE = 16
INVALID_HANDLE_VALUE = 0
ERROR_INSUFFICIENT_BUFFER = 122
SPDRP_HARDWAREID = 1
SPDRP_FRIENDLYNAME = 12
SPDRP_LOCATION_INFORMATION = 13
ERROR_NO_MORE_ITEMS = 259
def comports(available_only=True):
"""This generator scans the device registry for com ports and yields
(order, port, desc, hwid). If available_only is true only return currently
existing ports. Order is a helper to get sorted lists. it can be ignored
otherwise."""
flags = DIGCF_DEVICEINTERFACE
if available_only:
flags |= DIGCF_PRESENT
g_hdi = SetupDiGetClassDevs(ctypes.byref(GUID_CLASS_COMPORT), None, NULL, flags);
#~ for i in range(256):
for dwIndex in range(256):
did = SP_DEVICE_INTERFACE_DATA()
did.cbSize = ctypes.sizeof(did)
if not SetupDiEnumDeviceInterfaces(
g_hdi,
None,
ctypes.byref(GUID_CLASS_COMPORT),
dwIndex,
ctypes.byref(did)
):
if ctypes.GetLastError() != ERROR_NO_MORE_ITEMS:
raise ctypes.WinError()
break
dwNeeded = DWORD()
# get the size
if not SetupDiGetDeviceInterfaceDetail(
g_hdi,
ctypes.byref(did),
None, 0, ctypes.byref(dwNeeded),
None
):
# Ignore ERROR_INSUFFICIENT_BUFFER
if ctypes.GetLastError() != ERROR_INSUFFICIENT_BUFFER:
raise ctypes.WinError()
# allocate buffer
class SP_DEVICE_INTERFACE_DETAIL_DATA_A(ctypes.Structure):
_fields_ = [
('cbSize', DWORD),
('DevicePath', CHAR*(dwNeeded.value - ctypes.sizeof(DWORD))),
]
def __str__(self):
return "DevicePath:%s" % (self.DevicePath,)
idd = SP_DEVICE_INTERFACE_DETAIL_DATA_A()
idd.cbSize = SIZEOF_SP_DEVICE_INTERFACE_DETAIL_DATA_A
devinfo = SP_DEVINFO_DATA()
devinfo.cbSize = ctypes.sizeof(devinfo)
if not SetupDiGetDeviceInterfaceDetail(
g_hdi,
ctypes.byref(did),
ctypes.byref(idd), dwNeeded, None,
ctypes.byref(devinfo)
):
raise ctypes.WinError()
# hardware ID
szHardwareID = ctypes.create_string_buffer(250)
if not SetupDiGetDeviceRegistryProperty(
g_hdi,
ctypes.byref(devinfo),
SPDRP_HARDWAREID,
None,
ctypes.byref(szHardwareID), ctypes.sizeof(szHardwareID) - 1,
None
):
# Ignore ERROR_INSUFFICIENT_BUFFER
if ctypes.GetLastError() != ERROR_INSUFFICIENT_BUFFER:
raise ctypes.WinError()
# friendly name
szFriendlyName = ctypes.create_string_buffer(1024)
if not SetupDiGetDeviceRegistryProperty(
g_hdi,
ctypes.byref(devinfo),
SPDRP_FRIENDLYNAME,
None,
ctypes.byref(szFriendlyName), ctypes.sizeof(szFriendlyName) - 1,
None
):
# Ignore ERROR_INSUFFICIENT_BUFFER
if ctypes.GetLastError() != ERROR_INSUFFICIENT_BUFFER:
#~ raise ctypes.WinError()
# not getting friendly name for com0com devices, try something else
szFriendlyName = ctypes.create_string_buffer(1024)
if SetupDiGetDeviceRegistryProperty(
g_hdi,
ctypes.byref(devinfo),
SPDRP_LOCATION_INFORMATION,
None,
ctypes.byref(szFriendlyName), ctypes.sizeof(szFriendlyName) - 1,
None
):
port_name = "\\\\.\\" + szFriendlyName.value
order = None
else:
port_name = szFriendlyName.value
order = None
else:
try:
m = re.search(r"\((.*?(\d+))\)", szFriendlyName.value)
#~ print szFriendlyName.value, m.groups()
port_name = m.group(1)
order = int(m.group(2))
except AttributeError, msg:
port_name = szFriendlyName.value
order = None
yield order, port_name, szFriendlyName.value, szHardwareID.value
SetupDiDestroyDeviceInfoList(g_hdi)
if __name__ == '__main__':
import serial
print "-"*78
print "Serial ports"
print "-"*78
for order, port, desc, hwid in sorted(comports()):
print "%-10s: %s (%s) ->" % (port, desc, hwid),
try:
serial.Serial(port) # test open
except serial.serialutil.SerialException:
print "can't be openend"
else:
print "Ready"
print
# list of all ports the system knows
print "-"*78
print "All serial ports (registry)"
print "-"*78
for order, port, desc, hwid in sorted(comports(False)):
print "%-10s: %s (%s)" % (port, desc, hwid)
| mit | -7,163,676,177,331,834,000 | 33.87931 | 147 | 0.605783 | false |
rwightman/tensorflow-litterbox | litterbox/models/google/nets/inception.py | 1 | 1833 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Brings inception_v1, inception_v2 and inception_v3 under one namespace."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import
from models.google.nets.inception_resnet_v2 import inception_resnet_v2
from models.google.nets.inception_resnet_v2 import inception_resnet_v2_arg_scope
from models.google.nets.inception_v1 import inception_v1
from models.google.nets.inception_v1 import inception_v1_arg_scope
from models.google.nets.inception_v1 import inception_v1_base
from models.google.nets.inception_v2 import inception_v2
from models.google.nets.inception_v2 import inception_v2_arg_scope
from models.google.nets.inception_v2 import inception_v2_base
from models.google.nets.inception_v3 import inception_v3
from models.google.nets.inception_v3 import inception_v3_arg_scope
from models.google.nets.inception_v3 import inception_v3_base
from models.google.nets.inception_v4 import inception_v4
from models.google.nets.inception_v4 import inception_v4_arg_scope
from models.google.nets.inception_v4 import inception_v4_base
# pylint: enable=unused-import
| apache-2.0 | -1,821,658,035,064,631,300 | 49.916667 | 80 | 0.768685 | false |
2PacIsAlive/deepnet.works | deep_networks/data/preprocessing/segment_lungs.py | 2 | 2713 | import logging
import numpy as np
from skimage import measure
class LungSegmenter(object):
"""Extract the lungs from a scan.
Attributes:
log (logging.Logger): The logger for this module.
"""
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
def largest_label_volume(self, im, bg=-1):
"""
Adapted from:
https://www.kaggle.com/gzuidhof/data-science-bowl-2017/full-preprocessing-tutorial
Args:
im:
bg:
Returns:
"""
vals, counts = np.unique(im, return_counts=True)
counts = counts[vals != bg]
vals = vals[vals != bg]
biggest = vals[np.argmax(counts)]
return biggest
def mask(self, image, fill_lungs=True):
"""
Adapted from:
https://www.kaggle.com/gzuidhof/data-science-bowl-2017/full-preprocessing-tutorial
Args:
image:
fill_lungs:
Returns:
"""
# not actually binary, but 1 and 2.
# 0 is treated as background, which we do not want
binary_image = np.array(image > -320, dtype=np.int8)+1
labels = measure.label(binary_image)
# Pick the pixel in the very corner to determine which label is air.
# Improvement: Pick multiple background labels from around the patient
# More resistant to "trays" on which the patient lays cutting the air
# around the person in half
background_label = labels[0,0,0]
#Fill the air around the person
binary_image[background_label == labels] = 2
# Method of filling the lung structures (that is superior to something like
# morphological closing)
if fill_lungs:
# For every slice we determine the largest solid structure
for i, axial_slice in enumerate(binary_image):
axial_slice = axial_slice - 1
labeling = measure.label(axial_slice)
l_max = self.largest_label_volume(labeling, bg=0)
if l_max is not None: #This slice contains some lung
binary_image[i][labeling != l_max] = 1
binary_image -= 1 #Make the image actual binary
binary_image = 1-binary_image # Invert it, lungs are now 1
# Remove other air pockets insided body
labels = measure.label(binary_image, background=0)
l_max = self.largest_label_volume(labels, bg=0)
if l_max is not None: # There are air pockets
binary_image[labels != l_max] = 0
return binary_image
| mit | 6,750,953,811,845,551,000 | 31.297619 | 90 | 0.573535 | false |
andrebellafronte/stoq | plugins/ecf/ecfprinterdialog.py | 2 | 14094 | # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2007 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU Lesser General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <[email protected]>
##
##
import operator
import platform
from serial import SerialException
import gtk
from kiwi.enums import ListType
from kiwi.ui.objectlist import Column
from stoqdrivers.interfaces import ICouponPrinter
from stoqdrivers.printers.base import (get_supported_printers_by_iface,
get_baudrate_values)
from stoqdrivers.enum import PaymentMethodType, TaxType
from stoqlib.database.runtime import get_current_station
from stoqlib.domain.sellable import SellableTaxConstant
from stoqlib.domain.till import Till
from stoqlib.gui.base.dialogs import run_dialog
from stoqlib.gui.base.lists import ModelListDialog, ModelListSlave
from stoqlib.gui.dialogs.progressdialog import ProgressDialog
from stoqlib.gui.editors.baseeditor import BaseEditor
from stoqlib.lib.environment import is_developer_mode
from stoqlib.lib.devicemanager import DeviceManager
from stoqlib.lib.message import info, yesno, warning
from stoqlib.lib.parameters import sysparam
from stoqlib.lib.translation import locale_sorted, stoqlib_gettext
from ecf.ecfprinterstatus import ECFAsyncPrinterStatus
from ecf.ecfdomain import ECFPrinter, DeviceConstant
from ecf.deviceconstanteditor import DeviceConstantsDialog
_ = stoqlib_gettext
class _PrinterModel(object):
def __init__(self, brand, printer_class):
self.brand = unicode(brand)
self.model = unicode(printer_class.__name__)
self.model_name = unicode(printer_class.model_name)
self.printer_class = printer_class
def get_description(self):
return self.model_name
class ECFEditor(BaseEditor):
translation_domain = 'stoq'
domain = 'ecf'
gladefile = 'FiscalPrinterDialog'
model_type = ECFPrinter
model_name = _('Fiscal Printer')
proxy_widgets = ['device_name', 'device_serial', 'is_active', 'baudrate',
'user_number', 'register_date', 'register_cro']
def __init__(self, store, model=None):
self._device_manager = DeviceManager()
BaseEditor.__init__(self, store, model)
self.progress_dialog = ProgressDialog()
self.progress_dialog.connect('cancel',
self._on_progress_dialog__cancel)
self.progress_dialog.set_transient_for(self.main_dialog)
if self.edit_mode:
self.printer.set_sensitive(False)
self.main_dialog.ok_button.grab_focus()
else:
self.edit_constants.hide()
self.device_serial.hide()
self.device_serial_label.hide()
self.is_active.hide()
#
# BaseEditor
#
def create_model(self, store):
model = ECFPrinter(brand=u'daruma',
model=u'FS345',
device_name=u'/dev/ttyS0',
device_serial=u'',
baudrate=9600,
station=get_current_station(store),
is_active=True,
store=store)
if platform.system() == 'Windows':
model.device_name = u'COM1'
return model
def setup_proxies(self):
self._populate_printers()
self._populate_serial_ports()
self._populate_baudrate()
self.proxy = self.add_proxy(self.model,
ECFEditor.proxy_widgets)
self.printer.select_item_by_label(self.model.get_description())
def validate_confirm(self):
if not self.can_activate_printer():
return False
if self.edit_mode:
return True
try:
self._status = ECFAsyncPrinterStatus(self.model.device_name,
self.model.printer_class,
self.model.baudrate)
except SerialException as e:
warning(_('Error opening serial port'), str(e))
return False
self._status.connect('reply', self._printer_status__reply)
self._status.connect('timeout', self._printer_status__timeout)
self.progress_dialog.set_label(_("Probing for a %s printer on %s") % (
self.model.model_name, self._status.get_device_name()))
self.progress_dialog.start()
return False
def can_activate_printer(self):
serial = self.model.device_serial
printers = self.store.find(ECFPrinter, is_active=True,
station=get_current_station(self.store))
till = self.store.find(Till, status=Till.STATUS_OPEN,
station=get_current_station(self.store)).one()
if till and printers:
warning(_("You need to close the till opened at %s before "
"changing this printer.") % till.opening_date.date())
return False
for p in printers:
if p.device_serial != serial and self.model.is_active:
warning(_(u'The ECF %s is already active for this '
'station. Deactivate that printer before '
'activating this one.') % p.model)
return False
return True
#
# Callbacks
#
def _on_progress_dialog__cancel(self, progress):
# FIXME:
# status.stop()
pass
def on_printer__content_changed(self, combo):
# Cannot change the model in edit mode!
if self.edit_mode:
return
printer = combo.get_selected()
self.model.model = printer.model
self.model.brand = printer.brand
# These are not persistent
self.model.model_name = printer.model_name
self.model.printer_class = printer.printer_class
def on_edit_constants__clicked(self, button):
run_dialog(DeviceConstantsDialog, self, self.store, self.model)
def _printer_status__reply(self, status, reply):
self.progress_dialog.stop()
if not self._populate_ecf_printer(status):
return
if yesno(_("An ECF Printer was added. You need to restart Stoq "
"before using it. Would you like to restart it now?"),
gtk.RESPONSE_YES, _("Restart now"), _("Restart later")):
self.store.commit()
raise SystemExit
# FIXME: move to base dialogs or base editor
self.retval = self.model
self.main_dialog.close()
def _printer_status__timeout(self, status):
self.progress_dialog.stop()
info(_("Could not find a %s printer connected to %s") % (
self.model.model_name, status.get_device_name()))
#
# Private
#
def _populate_baudrate(self):
values = get_baudrate_values()
self.baudrate.prefill(values)
def _populate_printers(self):
supported_ifaces = get_supported_printers_by_iface(ICouponPrinter).items()
printers = []
for brand, printer_classes in supported_ifaces:
for printer_class in printer_classes:
printer = _PrinterModel(brand, printer_class)
printers.append((printer.get_description(), printer))
# Allow to use virtual printer for both demo mode and developer mode
# so it's easier for testers and developers to test ecf functionality
if sysparam.get_bool('DEMO_MODE') or is_developer_mode():
from stoqdrivers.printers.virtual.Simple import Simple
printer = _PrinterModel('virtual', Simple)
printers.append((printer.get_description(), printer))
self.printer.prefill(locale_sorted(
printers, key=operator.itemgetter(0)))
def _populate_serial_ports(self):
values = []
for device in self._device_manager.get_serial_devices():
values.append(device.device_name)
if not self.model.device_name in values:
values.append(self.model.device_name)
self.device_name.prefill(values)
def _populate_ecf_printer(self, status):
serial = unicode(status.printer.get_serial())
if self.store.find(ECFPrinter, device_serial=serial):
status.stop()
status.get_port().close()
info(_("This printer is already known to the system"))
return False
self.model.device_serial = serial
self._populate_constants(self.model, status)
return True
def _populate_constants(self, model, status):
driver = status.get_driver()
for tax_enum, device_value, value in driver.get_tax_constants():
if tax_enum == TaxType.CUSTOM:
constant = self.store.find(SellableTaxConstant,
tax_value=value).one()
# If the constant is not defined in the system, create it
if not constant:
constant = SellableTaxConstant(tax_value=value,
tax_type=int(TaxType.CUSTOM),
description=u'%0.2f %%' % value,
store=self.store)
elif tax_enum == TaxType.SERVICE:
constant = self.store.find(DeviceConstant,
constant_enum=int(tax_enum),
printer=model).one()
# Skip, If we have a service tax defined for this printer
# This needs to be improved when we support more than one
# service tax
if constant is not None:
continue
else:
constant = self.store.find(SellableTaxConstant,
tax_type=int(tax_enum)).one()
# Ignore if its unkown tax
if not constant:
continue
if value:
constant_name = u'%0.2f %%' % (value, )
elif constant:
constant_name = constant.description
else:
constant_name = None
DeviceConstant(constant_enum=int(tax_enum),
constant_name=constant_name,
constant_type=DeviceConstant.TYPE_TAX,
constant_value=value,
device_value=device_value,
printer=model,
store=self.store)
# This is going to be ugly, most printers don't support
# a real constant for the payment methods, so we have to look
# at the description and guess
payment_enums = {'dinheiro': PaymentMethodType.MONEY,
'cheque': PaymentMethodType.CHECK,
'boleto': PaymentMethodType.BILL,
'cartao credito': PaymentMethodType.CREDIT_CARD,
'cartao debito': PaymentMethodType.DEBIT_CARD,
'financeira': PaymentMethodType.FINANCIAL,
'vale compra': PaymentMethodType.GIFT_CERTIFICATE
}
payment_methods = []
for device_value, constant_name in driver.get_payment_constants():
lower = constant_name.lower()
lower = lower.replace('é', 'e') # Workaround method names with
lower = lower.replace('ã', 'a') # accents
payment_enum = payment_enums.get(lower)
if payment_enum is None:
continue
# Avoid register the same method twice for the same device
if payment_enum in payment_methods:
continue
DeviceConstant(constant_enum=int(payment_enum),
constant_name=unicode(constant_name),
constant_type=DeviceConstant.TYPE_PAYMENT,
constant_value=None,
device_value=device_value,
printer=model,
store=self.store)
payment_methods.append(payment_enum)
class ECFListSlave(ModelListSlave):
editor_class = ECFEditor
model_type = ECFPrinter
columns = [
Column('description', title=_('Model'), data_type=str, expand=True),
Column('device_serial', title=_('Serial'), data_type=str, width=100),
Column('station.name', title=_('Computer'), data_type=str, width=100),
Column('is_active', title=_('Active'), data_type=bool, width=60),
]
def __init__(self, parent, store, reuse_store=False):
ModelListSlave.__init__(self, parent, store, reuse_store=reuse_store)
self.set_list_type(ListType.UNREMOVABLE)
def populate(self):
return self.store.find(ECFPrinter,
station=get_current_station(self.store))
def edit_item(self, item):
if item.brand == 'virtual':
info(_("Cant edit a virtual printer"))
return False
return ModelListSlave.edit_item(self, item)
class ECFListDialog(ModelListDialog):
list_slave_class = ECFListSlave
title = _('Fiscal Printers')
size = (600, 250)
| gpl-2.0 | 1,691,430,609,699,142,000 | 39.148148 | 83 | 0.584729 | false |
hguemar/cinder | cinder/api/views/transfers.py | 18 | 3335 | # Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cinder.api import common
from cinder.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class ViewBuilder(common.ViewBuilder):
"""Model transfer API responses as a python dictionary."""
_collection_name = "os-volume-transfer"
def __init__(self):
"""Initialize view builder."""
super(ViewBuilder, self).__init__()
def summary_list(self, request, transfers):
"""Show a list of transfers without many details."""
return self._list_view(self.summary, request, transfers)
def detail_list(self, request, transfers):
"""Detailed view of a list of transfers ."""
return self._list_view(self.detail, request, transfers)
def summary(self, request, transfer):
"""Generic, non-detailed view of a transfer."""
return {
'transfer': {
'id': transfer['id'],
'volume_id': transfer.get('volume_id'),
'name': transfer['display_name'],
'links': self._get_links(request,
transfer['id']),
},
}
def detail(self, request, transfer):
"""Detailed view of a single transfer."""
return {
'transfer': {
'id': transfer.get('id'),
'created_at': transfer.get('created_at'),
'name': transfer.get('display_name'),
'volume_id': transfer.get('volume_id'),
'links': self._get_links(request, transfer['id'])
}
}
def create(self, request, transfer):
"""Detailed view of a single transfer when created."""
return {
'transfer': {
'id': transfer.get('id'),
'created_at': transfer.get('created_at'),
'name': transfer.get('display_name'),
'volume_id': transfer.get('volume_id'),
'auth_key': transfer.get('auth_key'),
'links': self._get_links(request, transfer['id'])
}
}
def _list_view(self, func, request, transfers):
"""Provide a view for a list of transfers."""
transfers_list = [func(request, transfer)['transfer'] for transfer in
transfers]
transfers_links = self._get_collection_links(request,
transfers,
self._collection_name)
transfers_dict = dict(transfers=transfers_list)
if transfers_links:
transfers_dict['transfers_links'] = transfers_links
return transfers_dict
| apache-2.0 | -4,970,848,433,441,745,000 | 36.47191 | 78 | 0.564918 | false |
ymilord/OctoPrint-MrBeam | docs/conf.py | 2 | 8692 | # -*- coding: utf-8 -*-
#
# OctoPrint documentation build configuration file, created by
# sphinx-quickstart on Mon Dec 02 17:08:50 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../src/'))
import octoprint._version
from datetime import date
year_since = 2013
year_current = date.today().year
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.todo', 'sphinx.ext.autodoc', 'sphinxcontrib.httpdomain']
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'OctoPrint'
copyright = u'%d-%d, Gina Häußge' % (year_since, year_current) if year_current > year_since else u'%d, Gina Häußge' % year_since
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = octoprint._version.get_versions()["version"]
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'OctoPrintdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'OctoPrint.tex', u'OctoPrint Documentation',
u'Gina Häußge', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'octoprint', u'OctoPrint Documentation',
[u'Gina Häußge'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'OctoPrint', u'OctoPrint Documentation',
u'Gina Häußge', 'OctoPrint', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| agpl-3.0 | -7,994,767,260,644,734,000 | 32.011407 | 128 | 0.708708 | false |
P4ELTE/t4p4s | src/hardware_indep/multi/actions.stage.c.py | 1 | 3080 | # SPDX-License-Identifier: Apache-2.0
# Copyright 2016 Eotvos Lorand University, Budapest, Hungary
from utils.codegen import format_expr, format_type
from compiler_common import get_hdrfld_name, generate_var_name, SugarStyle, make_const
compiler_common.current_compilation['is_multicompiled'] = True
part_count = compiler_common.current_compilation['multi']
multi_idx = compiler_common.current_compilation['multi_idx']
all_ctl_acts = sorted(((ctl, act) for ctl in hlir.controls for act in ctl.actions if len(act.body.components) != 0), key=lambda k: len(k[1].body.components))
ctl_acts = list((ctl, act) for idx, (ctl, act) in enumerate(all_ctl_acts) if idx % part_count == multi_idx)
if ctl_acts == []:
compiler_common.current_compilation['skip_output'] = True
else:
from compiler_log_warnings_errors import addError, addWarning
from utils.codegen import format_declaration, format_statement, format_expr, format_type, get_method_call_env
from compiler_common import types, unique_everseen
#[ #include <unistd.h>
#[ #include "dpdk_lib.h"
#[ #include "actions.h"
#[ #include "util_debug.h"
#[ #include "util_packet.h"
#[ #include "util_packet.h"
#[ extern const char* action_names[];
#[ extern const char* action_canonical_names[];
#[ extern const char* action_short_names[];
#[ extern ctrl_plane_backend bg;
for mcall in hlir.all_nodes.by_type('MethodCallStatement').map('methodCall').filter(lambda n: 'path' in n.method and n.method.path.name=='digest'):
digest = mcall.typeArguments[0]
funname = f'{mcall.method.path.name}__{digest.path.name}'
#[ extern ${format_type(mcall.urtype)} $funname(uint32_t /* ignored */ receiver, ctrl_plane_digest cpd, SHORT_STDPARAMS);
#[ extern void do_assignment(header_instance_t dst_hdr, header_instance_t src_hdr, SHORT_STDPARAMS);
################################################################################
for ctl, act in ctl_acts:
name = act.annotations.annotations.get('name')
if name:
#[ // action name: ${name.expr[0].value}
#{ void action_code_${act.name}(action_${act.name}_params_t parameters, SHORT_STDPARAMS) {
if len(act.body.components) != 0:
#[ uint32_t value32, res32;
#[ (void)value32, (void)res32;
#[ control_locals_${ctl.name}_t* local_vars = (control_locals_${ctl.name}_t*) pd->control_locals;
for stmt in act.body.components:
global pre_statement_buffer
global post_statement_buffer
pre_statement_buffer = ""
post_statement_buffer = ""
code = format_statement(stmt, ctl)
if pre_statement_buffer != "":
#= pre_statement_buffer
pre_statement_buffer = ""
#= code
if post_statement_buffer != "":
#= post_statement_buffer
post_statement_buffer = ""
#} }
#[
| apache-2.0 | 3,627,202,186,726,088,700 | 41.777778 | 157 | 0.602273 | false |
LiamMayfair/utils | timecalc.py | 1 | 1752 | """@@@"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
""" """
""" timecalc.py """
""" """
""" Accepts a timespan and returns the date time when that times """
""" pan elapses """
""" """
""" Author: Liam Mayfair """
""" Copyright (C) 2015 """
""" Licence: MIT """
""" """""""""""""""""""""""""""""""""""""""""""""""""""""""""""" """
#!/usr/bin/env python3
from datetime import datetime, timedelta
def calculate_date(origin, delta) -> datetime:
return origin + delta
def check_input(value) -> int:
if value.isdecimal():
return value
else:
return None
def get_delta() -> timedelta:
now = datetime.now()
in_weeks = check_input(input("Enter weeks [0]: ")) or 0
in_days = check_input(input("Enter days [0]: ")) or 0
in_hours = check_input(input("Enter hours [0]: ")) or 0
in_minutes = check_input(input("Enter minutes [0]: ")) or 0
return timedelta(weeks=int(in_weeks), days=int(in_days), hours=int(in_hours), minutes=int(in_minutes))
if __name__ == "__main__":
print("Enter time in numeric format. Blank or invalid values will take defaults.")
result = calculate_date(datetime.now(), get_delta())
# Return date formatted as e.g. Sunday, 01 May 2015, 16:43:02
print("The resulting date is: {0}".format(result.strftime("%A, %d %B %Y, %H:%M:%S")))
| mit | -8,306,291,390,187,111,000 | 45.105263 | 106 | 0.422945 | false |
bloomark/python-bitcoinlib | examples/make-bootstrap-rpc.py | 14 | 1584 | #!/usr/bin/env python3
# Copyright (C) 2013-2014 The python-bitcoinlib developers
#
# This file is part of python-bitcoinlib.
#
# It is subject to the license terms in the LICENSE file found in the top-level
# directory of this distribution.
#
# No part of python-bitcoinlib, including this file, may be copied, modified,
# propagated, or distributed except according to the terms contained in the
# LICENSE file.
"""Make a boostrap.dat file by getting the blocks from the RPC interface."""
import sys
if sys.version_info.major < 3:
sys.stderr.write('Sorry, Python 3.x required by this example.\n')
sys.exit(1)
import bitcoin
from bitcoin.core import CBlock
import bitcoin.rpc
import struct
import sys
import time
try:
if len(sys.argv) not in (2, 3):
raise Exception
n = int(sys.argv[1])
if len(sys.argv) == 3:
bitcoin.SelectParams(sys.argv[2])
except Exception as ex:
print('Usage: %s <block-height> [network=(mainnet|testnet|regtest)] > bootstrap.dat' % sys.argv[0], file=sys.stderr)
sys.exit(1)
proxy = bitcoin.rpc.Proxy()
total_bytes = 0
start_time = time.time()
fd = sys.stdout.buffer
for i in range(n + 1):
block = proxy.getblock(proxy.getblockhash(i))
block_bytes = block.serialize()
total_bytes += len(block_bytes)
print('%.2f KB/s, height %d, %d bytes' %
((total_bytes / 1000) / (time.time() - start_time),
i, len(block_bytes)),
file=sys.stderr)
fd.write(bitcoin.params.MESSAGE_START)
fd.write(struct.pack('<i', len(block_bytes)))
fd.write(block_bytes)
| lgpl-3.0 | -5,203,403,247,920,980,000 | 24.967213 | 120 | 0.674874 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.