code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
#!/usr/local/bin/python2
import os
print "PID:",str(os.getpid())
while True:
raw_input("press <RETURN> to open file")
fh = open("/tmp/test.txt",'w')
print "Opened /tmp/test.txt..."
raw_input("press <RETURN> to close")
fh.close()
| rbprogrammer/advanced_python_topics | course-material/py2/solutions/13 File system control/fuser_t.py | Python | apache-2.0 | 256 |
import sys
sys.path.insert(1, "../../../")
import h2o
import pandas as pd
import statsmodels.api as sm
def prostate(ip,port):
# Log.info("Importing prostate.csv data...\n")
h2o_data = h2o.upload_file(path=h2o.locate("smalldata/logreg/prostate.csv"))
#prostate.summary()
sm_data = pd.read_csv(h2o.locate("smalldata/logreg/prostate.csv")).as_matrix()
sm_data_response = sm_data[:,1]
sm_data_features = sm_data[:,2:]
#Log.info(cat("B)H2O GLM (binomial) with parameters:\nX:", myX, "\nY:", myY, "\n"))
h2o_glm = h2o.glm(y=h2o_data[1], x=h2o_data[2:], family="binomial", n_folds=10, alpha=[0.5])
h2o_glm.show()
sm_glm = sm.GLM(endog=sm_data_response, exog=sm_data_features, family=sm.families.Binomial()).fit()
assert abs(sm_glm.null_deviance - h2o_glm._model_json['output']['training_metrics']['null_deviance']) < 1e-5, "Expected null deviances to be the same"
if __name__ == "__main__":
h2o.run_test(sys.argv, prostate)
| PawarPawan/h2o-v3 | h2o-py/tests/testdir_algos/glm/pyunit_NOFEATURE_prostateGLM.py | Python | apache-2.0 | 956 |
# Copyright 2014, 2015 SAP SE.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: //www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import pytest
from pyhdb.cursor import format_operation
from pyhdb.exceptions import ProgrammingError
import tests.helper
TABLE = 'PYHDB_TEST_1'
TABLE_FIELDS = 'TEST VARCHAR(255)'
@pytest.fixture
def test_table_1(request, connection):
"""Fixture to create table for testing, and dropping it after test run"""
tests.helper.create_table_fixture(request, connection, TABLE, TABLE_FIELDS)
@pytest.fixture
def content_table_1(request, connection):
"""Additional fixture to test_table_1, inserts some rows for testing"""
cursor = connection.cursor()
cursor.execute("insert into PYHDB_TEST_1 values('row1')")
cursor.execute("insert into PYHDB_TEST_1 values('row2')")
cursor.execute("insert into PYHDB_TEST_1 values('row3')")
@pytest.mark.parametrize("parameters", [
None,
(),
[]
])
def test_format_operation_without_parameters(parameters):
"""Test that providing no parameter produces correct result."""
operation = "SELECT * FROM TEST WHERE fuu = 'bar'"
assert format_operation(operation, parameters) == operation
def test_format_operation_with_positional_parameters():
"""Test that correct number of parameters produces correct result."""
assert format_operation(
"INSERT INTO TEST VALUES(%s, %s)", ('Hello World', 2)
) == "INSERT INTO TEST VALUES('Hello World', 2)"
def test_format_operation_with_too_few_positional_parameters_raises():
"""Test that providing too few parameters raises exception"""
with pytest.raises(ProgrammingError):
format_operation("INSERT INTO TEST VALUES(%s, %s)", ('Hello World',))
def test_format_operation_with_too_many_positional_parameters_raises():
"""Test that providing too many parameters raises exception"""
with pytest.raises(ProgrammingError):
format_operation("INSERT INTO TEST VALUES(%s)", ('Hello World', 2))
def test_format_operation_with_named_parameters():
"""format_operation() is used for Python style parameter expansion"""
assert format_operation(
"INSERT INTO TEST VALUES(%(name)s, %(val)s)",
{'name': 'Hello World', 'val': 2}
) == "INSERT INTO TEST VALUES('Hello World', 2)"
@pytest.mark.hanatest
def test_cursor_fetch_without_execution(connection):
cursor = connection.cursor()
with pytest.raises(ProgrammingError):
cursor.fetchone()
@pytest.mark.hanatest
def test_cursor_fetchall_single_row(connection):
cursor = connection.cursor()
cursor.execute("SELECT 1 FROM DUMMY")
result = cursor.fetchall()
assert result == [(1,)]
@pytest.mark.hanatest
def test_cursor_fetchall_multiple_rows(connection):
cursor = connection.cursor()
cursor.execute('SELECT "VIEW_NAME" FROM "PUBLIC"."VIEWS" LIMIT 10')
result = cursor.fetchall()
assert len(result) == 10
# Test cases for different parameter style expansion
#
# paramstyle Meaning
# ---------------------------------------------------------
# 1) qmark Question mark style, e.g. ...WHERE name=?
# 2) numeric Numeric, positional style, e.g. ...WHERE name=:1
# 3) named Named style, e.g. ...WHERE name=:name -> NOT IMPLEMENTED !!
# 4) format ANSI C printf format codes, e.g. ...WHERE name=%s
# 5) pyformat Python extended format codes, e.g. ...WHERE name=%(name)s
@pytest.mark.hanatest
def test_cursor_execute_with_params1(connection, test_table_1, content_table_1):
"""Test qmark parameter expansion style - uses cursor.prepare*() methods"""
# Note: use fetchall() to check that only one row gets returned
cursor = connection.cursor()
sql = 'select test from PYHDB_TEST_1 where test=?'
# correct way:
assert cursor.execute(sql, ['row2']).fetchall() == [('row2',)]
# invalid - extra unexpected parameter
with pytest.raises(ProgrammingError):
cursor.execute(sql, ['row2', 'extra']).fetchall()
@pytest.mark.hanatest
def test_cursor_execute_with_params2(connection, test_table_1, content_table_1):
"""Test numeric parameter expansion style - uses cursor.prepare() methods"""
# Note: use fetchall() to check that only one row gets returned
cursor = connection.cursor()
sql = 'select test from PYHDB_TEST_1 where test=?'
# correct way:
assert cursor.execute(sql, ['row2']).fetchall() == [('row2',)]
# invalid - extra unexpected parameter
with pytest.raises(ProgrammingError):
cursor.execute(sql, ['row2', 'extra']).fetchall()
@pytest.mark.hanatest
def test_cursor_execute_with_params4(connection, test_table_1, content_table_1):
"""Test format (positional) parameter expansion style"""
# Uses prepare_operation method
cursor = connection.cursor()
sql = 'select test from PYHDB_TEST_1 where test=%s'
# correct way:
assert cursor.execute(sql, ['row2']).fetchall() == [('row2',)]
# invalid - extra unexpected parameter
with pytest.raises(ProgrammingError):
cursor.execute(sql, ['row2', 'extra']).fetchall()
@pytest.mark.hanatest
def test_cursor_execute_with_params5(connection, test_table_1, content_table_1):
"""Test pyformat (named) parameter expansion style"""
# Note: use fetchall() to check that only one row gets returned
cursor = connection.cursor()
sql = 'select test from {} where test=%(test)s'.format(TABLE)
# correct way:
assert cursor.execute(sql, {'test': 'row2'}).fetchall() == [('row2',)]
# also correct way, additional dict value should just be ignored
assert cursor.execute(sql, {'test': 'row2', 'd': 2}).fetchall() == \
[('row2',)]
@pytest.mark.hanatest
def test_cursor_insert_commit(connection, test_table_1):
cursor = connection.cursor()
cursor.execute("SELECT COUNT(*) FROM %s" % TABLE)
assert cursor.fetchone() == (0,)
cursor.execute("INSERT INTO %s VALUES('Hello World')" % TABLE)
assert cursor.rowcount == 1
cursor.execute("SELECT COUNT(*) FROM %s" % TABLE)
assert cursor.fetchone() == (1,)
connection.commit()
@pytest.mark.hanatest
def test_cursor_create_and_drop_table(connection):
cursor = connection.cursor()
if tests.helper.exists_table(connection, TABLE):
cursor.execute('DROP TABLE "%s"' % TABLE)
assert not tests.helper.exists_table(connection, TABLE)
cursor.execute('CREATE TABLE "%s" ("TEST" VARCHAR(255))' % TABLE)
assert tests.helper.exists_table(connection, TABLE)
cursor.execute('DROP TABLE "%s"' % TABLE)
@pytest.mark.hanatest
def test_received_last_resultset_part_resets_after_execute(connection):
# The private attribute was not reseted to False after
# executing another statement
cursor = connection.cursor()
cursor.execute("SELECT 1 FROM DUMMY")
# Result is very small we got everything direct into buffer
assert cursor._received_last_resultset_part
cursor.execute("SELECT VIEW_NAME FROM PUBLIC.VIEWS")
# Result is not small enouth for single resultset part
assert not cursor._received_last_resultset_part
@pytest.mark.hanatest
@pytest.mark.parametrize("method", [
'fetchone',
'fetchall',
'fetchmany',
])
def test_fetch_raises_error_after_close(connection, method):
cursor = connection.cursor()
cursor.close()
with pytest.raises(ProgrammingError):
getattr(cursor, method)()
@pytest.mark.hanatest
def test_execute_raises_error_after_close(connection):
cursor = connection.cursor()
cursor.close()
with pytest.raises(ProgrammingError):
cursor.execute("SELECT TEST FROM DUMMY")
@pytest.mark.hanatest
def test_cursor_description_after_execution(connection):
cursor = connection.cursor()
assert cursor.description is None
cursor.execute("SELECT 'Hello World' AS TEST FROM DUMMY")
assert cursor.description == ((u'TEST', 9, None, 11, 0, None, 0),)
@pytest.mark.hanatest
def test_cursor_executemany_python_expansion(connection, test_table_1):
cursor = connection.cursor()
cursor.executemany(
"INSERT INTO {} VALUES(%s)".format(TABLE),
(
("Statement 1",),
("Statement 2",)
)
)
cursor.execute("SELECT * FROM %s" % TABLE)
result = cursor.fetchall()
assert result == [('Statement 1',), ('Statement 2',)]
@pytest.mark.hanatest
def test_cursor_executemany_hana_expansion(connection, test_table_1):
cursor = connection.cursor()
cursor.executemany(
"INSERT INTO %s VALUES(:1)" % TABLE,
(
("Statement 1",),
("Statement 2",)
)
)
cursor.execute("SELECT * FROM %s" % TABLE)
result = cursor.fetchall()
assert result == [('Statement 1',), ('Statement 2',)]
| ralhei/PyHDB | tests/test_cursor.py | Python | apache-2.0 | 9,174 |
# Copyright 2010-2011 OpenStack Foundation
# Copyright 2012-2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslotest import base as test_base
from oslo.db.sqlalchemy import test_migrations as migrate
class TestWalkVersions(test_base.BaseTestCase, migrate.WalkVersionsMixin):
def setUp(self):
super(TestWalkVersions, self).setUp()
self.migration_api = mock.MagicMock()
self.engine = mock.MagicMock()
self.REPOSITORY = mock.MagicMock()
self.INIT_VERSION = 4
def test_migrate_up(self):
self.migration_api.db_version.return_value = 141
self._migrate_up(self.engine, 141)
self.migration_api.upgrade.assert_called_with(
self.engine, self.REPOSITORY, 141)
self.migration_api.db_version.assert_called_with(
self.engine, self.REPOSITORY)
def test_migrate_up_with_data(self):
test_value = {"a": 1, "b": 2}
self.migration_api.db_version.return_value = 141
self._pre_upgrade_141 = mock.MagicMock()
self._pre_upgrade_141.return_value = test_value
self._check_141 = mock.MagicMock()
self._migrate_up(self.engine, 141, True)
self._pre_upgrade_141.assert_called_with(self.engine)
self._check_141.assert_called_with(self.engine, test_value)
def test_migrate_down(self):
self.migration_api.db_version.return_value = 42
self.assertTrue(self._migrate_down(self.engine, 42))
self.migration_api.db_version.assert_called_with(
self.engine, self.REPOSITORY)
def test_migrate_down_not_implemented(self):
self.migration_api.downgrade.side_effect = NotImplementedError
self.assertFalse(self._migrate_down(self.engine, 42))
def test_migrate_down_with_data(self):
self._post_downgrade_043 = mock.MagicMock()
self.migration_api.db_version.return_value = 42
self._migrate_down(self.engine, 42, True)
self._post_downgrade_043.assert_called_with(self.engine)
@mock.patch.object(migrate.WalkVersionsMixin, '_migrate_up')
@mock.patch.object(migrate.WalkVersionsMixin, '_migrate_down')
def test_walk_versions_all_default(self, _migrate_up, _migrate_down):
self.REPOSITORY.latest = 20
self.migration_api.db_version.return_value = self.INIT_VERSION
self._walk_versions()
self.migration_api.version_control.assert_called_with(
None, self.REPOSITORY, self.INIT_VERSION)
self.migration_api.db_version.assert_called_with(
None, self.REPOSITORY)
versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1)
upgraded = [mock.call(None, v, with_data=True) for v in versions]
self.assertEqual(self._migrate_up.call_args_list, upgraded)
downgraded = [mock.call(None, v - 1) for v in reversed(versions)]
self.assertEqual(self._migrate_down.call_args_list, downgraded)
@mock.patch.object(migrate.WalkVersionsMixin, '_migrate_up')
@mock.patch.object(migrate.WalkVersionsMixin, '_migrate_down')
def test_walk_versions_all_true(self, _migrate_up, _migrate_down):
self.REPOSITORY.latest = 20
self.migration_api.db_version.return_value = self.INIT_VERSION
self._walk_versions(self.engine, snake_walk=True, downgrade=True)
versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1)
upgraded = []
for v in versions:
upgraded.append(mock.call(self.engine, v, with_data=True))
upgraded.append(mock.call(self.engine, v))
upgraded.extend(
[mock.call(self.engine, v) for v in reversed(versions)]
)
self.assertEqual(upgraded, self._migrate_up.call_args_list)
downgraded_1 = [
mock.call(self.engine, v - 1, with_data=True) for v in versions
]
downgraded_2 = []
for v in reversed(versions):
downgraded_2.append(mock.call(self.engine, v - 1))
downgraded_2.append(mock.call(self.engine, v - 1))
downgraded = downgraded_1 + downgraded_2
self.assertEqual(self._migrate_down.call_args_list, downgraded)
@mock.patch.object(migrate.WalkVersionsMixin, '_migrate_up')
@mock.patch.object(migrate.WalkVersionsMixin, '_migrate_down')
def test_walk_versions_true_false(self, _migrate_up, _migrate_down):
self.REPOSITORY.latest = 20
self.migration_api.db_version.return_value = self.INIT_VERSION
self._walk_versions(self.engine, snake_walk=True, downgrade=False)
versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1)
upgraded = []
for v in versions:
upgraded.append(mock.call(self.engine, v, with_data=True))
upgraded.append(mock.call(self.engine, v))
self.assertEqual(upgraded, self._migrate_up.call_args_list)
downgraded = [
mock.call(self.engine, v - 1, with_data=True) for v in versions
]
self.assertEqual(self._migrate_down.call_args_list, downgraded)
@mock.patch.object(migrate.WalkVersionsMixin, '_migrate_up')
@mock.patch.object(migrate.WalkVersionsMixin, '_migrate_down')
def test_walk_versions_all_false(self, _migrate_up, _migrate_down):
self.REPOSITORY.latest = 20
self.migration_api.db_version.return_value = self.INIT_VERSION
self._walk_versions(self.engine, snake_walk=False, downgrade=False)
versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1)
upgraded = [
mock.call(self.engine, v, with_data=True) for v in versions
]
self.assertEqual(upgraded, self._migrate_up.call_args_list)
| malor/oslo.db | tests/sqlalchemy/test_migrations.py | Python | apache-2.0 | 6,274 |
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for interacting with the Stackdriver Logging API"""
import traceback
import google.cloud.logging.client
import six
class HTTPContext(object):
"""HTTPContext defines an object that captures the parameter for the
httpRequest part of Error Reporting API
:type method: str
:param method: The type of HTTP request, such as GET, POST, etc.
:type url: str
:param url: The URL of the request
:type user_agent: str
:param user_agent: The user agent information that is provided with the
request.
:type referrer: str
:param referrer: The referrer information that is provided with the
request.
:type response_status_code: int
:param response_status_code: The HTTP response status code for the request.
:type remote_ip: str
:param remote_ip: The IP address from which the request originated. This
can be IPv4, IPv6, or a token which is derived from
the IP address, depending on the data that has been
provided in the error report.
"""
def __init__(self, method=None, url=None,
user_agent=None, referrer=None,
response_status_code=None, remote_ip=None):
self.method = method
self.url = url
# intentionally camel case for mapping to JSON API expects
# pylint: disable=invalid-name
self.userAgent = user_agent
self.referrer = referrer
self.responseStatusCode = response_status_code
self.remoteIp = remote_ip
class Client(object):
"""Error Reporting client. Currently Error Reporting is done by creating
a Logging client.
:type project: str
:param project: the project which the client acts on behalf of. If not
passed falls back to the default inferred from the
environment.
:type credentials: :class:`oauth2client.client.OAuth2Credentials` or
:class:`NoneType`
:param credentials: The OAuth2 Credentials to use for the connection
owned by this client. If not passed (and if no ``http``
object is passed), falls back to the default inferred
from the environment.
:type http: :class:`httplib2.Http` or class that defines ``request()``.
:param http: An optional HTTP object to make requests. If not passed, an
``http`` object is created that is bound to the
``credentials`` for the current object.
:type service: str
:param service: An identifier of the service, such as the name of the
executable, job, or Google App Engine service name. This
field is expected to have a low number of values that are
relatively stable over time, as opposed to version,
which can be changed whenever new code is deployed.
:type version: str
:param version: Represents the source code version that the developer
provided, which could represent a version label or a Git
SHA-1 hash, for example. If the developer did not provide
a version, the value is set to default.
:raises: :class:`ValueError` if the project is neither passed in nor
set in the environment.
"""
def __init__(self, project=None,
credentials=None,
http=None,
service=None,
version=None):
self.logging_client = google.cloud.logging.client.Client(
project, credentials, http)
self.service = service if service else self.DEFAULT_SERVICE
self.version = version
DEFAULT_SERVICE = 'python'
def _send_error_report(self, message,
report_location=None, http_context=None, user=None):
"""Makes the call to the Error Reporting API via the log stream.
This is the lower-level interface to build the payload, generally
users will use either report() or report_exception() to automatically
gather the parameters for this method.
Currently this method sends the Error Report by formatting a structured
log message according to
https://cloud.google.com/error-reporting/docs/formatting-error-messages
:type message: str
:param message: The stack trace that was reported or logged by the
service.
:type report_location: dict
:param report_location: The location in the source code where the
decision was made to report the error, usually the place
where it was logged. For a logged exception this would be the
source line where the exception is logged, usually close to
the place where it was caught.
This should be a Python dict that contains the keys 'filePath',
'lineNumber', and 'functionName'
:type http_context: :class`google.cloud.error_reporting.HTTPContext`
:param http_context: The HTTP request which was processed when the
error was triggered.
:type user: str
:param user: The user who caused or was affected by the crash. This can
be a user ID, an email address, or an arbitrary token that
uniquely identifies the user. When sending an error
report, leave this field empty if the user was not
logged in. In this case the Error Reporting system will
use other data, such as remote IP address,
to distinguish affected users.
"""
payload = {
'serviceContext': {
'service': self.service,
},
'message': '{0}'.format(message)
}
if self.version:
payload['serviceContext']['version'] = self.version
if report_location or http_context or user:
payload['context'] = {}
if report_location:
payload['context']['reportLocation'] = report_location
if http_context:
http_context_dict = http_context.__dict__
# strip out None values
payload['context']['httpContext'] = {
key: value for key, value in six.iteritems(http_context_dict)
if value is not None
}
if user:
payload['context']['user'] = user
logger = self.logging_client.logger('errors')
logger.log_struct(payload)
def report(self, message, http_context=None, user=None):
""" Reports a message to Stackdriver Error Reporting
https://cloud.google.com/error-reporting/docs/formatting-error-messages
:type message: str
:param message: A user-supplied message to report
:type http_context: :class`google.cloud.error_reporting.HTTPContext`
:param http_context: The HTTP request which was processed when the
error was triggered.
:type user: str
:param user: The user who caused or was affected by the crash. This
can be a user ID, an email address, or an arbitrary
token that uniquely identifies the user. When sending
an error report, leave this field empty if the user
was not logged in. In this case the Error Reporting
system will use other data, such as remote IP address,
to distinguish affected users.
Example:
.. code-block:: python
>>> client.report("Something went wrong!")
"""
stack = traceback.extract_stack()
last_call = stack[-2]
file_path = last_call[0]
line_number = last_call[1]
function_name = last_call[2]
report_location = {
'filePath': file_path,
'lineNumber': line_number,
'functionName': function_name
}
self._send_error_report(message,
http_context=http_context,
user=user,
report_location=report_location)
def report_exception(self, http_context=None, user=None):
""" Reports the details of the latest exceptions to Stackdriver Error
Reporting.
:type http_context: :class`google.cloud.error_reporting.HTTPContext`
:param http_context: The HTTP request which was processed when the
error was triggered.
:type user: str
:param user: The user who caused or was affected by the crash. This
can be a user ID, an email address, or an arbitrary
token that uniquely identifies the user. When sending an
error report, leave this field empty if the user was
not logged in. In this case the Error Reporting system
will use other data, such as remote IP address,
to distinguish affected users.
Example::
>>> try:
>>> raise NameError
>>> except Exception:
>>> client.report_exception()
"""
self._send_error_report(traceback.format_exc(),
http_context=http_context,
user=user)
| jgeewax/gcloud-python | error_reporting/google/cloud/error_reporting/client.py | Python | apache-2.0 | 10,193 |
"""Common functions for Rflink component tests and generic platform tests."""
import asyncio
from unittest.mock import Mock
from homeassistant.bootstrap import async_setup_component
from homeassistant.components.rflink import CONF_RECONNECT_INTERVAL
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF
from tests.common import assert_setup_component
@asyncio.coroutine
def mock_rflink(hass, config, domain, monkeypatch, failures=None):
"""Create mock Rflink asyncio protocol, test component setup."""
transport, protocol = (Mock(), Mock())
@asyncio.coroutine
def send_command_ack(*command):
return True
protocol.send_command_ack = Mock(wraps=send_command_ack)
@asyncio.coroutine
def send_command(*command):
return True
protocol.send_command = Mock(wraps=send_command)
@asyncio.coroutine
def create_rflink_connection(*args, **kwargs):
"""Return mocked transport and protocol."""
# failures can be a list of booleans indicating in which sequence
# creating a connection should success or fail
if failures:
fail = failures.pop()
else:
fail = False
if fail:
raise ConnectionRefusedError
else:
return transport, protocol
mock_create = Mock(wraps=create_rflink_connection)
monkeypatch.setattr(
'rflink.protocol.create_rflink_connection',
mock_create)
# verify instanstiation of component with given config
with assert_setup_component(1, domain):
yield from async_setup_component(hass, domain, config)
# hook into mock config for injecting events
event_callback = mock_create.call_args_list[0][1]['event_callback']
assert event_callback
disconnect_callback = mock_create.call_args_list[
0][1]['disconnect_callback']
return event_callback, mock_create, protocol, disconnect_callback
@asyncio.coroutine
def test_version_banner(hass, monkeypatch):
"""Test sending unknown commands doesn't cause issues."""
# use sensor domain during testing main platform
domain = 'sensor'
config = {
'rflink': {'port': '/dev/ttyABC0', },
domain: {
'platform': 'rflink',
'devices': {
'test': {'name': 'test', 'sensor_type': 'temperature', },
},
},
}
# setup mocking rflink module
event_callback, _, _, _ = yield from mock_rflink(
hass, config, domain, monkeypatch)
event_callback({
'hardware': 'Nodo RadioFrequencyLink',
'firmware': 'RFLink Gateway',
'version': '1.1',
'revision': '45',
})
@asyncio.coroutine
def test_send_no_wait(hass, monkeypatch):
"""Test command sending without ack."""
domain = 'switch'
config = {
'rflink': {
'port': '/dev/ttyABC0',
'wait_for_ack': False,
},
domain: {
'platform': 'rflink',
'devices': {
'protocol_0_0': {
'name': 'test',
'aliasses': ['test_alias_0_0'],
},
},
},
}
# setup mocking rflink module
_, _, protocol, _ = yield from mock_rflink(
hass, config, domain, monkeypatch)
hass.async_add_job(
hass.services.async_call(domain, SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: 'switch.test'}))
yield from hass.async_block_till_done()
assert protocol.send_command.call_args_list[0][0][0] == 'protocol_0_0'
assert protocol.send_command.call_args_list[0][0][1] == 'off'
@asyncio.coroutine
def test_reconnecting_after_disconnect(hass, monkeypatch):
"""An unexpected disconnect should cause a reconnect."""
domain = 'sensor'
config = {
'rflink': {
'port': '/dev/ttyABC0',
CONF_RECONNECT_INTERVAL: 0,
},
domain: {
'platform': 'rflink',
},
}
# setup mocking rflink module
_, mock_create, _, disconnect_callback = yield from mock_rflink(
hass, config, domain, monkeypatch)
assert disconnect_callback, 'disconnect callback not passed to rflink'
# rflink initiated disconnect
disconnect_callback(None)
yield from hass.async_block_till_done()
# we expect 2 call, the initial and reconnect
assert mock_create.call_count == 2
@asyncio.coroutine
def test_reconnecting_after_failure(hass, monkeypatch):
"""A failure to reconnect should be retried."""
domain = 'sensor'
config = {
'rflink': {
'port': '/dev/ttyABC0',
CONF_RECONNECT_INTERVAL: 0,
},
domain: {
'platform': 'rflink',
},
}
# success first time but fail second
failures = [False, True, False]
# setup mocking rflink module
_, mock_create, _, disconnect_callback = yield from mock_rflink(
hass, config, domain, monkeypatch, failures=failures)
# rflink initiated disconnect
disconnect_callback(None)
# wait for reconnects to have happened
yield from hass.async_block_till_done()
yield from hass.async_block_till_done()
# we expect 3 calls, the initial and 2 reconnects
assert mock_create.call_count == 3
@asyncio.coroutine
def test_error_when_not_connected(hass, monkeypatch):
"""Sending command should error when not connected."""
domain = 'switch'
config = {
'rflink': {
'port': '/dev/ttyABC0',
CONF_RECONNECT_INTERVAL: 0,
},
domain: {
'platform': 'rflink',
'devices': {
'protocol_0_0': {
'name': 'test',
'aliasses': ['test_alias_0_0'],
},
},
},
}
# success first time but fail second
failures = [False, True, False]
# setup mocking rflink module
_, mock_create, _, disconnect_callback = yield from mock_rflink(
hass, config, domain, monkeypatch, failures=failures)
assert hass.states.get('rflink.connection_status').state == 'connected'
# rflink initiated disconnect
disconnect_callback(None)
yield from asyncio.sleep(0, loop=hass.loop)
assert hass.states.get('rflink.connection_status').state == 'error'
success = yield from hass.services.async_call(
domain, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: 'switch.test'})
assert not success, 'changing state should not succeed when disconnected'
| open-homeautomation/home-assistant | tests/components/test_rflink.py | Python | apache-2.0 | 6,534 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Cloud TPU profiler package."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from setuptools import setup
_VERSION = '1.7.0'
CONSOLE_SCRIPTS = [
'capture_tpu_profile=cloud_tpu_profiler.main:run_main',
]
setup(
name='cloud_tpu_profiler',
version=_VERSION.replace('-', ''),
description='Trace and profile Cloud TPU performance',
long_description='Tools for capture TPU profile',
url='https://www.tensorflow.org/tfrc/',
author='Google Inc.',
author_email='[email protected]',
packages=['cloud_tpu_profiler'],
package_data={
'cloud_tpu_profiler': ['data/*'],
},
entry_points={
'console_scripts': CONSOLE_SCRIPTS,
},
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
license='Apache 2.0',
keywords='tensorflow performance tpu',
)
| gojira/tensorflow | tensorflow/contrib/tpu/profiler/pip_package/setup.py | Python | apache-2.0 | 2,551 |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for creating HTTP health checks."""
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.api_lib.compute import health_checks_utils
from googlecloudsdk.calliope import base
@base.ReleaseTracks(base.ReleaseTrack.GA, base.ReleaseTrack.BETA)
class Create(base_classes.BaseAsyncCreator):
"""Create a HTTP health check to monitor load balanced instances."""
@staticmethod
def Args(parser):
health_checks_utils.AddHttpRelatedCreationArgs(parser)
health_checks_utils.AddProtocolAgnosticCreationArgs(parser, 'HTTP')
@property
def service(self):
return self.compute.healthChecks
@property
def method(self):
return 'Insert'
@property
def resource_type(self):
return 'healthChecks'
def CreateRequests(self, args):
"""Returns the request necessary for adding the health check."""
health_check_ref = self.CreateGlobalReference(
args.name, resource_type='healthChecks')
proxy_header = self.messages.HTTPHealthCheck.ProxyHeaderValueValuesEnum(
args.proxy_header)
request = self.messages.ComputeHealthChecksInsertRequest(
healthCheck=self.messages.HealthCheck(
name=health_check_ref.Name(),
description=args.description,
type=self.messages.HealthCheck.TypeValueValuesEnum.HTTP,
httpHealthCheck=self.messages.HTTPHealthCheck(
host=args.host,
port=args.port,
portName=args.port_name,
requestPath=args.request_path,
proxyHeader=proxy_header),
checkIntervalSec=args.check_interval,
timeoutSec=args.timeout,
healthyThreshold=args.healthy_threshold,
unhealthyThreshold=args.unhealthy_threshold,
),
project=self.project)
return [request]
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class CreateAlpha(Create):
"""Create a HTTP health check to monitor load balanced instances."""
@staticmethod
def Args(parser):
Create.Args(parser)
health_checks_utils.AddHttpRelatedResponseArg(parser)
def CreateRequests(self, args):
"""Returns the request necessary for adding the health check."""
requests = super(CreateAlpha, self).CreateRequests(args)
requests[0].healthCheck.httpHealthCheck.response = args.response
return requests
Create.detailed_help = {
'brief': ('Create a HTTP health check to monitor load balanced instances'),
'DESCRIPTION': """\
*{command}* is used to create a HTTP health check. HTTP health checks
monitor instances in a load balancer controlled by a target pool. All
arguments to the command are optional except for the name of the health
check. For more information on load balancing, see
[](https://cloud.google.com/compute/docs/load-balancing-and-autoscaling/)
""",
}
| KaranToor/MA450 | google-cloud-sdk/lib/surface/compute/health_checks/create/http.py | Python | apache-2.0 | 3,471 |
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.volume import base
from tempest import exceptions
from tempest import test
class VolumeQuotasNegativeTestJSON(base.BaseVolumeV1AdminTest):
_interface = "json"
force_tenant_isolation = True
@classmethod
@test.safe_setup
def setUpClass(cls):
super(VolumeQuotasNegativeTestJSON, cls).setUpClass()
demo_user = cls.isolated_creds.get_primary_creds()
cls.demo_tenant_id = demo_user.tenant_id
cls.shared_quota_set = {'gigabytes': 3, 'volumes': 1, 'snapshots': 1}
# NOTE(gfidente): no need to restore original quota set
# after the tests as they only work with tenant isolation.
resp, quota_set = cls.quotas_client.update_quota_set(
cls.demo_tenant_id,
**cls.shared_quota_set)
# NOTE(gfidente): no need to delete in tearDown as
# they are created using utility wrapper methods.
cls.volume = cls.create_volume()
cls.snapshot = cls.create_snapshot(cls.volume['id'])
@test.attr(type='negative')
def test_quota_volumes(self):
self.assertRaises(exceptions.OverLimit,
self.volumes_client.create_volume,
size=1)
@test.attr(type='negative')
def test_quota_volume_snapshots(self):
self.assertRaises(exceptions.OverLimit,
self.snapshots_client.create_snapshot,
self.volume['id'])
@test.attr(type='negative')
def test_quota_volume_gigabytes(self):
# NOTE(gfidente): quota set needs to be changed for this test
# or we may be limited by the volumes or snaps quota number, not by
# actual gigs usage; next line ensures shared set is restored.
self.addCleanup(self.quotas_client.update_quota_set,
self.demo_tenant_id,
**self.shared_quota_set)
new_quota_set = {'gigabytes': 2, 'volumes': 2, 'snapshots': 1}
resp, quota_set = self.quotas_client.update_quota_set(
self.demo_tenant_id,
**new_quota_set)
self.assertRaises(exceptions.OverLimit,
self.volumes_client.create_volume,
size=1)
new_quota_set = {'gigabytes': 2, 'volumes': 1, 'snapshots': 2}
resp, quota_set = self.quotas_client.update_quota_set(
self.demo_tenant_id,
**self.shared_quota_set)
self.assertRaises(exceptions.OverLimit,
self.snapshots_client.create_snapshot,
self.volume['id'])
class VolumeQuotasNegativeTestXML(VolumeQuotasNegativeTestJSON):
_interface = "xml"
| Mirantis/tempest | tempest/api/volume/admin/test_volume_quotas_negative.py | Python | apache-2.0 | 3,331 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utilities with minimum-depends for use in setup.py
"""
import datetime
import os
import re
import subprocess
import sys
from setuptools.command import sdist
def parse_mailmap(mailmap='.mailmap'):
mapping = {}
if os.path.exists(mailmap):
fp = open(mailmap, 'r')
for l in fp:
l = l.strip()
if not l.startswith('#') and ' ' in l:
canonical_email, alias = l.split(' ')
mapping[alias] = canonical_email
return mapping
def canonicalize_emails(changelog, mapping):
"""Takes in a string and an email alias mapping and replaces all
instances of the aliases in the string with their real email.
"""
for alias, email in mapping.iteritems():
changelog = changelog.replace(alias, email)
return changelog
# Get requirements from the first file that exists
def get_reqs_from_files(requirements_files):
reqs_in = []
for requirements_file in requirements_files:
if os.path.exists(requirements_file):
return open(requirements_file, 'r').read().split('\n')
return []
def parse_requirements(requirements_files=['requirements.txt',
'tools/pip-requires']):
requirements = []
for line in get_reqs_from_files(requirements_files):
# For the requirements list, we need to inject only the portion
# after egg= so that distutils knows the package it's looking for
# such as:
# -e git://github.com/openstack/nova/master#egg=nova
if re.match(r'\s*-e\s+', line):
requirements.append(re.sub(r'\s*-e\s+.*#egg=(.*)$', r'\1',
line))
# such as:
# http://github.com/openstack/nova/zipball/master#egg=nova
elif re.match(r'\s*https?:', line):
requirements.append(re.sub(r'\s*https?:.*#egg=(.*)$', r'\1',
line))
# -f lines are for index locations, and don't get used here
elif re.match(r'\s*-f\s+', line):
pass
# argparse is part of the standard library starting with 2.7
# adding it to the requirements list screws distro installs
elif line == 'argparse' and sys.version_info >= (2, 7):
pass
else:
requirements.append(line)
return requirements
def parse_dependency_links(requirements_files=['requirements.txt',
'tools/pip-requires']):
dependency_links = []
# dependency_links inject alternate locations to find packages listed
# in requirements
for line in get_reqs_from_files(requirements_files):
# skip comments and blank lines
if re.match(r'(\s*#)|(\s*$)', line):
continue
# lines with -e or -f need the whole line, minus the flag
if re.match(r'\s*-[ef]\s+', line):
dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line))
# lines that are only urls can go in unmolested
elif re.match(r'\s*https?:', line):
dependency_links.append(line)
return dependency_links
def write_requirements():
venv = os.environ.get('VIRTUAL_ENV', None)
if venv is not None:
with open("requirements.txt", "w") as req_file:
output = subprocess.Popen(["pip", "-E", venv, "freeze", "-l"],
stdout=subprocess.PIPE)
requirements = output.communicate()[0].strip()
req_file.write(requirements)
def _run_shell_command(cmd):
output = subprocess.Popen(["/bin/sh", "-c", cmd],
stdout=subprocess.PIPE)
out = output.communicate()
if len(out) == 0:
return None
if len(out[0].strip()) == 0:
return None
return out[0].strip()
def _get_git_next_version_suffix(branch_name):
datestamp = datetime.datetime.now().strftime('%Y%m%d')
if branch_name == 'milestone-proposed':
revno_prefix = "r"
else:
revno_prefix = ""
_run_shell_command("git fetch origin +refs/meta/*:refs/remotes/meta/*")
milestone_cmd = "git show meta/openstack/release:%s" % branch_name
milestonever = _run_shell_command(milestone_cmd)
if not milestonever:
milestonever = ""
post_version = _get_git_post_version()
revno = post_version.split(".")[-1]
return "%s~%s.%s%s" % (milestonever, datestamp, revno_prefix, revno)
def _get_git_current_tag():
return _run_shell_command("git tag --contains HEAD")
def _get_git_tag_info():
return _run_shell_command("git describe --tags")
def _get_git_post_version():
current_tag = _get_git_current_tag()
if current_tag is not None:
return current_tag
else:
tag_info = _get_git_tag_info()
if tag_info is None:
base_version = "0.0"
cmd = "git --no-pager log --oneline"
out = _run_shell_command(cmd)
revno = len(out.split("\n"))
else:
tag_infos = tag_info.split("-")
base_version = "-".join(tag_infos[:-2])
revno = tag_infos[-2]
return "%s.%s" % (base_version, revno)
def write_git_changelog():
"""Write a changelog based on the git changelog."""
if os.path.isdir('.git'):
git_log_cmd = 'git log --stat'
changelog = _run_shell_command(git_log_cmd)
mailmap = parse_mailmap()
with open("ChangeLog", "w") as changelog_file:
changelog_file.write(canonicalize_emails(changelog, mailmap))
def generate_authors():
"""Create AUTHORS file using git commits."""
jenkins_email = '[email protected]'
old_authors = 'AUTHORS.in'
new_authors = 'AUTHORS'
if os.path.isdir('.git'):
# don't include jenkins email address in AUTHORS file
git_log_cmd = ("git log --format='%aN <%aE>' | sort -u | "
"grep -v " + jenkins_email)
changelog = _run_shell_command(git_log_cmd)
mailmap = parse_mailmap()
with open(new_authors, 'w') as new_authors_fh:
new_authors_fh.write(canonicalize_emails(changelog, mailmap))
if os.path.exists(old_authors):
with open(old_authors, "r") as old_authors_fh:
new_authors_fh.write('\n' + old_authors_fh.read())
_rst_template = """%(heading)s
%(underline)s
.. automodule:: %(module)s
:members:
:undoc-members:
:show-inheritance:
"""
def read_versioninfo(project):
"""Read the versioninfo file. If it doesn't exist, we're in a github
zipball, and there's really know way to know what version we really
are, but that should be ok, because the utility of that should be
just about nil if this code path is in use in the first place."""
versioninfo_path = os.path.join(project, 'versioninfo')
if os.path.exists(versioninfo_path):
with open(versioninfo_path, 'r') as vinfo:
version = vinfo.read().strip()
else:
version = "0.0.0"
return version
def write_versioninfo(project, version):
"""Write a simple file containing the version of the package."""
open(os.path.join(project, 'versioninfo'), 'w').write("%s\n" % version)
def get_cmdclass():
"""Return dict of commands to run from setup.py."""
cmdclass = dict()
def _find_modules(arg, dirname, files):
for filename in files:
if filename.endswith('.py') and filename != '__init__.py':
arg["%s.%s" % (dirname.replace('/', '.'),
filename[:-3])] = True
class LocalSDist(sdist.sdist):
"""Builds the ChangeLog and Authors files from VC first."""
def run(self):
write_git_changelog()
generate_authors()
# sdist.sdist is an old style class, can't use super()
sdist.sdist.run(self)
cmdclass['sdist'] = LocalSDist
# If Sphinx is installed on the box running setup.py,
# enable setup.py to build the documentation, otherwise,
# just ignore it
try:
from sphinx.setup_command import BuildDoc
class LocalBuildDoc(BuildDoc):
def generate_autoindex(self):
print "**Autodocumenting from %s" % os.path.abspath(os.curdir)
modules = {}
option_dict = self.distribution.get_option_dict('build_sphinx')
source_dir = os.path.join(option_dict['source_dir'][1], 'api')
if not os.path.exists(source_dir):
os.makedirs(source_dir)
for pkg in self.distribution.packages:
if '.' not in pkg:
os.path.walk(pkg, _find_modules, modules)
module_list = modules.keys()
module_list.sort()
autoindex_filename = os.path.join(source_dir, 'autoindex.rst')
with open(autoindex_filename, 'w') as autoindex:
autoindex.write(""".. toctree::
:maxdepth: 1
""")
for module in module_list:
output_filename = os.path.join(source_dir,
"%s.rst" % module)
heading = "The :mod:`%s` Module" % module
underline = "=" * len(heading)
values = dict(module=module, heading=heading,
underline=underline)
print "Generating %s" % output_filename
with open(output_filename, 'w') as output_file:
output_file.write(_rst_template % values)
autoindex.write(" %s.rst\n" % module)
def run(self):
if not os.getenv('SPHINX_DEBUG'):
self.generate_autoindex()
for builder in ['html', 'man']:
self.builder = builder
self.finalize_options()
self.project = self.distribution.get_name()
self.version = self.distribution.get_version()
self.release = self.distribution.get_version()
BuildDoc.run(self)
cmdclass['build_sphinx'] = LocalBuildDoc
except ImportError:
pass
return cmdclass
def get_git_branchname():
for branch in _run_shell_command("git branch --color=never").split("\n"):
if branch.startswith('*'):
_branch_name = branch.split()[1].strip()
if _branch_name == "(no":
_branch_name = "no-branch"
return _branch_name
def get_pre_version(projectname, base_version):
"""Return a version which is based"""
if os.path.isdir('.git'):
current_tag = _get_git_current_tag()
if current_tag is not None:
version = current_tag
else:
branch_name = os.getenv('BRANCHNAME',
os.getenv('GERRIT_REFNAME',
get_git_branchname()))
version_suffix = _get_git_next_version_suffix(branch_name)
version = "%s~%s" % (base_version, version_suffix)
write_versioninfo(projectname, version)
return version.split('~')[0]
else:
version = read_versioninfo(projectname)
return version.split('~')[0]
def get_post_version(projectname):
"""Return a version which is equal to the tag that's on the current
revision if there is one, or tag plus number of additional revisions
if the current revision has no tag."""
if os.path.isdir('.git'):
version = _get_git_post_version()
write_versioninfo(projectname, version)
return version
return read_versioninfo(projectname)
| chmouel/python-swiftclient | swiftclient/openstack/common/setup.py | Python | apache-2.0 | 12,438 |
"""Copyright 2020 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import utils
import random
import constraint
from bitstring import BitArray, BitStream
class riscv_instr_base:
max_instr_length = 11
# Missing parts: latency
def __init__(self, name=""):
self.name = name
self.instr_group = "Instruction Group"
self.instr_format = "Instruction Format"
self.instr_category = "Instruction Category"
self.instr_name = "Instruction Name"
self.instr_imm_t = "Instruction Immediate Type"
self.instr_src2 = "Instruction Source 2"
self.instr_src1 = "Instruction Source 1"
self.instr_rd = "Instruction Destination"
self.imm = "Instruction Immediate"
self.imm_length = "Instruction Immediate Length"
self.imm_str = ""
self.csr = "CSR"
self.comment = ""
self.has_label = 1
self.label = ""
self.idx = -1
self.atomic = 0 # As of now, we don't support atomic instructions.
self.is_compressed = 0 # As of now, compressed instructions are not supported
self.is_illegal_instr = 0
self.is_local_numeric_label = 0
self.is_pseudo_instr = "Is it a pseudo instruction or not"
self.branch_assigned = 0
self.process_load_store = 1
self.solution = "A random solution which meets given constraints"
self.problem = constraint.Problem(constraint.MinConflictsSolver())
# Convert an instruction to its assembly form.
def convert2asm(self):
asm = name = self.solution[self.instr_name]
format = self.solution[self.instr_format]
category = self.solution[self.instr_category]
src2 = self.solution[self.instr_src2]
src1 = self.solution[self.instr_src1]
destination = self.solution[self.instr_rd]
csr = self.solution[self.csr]
if category != "SYSTEM":
if format == "J_FORMAT" or format == "U_FORMAT":
asm += " {}, {}".format(destination, self.get_imm())
elif format == "I_FORMAT":
if name == "NOP":
asm = "nop"
elif name == "FENCE":
asm = "fence"
elif name == "FENCEI":
asm = "fence.i"
elif category == "LOAD":
asm += " {}, {}({})".format(destination, self.get_imm(), src1)
elif category == "CSR":
asm += " {}, {}, {}".format(destination, hex(csr), self.get_imm())
else:
asm += " {}, {}, {}".format(destination, src1, self.get_imm())
elif format == "S_FORMAT" or format == "B_FORMAT":
if category == "STORE":
asm += " {}, {}({})".format(src2, self.get_imm(), src1)
else:
asm += " {}, {}, {}".format(src1, src2, self.get_imm())
elif format == "R_FORMAT":
if category == "CSR":
asm += " {}, {}, {}".format(destination, hex(csr), src1)
else:
asm += " {}, {}, {}".format(destination, src1, src2)
else:
if name == "BREAK":
asm = ".option norvc;ebreak;.option rvc;"
if self.comment != "":
asm += " # {}".format(self.comment)
return asm.lower()
# Instruction to binary format
# TODO: to do
def convert2bin(self, sol):
name = sol[self.instr_name]
format = sol[self.instr_format]
imm = sol[self.imm]
rd = sol[self.instr_rd]
if format == "J_FORMAT":
binary = ""
def post_randomize(self):
imm_length = self.solution[self.imm_length]
imm_t = self.solution[self.instr_imm_t]
imm = self.solution[self.imm]
imm_bit = BitArray(int=imm, length=32)
imm_mask = BitArray(uint=4294967295, length=32)
imm_mask = imm_mask << imm_length
if imm_t == "UIMM" or imm_t == "NZUIMM":
imm_bit = imm_bit & ~imm_mask
imm = imm_bit.int
else:
if imm_bit[-imm_length]:
imm_bit = imm_bit | imm_mask
imm = imm_bit.int
else:
imm_bit = imm_bit & ~imm_mask
imm = imm_bit.int
if (imm_t == "NZIMM" or imm_t == "NZUIMM") and imm == 0:
imm = random.randrange(1, 2**(imm_length - 1) - 1)
if self.imm_str == "":
self.imm_str = int(imm)
def get_imm(self):
return self.imm_str
def problem_definition(self,
no_branch=0,
no_load_store=1,
enable_hint_instr=0,
no_name_c=0):
# Adding variables to the problem
self.problem.addVariable(self.instr_group, utils.riscv_instr_group_t)
self.problem.addVariable(self.instr_format, utils.riscv_instr_format_t)
self.problem.addVariable(self.instr_category, utils.riscv_instr_category_t)
self.problem.addVariable(self.instr_name, utils.riscv_instr_name_t)
self.problem.addVariable(self.instr_imm_t, utils.imm_t)
self.problem.addVariables([self.instr_src2, self.instr_src1, self.instr_rd],
utils.riscv_reg_t)
self.problem.addVariable(self.imm_length, [5, 6, 8, 11, 20])
# problem.addVariable(self.imm, range(0x00000000, 0xffffffff)) # doesn't
# work because: OverflowError: Python int too large to convert to C ssize_t
# Need to change the constraint to a soft constraint, as the default_c in
# the pseudo instruction class is in conflict with this one
if self.imm not in self.problem._variables:
self.problem.addVariable(self.imm, range(0x0000, 0xffff))
self.problem.addVariable(self.csr, range(0x000, 0xfff))
def default_c(is_pseudo_instr):
if not is_pseudo_instr:
return True
def name_c(name, group, format, category, imm_t):
condition = (
# Load instructions
(name == "LB" and group == "RV32I" and format == "I_FORMAT" and
category == "LOAD" and imm_t == "IMM") or
(name == "LH" and group == "RV32I" and format == "I_FORMAT" and
category == "LOAD" and imm_t == "IMM") or
(name == "LW" and group == "RV32I" and format == "I_FORMAT" and
category == "LOAD" and imm_t == "IMM") or
(name == "LBU" and group == "RV32I" and format == "I_FORMAT" and
category == "LOAD" and imm_t == "IMM") or
(name == "LHU" and group == "RV32I" and format == "I_FORMAT" and
category == "LOAD" and imm_t == "IMM")
# Store instructions
or (name == "SB" and group == "RV32I" and format == "S_FORMAT" and
category == "STORE" and imm_t == "IMM") or
(name == "SH" and group == "RV32I" and format == "S_FORMAT" and
category == "STORE" and imm_t == "IMM") or
(name == "SW" and group == "RV32I" and format == "S_FORMAT" and
category == "STORE" and imm_t == "IMM")
# Shift istructions
or (name == "SLL" and group == "RV32I" and format == "R_FORMAT" and
category == "SHIFT" and imm_t == "IMM") or
(name == "SLLI" and group == "RV32I" and format == "I_FORMAT" and
category == "SHIFT" and imm_t == "IMM") or
(name == "SRL" and group == "RV32I" and format == "R_FORMAT" and
category == "SHIFT" and imm_t == "IMM") or
(name == "SRLI" and group == "RV32I" and format == "I_FORMAT" and
category == "SHIFT" and imm_t == "IMM") or
(name == "SRA" and group == "RV32I" and format == "R_FORMAT" and
category == "SHIFT" and imm_t == "IMM") or
(name == "SRAI" and group == "RV32I" and format == "I_FORMAT" and
category == "SHIFT" and imm_t == "IMM")
# Arithmetic instructions
or (name == "ADD" and group == "RV32I" and format == "R_FORMAT" and
category == "ARITHMETIC" and imm_t == "IMM") or
(name == "ADDI" and group == "RV32I" and format == "I_FORMAT" and
category == "ARITHMETIC" and imm_t == "IMM") or
(name == "NOP" and group == "RV32I" and format == "I_FORMAT" and
category == "ARITHMETIC" and imm_t == "IMM") or
(name == "SUB" and group == "RV32I" and format == "R_FORMAT" and
category == "ARITHMETIC" and imm_t == "IMM") or
(name == "LUI" and group == "RV32I" and format == "U_FORMAT" and
category == "ARITHMETIC" and imm_t == "UIMM") or
(name == "AUIPC" and group == "RV32I" and format == "U_FORMAT" and
category == "ARITHMETIC" and imm_t == "UIMM")
# Logical instructions
or (name == "XOR" and group == "RV32I" and format == "R_FORMAT" and
category == "LOGICAL" and imm_t == "IMM") or
(name == "XORI" and group == "RV32I" and format == "I_FORMAT" and
category == "LOGICAL" and imm_t == "IMM") or
(name == "OR" and group == "RV32I" and format == "R_FORMAT" and
category == "LOGICAL" and imm_t == "IMM") or
(name == "ORI" and group == "RV32I" and format == "I_FORMAT" and
category == "LOGICAL" and imm_t == "IMM") or
(name == "AND" and group == "RV32I" and format == "R_FORMAT" and
category == "LOGICAL" and imm_t == "IMM") or
(name == "ANDI" and group == "RV32I" and format == "I_FORMAT" and
category == "LOGICAL" and imm_t == "IMM")
# Compare instructions
or (name == "SLT" and group == "RV32I" and format == "R_FORMAT" and
category == "COMPARE" and imm_t == "IMM") or
(name == "SLTI" and group == "RV32I" and format == "I_FORMAT" and
category == "COMPARE" and imm_t == "IMM") or
(name == "SLTU" and group == "RV32I" and format == "R_FORMAT" and
category == "COMPARE" and imm_t == "IMM") or
(name == "SLTIU" and group == "RV32I" and format == "I_FORMAT" and
category == "COMPARE" and imm_t == "IMM")
# Branch instructions
or (name == "BEQ" and group == "RV32I" and format == "B_FORMAT" and
category == "BRANCH" and imm_t == "IMM") or
(name == "BNE" and group == "RV32I" and format == "B_FORMAT" and
category == "BRANCH" and imm_t == "IMM") or
(name == "BLT" and group == "RV32I" and format == "B_FORMAT" and
category == "BRANCH" and imm_t == "IMM") or
(name == "BGE" and group == "RV32I" and format == "B_FORMAT" and
category == "BRANCH" and imm_t == "IMM") or
(name == "BLTU" and group == "RV32I" and format == "B_FORMAT" and
category == "BRANCH" and imm_t == "IMM") or
(name == "BGEU" and group == "RV32I" and format == "B_FORMAT" and
category == "BRANCH" and imm_t == "IMM")
# Jump instructions
or (name == "JAL" and group == "RV32I" and format == "J_FORMAT" and
category == "JUMP" and imm_t == "IMM") or
(name == "JALR" and group == "RV32I" and format == "I_FORMAT" and
category == "JUMP" and imm_t == "IMM")
# Synch instructions
or (name == "FENCE" and group == "RV32I" and format == "I_FORMAT" and
category == "SYNCH" and imm_t == "IMM") or
(name == "FENCEI" and group == "RV32I" and format == "I_FORMAT" and
category == "SYNCH" and imm_t == "IMM")
# System instructions
or (name == "ECALL" and group == "RV32I" and format == "I_FORMAT" and
category == "SYSTEM" and imm_t == "IMM") or
(name == "EBREAK" and group == "RV32I" and format == "I_FORMAT" and
category == "SYSTEM" and imm_t == "IMM") or
(name == "URET" and group == "RV32I" and format == "I_FORMAT" and
category == "SYSTEM" and imm_t == "IMM") or
(name == "SRET" and group == "RV32I" and format == "I_FORMAT" and
category == "SYSTEM" and imm_t == "IMM") or
(name == "MRET" and group == "RV32I" and format == "I_FORMAT" and
category == "SYSTEM" and imm_t == "IMM") or
(name == "WFI" and group == "RV32I" and format == "I_FORMAT" and
category == "SYSTEM" and imm_t == "IMM")
# CSR instructions
or (name == "CSRRW" and group == "RV32I" and format == "R_FORMAT" and
category == "CSR" and imm_t == "UIMM") or
(name == "CSRRS" and group == "RV32I" and format == "R_FORMAT" and
category == "CSR" and imm_t == "UIMM") or
(name == "CSRRC" and group == "RV32I" and format == "R_FORMAT" and
category == "CSR" and imm_t == "UIMM") or
(name == "CSRRWI" and group == "RV32I" and format == "I_FORMAT" and
category == "CSR" and imm_t == "UIMM") or
(name == "CSRRSI" and group == "RV32I" and format == "I_FORMAT" and
category == "CSR" and imm_t == "UIMM") or
(name == "CSRRCI" and group == "RV32I" and format == "I_FORMAT" and
category == "CSR" and imm_t == "UIMM"))
if condition:
return True
def fence_c(name, source1, destination, imm):
if name == "FENCE" or name == "FENCEI":
if source1 == "ZERO" and destination == "ZERO" and imm == 0:
return True
else:
return True
def load_store_c(category, source1):
if category == "LOAD" or category == "STORE":
if source1 != "ZERO":
return True
else:
return True
def nop_c(name, source1, source2, destination):
if name == "NOP":
if source1 == "ZERO" and source2 == "ZERO" and destination == "ZERO":
return True
else:
return True
def system_instr_c(category, source1, destination):
if category == "SYSTEM" or category == "SYNCH":
if source1 == "ZERO" and destination == "ZERO":
return True
else:
return True
def imm_len_c(format, imm_t, imm_length):
if format == "U_FORMAT" or format == "J_FORMAT":
return imm_length == 20
elif format == "I_FORMAT" or format == "S_FORMAT" or format == "B_FORMAT":
if imm_t == "UIMM":
return imm_length == 5
else:
return imm_length == 11
else:
return True
def imm_val_c(imm_type, imm):
if imm_type == "NZIMM" or imm_type == "NZUIMM":
return imm != 0
else:
return True
def shift_imm_val_c(category, imm):
if category == "SHIFT":
return imm < utils.XLEN
else:
return True
def only_arithmetic_and_logical_c(category):
if category == "ARITHMETIC" or category == "LOGICAL" or \
category == "BRANCH" or category == "LOAD" or category == "STORE":
return True
def non_system(category):
if category != "SYSTEM":
return True
def non_csr(category):
if category != "CSR":
return True
def non_synch(category):
if category != "SYNCH":
return True
def no_branch_c(category):
if category != "BRANCH":
return True
def no_load_store_c(category):
if category != "LOAD" and category != "STORE":
return True
# Refer to pseudo class for explanation
if not no_name_c:
self.problem.addConstraint(name_c, [
self.instr_name, self.instr_group, self.instr_format,
self.instr_category, self.instr_imm_t
])
# TODO: add a temporarily constraint for generating only arithmetic random instructions
# self.problem.addConstraint(only_arithmetic_and_logical_c, [self.instr_category])
# self.problem.addConstraint(default_c, [self.is_pseudo_instr])
self.problem.addConstraint(non_csr, [self.instr_category])
self.problem.addConstraint(non_system, [self.instr_category])
self.problem.addConstraint(non_synch, [self.instr_category])
if no_branch:
self.problem.addConstraint(no_branch_c, [self.instr_category])
if no_load_store:
self.problem.addConstraint(no_load_store_c, [self.instr_category])
self.problem.addConstraint(
fence_c, [self.instr_name, self.instr_src1, self.instr_rd, self.imm])
self.problem.addConstraint(load_store_c,
[self.instr_category, self.instr_src1])
self.problem.addConstraint(
nop_c,
[self.instr_name, self.instr_src1, self.instr_src2, self.instr_rd
]) #: takes too long, don't know why
self.problem.addConstraint(
system_instr_c, [self.instr_category, self.instr_src1, self.instr_rd])
self.problem.addConstraint(
imm_len_c, [self.instr_format, self.instr_imm_t, self.imm_length])
self.problem.addConstraint(imm_val_c, [self.instr_imm_t, self.imm])
self.problem.addConstraint(shift_imm_val_c, [self.instr_category, self.imm])
# return
# return self.problem.getSolution()
def randomize(self):
# old randomize()
# self.solution = self.problem.getSolution()
# self.post_randomize()
self.solution = self.problem.getSolution()
if self.solution:
# print("TODO: randomized with steps: {}".format(self.problem._solver._steps))
pass
else:
i = 1
while self.solution is None:
for j in range(10):
self.solution = self.problem.getSolution()
if self.solution:
# print("TODO: randomized with steps: {}".format(self.problem._solver._steps))
break
i *= 5
self.problem._solver._steps *= i
self.post_randomize()
# Psuedo instructions are used to simplify assembly program writing
class riscv_pseudo_instr(riscv_instr_base):
def __init__(self, name=""):
# calling super constructor
riscv_instr_base.__init__(self, name)
# Important: Constraint solver gets too slow in pseudo class. We have three solutions:
# 1- change the type of the constraint solver, from MinConflict to regular, this one
# also takes fairly good amount of time, but it's good for validity check, to see
# if constraints are valid and there is no conflict between them.
# 2- Increase the number of steps for MinConflict...
# 3- Since we don't need to check the name_c constraint here, we can get rid of it
# for pseudo class! We're going to use this option for now
# self.problem = constraint.Problem(constraint.MinConflictsSolver(steps=10000))
# self.problem = constraint.Problem()
self.process_load_store = 0
self.pseudo_instr_name = "Pseudo instruction name"
def problem_definition(self, la_instr=0):
# Calling the super problem_definition, to apply all the constraints to the base object
# super().problem_definition(no_load_store=0, no_name_c=1)
super().problem_definition(no_load_store=0)
# To add the new constraint carried by the problem_definition
# fun()
self.problem.addVariable(self.pseudo_instr_name,
utils.riscv_pseudo_instr_name_t)
self.problem.addVariable(self.is_pseudo_instr, range(2))
def pseudo_name_c(name, group, format, category):
condition = (((name == "LI" or name == "LA") and group == "RV32I" and
format == "I_FORMAT" and category == "LOAD"))
if condition:
return True
def la_c(name):
if name == "LA":
return True
def default_c(is_pseudo_instr):
if is_pseudo_instr:
return True
self.problem.addConstraint(pseudo_name_c, [
self.pseudo_instr_name, self.instr_group, self.instr_format,
self.instr_category
])
if la_instr:
self.problem.addConstraint(la_c, [self.pseudo_instr_name])
self.problem.addConstraint(default_c, [self.is_pseudo_instr])
return
# Convert the instruction to assembly code
def convert2asm(self):
asm_str = self.get_instr_name()
destination = self.solution[self.instr_rd]
# instr rd,imm
asm_str = "{} {}, {}".format(asm_str, destination, self.get_imm())
if self.comment != "":
asm_str = asm_str + " #" + self.comment
return asm_str.lower()
def get_instr_name(self):
return self.solution[self.pseudo_instr_name]
| lowRISC/ibex | vendor/google_riscv-dv/pygen/experimental/riscv_instr_base.py | Python | apache-2.0 | 20,190 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
# Generated by Django 1.9 on 2016-01-18 00:17
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0006_auto_20151218_1741'),
]
operations = [
migrations.AddField(
model_name='usersettings',
name='stripe_customer_id',
field=models.CharField(blank=True, max_length=128, null=True),
),
migrations.AddField(
model_name='usersettings',
name='stripe_payout_recipient',
field=models.CharField(blank=True, max_length=128, null=True),
),
]
| Pinecast/pinecast | accounts/migrations/0007_auto_20160118_0017.py | Python | apache-2.0 | 726 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for documentation parser."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import inspect
import os
import sys
from tensorflow.python.platform import googletest
from tensorflow.tools.docs import parser
def test_function_for_markdown_reference(unused_arg):
"""Docstring with reference to @{test_function}."""
pass
def test_function(unused_arg, unused_kwarg='default'):
"""Docstring for test function."""
pass
def test_function_with_args_kwargs(unused_arg, *unused_args, **unused_kwargs):
"""Docstring for second test function."""
pass
def test_function_with_fancy_docstring(arg):
"""Function with a fancy docstring.
Args:
arg: An argument.
Returns:
arg: the input, and
arg: the input, again.
"""
return arg, arg
class TestClass(object):
"""Docstring for TestClass itself."""
def a_method(self, arg='default'):
"""Docstring for a method."""
pass
class ChildClass(object):
"""Docstring for a child class."""
pass
@property
def a_property(self):
"""Docstring for a property."""
pass
CLASS_MEMBER = 'a class member'
class ParserTest(googletest.TestCase):
def test_documentation_path(self):
self.assertEqual('test.md', parser.documentation_path('test'))
self.assertEqual('test/module.md', parser.documentation_path('test.module'))
def test_documentation_path_empty(self):
self.assertEqual('index.md', parser.documentation_path(''))
def test_replace_references(self):
string = 'A @{reference}, another @{tf.reference}, and a @{third}.'
duplicate_of = {'third': 'fourth'}
result = parser.replace_references(string, '../..', duplicate_of)
self.assertEqual(
'A [`reference`](../../reference.md), another '
'[`tf.reference`](../../reference.md), '
'and a [`third`](../../fourth.md).',
result)
def test_generate_markdown_for_class(self):
index = {
'TestClass': TestClass,
'TestClass.a_method': TestClass.a_method,
'TestClass.a_property': TestClass.a_property,
'TestClass.ChildClass': TestClass.ChildClass,
'TestClass.CLASS_MEMBER': TestClass.CLASS_MEMBER
}
tree = {
'TestClass': ['a_method', 'a_property', 'ChildClass', 'CLASS_MEMBER']
}
docs = parser.generate_markdown(full_name='TestClass', py_object=TestClass,
duplicate_of={}, duplicates={},
index=index, tree=tree, base_dir='/')
# Make sure all required docstrings are present.
self.assertTrue(inspect.getdoc(TestClass) in docs)
self.assertTrue(inspect.getdoc(TestClass.a_method) in docs)
self.assertTrue(inspect.getdoc(TestClass.a_property) in docs)
# Make sure that the signature is extracted properly and omits self.
self.assertTrue('a_method(arg=\'default\')' in docs)
# Make sure there is a link to the child class and it points the right way.
self.assertTrue('[`class ChildClass`](./TestClass/ChildClass.md)' in docs)
# Make sure CLASS_MEMBER is mentioned.
self.assertTrue('CLASS_MEMBER' in docs)
# Make sure this file is contained as the definition location.
self.assertTrue(os.path.relpath(__file__, '/') in docs)
def test_generate_markdown_for_module(self):
module = sys.modules[__name__]
index = {
'TestModule': module,
'TestModule.test_function': test_function,
'TestModule.test_function_with_args_kwargs':
test_function_with_args_kwargs,
'TestModule.TestClass': TestClass,
}
tree = {
'TestModule': ['TestClass', 'test_function',
'test_function_with_args_kwargs']
}
docs = parser.generate_markdown(full_name='TestModule', py_object=module,
duplicate_of={}, duplicates={},
index=index, tree=tree, base_dir='/')
# Make sure all required docstrings are present.
self.assertTrue(inspect.getdoc(module) in docs)
# Make sure that links to the members are there (not asserting on exact link
# text for functions).
self.assertTrue('./TestModule/test_function.md' in docs)
self.assertTrue('./TestModule/test_function_with_args_kwargs.md' in docs)
# Make sure there is a link to the child class and it points the right way.
self.assertTrue('[`class TestClass`](./TestModule/TestClass.md)' in docs)
# Make sure this file is contained as the definition location.
self.assertTrue(os.path.relpath(__file__, '/') in docs)
def test_generate_markdown_for_function(self):
index = {
'test_function': test_function
}
tree = {
'': ['test_function']
}
docs = parser.generate_markdown(full_name='test_function',
py_object=test_function,
duplicate_of={}, duplicates={},
index=index, tree=tree, base_dir='/')
# Make sure docstring shows up.
self.assertTrue(inspect.getdoc(test_function) in docs)
# Make sure the extracted signature is good.
self.assertTrue(
'test_function(unused_arg, unused_kwarg=\'default\')' in docs)
# Make sure this file is contained as the definition location.
self.assertTrue(os.path.relpath(__file__, '/') in docs)
def test_generate_markdown_for_function_with_kwargs(self):
index = {
'test_function_with_args_kwargs': test_function_with_args_kwargs
}
tree = {
'': ['test_function_with_args_kwargs']
}
docs = parser.generate_markdown(full_name='test_function_with_args_kwargs',
py_object=test_function_with_args_kwargs,
duplicate_of={}, duplicates={},
index=index, tree=tree, base_dir='/')
# Make sure docstring shows up.
self.assertTrue(inspect.getdoc(test_function_with_args_kwargs) in docs)
# Make sure the extracted signature is good.
self.assertTrue(
'test_function_with_args_kwargs(unused_arg,'
' *unused_args, **unused_kwargs)' in docs)
def test_references_replaced_in_generated_markdown(self):
index = {
'test_function_for_markdown_reference':
test_function_for_markdown_reference
}
tree = {
'': ['test_function_for_markdown_reference']
}
docs = parser.generate_markdown(
full_name='test_function_for_markdown_reference',
py_object=test_function_for_markdown_reference,
duplicate_of={}, duplicates={},
index=index, tree=tree, base_dir='/')
# Make sure docstring shows up and is properly processed.
expected_docs = parser.replace_references(
inspect.getdoc(test_function_for_markdown_reference),
relative_path_to_root='.', duplicate_of={})
self.assertTrue(expected_docs in docs)
def test_docstring_special_section(self):
index = {
'test_function': test_function_with_fancy_docstring
}
tree = {
'': 'test_function'
}
docs = parser.generate_markdown(
full_name='test_function',
py_object=test_function_with_fancy_docstring,
duplicate_of={}, duplicates={},
index=index, tree=tree, base_dir='/')
expected = '\n'.join([
'Function with a fancy docstring.',
'',
'#### Args:',
'',
'* <b>`arg`</b>: An argument.',
'',
'',
'#### Returns:',
'',
'* <b>`arg`</b>: the input, and',
'* <b>`arg`</b>: the input, again.',
''])
self.assertTrue(expected in docs)
def test_generate_index(self):
module = sys.modules[__name__]
index = {
'TestModule': module,
'test_function': test_function,
'TestModule.test_function': test_function,
'TestModule.TestClass': TestClass,
'TestModule.TestClass.a_method': TestClass.a_method,
'TestModule.TestClass.a_property': TestClass.a_property,
'TestModule.TestClass.ChildClass': TestClass.ChildClass,
}
duplicate_of = {
'TestModule.test_function': 'test_function'
}
docs = parser.generate_global_index('TestLibrary', 'test',
index=index,
duplicate_of=duplicate_of)
# Make sure duplicates and non-top-level symbols are in the index, but
# methods and properties are not.
self.assertTrue('a_method' not in docs)
self.assertTrue('a_property' not in docs)
self.assertTrue('TestModule.TestClass' in docs)
self.assertTrue('TestModule.TestClass.ChildClass' in docs)
self.assertTrue('TestModule.test_function' in docs)
# Leading backtick to make sure it's included top-level.
# This depends on formatting, but should be stable.
self.assertTrue('`test_function' in docs)
def test_argspec_for_functoos_partial(self):
# pylint: disable=unused-argument
def test_function_for_partial1(arg1, arg2, kwarg1=1, kwarg2=2):
pass
def test_function_for_partial2(arg1, arg2, *my_args, **my_kwargs):
pass
# pylint: enable=unused-argument
# pylint: disable=protected-access
# Make sure everything works for regular functions.
expected = inspect.ArgSpec(['arg1', 'arg2', 'kwarg1', 'kwarg2'], None, None,
(1, 2))
self.assertEqual(expected, parser._get_arg_spec(test_function_for_partial1))
# Make sure doing nothing works.
expected = inspect.ArgSpec(['arg1', 'arg2', 'kwarg1', 'kwarg2'], None, None,
(1, 2))
partial = functools.partial(test_function_for_partial1)
self.assertEqual(expected, parser._get_arg_spec(partial))
# Make sure setting args from the front works.
expected = inspect.ArgSpec(['arg2', 'kwarg1', 'kwarg2'], None, None, (1, 2))
partial = functools.partial(test_function_for_partial1, 1)
self.assertEqual(expected, parser._get_arg_spec(partial))
expected = inspect.ArgSpec(['kwarg2',], None, None, (2,))
partial = functools.partial(test_function_for_partial1, 1, 2, 3)
self.assertEqual(expected, parser._get_arg_spec(partial))
# Make sure setting kwargs works.
expected = inspect.ArgSpec(['arg1', 'arg2', 'kwarg2'], None, None, (2,))
partial = functools.partial(test_function_for_partial1, kwarg1=0)
self.assertEqual(expected, parser._get_arg_spec(partial))
expected = inspect.ArgSpec(['arg1', 'arg2', 'kwarg1'], None, None, (1,))
partial = functools.partial(test_function_for_partial1, kwarg2=0)
self.assertEqual(expected, parser._get_arg_spec(partial))
expected = inspect.ArgSpec(['arg1'], None, None, ())
partial = functools.partial(test_function_for_partial1,
arg2=0, kwarg1=0, kwarg2=0)
self.assertEqual(expected, parser._get_arg_spec(partial))
# Make sure *args, *kwargs is accounted for.
expected = inspect.ArgSpec([], 'my_args', 'my_kwargs', ())
partial = functools.partial(test_function_for_partial2, 0, 1)
self.assertEqual(expected, parser._get_arg_spec(partial))
# pylint: enable=protected-access
if __name__ == '__main__':
googletest.main()
| odejesush/tensorflow | tensorflow/tools/docs/parser_test.py | Python | apache-2.0 | 12,002 |
import unittest
from openmdao.main.api import set_as_top
from openmdao.util.testutil import assert_rel_error
from pycycle import duct, flowstation
class DuctTestCase(unittest.TestCase):
def test_start(self):
comp = set_as_top(duct.Duct())
comp.dPqP = 0
comp.Q_dot = -237.8
comp.MNexit_des = .4
fs = flowstation.FlowStation()
fs.W = 1.080
fs.setTotalTP(1424.01, .34)
fs.Mach = .4
comp.Fl_I = fs
comp.design = True
comp.run()
assert_rel_error(self,comp.Fl_O.W, 1.080, .005)
assert_rel_error(self,comp.Fl_O.Pt, .34, .005)
assert_rel_error(self,comp.Fl_O.Tt, 540.00, .005)
assert_rel_error(self,comp.Fl_O.rhos, .001566, .005)
assert_rel_error(self,comp.Fl_O.Mach, 0.4, .005)
assert_rel_error(self,comp.Fl_O.area, 221.4, .005)
#check off design
comp.run()
assert_rel_error(self,comp.Fl_O.W, 1.080, .005)
assert_rel_error(self,comp.Fl_O.Pt, .34, .005)
assert_rel_error(self,comp.Fl_O.Tt, 540.00, .005)
assert_rel_error(self,comp.Fl_O.rhos, .001566, .005)
assert_rel_error(self,comp.Fl_O.Mach, 0.4, .005)
assert_rel_error(self,comp.Fl_O.area, 221.4, .005)
#vary something
comp.dPqP = .1
comp.run()
assert_rel_error(self,comp.Fl_O.W, 1.080, .005)
assert_rel_error(self,comp.Fl_O.Pt, .306, .005)
assert_rel_error(self,comp.Fl_O.Tt, 540.00, .005)
assert_rel_error(self,comp.Fl_O.rhos, .0013783, .005)
assert_rel_error(self,comp.Fl_O.Mach, 0.4572, .005)
assert_rel_error(self,comp.Fl_O.area, 221.4, .005)
if __name__ == "__main__":
unittest.main()
| whiplash01/pyCycle | src/pycycle/test/test_duct.py | Python | apache-2.0 | 1,762 |
"""Utilities to help with aiohttp."""
import json
from typing import Any, Dict, Optional
from urllib.parse import parse_qsl
from multidict import CIMultiDict, MultiDict
class MockRequest:
"""Mock an aiohttp request."""
def __init__(
self,
content: bytes,
method: str = "GET",
status: int = 200,
headers: Optional[Dict[str, str]] = None,
query_string: Optional[str] = None,
url: str = "",
) -> None:
"""Initialize a request."""
self.method = method
self.url = url
self.status = status
self.headers: CIMultiDict[str] = CIMultiDict(headers or {})
self.query_string = query_string or ""
self._content = content
@property
def query(self) -> "MultiDict[str]":
"""Return a dictionary with the query variables."""
return MultiDict(parse_qsl(self.query_string, keep_blank_values=True))
@property
def _text(self) -> str:
"""Return the body as text."""
return self._content.decode("utf-8")
async def json(self) -> Any:
"""Return the body as JSON."""
return json.loads(self._text)
async def post(self) -> "MultiDict[str]":
"""Return POST parameters."""
return MultiDict(parse_qsl(self._text, keep_blank_values=True))
async def text(self) -> str:
"""Return the body as text."""
return self._text
| leppa/home-assistant | homeassistant/util/aiohttp.py | Python | apache-2.0 | 1,424 |
import struct
import numpy
import io
import pickle
import pyctrl.packet as packet
def testA():
# test A
assert packet.pack('A','C') == b'AC'
assert packet.pack('A','B') == b'AB'
assert packet.pack('A','C') != b'AB'
assert packet.unpack_stream(io.BytesIO(b'AC')) == ('A', 'C')
assert packet.unpack_stream(io.BytesIO(b'AB')) == ('A', 'B')
assert packet.unpack_stream(io.BytesIO(b'AB')) != ('A', 'C')
def testC():
# test C
assert packet.pack('C','C') == b'CC'
assert packet.pack('C','B') == b'CB'
assert packet.pack('C','C') != b'CB'
assert packet.unpack_stream(io.BytesIO(b'CC')) == ('C', 'C')
assert packet.unpack_stream(io.BytesIO(b'CB')) == ('C', 'B')
assert packet.unpack_stream(io.BytesIO(b'CB')) != ('C', 'C')
def testS():
# test S
assert packet.pack('S','abc') == struct.pack('<cI3s', b'S', 3, b'abc')
assert packet.pack('S','abcd') != struct.pack('<cI3s', b'S', 3, b'abc')
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cI3s', b'S', 3, b'abc'))) == ('S', 'abc')
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cI3s', b'S', 3, b'abc'))) != ('S', 'abcd')
def testIFD():
# test I
assert packet.pack('I',3) == struct.pack('<ci', b'I', 3)
assert packet.pack('I',3) != struct.pack('<ci', b'I', 4)
assert packet.unpack_stream(
io.BytesIO(struct.pack('<ci', b'I', 3))) == ('I', 3)
assert packet.unpack_stream(
io.BytesIO(struct.pack('<ci', b'I', 4))) != ('I', 3)
# test F
assert packet.pack('F',3.3) == struct.pack('<cf', b'F', 3.3)
assert packet.pack('F',3.3) != struct.pack('<cf', b'F', 4.3)
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cf', b'F', numpy.float32(3.3)))) == ('F', numpy.float32(3.3))
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cf', b'F', 4.3))) != ('F', 3.3)
# test D
assert packet.pack('D',3.3) == struct.pack('<cd', b'D', 3.3)
assert packet.pack('D',3.3) != struct.pack('<cd', b'D', 4.3)
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cd', b'D', 3.3))) == ('D', 3.3)
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cd', b'D', 4.3))) != ('D', 3.3)
def testV():
# test VI
vector = numpy.array((1,2,3), int)
assert packet.pack('V',vector) == struct.pack('<ccIiii', b'V', b'I', 3, 1, 2, 3)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<ccIiii', b'V', b'I', 3, 1, 2, 3)))
assert type == 'V'
assert numpy.all(rvector == vector)
vector = numpy.array((1,-2,3), int)
assert packet.pack('V',vector) == struct.pack('<ccIiii', b'V', b'I', 3, 1, -2, 3)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<ccIiii', b'V', b'I', 3, 1, -2, 3)))
assert type == 'V'
assert numpy.all(rvector == vector)
# test VF
vector = numpy.array((1.3,-2,3), numpy.float32)
assert packet.pack('V',vector) == struct.pack('<ccIfff', b'V', b'F', 3, 1.3, -2, 3)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<ccIfff', b'V', b'F', 3, 1.3, -2, 3)))
assert type == 'V'
assert numpy.all(rvector == vector)
# test VD
vector = numpy.array((1.3,-2,3), float)
assert packet.pack('V',vector) == struct.pack('<ccIddd', b'V', b'D', 3, 1.3, -2, 3)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<ccIddd', b'V', b'D', 3, 1.3, -2, 3)))
assert type == 'V'
assert numpy.all(rvector == vector)
def testM():
# test MI
vector = numpy.array(((1,2,3), (3,4,5)), int)
assert packet.pack('M',vector) == struct.pack('<cIccIiiiiii', b'M', 2, b'V', b'I', 6, 1, 2, 3, 3, 4, 5)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<cIccIiiiiii', b'M', 2, b'V', b'I', 6, 1, 2, 3, 3, 4, 5)))
assert type == 'M'
assert numpy.all(rvector == vector)
vector = numpy.array(((1,-2,3), (3,4,-5)), int)
assert packet.pack('M',vector) == struct.pack('<cIccIiiiiii', b'M', 2, b'V', b'I', 6, 1, -2, 3, 3, 4, -5)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<cIccIiiiiii', b'M', 2, b'V', b'I', 6, 1, -2, 3, 3, 4, -5)))
assert type == 'M'
assert numpy.all(rvector == vector)
# test MF
vector = numpy.array(((1.3,-2,3), (0,-1,2.5)), numpy.float32)
assert packet.pack('M',vector) == struct.pack('<cIccIffffff', b'M', 2, b'V', b'F', 6, 1.3, -2, 3, 0, -1, 2.5)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<cIccIffffff', b'M', 2, b'V', b'F', 6, 1.3, -2, 3, 0, -1, 2.5)))
assert type == 'M'
assert numpy.all(rvector == vector)
# test MD
vector = numpy.array(((1.3,-2,3), (0,-1,2.5)), numpy.float)
assert packet.pack('M',vector) == struct.pack('<cIccIdddddd', b'M', 2, b'V', b'D', 6, 1.3, -2, 3, 0, -1, 2.5)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<cIccIdddddd', b'M', 2, b'V', b'D', 6, 1.3, -2, 3, 0, -1, 2.5)))
assert type == 'M'
assert numpy.all(rvector == vector)
def testP():
vector = numpy.array(((1.3,-2,3), (0,-1,2.5)), numpy.float)
string = packet.pack('P', vector)
(type, rvector) = packet.unpack_stream(io.BytesIO(string))
assert type == 'P'
assert numpy.all(rvector == vector)
def testKR():
args = { 'a': 1, 'b': 2 }
string = packet.pack('K', args)
(type, rargs) = packet.unpack_stream(io.BytesIO(string))
assert type == 'K'
assert (args == rargs)
args = ('a', 1, 'b', 2)
string = packet.pack('R', args)
(type, rargs) = packet.unpack_stream(io.BytesIO(string))
assert type == 'R'
assert (args == rargs)
if __name__ == "__main__":
testA()
testC()
testS()
testIFD()
testV()
testM()
testP()
testKR()
| mcdeoliveira/ctrl | test/test_packet.py | Python | apache-2.0 | 5,810 |
import contextlib
from django.core.exceptions import ValidationError as DjangoValidationError
# Remants from MODM days
# TODO: Remove usages of aliased Exceptions
ValidationError = DjangoValidationError
ValidationValueError = DjangoValidationError
ValidationTypeError = DjangoValidationError
class TokenError(Exception):
pass
class TokenHandlerNotFound(TokenError):
def __init__(self, action, *args, **kwargs):
super(TokenHandlerNotFound, self).__init__(*args, **kwargs)
self.action = action
class UnsupportedSanctionHandlerKind(Exception):
pass
class OSFError(Exception):
"""Base class for exceptions raised by the Osf application"""
pass
class NodeError(OSFError):
"""Raised when an action cannot be performed on a Node model"""
pass
class NodeStateError(NodeError):
"""Raised when the Node's state is not suitable for the requested action
Example: Node.remove_node() is called, but the node has non-deleted children
"""
pass
class UserStateError(OSFError):
"""Raised when the user's state is not suitable for the requested action
Example: user.gdpr_delete() is called, but the user has resources that cannot be deleted.
"""
pass
class SanctionTokenError(TokenError):
"""Base class for errors arising from the user of a sanction token."""
pass
class MaxRetriesError(OSFError):
"""Raised when an operation has been attempted a pre-determined number of times"""
pass
class InvalidSanctionRejectionToken(TokenError):
"""Raised if a Sanction subclass disapproval token submitted is invalid
or associated with another admin authorizer
"""
message_short = 'Invalid Token'
message_long = 'This disapproval link is invalid. Are you logged into the correct account?'
class InvalidSanctionApprovalToken(TokenError):
"""Raised if a Sanction subclass approval token submitted is invalid
or associated with another admin authorizer
"""
message_short = 'Invalid Token'
message_long = 'This approval link is invalid. Are you logged into the correct account?'
class InvalidTagError(OSFError):
"""Raised when attempting to perform an invalid operation on a tag"""
pass
class TagNotFoundError(OSFError):
"""Raised when attempting to perform an operation on an absent tag"""
pass
class UserNotAffiliatedError(OSFError):
"""Raised if a user attempts to add an institution that is not currently
one of its affiliations.
"""
message_short = 'User not affiliated'
message_long = 'This user is not affiliated with this institution.'
@contextlib.contextmanager
def reraise_django_validation_errors():
"""Context manager to reraise DjangoValidationErrors as `osf.exceptions.ValidationErrors` (for
MODM compat).
"""
try:
yield
except DjangoValidationError as err:
raise ValidationError(*err.args)
class NaiveDatetimeException(Exception):
pass
class InvalidTriggerError(Exception):
def __init__(self, trigger, state, valid_triggers):
self.trigger = trigger
self.state = state
self.valid_triggers = valid_triggers
self.message = 'Cannot trigger "{}" from state "{}". Valid triggers: {}'.format(trigger, state, valid_triggers)
super(Exception, self).__init__(self.message)
class InvalidTransitionError(Exception):
def __init__(self, machine, transition):
self.message = 'Machine "{}" received invalid transitions: "{}" expected but not defined'.format(machine, transition)
class PreprintError(OSFError):
"""Raised when an action cannot be performed on a Preprint model"""
pass
class PreprintStateError(PreprintError):
"""Raised when the Preprint's state is not suitable for the requested action"""
pass
class DraftRegistrationStateError(OSFError):
"""Raised when an action cannot be performed on a Draft Registration model"""
pass
class PreprintProviderError(PreprintError):
"""Raised when there is an error with the preprint provider"""
pass
class BlockedEmailError(OSFError):
"""Raised if a user tries to register an email that is included
in the blocked domains list
"""
pass
class SchemaBlockConversionError(OSFError):
"""Raised if unexpected data breaks the conversion between the legacy
nested registration schema/metadata format and the new, flattened,
'schema block' format.
"""
pass
class SchemaResponseError(OSFError):
"""Superclass for errors ariseing from unexpected SchemaResponse behavior."""
pass
class SchemaResponseStateError(SchemaResponseError):
"""Raised when attempting to perform an operation against a
SchemaResponse with an invalid state.
"""
pass
class PreviousSchemaResponseError(SchemaResponseError):
"""Raised when attempting to create a new SchemaResponse for a parent that
already has a SchemaResponse in an unsupported state
"""
pass
class RegistrationBulkCreationContributorError(OSFError):
"""Raised if contributor preparation has failed"""
def __init__(self, error=None):
self.error = error if error else 'Contributor preparation error'
class RegistrationBulkCreationRowError(OSFError):
"""Raised if a draft registration failed creation during bulk upload"""
def __init__(self, upload_id, row_id, title, external_id, draft_id=None, error=None, approval_failure=False):
# `draft_id` is provided when the draft is created but not related to the row object
self.draft_id = draft_id
# `approval_failure` determines whether the error happens during the approval process
self.approval_failure = approval_failure
# The error information for logging, sentry and email
self.error = error if error else 'Draft registration creation error'
# The short error message to be added to the error list that will be returned to the initiator via email
self.short_message = 'Title: {}, External ID: {}, Error: {}'.format(title, external_id, self.error)
# The long error message for logging and sentry
self.long_message = 'Draft registration creation failed: [upload_id="{}", row_id="{}", title="{}", ' \
'external_id="{}", error="{}"]'.format(upload_id, row_id, title, external_id, self.error)
class SchemaResponseUpdateError(SchemaResponseError):
"""Raised when assigning an invalid value (or key) to a SchemaResponseBlock."""
def __init__(self, response, invalid_responses=None, unsupported_keys=None):
self.invalid_responses = invalid_responses
self.unsupported_keys = unsupported_keys
invalid_response_message = ''
unsupported_keys_message = ''
if invalid_responses:
invalid_response_message = (
f'\nThe following responses had invalid values: {invalid_responses}'
)
if unsupported_keys:
unsupported_keys_message = (
f'\nReceived the following resposnes had invalid keys: {unsupported_keys}'
)
error_message = (
f'Error update SchemaResponse with id [{response._id}]:'
f'{invalid_response_message}{unsupported_keys_message}'
)
super().__init__(error_message)
| Johnetordoff/osf.io | osf/exceptions.py | Python | apache-2.0 | 7,309 |
"""Playbook Args"""
from argparse import ArgumentParser
class Args:
"""Playbook Args"""
def __init__(self, parser: ArgumentParser):
"""Initialize class properties."""
| kstilwell/tcex | app_init/service_webhook/args.py | Python | apache-2.0 | 186 |
from src.utils import glove
import numpy as np
import string
class jester_vectorize():
def __init__(self, user_interactions, content, user_vector_type, content_vector_type, **support_files):
"""Set up the Jester Vectorizer.
Args:
user_interactions (rdd): The raw data of users interactions with
the system. For Jester, each "row" is as follows:
Row(joke_id, rating, user_id)
content (rdd): The raw data about the items in the dataset. For
Jester, each row is as follows: Row(joke_id, joke_text)
user_vector_type (str): The type of user vector desired. One of
'ratings', 'pos_ratings', 'ratings_to_interact', or None.
content_vector_type: The type of content vector desired. One of
'glove' or None.
support_files: Only one support file is used for this class:
glove_model: An instantiated glove model.
"""
self.user_vector_type = user_vector_type
self.content_vector_type = content_vector_type
self.user_interactions = user_interactions
self.content = content
# If no support files were passed in, initialize an empty support file
if support_files:
self.support_files = support_files
else:
self.support_files = {}
def get_user_vector(self):
"""Produce an RDD containing tuples of the form (user, item, rating).
There are three options when producing these user vectors:
ratings: The ratings the users assigned
pos_ratings: Only ratings > 0, all others are discarded
ratings_to_interact: Positive ratings are mapped to 1, negative to -1.
"""
uir = self.user_interactions.map(lambda row: (row.user_id, row.joke_id, row.rating))
if self.user_vector_type == 'ratings':
return uir
elif self.user_vector_type == 'pos_ratings':
return uir.filter(lambda (u, i, r): r > 0)
elif self.user_vector_type == 'ratings_to_interact':
return uir.map(lambda (u, i, r): (u, i, 1 if r > 0 else -1))
elif self.user_vector_type == 'none' or self.user_vector_type is None:
return None
else:
print "Please choose a user_vector_type between 'ratings', 'pos_ratings', 'ratings_to_interact', and 'none'"
return None
def get_content_vector(self):
"""Produce an RDD containing tuples of the form (item, content_vector).
There is one method of producing content vectors:
glove: Use the Stanford GloVe model to sum vector ratings of all
the words in the joke.
"""
if self.content_vector_type == 'glove':
# The model is initialized by the user and passed in via the
# support_file object
glove_model = self.support_files["glove_model"]
# Transformation function
def joke_to_glove(row, glove):
vector = np.zeros(glove.vector_size)
for chunk in row.joke_text.split():
word = chunk.lower().strip(string.punctuation)
vector += glove[word]
return (row.joke_id, vector)
# Run the transformation function over the data
return self.content.map(lambda row: joke_to_glove(row, glove_model))
elif self.content_vector_type == 'none' or self.content_vector_type is None:
return None
else:
print "Please choose a content_vector_type between 'glove' or None"
return None
| tiffanyj41/hermes | src/data_prep/jester_vectorize.py | Python | apache-2.0 | 3,684 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import errno
import functools
import os
import shutil
import tempfile
import time
import weakref
from eventlet import semaphore
from nova.openstack.common import cfg
from nova.openstack.common import fileutils
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
util_opts = [
cfg.BoolOpt('disable_process_locking', default=False,
help='Whether to disable inter-process locks'),
cfg.StrOpt('lock_path',
default=os.path.abspath(os.path.join(os.path.dirname(__file__),
'../')),
help='Directory to use for lock files')
]
CONF = cfg.CONF
CONF.register_opts(util_opts)
class _InterProcessLock(object):
"""Lock implementation which allows multiple locks, working around
issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does
not require any cleanup. Since the lock is always held on a file
descriptor rather than outside of the process, the lock gets dropped
automatically if the process crashes, even if __exit__ is not executed.
There are no guarantees regarding usage by multiple green threads in a
single process here. This lock works only between processes. Exclusive
access between local threads should be achieved using the semaphores
in the @synchronized decorator.
Note these locks are released when the descriptor is closed, so it's not
safe to close the file descriptor while another green thread holds the
lock. Just opening and closing the lock file can break synchronisation,
so lock files must be accessed only using this abstraction.
"""
def __init__(self, name):
self.lockfile = None
self.fname = name
def __enter__(self):
self.lockfile = open(self.fname, 'w')
while True:
try:
# Using non-blocking locks since green threads are not
# patched to deal with blocking locking calls.
# Also upon reading the MSDN docs for locking(), it seems
# to have a laughable 10 attempts "blocking" mechanism.
self.trylock()
return self
except IOError, e:
if e.errno in (errno.EACCES, errno.EAGAIN):
# external locks synchronise things like iptables
# updates - give it some time to prevent busy spinning
time.sleep(0.01)
else:
raise
def __exit__(self, exc_type, exc_val, exc_tb):
try:
self.unlock()
self.lockfile.close()
except IOError:
LOG.exception(_("Could not release the acquired lock `%s`"),
self.fname)
def trylock(self):
raise NotImplementedError()
def unlock(self):
raise NotImplementedError()
class _WindowsLock(_InterProcessLock):
def trylock(self):
msvcrt.locking(self.lockfile, msvcrt.LK_NBLCK, 1)
def unlock(self):
msvcrt.locking(self.lockfile, msvcrt.LK_UNLCK, 1)
class _PosixLock(_InterProcessLock):
def trylock(self):
fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
def unlock(self):
fcntl.lockf(self.lockfile, fcntl.LOCK_UN)
if os.name == 'nt':
import msvcrt
InterProcessLock = _WindowsLock
else:
import fcntl
InterProcessLock = _PosixLock
_semaphores = weakref.WeakValueDictionary()
def synchronized(name, lock_file_prefix, external=False, lock_path=None):
"""Synchronization decorator.
Decorating a method like so::
@synchronized('mylock')
def foo(self, *args):
...
ensures that only one thread will execute the bar method at a time.
Different methods can share the same lock::
@synchronized('mylock')
def foo(self, *args):
...
@synchronized('mylock')
def bar(self, *args):
...
This way only one of either foo or bar can be executing at a time.
The lock_file_prefix argument is used to provide lock files on disk with a
meaningful prefix. The prefix should end with a hyphen ('-') if specified.
The external keyword argument denotes whether this lock should work across
multiple processes. This means that if two different workers both run a
a method decorated with @synchronized('mylock', external=True), only one
of them will execute at a time.
The lock_path keyword argument is used to specify a special location for
external lock files to live. If nothing is set, then CONF.lock_path is
used as a default.
"""
def wrap(f):
@functools.wraps(f)
def inner(*args, **kwargs):
# NOTE(soren): If we ever go natively threaded, this will be racy.
# See http://stackoverflow.com/questions/5390569/dyn
# amically-allocating-and-destroying-mutexes
sem = _semaphores.get(name, semaphore.Semaphore())
if name not in _semaphores:
# this check is not racy - we're already holding ref locally
# so GC won't remove the item and there was no IO switch
# (only valid in greenthreads)
_semaphores[name] = sem
with sem:
LOG.debug(_('Got semaphore "%(lock)s" for method '
'"%(method)s"...'), {'lock': name,
'method': f.__name__})
if external and not CONF.disable_process_locking:
LOG.debug(_('Attempting to grab file lock "%(lock)s" for '
'method "%(method)s"...'),
{'lock': name, 'method': f.__name__})
cleanup_dir = False
# We need a copy of lock_path because it is non-local
local_lock_path = lock_path
if not local_lock_path:
local_lock_path = CONF.lock_path
if not local_lock_path:
cleanup_dir = True
local_lock_path = tempfile.mkdtemp()
if not os.path.exists(local_lock_path):
cleanup_dir = True
fileutils.ensure_tree(local_lock_path)
# NOTE(mikal): the lock name cannot contain directory
# separators
safe_name = name.replace(os.sep, '_')
lock_file_name = '%s%s' % (lock_file_prefix, safe_name)
lock_file_path = os.path.join(local_lock_path,
lock_file_name)
try:
lock = InterProcessLock(lock_file_path)
with lock:
LOG.debug(_('Got file lock "%(lock)s" at %(path)s '
'for method "%(method)s"...'),
{'lock': name,
'path': lock_file_path,
'method': f.__name__})
retval = f(*args, **kwargs)
finally:
# NOTE(vish): This removes the tempdir if we needed
# to create one. This is used to cleanup
# the locks left behind by unit tests.
if cleanup_dir:
shutil.rmtree(local_lock_path)
else:
retval = f(*args, **kwargs)
return retval
return inner
return wrap
| houshengbo/nova_vmware_compute_driver | nova/openstack/common/lockutils.py | Python | apache-2.0 | 8,446 |
# Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import re
from wlauto import AndroidUiAutoBenchmark, Parameter, Alias
from wlauto.exceptions import ConfigError
class Andebench(AndroidUiAutoBenchmark):
name = 'andebench'
description = """
AndEBench is an industry standard Android benchmark provided by The
Embedded Microprocessor Benchmark Consortium (EEMBC).
http://www.eembc.org/andebench/about.php
From the website:
- Initial focus on CPU and Dalvik interpreter performance
- Internal algorithms concentrate on integer operations
- Compares the difference between native and Java performance
- Implements flexible multicore performance analysis
- Results displayed in Iterations per second
- Detailed log file for comprehensive engineering analysis
"""
package = 'com.eembc.coremark'
activity = 'com.eembc.coremark.splash'
summary_metrics = ['AndEMark Java', 'AndEMark Native']
parameters = [
Parameter('number_of_threads', kind=int,
description='Number of threads that will be spawned by AndEBench.'),
Parameter('single_threaded', kind=bool,
description="""
If ``true``, AndEBench will run with a single thread. Note: this must
not be specified if ``number_of_threads`` has been specified.
"""),
]
aliases = [
Alias('andebenchst', number_of_threads=1),
]
regex = re.compile('\s*(?P<key>(AndEMark Native|AndEMark Java))\s*:'
'\s*(?P<value>\d+)')
def validate(self):
if (self.number_of_threads is not None) and (self.single_threaded is not None): # pylint: disable=E1101
raise ConfigError('Can\'t specify both number_of_threads and single_threaded parameters.')
def setup(self, context):
if self.number_of_threads is None: # pylint: disable=access-member-before-definition
if self.single_threaded: # pylint: disable=E1101
self.number_of_threads = 1 # pylint: disable=attribute-defined-outside-init
else:
self.number_of_threads = self.device.number_of_cores # pylint: disable=W0201
self.logger.debug('Using {} threads'.format(self.number_of_threads))
self.uiauto_params['number_of_threads'] = self.number_of_threads
# Called after this setup as modifying uiauto_params
super(Andebench, self).setup(context)
def update_result(self, context):
super(Andebench, self).update_result(context)
results = {}
with open(self.logcat_log) as fh:
for line in fh:
match = self.regex.search(line)
if match:
data = match.groupdict()
results[data['key']] = data['value']
for key, value in results.iteritems():
context.result.add_metric(key, value)
| freedomtan/workload-automation | wlauto/workloads/andebench/__init__.py | Python | apache-2.0 | 3,485 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for rnn module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import time
import timeit
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
from tensorflow.python.util import nest
class Plus1RNNCell(tf.nn.rnn_cell.RNNCell):
"""RNN Cell generating (output, new_state) = (input + 1, state + 1)."""
@property
def output_size(self):
return 5
@property
def state_size(self):
return 5
def __call__(self, input_, state, scope=None):
return (input_ + 1, state + 1)
class DummyMultiDimensionalLSTM(tf.nn.rnn_cell.RNNCell):
"""LSTM Cell generating (output, new_state) = (input + 1, state + 1).
The input to this cell may have an arbitrary number of dimensions that follow
the preceding 'Time' and 'Batch' dimensions.
"""
def __init__(self, dims):
"""Initialize the Multi-dimensional LSTM cell.
Args:
dims: tuple that contains the dimensions of the output of the cell,
without including 'Time' or 'Batch' dimensions.
"""
if not isinstance(dims, tuple):
raise TypeError("The dimensions passed to DummyMultiDimensionalLSTM"
"should be a tuple of ints.")
self._dims = dims
self._output_size = tf.TensorShape(self._dims)
self._state_size = (tf.TensorShape(self._dims), tf.TensorShape(self._dims))
@property
def output_size(self):
return self._output_size
@property
def state_size(self):
return self._state_size
def __call__(self, input_, state, scope=None):
h, c = state
return (input_ + 1, (h + 1, c + 1))
class NestedRNNCell(tf.nn.rnn_cell.RNNCell):
"""RNN Cell generating (output, new_state) = (input + 1, state + 1).
The input, output and state of this cell is a tuple of two tensors.
"""
@property
def output_size(self):
return (5, 5)
@property
def state_size(self):
return (6, 6)
def __call__(self, input_, state, scope=None):
h, c = state
x, y = input_
return ((x + 1, y + 1), (h + 1, c + 1))
class TestStateSaver(object):
def __init__(self, batch_size, state_size):
self._batch_size = batch_size
self._state_size = state_size
self.saved_state = {}
def state(self, name):
if isinstance(self._state_size, dict):
state_size = self._state_size[name]
else:
state_size = self._state_size
if isinstance(state_size, int):
state_size = (state_size,)
elif isinstance(state_size, tuple):
pass
else:
raise TypeError("state_size should either be an int or a tuple")
return tf.zeros((self._batch_size,) + state_size)
def save_state(self, name, state):
self.saved_state[name] = state
return tf.identity(state)
class RNNTest(tf.test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def testInvalidSequenceLengthShape(self):
cell = Plus1RNNCell()
inputs = [tf.placeholder(tf.float32, shape=(3, 4))]
with self.assertRaisesRegexp(ValueError, "must be a vector"):
tf.nn.rnn(cell, inputs, dtype=tf.float32, sequence_length=4)
with self.assertRaisesRegexp(ValueError, "must be a vector"):
tf.nn.dynamic_rnn(
cell, tf.pack(inputs), dtype=tf.float32, sequence_length=[[4]])
def testRNN(self):
cell = Plus1RNNCell()
batch_size = 2
input_size = 5
max_length = 8 # unrolled up to this length
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
outputs, state = tf.nn.rnn(cell, inputs, dtype=tf.float32)
self.assertEqual(len(outputs), len(inputs))
for out, inp in zip(outputs, inputs):
self.assertEqual(out.get_shape(), inp.get_shape())
self.assertEqual(out.dtype, inp.dtype)
with self.test_session(use_gpu=False) as sess:
input_value = np.random.randn(batch_size, input_size)
values = sess.run(outputs + [state],
feed_dict={inputs[0]: input_value})
# Outputs
for v in values[:-1]:
self.assertAllClose(v, input_value + 1.0)
# Final state
self.assertAllClose(
values[-1],
max_length * np.ones((batch_size, input_size), dtype=np.float32))
def testDropout(self):
cell = Plus1RNNCell()
full_dropout_cell = tf.nn.rnn_cell.DropoutWrapper(
cell, input_keep_prob=1e-12, seed=0)
batch_size = 2
input_size = 5
max_length = 8
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
with tf.variable_scope("share_scope"):
outputs, state = tf.nn.rnn(cell, inputs, dtype=tf.float32)
with tf.variable_scope("drop_scope"):
dropped_outputs, _ = tf.nn.rnn(
full_dropout_cell, inputs, dtype=tf.float32)
self.assertEqual(len(outputs), len(inputs))
for out, inp in zip(outputs, inputs):
self.assertEqual(out.get_shape().as_list(), inp.get_shape().as_list())
self.assertEqual(out.dtype, inp.dtype)
with self.test_session(use_gpu=False) as sess:
input_value = np.random.randn(batch_size, input_size)
values = sess.run(outputs + [state],
feed_dict={inputs[0]: input_value})
full_dropout_values = sess.run(dropped_outputs,
feed_dict={inputs[0]: input_value})
for v in values[:-1]:
self.assertAllClose(v, input_value + 1.0)
for d_v in full_dropout_values[:-1]: # Add 1.0 to dropped_out (all zeros)
self.assertAllClose(d_v, np.ones_like(input_value))
def _testDynamicCalculation(self, use_gpu):
cell = Plus1RNNCell()
sequence_length = tf.placeholder(tf.int64)
batch_size = 2
input_size = 5
max_length = 8
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
with tf.variable_scope("drop_scope"):
dynamic_outputs, dynamic_state = tf.nn.rnn(
cell, inputs, sequence_length=sequence_length, dtype=tf.float32)
self.assertEqual(len(dynamic_outputs), len(inputs))
with self.test_session(use_gpu=use_gpu) as sess:
input_value = np.random.randn(batch_size, input_size)
dynamic_values = sess.run(dynamic_outputs,
feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
dynamic_state_value = sess.run([dynamic_state],
feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
# outputs are fully calculated for t = 0, 1
for v in dynamic_values[:2]:
self.assertAllClose(v, input_value + 1.0)
# outputs at t = 2 are zero for entry 0, calculated for entry 1
self.assertAllClose(
dynamic_values[2],
np.vstack((
np.zeros((input_size)),
1.0 + input_value[1, :])))
# outputs at t = 3+ are zero
for v in dynamic_values[3:]:
self.assertAllEqual(v, np.zeros_like(input_value))
# the final states are:
# entry 0: the values from the calculation at t=1
# entry 1: the values from the calculation at t=2
self.assertAllEqual(
dynamic_state_value[0],
np.vstack((
1.0 * (1 + 1) * np.ones((input_size)),
1.0 * (2 + 1) * np.ones((input_size)))))
def testDynamicCalculation(self):
self._testDynamicCalculation(True)
self._testDynamicCalculation(False)
def _testScope(self, factory, prefix="prefix", use_outer_scope=True):
with self.test_session(use_gpu=True, graph=tf.Graph()):
if use_outer_scope:
with tf.variable_scope(prefix) as scope:
factory(scope)
else:
factory(prefix)
# check that all the variables names starts
# with the proper scope.
tf.initialize_all_variables()
all_vars = tf.all_variables()
prefix = prefix or "RNN"
scope_vars = [v for v in all_vars if v.name.startswith(prefix + "/")]
tf.logging.info("RNN with scope: %s (%s)"
% (prefix, "scope" if use_outer_scope else "str"))
for v in scope_vars:
tf.logging.info(v.name)
self.assertEqual(len(scope_vars), len(all_vars))
def testScope(self):
def factory(scope):
cell = Plus1RNNCell()
batch_size = 2
input_size = 5
max_length = 8 # unrolled up to this length
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
return tf.nn.rnn(cell, inputs, dtype=tf.float32, scope=scope)
self._testScope(factory, use_outer_scope=True)
self._testScope(factory, use_outer_scope=False)
self._testScope(factory, prefix=None, use_outer_scope=False)
class GRUTest(tf.test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def _testDynamic(self, use_gpu):
time_steps = 8
num_units = 3
input_size = 5
batch_size = 2
input_values = np.random.randn(time_steps, batch_size, input_size)
sequence_length = np.random.randint(0, time_steps, size=batch_size)
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
concat_inputs = tf.placeholder(
tf.float32, shape=(time_steps, batch_size, input_size))
cell = tf.nn.rnn_cell.GRUCell(num_units=num_units)
with tf.variable_scope("dynamic_scope"):
outputs_dynamic, state_dynamic = tf.nn.dynamic_rnn(
cell, inputs=concat_inputs, sequence_length=sequence_length,
time_major=True, dtype=tf.float32)
feeds = {concat_inputs: input_values}
# Initialize
tf.initialize_all_variables().run(feed_dict=feeds)
sess.run([outputs_dynamic, state_dynamic], feed_dict=feeds)
def testDynamic(self):
self._testDynamic(use_gpu=False)
self._testDynamic(use_gpu=True)
def _testScope(self, factory, prefix="prefix", use_outer_scope=True):
with self.test_session(use_gpu=True, graph=tf.Graph()):
if use_outer_scope:
with tf.variable_scope(prefix) as scope:
factory(scope)
else:
factory(prefix)
tf.initialize_all_variables()
# check that all the variables names starts
# with the proper scope.
all_vars = tf.all_variables()
prefix = prefix or "RNN"
scope_vars = [v for v in all_vars if v.name.startswith(prefix + "/")]
tf.logging.info("RNN with scope: %s (%s)"
% (prefix, "scope" if use_outer_scope else "str"))
for v in scope_vars:
tf.logging.info(v.name)
self.assertEqual(len(scope_vars), len(all_vars))
def testDynamicScope(self):
time_steps = 8
num_units = 3
input_size = 5
batch_size = 2
sequence_length = np.random.randint(0, time_steps, size=batch_size)
def factory(scope):
concat_inputs = tf.placeholder(
tf.float32, shape=(time_steps, batch_size, input_size))
cell = tf.nn.rnn_cell.GRUCell(num_units=num_units)
return tf.nn.dynamic_rnn(cell, inputs=concat_inputs,
sequence_length=sequence_length,
time_major=True, dtype=tf.float32,
scope=scope)
self._testScope(factory, use_outer_scope=True)
self._testScope(factory, use_outer_scope=False)
self._testScope(factory, prefix=None, use_outer_scope=False)
class LSTMTest(tf.test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def _testNoProjNoSharding(self, use_gpu):
num_units = 3
input_size = 5
batch_size = 2
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
cell = tf.nn.rnn_cell.LSTMCell(num_units, initializer=initializer,
state_is_tuple=False)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
outputs, _ = tf.nn.rnn(cell, inputs, dtype=tf.float32)
self.assertEqual(len(outputs), len(inputs))
for out in outputs:
self.assertEqual(out.get_shape().as_list(), [batch_size, num_units])
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
sess.run(outputs, feed_dict={inputs[0]: input_value})
def _testCellClipping(self, use_gpu):
num_units = 3
input_size = 5
batch_size = 2
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True, cell_clip=0.0, initializer=initializer,
state_is_tuple=False)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
outputs, _ = tf.nn.rnn(cell, inputs, dtype=tf.float32)
self.assertEqual(len(outputs), len(inputs))
for out in outputs:
self.assertEqual(out.get_shape().as_list(), [batch_size, num_units])
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
values = sess.run(outputs, feed_dict={inputs[0]: input_value})
for value in values:
# if cell c is clipped to 0, tanh(c) = 0 => m==0
self.assertAllEqual(value, np.zeros((batch_size, num_units)))
def _testNoProjNoShardingSimpleStateSaver(self, use_gpu):
num_units = 3
input_size = 5
batch_size = 2
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
state_saver = TestStateSaver(batch_size, 2 * num_units)
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=False, initializer=initializer,
state_is_tuple=False)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
with tf.variable_scope("share_scope"):
outputs, state = tf.nn.state_saving_rnn(
cell, inputs, state_saver=state_saver, state_name="save_lstm")
self.assertEqual(len(outputs), len(inputs))
for out in outputs:
self.assertEqual(out.get_shape().as_list(), [batch_size, num_units])
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
(last_state_value, saved_state_value) = sess.run(
[state, state_saver.saved_state["save_lstm"]],
feed_dict={inputs[0]: input_value})
self.assertAllEqual(last_state_value, saved_state_value)
def testNoProjNoShardingTupleStateSaver(self):
num_units = 3
input_size = 5
batch_size = 2
max_length = 8
with self.test_session(graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
state_saver = TestStateSaver(batch_size, num_units)
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=False, initializer=initializer,
state_is_tuple=True)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
with tf.variable_scope("share_scope"):
outputs, state = tf.nn.state_saving_rnn(
cell, inputs, state_saver=state_saver, state_name=("c", "m"))
self.assertEqual(len(outputs), len(inputs))
for out in outputs:
self.assertEqual(out.get_shape().as_list(), [batch_size, num_units])
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
last_and_saved_states = sess.run(
state + (state_saver.saved_state["c"], state_saver.saved_state["m"]),
feed_dict={inputs[0]: input_value})
self.assertEqual(4, len(last_and_saved_states))
self.assertAllEqual(last_and_saved_states[:2], last_and_saved_states[2:])
def testNoProjNoShardingNestedTupleStateSaver(self):
num_units = 3
input_size = 5
batch_size = 2
max_length = 8
with self.test_session(graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
state_saver = TestStateSaver(batch_size, {"c0": num_units,
"m0": num_units,
"c1": num_units + 1,
"m1": num_units + 1,
"c2": num_units + 2,
"m2": num_units + 2,
"c3": num_units + 3,
"m3": num_units + 3})
def _cell(i):
return tf.nn.rnn_cell.LSTMCell(
num_units + i, use_peepholes=False, initializer=initializer,
state_is_tuple=True)
# This creates a state tuple which has 4 sub-tuples of length 2 each.
cell = tf.nn.rnn_cell.MultiRNNCell(
[_cell(i) for i in range(4)], state_is_tuple=True)
self.assertEqual(len(cell.state_size), 4)
for i in range(4):
self.assertEqual(len(cell.state_size[i]), 2)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
state_names = (("c0", "m0"), ("c1", "m1"),
("c2", "m2"), ("c3", "m3"))
with tf.variable_scope("share_scope"):
outputs, state = tf.nn.state_saving_rnn(
cell, inputs, state_saver=state_saver, state_name=state_names)
self.assertEqual(len(outputs), len(inputs))
# Final output comes from _cell(3) which has state size num_units + 3
for out in outputs:
self.assertEqual(out.get_shape().as_list(), [batch_size, num_units + 3])
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
last_states = sess.run(
list(nest.flatten(state)), feed_dict={inputs[0]: input_value})
saved_states = sess.run(
list(state_saver.saved_state.values()),
feed_dict={inputs[0]: input_value})
self.assertEqual(8, len(last_states))
self.assertEqual(8, len(saved_states))
flat_state_names = nest.flatten(state_names)
named_saved_states = dict(
zip(state_saver.saved_state.keys(), saved_states))
for i in range(8):
self.assertAllEqual(
last_states[i],
named_saved_states[flat_state_names[i]])
def _testProjNoSharding(self, use_gpu):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
num_proj=num_proj, initializer=initializer,
state_is_tuple=False)
outputs, _ = tf.nn.rnn(cell, inputs, dtype=tf.float32)
self.assertEqual(len(outputs), len(inputs))
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
sess.run(outputs, feed_dict={inputs[0]: input_value})
def testStateTupleWithProjAndSequenceLength(self):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
max_length = 8
sequence_length = [4, 6]
with self.test_session(graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
cell_notuple = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
num_proj=num_proj, initializer=initializer, state_is_tuple=False)
cell_tuple = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
num_proj=num_proj, initializer=initializer, state_is_tuple=True)
outputs_notuple, state_notuple = tf.nn.rnn(
cell_notuple, inputs, dtype=tf.float32,
sequence_length=sequence_length)
tf.get_variable_scope().reuse_variables()
outputs_tuple, state_tuple = tf.nn.rnn(
cell_tuple, inputs, dtype=tf.float32,
sequence_length=sequence_length)
self.assertEqual(len(outputs_notuple), len(inputs))
self.assertEqual(len(outputs_tuple), len(inputs))
self.assertTrue(isinstance(state_tuple, tuple))
self.assertTrue(isinstance(state_notuple, tf.Tensor))
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
outputs_notuple_v = sess.run(
outputs_notuple, feed_dict={inputs[0]: input_value})
outputs_tuple_v = sess.run(
outputs_tuple, feed_dict={inputs[0]: input_value})
self.assertAllEqual(outputs_notuple_v, outputs_tuple_v)
(state_notuple_v,) = sess.run(
(state_notuple,), feed_dict={inputs[0]: input_value})
state_tuple_v = sess.run(
state_tuple, feed_dict={inputs[0]: input_value})
self.assertAllEqual(state_notuple_v, np.hstack(state_tuple_v))
def _testProjSharding(self, use_gpu):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
num_proj_shards = 3
num_unit_shards = 2
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
cell = tf.nn.rnn_cell.LSTMCell(
num_units,
use_peepholes=True,
num_proj=num_proj,
num_unit_shards=num_unit_shards,
num_proj_shards=num_proj_shards,
initializer=initializer,
state_is_tuple=False)
outputs, _ = tf.nn.rnn(cell, inputs, dtype=tf.float32)
self.assertEqual(len(outputs), len(inputs))
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
sess.run(outputs, feed_dict={inputs[0]: input_value})
def _testTooManyShards(self, use_gpu):
num_units = 3
input_size = 5
num_proj = 4
num_proj_shards = 4
num_unit_shards = 2
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()):
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
cell = tf.nn.rnn_cell.LSTMCell(
num_units,
use_peepholes=True,
num_proj=num_proj,
num_unit_shards=num_unit_shards,
num_proj_shards=num_proj_shards,
initializer=initializer,
state_is_tuple=False)
with self.assertRaises(ValueError):
tf.nn.rnn(cell, inputs, dtype=tf.float32)
def _testDoubleInput(self, use_gpu):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
num_proj_shards = 3
num_unit_shards = 2
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-1, 1, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float64, shape=(None, input_size))]
cell = tf.nn.rnn_cell.LSTMCell(
num_units,
use_peepholes=True,
num_proj=num_proj,
num_unit_shards=num_unit_shards,
num_proj_shards=num_proj_shards,
initializer=initializer,
state_is_tuple=False)
outputs, _ = tf.nn.rnn(
cell, inputs, initial_state=cell.zero_state(batch_size, tf.float64))
self.assertEqual(len(outputs), len(inputs))
tf.initialize_all_variables().run()
input_value = np.asarray(np.random.randn(batch_size, input_size),
dtype=np.float64)
values = sess.run(outputs, feed_dict={inputs[0]: input_value})
self.assertEqual(values[0].dtype, input_value.dtype)
def _testShardNoShardEquivalentOutput(self, use_gpu):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
num_proj_shards = 3
num_unit_shards = 2
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
initializer = tf.constant_initializer(0.001)
cell_noshard = tf.nn.rnn_cell.LSTMCell(
num_units,
num_proj=num_proj,
use_peepholes=True,
initializer=initializer,
num_unit_shards=num_unit_shards,
num_proj_shards=num_proj_shards,
state_is_tuple=False)
cell_shard = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
initializer=initializer, num_proj=num_proj,
state_is_tuple=False)
with tf.variable_scope("noshard_scope"):
outputs_noshard, state_noshard = tf.nn.rnn(
cell_noshard, inputs, dtype=tf.float32)
with tf.variable_scope("shard_scope"):
outputs_shard, state_shard = tf.nn.rnn(
cell_shard, inputs, dtype=tf.float32)
self.assertEqual(len(outputs_noshard), len(inputs))
self.assertEqual(len(outputs_noshard), len(outputs_shard))
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
feeds = dict((x, input_value) for x in inputs)
values_noshard = sess.run(outputs_noshard, feed_dict=feeds)
values_shard = sess.run(outputs_shard, feed_dict=feeds)
state_values_noshard = sess.run([state_noshard], feed_dict=feeds)
state_values_shard = sess.run([state_shard], feed_dict=feeds)
self.assertEqual(len(values_noshard), len(values_shard))
self.assertEqual(len(state_values_noshard), len(state_values_shard))
for (v_noshard, v_shard) in zip(values_noshard, values_shard):
self.assertAllClose(v_noshard, v_shard, atol=1e-3)
for (s_noshard, s_shard) in zip(state_values_noshard, state_values_shard):
self.assertAllClose(s_noshard, s_shard, atol=1e-3)
def _testDoubleInputWithDropoutAndDynamicCalculation(
self, use_gpu):
"""Smoke test for using LSTM with doubles, dropout, dynamic calculation."""
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
num_proj_shards = 3
num_unit_shards = 2
max_length = 8
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
sequence_length = tf.placeholder(tf.int64)
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float64, shape=(None, input_size))]
cell = tf.nn.rnn_cell.LSTMCell(
num_units,
use_peepholes=True,
num_proj=num_proj,
num_unit_shards=num_unit_shards,
num_proj_shards=num_proj_shards,
initializer=initializer,
state_is_tuple=False)
dropout_cell = tf.nn.rnn_cell.DropoutWrapper(cell, 0.5, seed=0)
outputs, state = tf.nn.rnn(
dropout_cell, inputs, sequence_length=sequence_length,
initial_state=cell.zero_state(batch_size, tf.float64))
self.assertEqual(len(outputs), len(inputs))
tf.initialize_all_variables().run(feed_dict={sequence_length: [2, 3]})
input_value = np.asarray(np.random.randn(batch_size, input_size),
dtype=np.float64)
values = sess.run(outputs, feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
state_value = sess.run([state], feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
self.assertEqual(values[0].dtype, input_value.dtype)
self.assertEqual(state_value[0].dtype, input_value.dtype)
def testSharingWeightsWithReuse(self):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
max_length = 8
with self.test_session(graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-1, 1, seed=self._seed)
initializer_d = tf.random_uniform_initializer(-1, 1, seed=self._seed+1)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
num_proj=num_proj, initializer=initializer,
state_is_tuple=False)
cell_d = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
num_proj=num_proj, initializer=initializer_d,
state_is_tuple=False)
with tf.variable_scope("share_scope"):
outputs0, _ = tf.nn.rnn(cell, inputs, dtype=tf.float32)
with tf.variable_scope("share_scope", reuse=True):
outputs1, _ = tf.nn.rnn(cell, inputs, dtype=tf.float32)
with tf.variable_scope("diff_scope"):
outputs2, _ = tf.nn.rnn(cell_d, inputs, dtype=tf.float32)
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
output_values = sess.run(
outputs0 + outputs1 + outputs2, feed_dict={inputs[0]: input_value})
outputs0_values = output_values[:max_length]
outputs1_values = output_values[max_length:2*max_length]
outputs2_values = output_values[2*max_length:]
self.assertEqual(len(outputs0_values), len(outputs1_values))
self.assertEqual(len(outputs0_values), len(outputs2_values))
for o1, o2, o3 in zip(outputs0_values, outputs1_values, outputs2_values):
# Same weights used by both RNNs so outputs should be the same.
self.assertAllEqual(o1, o2)
# Different weights used so outputs should be different.
self.assertTrue(np.linalg.norm(o1-o3) > 1e-6)
def testSharingWeightsWithDifferentNamescope(self):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
max_length = 8
with self.test_session(graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-1, 1, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
num_proj=num_proj, initializer=initializer,
state_is_tuple=False)
with tf.name_scope("scope0"):
with tf.variable_scope("share_scope"):
outputs0, _ = tf.nn.rnn(cell, inputs, dtype=tf.float32)
with tf.name_scope("scope1"):
with tf.variable_scope("share_scope", reuse=True):
outputs1, _ = tf.nn.rnn(cell, inputs, dtype=tf.float32)
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
output_values = sess.run(
outputs0 + outputs1, feed_dict={inputs[0]: input_value})
outputs0_values = output_values[:max_length]
outputs1_values = output_values[max_length:]
self.assertEqual(len(outputs0_values), len(outputs1_values))
for out0, out1 in zip(outputs0_values, outputs1_values):
self.assertAllEqual(out0, out1)
def testDynamicRNNWithTupleStates(self):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
max_length = 8
sequence_length = [4, 6]
with self.test_session(graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
inputs_c = tf.pack(inputs)
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
num_proj=num_proj, initializer=initializer, state_is_tuple=True)
outputs_static, state_static = tf.nn.rnn(
cell, inputs, dtype=tf.float32,
sequence_length=sequence_length)
tf.get_variable_scope().reuse_variables()
outputs_dynamic, state_dynamic = tf.nn.dynamic_rnn(
cell, inputs_c, dtype=tf.float32, time_major=True,
sequence_length=sequence_length)
self.assertTrue(isinstance(state_static, tf.nn.rnn_cell.LSTMStateTuple))
self.assertTrue(isinstance(state_dynamic, tf.nn.rnn_cell.LSTMStateTuple))
self.assertEqual(state_static[0], state_static.c)
self.assertEqual(state_static[1], state_static.h)
self.assertEqual(state_dynamic[0], state_dynamic.c)
self.assertEqual(state_dynamic[1], state_dynamic.h)
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
outputs_static_v = sess.run(
outputs_static, feed_dict={inputs[0]: input_value})
outputs_dynamic_v = sess.run(
outputs_dynamic, feed_dict={inputs[0]: input_value})
self.assertAllEqual(outputs_static_v, outputs_dynamic_v)
state_static_v = sess.run(
state_static, feed_dict={inputs[0]: input_value})
state_dynamic_v = sess.run(
state_dynamic, feed_dict={inputs[0]: input_value})
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_dynamic_v))
def testDynamicRNNWithNestedTupleStates(self):
num_units = 3
input_size = 5
batch_size = 2
num_proj = 4
max_length = 8
sequence_length = [4, 6]
with self.test_session(graph=tf.Graph()) as sess:
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None, input_size))]
inputs_c = tf.pack(inputs)
def _cell(i):
return tf.nn.rnn_cell.LSTMCell(
num_units + i, use_peepholes=True,
num_proj=num_proj + i, initializer=initializer, state_is_tuple=True)
# This creates a state tuple which has 4 sub-tuples of length 2 each.
cell = tf.nn.rnn_cell.MultiRNNCell(
[_cell(i) for i in range(4)], state_is_tuple=True)
self.assertEqual(len(cell.state_size), 4)
for i in range(4):
self.assertEqual(len(cell.state_size[i]), 2)
test_zero = cell.zero_state(1, tf.float32)
self.assertEqual(len(test_zero), 4)
for i in range(4):
self.assertEqual(test_zero[i][0].get_shape()[1], cell.state_size[i][0])
self.assertEqual(test_zero[i][1].get_shape()[1], cell.state_size[i][1])
outputs_static, state_static = tf.nn.rnn(
cell, inputs, dtype=tf.float32,
sequence_length=sequence_length)
tf.get_variable_scope().reuse_variables()
outputs_dynamic, state_dynamic = tf.nn.dynamic_rnn(
cell, inputs_c, dtype=tf.float32, time_major=True,
sequence_length=sequence_length)
tf.initialize_all_variables().run()
input_value = np.random.randn(batch_size, input_size)
outputs_static_v = sess.run(
outputs_static, feed_dict={inputs[0]: input_value})
outputs_dynamic_v = sess.run(
outputs_dynamic, feed_dict={inputs[0]: input_value})
self.assertAllEqual(outputs_static_v, outputs_dynamic_v)
state_static_v = sess.run(
nest.flatten(state_static), feed_dict={inputs[0]: input_value})
state_dynamic_v = sess.run(
nest.flatten(state_dynamic), feed_dict={inputs[0]: input_value})
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_dynamic_v))
def _testDynamicEquivalentToStaticRNN(self, use_gpu, use_sequence_length):
time_steps = 8
num_units = 3
num_proj = 4
input_size = 5
batch_size = 2
input_values = np.random.randn(time_steps, batch_size, input_size)
if use_sequence_length:
sequence_length = np.random.randint(0, time_steps, size=batch_size)
else:
sequence_length = None
########### Step 1: Run static graph and generate readouts
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
concat_inputs = tf.placeholder(tf.float32,
shape=(time_steps, batch_size, input_size))
inputs = tf.unpack(concat_inputs)
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
initializer=initializer, num_proj=num_proj, state_is_tuple=False)
with tf.variable_scope("dynamic_scope"):
outputs_static, state_static = tf.nn.rnn(
cell, inputs, sequence_length=sequence_length, dtype=tf.float32)
feeds = {concat_inputs: input_values}
# Initialize
tf.initialize_all_variables().run(feed_dict=feeds)
# Generate gradients of sum of outputs w.r.t. inputs
static_gradients = tf.gradients(
outputs_static + [state_static], [concat_inputs])
# Generate gradients of individual outputs w.r.t. inputs
static_individual_gradients = nest.flatten([
tf.gradients(y, [concat_inputs])
for y in [outputs_static[0],
outputs_static[-1],
state_static]])
# Generate gradients of individual variables w.r.t. inputs
trainable_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
assert len(trainable_variables) > 1, (
"Count of trainable variables: %d" % len(trainable_variables))
# pylint: disable=bad-builtin
static_individual_variable_gradients = nest.flatten([
tf.gradients(y, trainable_variables)
for y in [outputs_static[0],
outputs_static[-1],
state_static]])
# Test forward pass
values_static = sess.run(outputs_static, feed_dict=feeds)
(state_value_static,) = sess.run((state_static,), feed_dict=feeds)
# Test gradients to inputs and variables w.r.t. outputs & final state
static_grad_values = sess.run(static_gradients, feed_dict=feeds)
static_individual_grad_values = sess.run(
static_individual_gradients, feed_dict=feeds)
static_individual_var_grad_values = sess.run(
static_individual_variable_gradients, feed_dict=feeds)
########## Step 2: Run dynamic graph and generate readouts
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
concat_inputs = tf.placeholder(tf.float32,
shape=(time_steps, batch_size, input_size))
inputs = tf.unpack(concat_inputs)
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=True,
initializer=initializer, num_proj=num_proj, state_is_tuple=False)
with tf.variable_scope("dynamic_scope"):
outputs_dynamic, state_dynamic = tf.nn.dynamic_rnn(
cell, inputs=concat_inputs, sequence_length=sequence_length,
time_major=True, dtype=tf.float32)
split_outputs_dynamic = tf.unpack(outputs_dynamic, time_steps)
feeds = {concat_inputs: input_values}
# Initialize
tf.initialize_all_variables().run(feed_dict=feeds)
# Generate gradients of sum of outputs w.r.t. inputs
dynamic_gradients = tf.gradients(
split_outputs_dynamic + [state_dynamic], [concat_inputs])
# Generate gradients of several individual outputs w.r.t. inputs
dynamic_individual_gradients = nest.flatten([
tf.gradients(y, [concat_inputs])
for y in [split_outputs_dynamic[0],
split_outputs_dynamic[-1],
state_dynamic]])
# Generate gradients of individual variables w.r.t. inputs
trainable_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
assert len(trainable_variables) > 1, (
"Count of trainable variables: %d" % len(trainable_variables))
dynamic_individual_variable_gradients = nest.flatten([
tf.gradients(y, trainable_variables)
for y in [split_outputs_dynamic[0],
split_outputs_dynamic[-1],
state_dynamic]])
# Test forward pass
values_dynamic = sess.run(split_outputs_dynamic, feed_dict=feeds)
(state_value_dynamic,) = sess.run(
(state_dynamic,), feed_dict=feeds)
# Test gradients to inputs and variables w.r.t. outputs & final state
dynamic_grad_values = sess.run(dynamic_gradients, feed_dict=feeds)
dynamic_individual_grad_values = sess.run(
dynamic_individual_gradients, feed_dict=feeds)
dynamic_individual_var_grad_values = sess.run(
dynamic_individual_variable_gradients, feed_dict=feeds)
######### Step 3: Comparisons
self.assertEqual(len(values_static), len(values_dynamic))
for (value_static, value_dynamic) in zip(values_static, values_dynamic):
self.assertAllEqual(value_static, value_dynamic)
self.assertAllEqual(state_value_static, state_value_dynamic)
self.assertAllEqual(static_grad_values, dynamic_grad_values)
self.assertEqual(len(static_individual_grad_values),
len(dynamic_individual_grad_values))
self.assertEqual(len(static_individual_var_grad_values),
len(dynamic_individual_var_grad_values))
for i, (a, b) in enumerate(zip(static_individual_grad_values,
dynamic_individual_grad_values)):
tf.logging.info("Comparing individual gradients iteration %d" % i)
self.assertAllEqual(a, b)
for i, (a, b) in enumerate(zip(static_individual_var_grad_values,
dynamic_individual_var_grad_values)):
tf.logging.info(
"Comparing individual variable gradients iteration %d" % i)
self.assertAllEqual(a, b)
def testNoProjNoShardingSimpleStateSaver(self):
self._testNoProjNoShardingSimpleStateSaver(use_gpu=False)
self._testNoProjNoShardingSimpleStateSaver(use_gpu=True)
def testNoProjNoSharding(self):
self._testNoProjNoSharding(use_gpu=False)
self._testNoProjNoSharding(use_gpu=True)
def testCellClipping(self):
self._testCellClipping(use_gpu=False)
self._testCellClipping(use_gpu=True)
def testProjNoSharding(self):
self._testProjNoSharding(use_gpu=False)
self._testProjNoSharding(use_gpu=True)
def testProjSharding(self):
self._testProjSharding(use_gpu=False)
self._testProjSharding(use_gpu=True)
def testTooManyShards(self):
self._testTooManyShards(use_gpu=False)
self._testTooManyShards(use_gpu=True)
def testShardNoShardEquivalentOutput(self):
self._testShardNoShardEquivalentOutput(use_gpu=False)
self._testShardNoShardEquivalentOutput(use_gpu=True)
def testDoubleInput(self):
self._testDoubleInput(use_gpu=False)
self._testDoubleInput(use_gpu=True)
def testDoubleInputWithDropoutAndDynamicCalculation(self):
self._testDoubleInputWithDropoutAndDynamicCalculation(use_gpu=False)
self._testDoubleInputWithDropoutAndDynamicCalculation(use_gpu=True)
def testDynamicEquivalentToStaticRNN(self):
self._testDynamicEquivalentToStaticRNN(
use_gpu=False, use_sequence_length=False)
self._testDynamicEquivalentToStaticRNN(
use_gpu=True, use_sequence_length=False)
self._testDynamicEquivalentToStaticRNN(
use_gpu=False, use_sequence_length=True)
self._testDynamicEquivalentToStaticRNN(
use_gpu=True, use_sequence_length=True)
class BidirectionalRNNTest(tf.test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def _createBidirectionalRNN(self,
use_gpu,
use_shape,
use_sequence_length,
scope=None):
num_units = 3
input_size = 5
batch_size = 2
max_length = 8
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
sequence_length = tf.placeholder(tf.int64) if use_sequence_length else None
cell_fw = tf.nn.rnn_cell.LSTMCell(num_units,
input_size,
initializer=initializer,
state_is_tuple=False)
cell_bw = tf.nn.rnn_cell.LSTMCell(num_units,
input_size,
initializer=initializer,
state_is_tuple=False)
inputs = max_length * [
tf.placeholder(
tf.float32,
shape=(batch_size, input_size) if use_shape else (None, input_size))
]
outputs, state_fw, state_bw = tf.nn.bidirectional_rnn(
cell_fw,
cell_bw,
inputs,
dtype=tf.float32,
sequence_length=sequence_length,
scope=scope)
self.assertEqual(len(outputs), len(inputs))
for out in outputs:
self.assertEqual(
out.get_shape().as_list(),
[batch_size if use_shape else None, 2 * num_units])
input_value = np.random.randn(batch_size, input_size)
outputs = tf.pack(outputs)
return input_value, inputs, outputs, state_fw, state_bw, sequence_length
def _testBidirectionalRNN(self, use_gpu, use_shape):
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
input_value, inputs, outputs, state_fw, state_bw, sequence_length = (
self._createBidirectionalRNN(use_gpu, use_shape, True))
tf.initialize_all_variables().run()
# Run with pre-specified sequence length of 2, 3
out, s_fw, s_bw = sess.run([outputs, state_fw, state_bw],
feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
# Since the forward and backward LSTM cells were initialized with the
# same parameters, the forward and backward output has to be the same,
# but reversed in time. The format is output[time][batch][depth], and
# due to depth concatenation (as num_units=3 for both RNNs):
# - forward output: out[][][depth] for 0 <= depth < 3
# - backward output: out[][][depth] for 4 <= depth < 6
#
# First sequence in batch is length=2
# Check that the time=0 forward output is equal to time=1 backward output
self.assertEqual(out[0][0][0], out[1][0][3])
self.assertEqual(out[0][0][1], out[1][0][4])
self.assertEqual(out[0][0][2], out[1][0][5])
# Check that the time=1 forward output is equal to time=0 backward output
self.assertEqual(out[1][0][0], out[0][0][3])
self.assertEqual(out[1][0][1], out[0][0][4])
self.assertEqual(out[1][0][2], out[0][0][5])
# Second sequence in batch is length=3
# Check that the time=0 forward output is equal to time=2 backward output
self.assertEqual(out[0][1][0], out[2][1][3])
self.assertEqual(out[0][1][1], out[2][1][4])
self.assertEqual(out[0][1][2], out[2][1][5])
# Check that the time=1 forward output is equal to time=1 backward output
self.assertEqual(out[1][1][0], out[1][1][3])
self.assertEqual(out[1][1][1], out[1][1][4])
self.assertEqual(out[1][1][2], out[1][1][5])
# Check that the time=2 forward output is equal to time=0 backward output
self.assertEqual(out[2][1][0], out[0][1][3])
self.assertEqual(out[2][1][1], out[0][1][4])
self.assertEqual(out[2][1][2], out[0][1][5])
# Via the reasoning above, the forward and backward final state should be
# exactly the same
self.assertAllClose(s_fw, s_bw)
def _testBidirectionalRNNWithoutSequenceLength(self, use_gpu, use_shape):
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
input_value, inputs, outputs, state_fw, state_bw, _ = (
self._createBidirectionalRNN(use_gpu, use_shape, False))
tf.initialize_all_variables().run()
out, s_fw, s_bw = sess.run([outputs, state_fw, state_bw],
feed_dict={inputs[0]: input_value})
# Since the forward and backward LSTM cells were initialized with the
# same parameters, the forward and backward output has to be the same,
# but reversed in time. The format is output[time][batch][depth], and
# due to depth concatenation (as num_units=3 for both RNNs):
# - forward output: out[][][depth] for 0 <= depth < 3
# - backward output: out[][][depth] for 4 <= depth < 6
#
# Both sequences in batch are length=8. Check that the time=i
# forward output is equal to time=8-1-i backward output
for i in xrange(8):
self.assertEqual(out[i][0][0], out[8 - 1 - i][0][3])
self.assertEqual(out[i][0][1], out[8 - 1 - i][0][4])
self.assertEqual(out[i][0][2], out[8 - 1 - i][0][5])
for i in xrange(8):
self.assertEqual(out[i][1][0], out[8 - 1 - i][1][3])
self.assertEqual(out[i][1][1], out[8 - 1 - i][1][4])
self.assertEqual(out[i][1][2], out[8 - 1 - i][1][5])
# Via the reasoning above, the forward and backward final state should be
# exactly the same
self.assertAllClose(s_fw, s_bw)
def testBidirectionalRNN(self):
self._testBidirectionalRNN(use_gpu=False, use_shape=False)
self._testBidirectionalRNN(use_gpu=True, use_shape=False)
self._testBidirectionalRNN(use_gpu=False, use_shape=True)
self._testBidirectionalRNN(use_gpu=True, use_shape=True)
def testBidirectionalRNNWithoutSequenceLength(self):
self._testBidirectionalRNNWithoutSequenceLength(use_gpu=False,
use_shape=False)
self._testBidirectionalRNNWithoutSequenceLength(use_gpu=True,
use_shape=False)
self._testBidirectionalRNNWithoutSequenceLength(use_gpu=False,
use_shape=True)
self._testBidirectionalRNNWithoutSequenceLength(use_gpu=True,
use_shape=True)
def _createBidirectionalDynamicRNN(self, use_gpu, use_shape,
use_state_tuple, use_time_major,
scope=None):
num_units = 3
input_size = 5
batch_size = 2
max_length = 8
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
sequence_length = tf.placeholder(tf.int64)
cell_fw = tf.nn.rnn_cell.LSTMCell(num_units,
initializer=initializer,
state_is_tuple=use_state_tuple)
cell_bw = tf.nn.rnn_cell.LSTMCell(num_units,
initializer=initializer,
state_is_tuple=use_state_tuple)
inputs = max_length * [
tf.placeholder(tf.float32,
shape=(batch_size if use_shape else None, input_size))]
inputs_c = tf.pack(inputs)
if not use_time_major:
inputs_c = tf.transpose(inputs_c, [1, 0, 2])
outputs, states = tf.nn.bidirectional_dynamic_rnn(
cell_fw,
cell_bw,
inputs_c,
sequence_length,
dtype=tf.float32,
time_major=use_time_major,
scope=scope)
outputs = tf.concat(2, outputs)
state_fw, state_bw = states
outputs_shape = [None, max_length, 2 * num_units]
if use_shape:
outputs_shape[0] = batch_size
if use_time_major:
outputs_shape[0], outputs_shape[1] = outputs_shape[1], outputs_shape[0]
self.assertEqual(
outputs.get_shape().as_list(),
outputs_shape)
input_value = np.random.randn(batch_size, input_size)
return input_value, inputs, outputs, state_fw, state_bw, sequence_length
def _testBidirectionalDynamicRNN(self, use_gpu, use_shape,
use_state_tuple, use_time_major):
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
input_value, inputs, outputs, state_fw, state_bw, sequence_length = (
self._createBidirectionalDynamicRNN(
use_gpu, use_shape, use_state_tuple, use_time_major))
tf.initialize_all_variables().run()
# Run with pre-specified sequence length of 2, 3
if use_state_tuple:
out, c_fw, m_fw, c_bw, m_bw = sess.run(
[outputs, state_fw[0], state_fw[1], state_bw[0], state_bw[1]],
feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
s_fw = (c_fw, m_fw)
s_bw = (c_bw, m_bw)
else:
out, s_fw, s_bw = sess.run([outputs, state_fw, state_bw],
feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
# Since the forward and backward LSTM cells were initialized with the
# same parameters, the forward and backward output has to be the same,
# but reversed in time. The format is output[time][batch][depth], and
# due to depth concatenation (as num_units=3 for both RNNs):
# - forward output: out[][][depth] for 0 <= depth < 3
# - backward output: out[][][depth] for 4 <= depth < 6
#
# First sequence in batch is length=2
# Check that the time=0 forward output is equal to time=1 backward output
if not use_time_major:
out = np.swapaxes(out, 0, 1)
self.assertEqual(out[0][0][0], out[1][0][3])
self.assertEqual(out[0][0][1], out[1][0][4])
self.assertEqual(out[0][0][2], out[1][0][5])
# Check that the time=1 forward output is equal to time=0 backward output
self.assertEqual(out[1][0][0], out[0][0][3])
self.assertEqual(out[1][0][1], out[0][0][4])
self.assertEqual(out[1][0][2], out[0][0][5])
# Second sequence in batch is length=3
# Check that the time=0 forward output is equal to time=2 backward output
self.assertEqual(out[0][1][0], out[2][1][3])
self.assertEqual(out[0][1][1], out[2][1][4])
self.assertEqual(out[0][1][2], out[2][1][5])
# Check that the time=1 forward output is equal to time=1 backward output
self.assertEqual(out[1][1][0], out[1][1][3])
self.assertEqual(out[1][1][1], out[1][1][4])
self.assertEqual(out[1][1][2], out[1][1][5])
# Check that the time=2 forward output is equal to time=0 backward output
self.assertEqual(out[2][1][0], out[0][1][3])
self.assertEqual(out[2][1][1], out[0][1][4])
self.assertEqual(out[2][1][2], out[0][1][5])
# Via the reasoning above, the forward and backward final state should be
# exactly the same
self.assertAllClose(s_fw, s_bw)
def testBidirectionalDynamicRNN(self):
# Generate 2^4 option values
# from [True, True, True, True] to [False, False, False, False]
options = itertools.product([True, False], repeat=4)
for option in options:
self._testBidirectionalDynamicRNN(use_gpu=option[0], use_shape=option[1],
use_state_tuple=option[2],
use_time_major=option[3])
def _testScope(self, factory, prefix="prefix", use_outer_scope=True):
# REMARKS: factory(scope) is a function accepting a scope
# as an argument, such scope can be None, a string
# or a VariableScope instance.
with self.test_session(use_gpu=True, graph=tf.Graph()):
if use_outer_scope:
with tf.variable_scope(prefix) as scope:
factory(scope)
else:
factory(prefix)
# check that all the variables names starts
# with the proper scope.
tf.initialize_all_variables()
all_vars = tf.all_variables()
prefix = prefix or "BiRNN"
scope_vars = [v for v in all_vars if v.name.startswith(prefix + "/")]
tf.logging.info("BiRNN with scope: %s (%s)"
% (prefix, "scope" if use_outer_scope else "str"))
for v in scope_vars:
tf.logging.info(v.name)
self.assertEqual(len(scope_vars), len(all_vars))
def testBidirectionalRNNScope(self):
def factory(scope):
return self._createBidirectionalRNN(
use_gpu=True, use_shape=True,
use_sequence_length=True, scope=scope)
self._testScope(factory, use_outer_scope=True)
self._testScope(factory, use_outer_scope=False)
self._testScope(factory, prefix=None, use_outer_scope=False)
def testBidirectionalDynamicRNNScope(self):
def get_factory(use_time_major):
def factory(scope):
return self._createBidirectionalDynamicRNN(
use_gpu=True, use_shape=True, use_state_tuple=True,
use_time_major=use_time_major, scope=scope)
return factory
self._testScope(get_factory(True), use_outer_scope=True)
self._testScope(get_factory(True), use_outer_scope=False)
self._testScope(get_factory(True), prefix=None, use_outer_scope=False)
self._testScope(get_factory(False), use_outer_scope=True)
self._testScope(get_factory(False), use_outer_scope=False)
self._testScope(get_factory(False), prefix=None, use_outer_scope=False)
class MultiDimensionalLSTMTest(tf.test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def testMultiDimensionalLSTMAllRNNContainers(self):
feature_dims = (3, 4, 5)
input_size = feature_dims
batch_size = 2
max_length = 8
sequence_length = [4, 6]
with self.test_session(graph=tf.Graph()) as sess:
inputs = max_length * [
tf.placeholder(tf.float32, shape=(None,) + input_size)]
inputs_using_dim = max_length * [
tf.placeholder(tf.float32, shape=(batch_size,) + input_size)]
inputs_c = tf.pack(inputs)
# Create a cell for the whole test. This is fine because the cell has no
# variables.
cell = DummyMultiDimensionalLSTM(feature_dims)
state_saver = TestStateSaver(batch_size, input_size)
outputs_static, state_static = tf.nn.rnn(
cell, inputs, dtype=tf.float32,
sequence_length=sequence_length)
outputs_dynamic, state_dynamic = tf.nn.dynamic_rnn(
cell, inputs_c, dtype=tf.float32, time_major=True,
sequence_length=sequence_length)
outputs_bid, state_bid_fw, state_bid_bw = tf.nn.bidirectional_rnn(
cell, cell, inputs_using_dim, dtype=tf.float32,
sequence_length=sequence_length)
outputs_sav, state_sav = tf.nn.state_saving_rnn(
cell, inputs_using_dim, sequence_length=sequence_length,
state_saver=state_saver, state_name=("h", "c"))
for out, inp in zip(outputs_static, inputs):
self.assertEqual(out.get_shape().as_list(), inp.get_shape().as_list())
self.assertEqual(outputs_dynamic.get_shape().as_list(),
inputs_c.get_shape().as_list())
for out, inp in zip(outputs_bid, inputs_using_dim):
input_shape_list = inp.get_shape().as_list()
# fwd and bwd activations are concatenated along the second dim.
input_shape_list[1] *= 2
self.assertEqual(out.get_shape().as_list(), input_shape_list)
tf.initialize_all_variables().run()
input_total_size = (batch_size,) + input_size
input_value = np.random.randn(*input_total_size)
outputs_static_v = sess.run(
outputs_static, feed_dict={inputs[0]: input_value})
outputs_dynamic_v = sess.run(
outputs_dynamic, feed_dict={inputs[0]: input_value})
outputs_bid_v = sess.run(
outputs_bid, feed_dict={inputs_using_dim[0]: input_value})
outputs_sav_v = sess.run(
outputs_sav, feed_dict={inputs_using_dim[0]: input_value})
self.assertAllEqual(outputs_static_v, outputs_dynamic_v)
self.assertAllEqual(outputs_static_v, outputs_sav_v)
outputs_static_array = np.array(outputs_static_v)
outputs_static_array_double = np.concatenate(
(outputs_static_array, outputs_static_array), axis=2)
outputs_bid_array = np.array(outputs_bid_v)
self.assertAllEqual(outputs_static_array_double, outputs_bid_array)
state_static_v = sess.run(
state_static, feed_dict={inputs[0]: input_value})
state_dynamic_v = sess.run(
state_dynamic, feed_dict={inputs[0]: input_value})
state_bid_fw_v = sess.run(
state_bid_fw, feed_dict={inputs_using_dim[0]: input_value})
state_bid_bw_v = sess.run(
state_bid_bw, feed_dict={inputs_using_dim[0]: input_value})
state_sav_v = sess.run(
state_sav, feed_dict={inputs_using_dim[0]: input_value})
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_dynamic_v))
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_sav_v))
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_bid_fw_v))
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_bid_bw_v))
class NestedLSTMTest(tf.test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def testNestedIOLSTMAllRNNContainers(self):
input_size = 5
batch_size = 2
state_size = 6
max_length = 8
sequence_length = [4, 6]
with self.test_session(graph=tf.Graph()) as sess:
state_saver = TestStateSaver(batch_size, state_size)
single_input = (tf.placeholder(tf.float32, shape=(None, input_size)),
tf.placeholder(tf.float32, shape=(None, input_size)))
inputs = max_length * [single_input]
inputs_c = (tf.pack([input_[0] for input_ in inputs]),
tf.pack([input_[1] for input_ in inputs]))
single_input_using_dim = (
tf.placeholder(tf.float32, shape=(batch_size, input_size)),
tf.placeholder(tf.float32, shape=(batch_size, input_size)))
inputs_using_dim = max_length * [single_input_using_dim]
# Create a cell for the whole test. This is fine because the cell has no
# variables.
cell = NestedRNNCell()
outputs_dynamic, state_dynamic = tf.nn.dynamic_rnn(
cell, inputs_c, dtype=tf.float32, time_major=True,
sequence_length=sequence_length)
outputs_static, state_static = tf.nn.rnn(
cell, inputs, dtype=tf.float32,
sequence_length=sequence_length)
outputs_bid, state_bid_fw, state_bid_bw = tf.nn.bidirectional_rnn(
cell, cell, inputs_using_dim, dtype=tf.float32,
sequence_length=sequence_length)
outputs_sav, state_sav = tf.nn.state_saving_rnn(
cell, inputs_using_dim, sequence_length=sequence_length,
state_saver=state_saver, state_name=("h", "c"))
def _assert_same_shape(input1, input2, double=False):
flat_input1 = nest.flatten(input1)
flat_input2 = nest.flatten(input2)
for inp1, inp2 in zip(flat_input1, flat_input2):
input_shape = inp1.get_shape().as_list()
if double:
input_shape[1] *= 2
self.assertEqual(input_shape, inp2.get_shape().as_list())
_assert_same_shape(inputs_c, outputs_dynamic)
_assert_same_shape(inputs, outputs_static)
_assert_same_shape(inputs_using_dim, outputs_sav)
_assert_same_shape(inputs_using_dim, outputs_bid, double=True)
tf.initialize_all_variables().run()
input_total_size = (batch_size, input_size)
input_value = (np.random.randn(*input_total_size),
np.random.randn(*input_total_size))
outputs_dynamic_v = sess.run(
outputs_dynamic, feed_dict={single_input: input_value})
outputs_static_v = sess.run(
outputs_static, feed_dict={single_input: input_value})
outputs_sav_v = sess.run(
outputs_sav, feed_dict={single_input_using_dim: input_value})
outputs_bid_v = sess.run(
outputs_bid, feed_dict={single_input_using_dim: input_value})
self.assertAllEqual(outputs_static_v,
np.transpose(outputs_dynamic_v, (1, 0, 2, 3)))
self.assertAllEqual(outputs_static_v, outputs_sav_v)
outputs_static_array = np.array(outputs_static_v)
outputs_static_array_double = np.concatenate(
(outputs_static_array, outputs_static_array), axis=3)
outputs_bid_array = np.array(outputs_bid_v)
self.assertAllEqual(outputs_static_array_double, outputs_bid_array)
state_dynamic_v = sess.run(
state_dynamic, feed_dict={single_input: input_value})
state_static_v = sess.run(
state_static, feed_dict={single_input: input_value})
state_bid_fw_v = sess.run(
state_bid_fw, feed_dict={single_input_using_dim: input_value})
state_bid_bw_v = sess.run(
state_bid_bw, feed_dict={single_input_using_dim: input_value})
state_sav_v = sess.run(
state_sav, feed_dict={single_input_using_dim: input_value})
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_dynamic_v))
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_sav_v))
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_bid_fw_v))
self.assertAllEqual(
np.hstack(state_static_v), np.hstack(state_bid_bw_v))
class RawRNNTest(tf.test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def _testRawRNN(self, max_time):
with self.test_session(graph=tf.Graph()) as sess:
batch_size = 16
input_depth = 4
num_units = 3
inputs = tf.placeholder(shape=(max_time, batch_size, input_depth),
dtype=tf.float32)
sequence_length = tf.placeholder(shape=(batch_size,), dtype=tf.int32)
inputs_ta = tf.TensorArray(dtype=tf.float32, size=tf.shape(inputs)[0])
inputs_ta = inputs_ta.unpack(inputs)
cell = tf.nn.rnn_cell.LSTMCell(num_units, state_is_tuple=True)
def loop_fn(time_, cell_output, cell_state, unused_loop_state):
emit_output = cell_output # == None for time == 0
if cell_output is None: # time == 0
next_state = cell.zero_state(batch_size, tf.float32)
else:
next_state = cell_state # copy state through
elements_finished = (time_ >= sequence_length)
finished = tf.reduce_all(elements_finished)
# For the very final iteration, we must emit a dummy input
next_input = tf.cond(
finished,
lambda: tf.zeros([batch_size, input_depth], dtype=tf.float32),
lambda: inputs_ta.read(time_))
return (elements_finished, next_input, next_state, emit_output, None)
outputs_ta, final_state, _ = tf.nn.raw_rnn(cell, loop_fn)
outputs = outputs_ta.pack()
tf.get_variable_scope().reuse_variables()
outputs_dynamic_rnn, final_state_dynamic_rnn = tf.nn.dynamic_rnn(
cell, inputs, time_major=True, dtype=tf.float32,
sequence_length=sequence_length)
variables = tf.trainable_variables()
gradients = tf.gradients([outputs, final_state], [inputs] + variables)
gradients_dynamic_rnn = tf.gradients(
[outputs_dynamic_rnn, final_state_dynamic_rnn], [inputs] + variables)
tf.initialize_all_variables().run()
rand_input = np.random.randn(max_time, batch_size, input_depth)
if max_time == 0:
rand_seq_len = np.zeros(batch_size)
else:
rand_seq_len = np.random.randint(max_time, size=batch_size)
# To ensure same output lengths for dynamic_rnn and raw_rnn
rand_seq_len[0] = max_time
(outputs_val, outputs_dynamic_rnn_val,
final_state_val, final_state_dynamic_rnn_val) = sess.run(
[outputs, outputs_dynamic_rnn, final_state, final_state_dynamic_rnn],
feed_dict={inputs: rand_input, sequence_length: rand_seq_len})
self.assertAllClose(outputs_dynamic_rnn_val, outputs_val)
self.assertAllClose(final_state_dynamic_rnn_val, final_state_val)
# NOTE: Because with 0 time steps, raw_rnn does not have shape
# information about the input, it is impossible to perform
# gradients comparisons as the gradients eval will fail. So
# this case skips the gradients test.
if max_time > 0:
self.assertEqual(len(gradients), len(gradients_dynamic_rnn))
gradients_val = sess.run(
gradients,
feed_dict={inputs: rand_input, sequence_length: rand_seq_len})
gradients_dynamic_rnn_val = sess.run(
gradients_dynamic_rnn,
feed_dict={inputs: rand_input, sequence_length: rand_seq_len})
self.assertEqual(len(gradients_val), len(gradients_dynamic_rnn_val))
input_gradients_val = gradients_val[0]
input_gradients_dynamic_rnn_val = gradients_dynamic_rnn_val[0]
self.assertAllClose(
input_gradients_val, input_gradients_dynamic_rnn_val)
for i in range(1, len(gradients_val)):
self.assertAllClose(gradients_dynamic_rnn_val[i], gradients_val[i])
def testRawRNNZeroLength(self):
# NOTE: Because with 0 time steps, raw_rnn does not have shape
# information about the input, it is impossible to perform
# gradients comparisons as the gradients eval will fail. So this
# case skips the gradients test.
self._testRawRNN(max_time=0)
def testRawRNN(self):
self._testRawRNN(max_time=10)
def testLoopState(self):
with self.test_session(graph=tf.Graph()):
max_time = 10
batch_size = 16
input_depth = 4
num_units = 3
inputs = np.random.randn(max_time, batch_size, input_depth)
inputs_ta = tf.TensorArray(dtype=tf.float32, size=tf.shape(inputs)[0])
inputs_ta = inputs_ta.unpack(inputs)
cell = tf.nn.rnn_cell.LSTMCell(num_units, state_is_tuple=True)
def loop_fn(time_, cell_output, cell_state, loop_state):
if cell_output is None:
loop_state = tf.constant([0])
next_state = cell.zero_state(batch_size, tf.float32)
else:
loop_state = tf.pack([tf.squeeze(loop_state) + 1])
next_state = cell_state
emit_output = cell_output # == None for time == 0
elements_finished = tf.tile([time_ >= max_time], [batch_size])
finished = tf.reduce_all(elements_finished)
# For the very final iteration, we must emit a dummy input
next_input = tf.cond(
finished,
lambda: tf.zeros([batch_size, input_depth], dtype=tf.float32),
lambda: inputs_ta.read(time_))
return (elements_finished, next_input,
next_state, emit_output, loop_state)
r = tf.nn.raw_rnn(cell, loop_fn)
loop_state = r[-1]
self.assertEqual([10], loop_state.eval())
def testLoopStateWithTensorArray(self):
with self.test_session(graph=tf.Graph()):
max_time = 4
batch_size = 16
input_depth = 4
num_units = 3
inputs = np.random.randn(max_time, batch_size, input_depth)
inputs_ta = tf.TensorArray(dtype=tf.float32, size=tf.shape(inputs)[0])
inputs_ta = inputs_ta.unpack(inputs)
cell = tf.nn.rnn_cell.LSTMCell(num_units, state_is_tuple=True)
def loop_fn(time_, cell_output, cell_state, loop_state):
if cell_output is None:
loop_state = tf.TensorArray(
dynamic_size=True, size=0, dtype=tf.int32, clear_after_read=False)
loop_state = loop_state.write(0, 1)
next_state = cell.zero_state(batch_size, tf.float32)
else:
loop_state = loop_state.write(
time_, loop_state.read(time_ - 1) + time_)
next_state = cell_state
emit_output = cell_output # == None for time == 0
elements_finished = tf.tile([time_ >= max_time], [batch_size])
finished = tf.reduce_all(elements_finished)
# For the very final iteration, we must emit a dummy input
next_input = tf.cond(
finished,
lambda: tf.zeros([batch_size, input_depth], dtype=tf.float32),
lambda: inputs_ta.read(time_))
return (elements_finished, next_input,
next_state, emit_output, loop_state)
r = tf.nn.raw_rnn(cell, loop_fn)
loop_state = r[-1]
loop_state = loop_state.pack()
self.assertAllEqual([1, 2, 2 + 2, 4 + 3, 7 + 4], loop_state.eval())
def testEmitDifferentStructureThanCellOutput(self):
with self.test_session(graph=tf.Graph()) as sess:
max_time = 10
batch_size = 16
input_depth = 4
num_units = 3
inputs = np.random.randn(max_time, batch_size, input_depth)
inputs_ta = tf.TensorArray(dtype=tf.float32, size=tf.shape(inputs)[0])
inputs_ta = inputs_ta.unpack(inputs)
cell = tf.nn.rnn_cell.LSTMCell(num_units, state_is_tuple=True)
def loop_fn(time_, cell_output, cell_state, _):
if cell_output is None:
emit_output = (tf.zeros([2, 3], dtype=tf.int32),
tf.zeros([1], dtype=tf.int64))
next_state = cell.zero_state(batch_size, tf.float32)
else:
emit_output = (tf.ones([batch_size, 2, 3], dtype=tf.int32),
tf.ones([batch_size, 1], dtype=tf.int64))
next_state = cell_state
elements_finished = tf.tile([time_ >= max_time], [batch_size])
finished = tf.reduce_all(elements_finished)
# For the very final iteration, we must emit a dummy input
next_input = tf.cond(
finished,
lambda: tf.zeros([batch_size, input_depth], dtype=tf.float32),
lambda: inputs_ta.read(time_))
return (elements_finished, next_input, next_state, emit_output, None)
r = tf.nn.raw_rnn(cell, loop_fn)
output_ta = r[0]
self.assertEqual(2, len(output_ta))
self.assertEqual([tf.int32, tf.int64], [ta.dtype for ta in output_ta])
output = [ta.pack() for ta in output_ta]
output_vals = sess.run(output)
self.assertAllEqual(
np.ones((max_time, batch_size, 2, 3), np.int32), output_vals[0])
self.assertAllEqual(
np.ones((max_time, batch_size, 1), np.int64), output_vals[1])
def _testScope(self, factory, prefix="prefix", use_outer_scope=True):
with self.test_session(use_gpu=True, graph=tf.Graph()):
if use_outer_scope:
with tf.variable_scope(prefix) as scope:
factory(scope)
else:
factory(prefix)
tf.initialize_all_variables()
# check that all the variables names starts
# with the proper scope.
all_vars = tf.all_variables()
prefix = prefix or "RNN"
scope_vars = [v for v in all_vars if v.name.startswith(prefix + "/")]
tf.logging.info("RNN with scope: %s (%s)"
% (prefix, "scope" if use_outer_scope else "str"))
for v in scope_vars:
tf.logging.info(v.name)
self.assertEqual(len(scope_vars), len(all_vars))
def testRawRNNScope(self):
max_time = 10
batch_size = 16
input_depth = 4
num_units = 3
def factory(scope):
inputs = tf.placeholder(shape=(max_time, batch_size, input_depth),
dtype=tf.float32)
sequence_length = tf.placeholder(shape=(batch_size,), dtype=tf.int32)
inputs_ta = tf.TensorArray(dtype=tf.float32, size=tf.shape(inputs)[0])
inputs_ta = inputs_ta.unpack(inputs)
cell = tf.nn.rnn_cell.LSTMCell(num_units, state_is_tuple=True)
def loop_fn(time_, cell_output, cell_state, unused_loop_state):
emit_output = cell_output # == None for time == 0
if cell_output is None: # time == 0
next_state = cell.zero_state(batch_size, tf.float32)
else:
next_state = cell_state
elements_finished = (time_ >= sequence_length)
finished = tf.reduce_all(elements_finished)
# For the very final iteration, we must emit a dummy input
next_input = tf.cond(
finished,
lambda: tf.zeros([batch_size, input_depth], dtype=tf.float32),
lambda: inputs_ta.read(time_))
return (elements_finished, next_input, next_state, emit_output, None)
return tf.nn.raw_rnn(cell, loop_fn, scope=scope)
self._testScope(factory, use_outer_scope=True)
self._testScope(factory, use_outer_scope=False)
self._testScope(factory, prefix=None, use_outer_scope=False)
class StateSaverRNNTest(tf.test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def _testScope(self, factory, prefix="prefix", use_outer_scope=True):
with self.test_session(use_gpu=True, graph=tf.Graph()):
if use_outer_scope:
with tf.variable_scope(prefix) as scope:
factory(scope)
else:
factory(prefix)
tf.initialize_all_variables()
# check that all the variables names starts
# with the proper scope.
all_vars = tf.all_variables()
prefix = prefix or "RNN"
scope_vars = [v for v in all_vars if v.name.startswith(prefix + "/")]
tf.logging.info("RNN with scope: %s (%s)"
% (prefix, "scope" if use_outer_scope else "str"))
for v in scope_vars:
tf.logging.info(v.name)
self.assertEqual(len(scope_vars), len(all_vars))
def testStateSaverRNNScope(self):
num_units = 3
input_size = 5
batch_size = 2
max_length = 8
def factory(scope):
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
state_saver = TestStateSaver(batch_size, 2 * num_units)
cell = tf.nn.rnn_cell.LSTMCell(
num_units, use_peepholes=False, initializer=initializer,
state_is_tuple=False)
inputs = max_length * [
tf.placeholder(tf.float32, shape=(batch_size, input_size))]
return tf.nn.state_saving_rnn(
cell, inputs, state_saver=state_saver,
state_name="save_lstm", scope=scope)
self._testScope(factory, use_outer_scope=True)
self._testScope(factory, use_outer_scope=False)
self._testScope(factory, prefix=None, use_outer_scope=False)
######### Benchmarking RNN code
def _static_vs_dynamic_rnn_benchmark_static(inputs_list_t, sequence_length):
(_, input_size) = inputs_list_t[0].get_shape().as_list()
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = tf.nn.rnn_cell.LSTMCell(
num_units=input_size, use_peepholes=True, initializer=initializer,
state_is_tuple=False)
outputs, final_state = tf.nn.rnn(
cell, inputs_list_t, sequence_length=sequence_length, dtype=tf.float32)
trainable_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
gradients = tf.gradients(outputs + [final_state], trainable_variables)
return tf.group(final_state, *(gradients + outputs))
def _static_vs_dynamic_rnn_benchmark_dynamic(inputs_t, sequence_length):
(unused_0, unused_1, input_size) = inputs_t.get_shape().as_list()
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = tf.nn.rnn_cell.LSTMCell(
num_units=input_size, use_peepholes=True, initializer=initializer,
state_is_tuple=False)
outputs, final_state = tf.nn.dynamic_rnn(
cell, inputs_t, sequence_length=sequence_length, dtype=tf.float32)
trainable_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
gradients = tf.gradients([outputs, final_state], trainable_variables)
return tf.group(final_state, outputs, *gradients)
def graph_creation_static_vs_dynamic_rnn_benchmark(max_time):
config = tf.ConfigProto()
config.allow_soft_placement = True
# These parameters don't matter
batch_size = 512
num_units = 512
# Set up sequence lengths
np.random.seed([127])
sequence_length = np.random.randint(0, max_time, size=batch_size)
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)]
inputs = np.dstack(inputs_list).transpose([0, 2, 1]) # batch x time x depth
def _create_static_rnn():
with tf.Session(config=config, graph=tf.Graph()) as sess:
inputs_list_t = [
tf.Variable(x, trainable=False).value() for x in inputs_list]
ops = _static_vs_dynamic_rnn_benchmark_static(
inputs_list_t, sequence_length)
def _create_dynamic_rnn():
with tf.Session(config=config, graph=tf.Graph()) as sess:
inputs_t = tf.Variable(inputs, trainable=False).value()
ops = _static_vs_dynamic_rnn_benchmark_dynamic(
inputs_t, sequence_length)
delta_static = timeit.timeit(_create_static_rnn, number=5)
delta_dynamic = timeit.timeit(_create_dynamic_rnn, number=5)
print("%d \t %f \t %f \t %f" %
(max_time, delta_static, delta_dynamic, delta_dynamic/delta_static))
return delta_static, delta_dynamic
def _timer(sess, ops):
# Warm in
for _ in range(2):
sess.run(ops)
# Timing run
runs = 20
start = time.time()
for _ in range(runs):
sess.run(ops)
end = time.time()
return (end - start)/float(runs)
def static_vs_dynamic_rnn_benchmark(batch_size, max_time, num_units, use_gpu):
config = tf.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = np.random.randint(0, max_time, size=batch_size)
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)]
inputs = np.dstack(inputs_list).transpose([0, 2, 1]) # batch x time x depth
# Using rnn()
with tf.Session(config=config, graph=tf.Graph()) as sess:
with tf.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
tf.Variable(x, trainable=False).value() for x in inputs_list]
ops = _static_vs_dynamic_rnn_benchmark_static(
inputs_list_t, sequence_length)
tf.initialize_all_variables().run()
delta_static = _timer(sess, ops)
# Using dynamic_rnn()
with tf.Session(config=config, graph=tf.Graph()) as sess:
with tf.device("/cpu:0" if not use_gpu else None):
inputs_t = tf.Variable(inputs, trainable=False).value()
ops = _static_vs_dynamic_rnn_benchmark_dynamic(
inputs_t, sequence_length)
tf.initialize_all_variables().run()
delta_dynamic = _timer(sess, ops)
print("%d \t %d \t %d \t %s \t %f \t %f \t %f" %
(batch_size, max_time, num_units, use_gpu, delta_static,
delta_dynamic, delta_dynamic/delta_static))
return delta_static, delta_dynamic
def _half_seq_len_vs_unroll_half_rnn_benchmark(inputs_list_t, sequence_length):
(_, input_size) = inputs_list_t[0].get_shape().as_list()
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = tf.nn.rnn_cell.LSTMCell(
num_units=input_size, use_peepholes=True, initializer=initializer,
state_is_tuple=False)
outputs, final_state = tf.nn.rnn(
cell, inputs_list_t, sequence_length=sequence_length, dtype=tf.float32)
trainable_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
gradients = tf.gradients(outputs + [final_state], trainable_variables)
return tf.group(final_state, *(gradients + outputs))
def half_seq_len_vs_unroll_half_rnn_benchmark(
batch_size, max_time, num_units, use_gpu):
config = tf.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = max_time * np.ones((batch_size,))
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)]
# Halve the sequence length, full static unroll
with tf.Session(config=config, graph=tf.Graph()) as sess:
with tf.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
tf.Variable(x, trainable=False).value() for x in inputs_list]
ops = _half_seq_len_vs_unroll_half_rnn_benchmark(
inputs_list_t, sequence_length / 2)
tf.initialize_all_variables().run()
delta_half_seq_len = _timer(sess, ops)
# Halve the unroll size, don't use sequence length
with tf.Session(config=config, graph=tf.Graph()) as sess:
with tf.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
tf.Variable(x, trainable=False).value() for x in inputs_list]
ops = _half_seq_len_vs_unroll_half_rnn_benchmark(
inputs_list_t[:(max_time // 2)], sequence_length / 2)
tf.initialize_all_variables().run()
delta_unroll_half = _timer(sess, ops)
print("%d \t %d \t\t %d \t %s \t %f \t\t %f \t\t %f" %
(batch_size, max_time, num_units, use_gpu, delta_half_seq_len,
delta_unroll_half, delta_half_seq_len/delta_unroll_half))
return delta_half_seq_len, delta_unroll_half
def _concat_state_vs_tuple_state_rnn_benchmark(
inputs_list_t, sequence_length, state_is_tuple):
(_, input_size) = inputs_list_t[0].get_shape().as_list()
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = tf.nn.rnn_cell.LSTMCell(
num_units=input_size, use_peepholes=True,
initializer=initializer, state_is_tuple=state_is_tuple)
outputs, final_state = tf.nn.rnn(
cell, inputs_list_t, sequence_length=sequence_length, dtype=tf.float32)
final_state = list(final_state) if state_is_tuple else [final_state]
trainable_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
gradients = tf.gradients(outputs + final_state, trainable_variables)
return tf.group(*(final_state + gradients + outputs))
def concat_state_vs_tuple_state_rnn_benchmark(
batch_size, max_time, num_units, use_gpu):
config = tf.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = max_time * np.ones((batch_size,))
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)]
# Run with concatenated states (default)
with tf.Session(config=config, graph=tf.Graph()) as sess:
with tf.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
tf.Variable(x, trainable=False).value() for x in inputs_list]
ops = _concat_state_vs_tuple_state_rnn_benchmark(
inputs_list_t, sequence_length, state_is_tuple=False)
tf.initialize_all_variables().run()
delta_concat_state = _timer(sess, ops)
# Run with tuple states (new)
with tf.Session(config=config, graph=tf.Graph()) as sess:
with tf.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
tf.Variable(x, trainable=False).value() for x in inputs_list]
ops = _concat_state_vs_tuple_state_rnn_benchmark(
inputs_list_t, sequence_length, state_is_tuple=True)
tf.initialize_all_variables().run()
delta_tuple_state = _timer(sess, ops)
print("%d \t %d \t %d \t %s \t %f \t\t %f \t\t %f" %
(batch_size, max_time, num_units, use_gpu, delta_concat_state,
delta_tuple_state, delta_concat_state/delta_tuple_state))
return delta_concat_state, delta_tuple_state
def _dynamic_rnn_swap_memory_benchmark(inputs_t, sequence_length,
swap_memory):
(unused_0, unused_1, input_size) = inputs_t.get_shape().as_list()
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = tf.nn.rnn_cell.LSTMCell(
num_units=input_size, use_peepholes=True, initializer=initializer,
state_is_tuple=False)
outputs, final_state = tf.nn.dynamic_rnn(
cell, inputs_t, sequence_length=sequence_length,
swap_memory=swap_memory, dtype=tf.float32)
trainable_variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
gradients = tf.gradients([outputs, final_state], trainable_variables)
return tf.group(final_state, outputs, *gradients)
def dynamic_rnn_swap_memory_benchmark(batch_size, max_time, num_units):
config = tf.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = np.random.randint(0, max_time, size=batch_size)
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)]
inputs = np.dstack(inputs_list).transpose([0, 2, 1]) # batch x time x depth
# No memory swap
with tf.Session(config=config, graph=tf.Graph()) as sess:
inputs_t = tf.Variable(inputs, trainable=False).value()
ops = _dynamic_rnn_swap_memory_benchmark(
inputs_t, sequence_length, swap_memory=False)
tf.initialize_all_variables().run()
no_swap = _timer(sess, ops)
# Memory swap
with tf.Session(config=config, graph=tf.Graph()) as sess:
inputs_t = tf.Variable(inputs, trainable=False).value()
ops = _dynamic_rnn_swap_memory_benchmark(
inputs_t, sequence_length, swap_memory=True)
tf.initialize_all_variables().run()
swap = _timer(sess, ops)
print("%d \t %d \t %d \t %f \t %f \t %f" %
(batch_size, max_time, num_units, no_swap, swap, swap/no_swap))
return no_swap, swap
def rnn_long_sequence_benchmark(batch_size, seqlen, num_units,
dynamic, swap_memory):
config = tf.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = [seqlen for _ in range(batch_size)]
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(seqlen)]
inputs = np.dstack(inputs_list).transpose([0, 2, 1]) # batch x time x depth
for _ in range(5):
if dynamic:
with tf.Session(config=config, graph=tf.Graph()) as sess:
inputs_t = tf.Variable(inputs, trainable=False).value()
ops = _dynamic_rnn_swap_memory_benchmark(
inputs_t, sequence_length, swap_memory=swap_memory)
tf.initialize_all_variables().run()
elapsed = _timer(sess, ops)
else:
with tf.Session(config=config, graph=tf.Graph()) as sess:
inputs_list_t = [
tf.Variable(x, trainable=False).value() for x in inputs_list]
ops = _static_vs_dynamic_rnn_benchmark_static(
inputs_list_t, sequence_length)
tf.initialize_all_variables().run()
elapsed = _timer(sess, ops)
print("%d \t %d \t %d \t %s \t %f \t %f" %
(batch_size, seqlen, num_units, dynamic, elapsed,
elapsed/seqlen))
class BenchmarkRNN(tf.test.Benchmark):
def benchmarkGraphCreationStaticVsDynamicLSTM(self):
print("Graph Creation: Static Unroll vs. Dynamic Unroll LSTM")
print("max_t \t dt(static) \t dt(dynamic) \t dt(dynamic)/dt(static)")
for max_time in (1, 25, 50):
s_dt, d_dt = graph_creation_static_vs_dynamic_rnn_benchmark(max_time)
self.report_benchmark(name="graph_creation_time_static_T%02d" % max_time,
iters=5, wall_time=s_dt)
self.report_benchmark(name="graph_creation_time_dynamic_T%02d" % max_time,
iters=5, wall_time=d_dt)
def benchmarkStaticUnrollVsDynamicFlowLSTM(self):
print("Calculation: Static Unroll with Dynamic Flow LSTM "
"vs. Dynamic Unroll LSTM")
print("batch \t max_t \t units \t gpu \t dt(static) \t dt(dynamic) "
"\t dt(dynamic)/dt(static)")
for batch_size in (256,):
for max_time in (50,):
for num_units in (512, 256, 128):
for use_gpu in (False, True):
s_dt, d_dt = static_vs_dynamic_rnn_benchmark(
batch_size, max_time, num_units, use_gpu)
self.report_benchmark(
name="static_unroll_time_T%02d_B%03d_N%03d_gpu_%s"
% (max_time, batch_size, num_units, use_gpu),
iters=20, wall_time=s_dt)
self.report_benchmark(
name="dynamic_unroll_time_T%02d_B%03d_N%03d_gpu_%s"
% (max_time, batch_size, num_units, use_gpu),
iters=20, wall_time=d_dt)
def benchmarkDynamicLSTMNoMemorySwapVsMemorySwap(self):
print("Calculation: Dynamic LSTM No Memory Swap vs. Memory Swap")
print("batch \t max_t \t units \t no_swap \t swap \t swap/no_swap")
for batch_size in (256, 512):
for max_time in (100,):
for num_units in (512, 256, 128):
no_swap, swap = dynamic_rnn_swap_memory_benchmark(
batch_size, max_time, num_units)
self.report_benchmark(
name="dynamic_lstm_no_memory_swap_T%02d_B%03d_N%03d"
% (max_time, batch_size, num_units),
iters=20, wall_time=no_swap)
self.report_benchmark(
name="dynamic_lstm_with_memory_swap_T%02d_B%03d_N%03d"
% (max_time, batch_size, num_units),
iters=20, wall_time=swap)
def benchmarkStaticUnrollHalfSequenceLengthVsHalfUnroll(self):
print("Calculation: Static Unroll with Halved Sequence Length "
"vs. Half Static Unroll")
print("batch \t full_t \t units \t gpu \t dt(half_seq_len) "
"\t dt(unroll_half) \t dt(half_seq_len)/dt(unroll_half)")
for batch_size in (128,):
for max_time in (50,):
for num_units in (256,):
for use_gpu in (False, True):
s_dt, d_dt = half_seq_len_vs_unroll_half_rnn_benchmark(
batch_size, max_time, num_units, use_gpu)
self.report_benchmark(
name="half_seq_len_time_T%02d_B%03d_N%03d_gpu_%s"
% (max_time, batch_size, num_units, use_gpu),
iters=20, wall_time=s_dt)
self.report_benchmark(
name="unroll_half_time_T%02d_B%03d_N%03d_gpu_%s"
% (max_time, batch_size, num_units, use_gpu),
iters=20, wall_time=d_dt)
def benchmarkStaticUnrollStateConcatVsStateTuple(self):
print("Calculation: Static Unroll with Concatenated State "
"vs. Tuple State")
print("batch \t time \t units \t gpu \t dt(concat_state) "
"\t dt(tuple_state) \t dt(concat_state)/dt(tuple_state)")
for batch_size in (16, 128,):
for max_time in (50,):
for num_units in (16, 128,):
for use_gpu in (False, True):
c_dt, t_dt = concat_state_vs_tuple_state_rnn_benchmark(
batch_size, max_time, num_units, use_gpu)
self.report_benchmark(
name="concat_state_time_T%02d_B%03d_N%03d_gpu_%s"
% (max_time, batch_size, num_units, use_gpu),
iters=20, wall_time=c_dt)
self.report_benchmark(
name="tuple_state_time_T%02d_B%03d_N%03d_gpu_%s"
% (max_time, batch_size, num_units, use_gpu),
iters=20, wall_time=t_dt)
if __name__ == "__main__":
tf.test.main()
| neilhan/tensorflow | tensorflow/python/kernel_tests/rnn_test.py | Python | apache-2.0 | 95,137 |
# coding=utf-8
from django_sae.contrib.tasks.cron import OperationView
from django_sae.contrib.tasks.operations import TaskOperationMixin
class OperationViewMock(OperationView):
def get_operation(self, request):
return [TaskOperationMixin() for _ in range(0, 3)] | zl352773277/django-sae | django_sae/contrib/tasks/tests/views.py | Python | apache-2.0 | 276 |
import os
import yaml
from google.cloud import storage
from google.oauth2 import service_account
from .storage import Storage
class GcsStorage(Storage):
def __init__(self, bucket, path, project=None, json_path=None):
if bucket is None:
raise ValueError('Bucket must be supplied to GCS storage')
if path is None:
path = 'spinbot/cache'
self.path = path
if json_path is not None:
json_path = os.path.expanduser(json_path)
credentials = service_account.Credentials.from_service_account_file(json_path)
if credentials.requires_scopes:
credentials = credentials.with_scopes(['https://www.googleapis.com/auth/devstorage.read_write'])
self.client = storage.Client(project=project, credentials=credentials)
else:
self.client = storage.Client()
if self.client.lookup_bucket(bucket) is None:
self.client.create_bucket(bucket)
self.bucket = self.client.get_bucket(bucket)
super().__init__()
def store(self, key, val):
b = self.bucket.get_blob(self.path)
contents = '{}'
if b:
contents = b.download_as_string()
else:
b = self.bucket.blob(self.path)
props = yaml.safe_load(contents)
if props is None:
props = {}
props[key] = val
b.upload_from_string(yaml.safe_dump(props))
def load(self, key):
b = self.bucket.get_blob(self.path)
contents = '{}'
if b:
contents = b.download_as_string()
else:
b = self.bucket.blob(self.path)
props = yaml.safe_load(contents)
if props is None:
props = {}
return props.get(key)
| skim1420/spinnaker | spinbot/storage/gcs_storage.py | Python | apache-2.0 | 1,786 |
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import ssl
from oslo.config import cfg
from volt.openstack.common.gettextutils import _
ssl_opts = [
cfg.StrOpt('ca_file',
default=None,
help="CA certificate file to use to verify "
"connecting clients."),
cfg.StrOpt('cert_file',
default=None,
help="Certificate file to use when starting "
"the server securely."),
cfg.StrOpt('key_file',
default=None,
help="Private key file to use when starting "
"the server securely."),
]
CONF = cfg.CONF
CONF.register_opts(ssl_opts, "ssl")
def is_enabled():
cert_file = CONF.ssl.cert_file
key_file = CONF.ssl.key_file
ca_file = CONF.ssl.ca_file
use_ssl = cert_file or key_file
if cert_file and not os.path.exists(cert_file):
raise RuntimeError(_("Unable to find cert_file : %s") % cert_file)
if ca_file and not os.path.exists(ca_file):
raise RuntimeError(_("Unable to find ca_file : %s") % ca_file)
if key_file and not os.path.exists(key_file):
raise RuntimeError(_("Unable to find key_file : %s") % key_file)
if use_ssl and (not cert_file or not key_file):
raise RuntimeError(_("When running server in SSL mode, you must "
"specify both a cert_file and key_file "
"option value in your configuration file"))
return use_ssl
def wrap(sock):
ssl_kwargs = {
'server_side': True,
'certfile': CONF.ssl.cert_file,
'keyfile': CONF.ssl.key_file,
'cert_reqs': ssl.CERT_NONE,
}
if CONF.ssl.ca_file:
ssl_kwargs['ca_certs'] = CONF.ssl.ca_file
ssl_kwargs['cert_reqs'] = ssl.CERT_REQUIRED
return ssl.wrap_socket(sock, **ssl_kwargs)
_SSL_PROTOCOLS = {
"tlsv1": ssl.PROTOCOL_TLSv1,
"sslv23": ssl.PROTOCOL_SSLv23,
"sslv3": ssl.PROTOCOL_SSLv3
}
try:
_SSL_PROTOCOLS["sslv2"] = ssl.PROTOCOL_SSLv2
except AttributeError:
pass
def validate_ssl_version(version):
key = version.lower()
try:
return _SSL_PROTOCOLS[key]
except KeyError:
raise RuntimeError(_("Invalid SSL version : %s") % version)
| zhangwenyu/packages | volt/volt/openstack/common/sslutils.py | Python | apache-2.0 | 2,842 |
"""Support to serve the Home Assistant API as WSGI application."""
from __future__ import annotations
from ipaddress import ip_network
import logging
import os
import ssl
from typing import Any, Final, Optional, TypedDict, cast
from aiohttp import web
from aiohttp.typedefs import StrOrURL
from aiohttp.web_exceptions import HTTPMovedPermanently, HTTPRedirection
import voluptuous as vol
from homeassistant.components.network import async_get_source_ip
from homeassistant.const import EVENT_HOMEASSISTANT_STOP, SERVER_PORT
from homeassistant.core import Event, HomeAssistant
from homeassistant.helpers import storage
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import bind_hass
from homeassistant.setup import async_start_setup, async_when_setup_or_start
from homeassistant.util import ssl as ssl_util
from .auth import setup_auth
from .ban import setup_bans
from .const import KEY_AUTHENTICATED, KEY_HASS, KEY_HASS_USER # noqa: F401
from .cors import setup_cors
from .forwarded import async_setup_forwarded
from .request_context import current_request, setup_request_context
from .security_filter import setup_security_filter
from .static import CACHE_HEADERS, CachingStaticResource
from .view import HomeAssistantView
from .web_runner import HomeAssistantTCPSite
DOMAIN: Final = "http"
CONF_SERVER_HOST: Final = "server_host"
CONF_SERVER_PORT: Final = "server_port"
CONF_BASE_URL: Final = "base_url"
CONF_SSL_CERTIFICATE: Final = "ssl_certificate"
CONF_SSL_PEER_CERTIFICATE: Final = "ssl_peer_certificate"
CONF_SSL_KEY: Final = "ssl_key"
CONF_CORS_ORIGINS: Final = "cors_allowed_origins"
CONF_USE_X_FORWARDED_FOR: Final = "use_x_forwarded_for"
CONF_TRUSTED_PROXIES: Final = "trusted_proxies"
CONF_LOGIN_ATTEMPTS_THRESHOLD: Final = "login_attempts_threshold"
CONF_IP_BAN_ENABLED: Final = "ip_ban_enabled"
CONF_SSL_PROFILE: Final = "ssl_profile"
SSL_MODERN: Final = "modern"
SSL_INTERMEDIATE: Final = "intermediate"
_LOGGER: Final = logging.getLogger(__name__)
DEFAULT_DEVELOPMENT: Final = "0"
# Cast to be able to load custom cards.
# My to be able to check url and version info.
DEFAULT_CORS: Final[list[str]] = ["https://cast.home-assistant.io"]
NO_LOGIN_ATTEMPT_THRESHOLD: Final = -1
MAX_CLIENT_SIZE: Final = 1024 ** 2 * 16
STORAGE_KEY: Final = DOMAIN
STORAGE_VERSION: Final = 1
SAVE_DELAY: Final = 180
HTTP_SCHEMA: Final = vol.All(
cv.deprecated(CONF_BASE_URL),
vol.Schema(
{
vol.Optional(CONF_SERVER_HOST): vol.All(
cv.ensure_list, vol.Length(min=1), [cv.string]
),
vol.Optional(CONF_SERVER_PORT, default=SERVER_PORT): cv.port,
vol.Optional(CONF_BASE_URL): cv.string,
vol.Optional(CONF_SSL_CERTIFICATE): cv.isfile,
vol.Optional(CONF_SSL_PEER_CERTIFICATE): cv.isfile,
vol.Optional(CONF_SSL_KEY): cv.isfile,
vol.Optional(CONF_CORS_ORIGINS, default=DEFAULT_CORS): vol.All(
cv.ensure_list, [cv.string]
),
vol.Inclusive(CONF_USE_X_FORWARDED_FOR, "proxy"): cv.boolean,
vol.Inclusive(CONF_TRUSTED_PROXIES, "proxy"): vol.All(
cv.ensure_list, [ip_network]
),
vol.Optional(
CONF_LOGIN_ATTEMPTS_THRESHOLD, default=NO_LOGIN_ATTEMPT_THRESHOLD
): vol.Any(cv.positive_int, NO_LOGIN_ATTEMPT_THRESHOLD),
vol.Optional(CONF_IP_BAN_ENABLED, default=True): cv.boolean,
vol.Optional(CONF_SSL_PROFILE, default=SSL_MODERN): vol.In(
[SSL_INTERMEDIATE, SSL_MODERN]
),
}
),
)
CONFIG_SCHEMA: Final = vol.Schema({DOMAIN: HTTP_SCHEMA}, extra=vol.ALLOW_EXTRA)
class ConfData(TypedDict, total=False):
"""Typed dict for config data."""
server_host: list[str]
server_port: int
base_url: str
ssl_certificate: str
ssl_peer_certificate: str
ssl_key: str
cors_allowed_origins: list[str]
use_x_forwarded_for: bool
trusted_proxies: list[str]
login_attempts_threshold: int
ip_ban_enabled: bool
ssl_profile: str
@bind_hass
async def async_get_last_config(hass: HomeAssistant) -> dict | None:
"""Return the last known working config."""
store = storage.Store(hass, STORAGE_VERSION, STORAGE_KEY)
return cast(Optional[dict], await store.async_load())
class ApiConfig:
"""Configuration settings for API server."""
def __init__(
self,
local_ip: str,
host: str,
port: int,
use_ssl: bool,
) -> None:
"""Initialize a new API config object."""
self.local_ip = local_ip
self.host = host
self.port = port
self.use_ssl = use_ssl
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the HTTP API and debug interface."""
conf: ConfData | None = config.get(DOMAIN)
if conf is None:
conf = cast(ConfData, HTTP_SCHEMA({}))
server_host = conf.get(CONF_SERVER_HOST)
server_port = conf[CONF_SERVER_PORT]
ssl_certificate = conf.get(CONF_SSL_CERTIFICATE)
ssl_peer_certificate = conf.get(CONF_SSL_PEER_CERTIFICATE)
ssl_key = conf.get(CONF_SSL_KEY)
cors_origins = conf[CONF_CORS_ORIGINS]
use_x_forwarded_for = conf.get(CONF_USE_X_FORWARDED_FOR, False)
trusted_proxies = conf.get(CONF_TRUSTED_PROXIES) or []
is_ban_enabled = conf[CONF_IP_BAN_ENABLED]
login_threshold = conf[CONF_LOGIN_ATTEMPTS_THRESHOLD]
ssl_profile = conf[CONF_SSL_PROFILE]
server = HomeAssistantHTTP(
hass,
server_host=server_host,
server_port=server_port,
ssl_certificate=ssl_certificate,
ssl_peer_certificate=ssl_peer_certificate,
ssl_key=ssl_key,
cors_origins=cors_origins,
use_x_forwarded_for=use_x_forwarded_for,
trusted_proxies=trusted_proxies,
login_threshold=login_threshold,
is_ban_enabled=is_ban_enabled,
ssl_profile=ssl_profile,
)
async def stop_server(event: Event) -> None:
"""Stop the server."""
await server.stop()
async def start_server(*_: Any) -> None:
"""Start the server."""
with async_start_setup(hass, ["http"]):
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_server)
# We already checked it's not None.
assert conf is not None
await start_http_server_and_save_config(hass, dict(conf), server)
async_when_setup_or_start(hass, "frontend", start_server)
hass.http = server
local_ip = await async_get_source_ip(hass)
host = local_ip
if server_host is not None:
# Assume the first server host name provided as API host
host = server_host[0]
hass.config.api = ApiConfig(
local_ip, host, server_port, ssl_certificate is not None
)
return True
class HomeAssistantHTTP:
"""HTTP server for Home Assistant."""
def __init__(
self,
hass: HomeAssistant,
ssl_certificate: str | None,
ssl_peer_certificate: str | None,
ssl_key: str | None,
server_host: list[str] | None,
server_port: int,
cors_origins: list[str],
use_x_forwarded_for: bool,
trusted_proxies: list[str],
login_threshold: int,
is_ban_enabled: bool,
ssl_profile: str,
) -> None:
"""Initialize the HTTP Home Assistant server."""
app = self.app = web.Application(
middlewares=[], client_max_size=MAX_CLIENT_SIZE
)
app[KEY_HASS] = hass
# Order matters, security filters middle ware needs to go first,
# forwarded middleware needs to go second.
setup_security_filter(app)
async_setup_forwarded(app, use_x_forwarded_for, trusted_proxies)
setup_request_context(app, current_request)
if is_ban_enabled:
setup_bans(hass, app, login_threshold)
setup_auth(hass, app)
setup_cors(app, cors_origins)
self.hass = hass
self.ssl_certificate = ssl_certificate
self.ssl_peer_certificate = ssl_peer_certificate
self.ssl_key = ssl_key
self.server_host = server_host
self.server_port = server_port
self.trusted_proxies = trusted_proxies
self.is_ban_enabled = is_ban_enabled
self.ssl_profile = ssl_profile
self._handler = None
self.runner: web.AppRunner | None = None
self.site: HomeAssistantTCPSite | None = None
def register_view(self, view: HomeAssistantView) -> None:
"""Register a view with the WSGI server.
The view argument must be a class that inherits from HomeAssistantView.
It is optional to instantiate it before registering; this method will
handle it either way.
"""
if isinstance(view, type):
# Instantiate the view, if needed
view = view()
if not hasattr(view, "url"):
class_name = view.__class__.__name__
raise AttributeError(f'{class_name} missing required attribute "url"')
if not hasattr(view, "name"):
class_name = view.__class__.__name__
raise AttributeError(f'{class_name} missing required attribute "name"')
view.register(self.app, self.app.router)
def register_redirect(
self,
url: str,
redirect_to: StrOrURL,
*,
redirect_exc: type[HTTPRedirection] = HTTPMovedPermanently,
) -> None:
"""Register a redirect with the server.
If given this must be either a string or callable. In case of a
callable it's called with the url adapter that triggered the match and
the values of the URL as keyword arguments and has to return the target
for the redirect, otherwise it has to be a string with placeholders in
rule syntax.
"""
async def redirect(request: web.Request) -> web.StreamResponse:
"""Redirect to location."""
# Should be instance of aiohttp.web_exceptions._HTTPMove.
raise redirect_exc(redirect_to) # type: ignore[arg-type,misc]
self.app["allow_configured_cors"](
self.app.router.add_route("GET", url, redirect)
)
def register_static_path(
self, url_path: str, path: str, cache_headers: bool = True
) -> None:
"""Register a folder or file to serve as a static path."""
if os.path.isdir(path):
if cache_headers:
resource: CachingStaticResource | web.StaticResource = (
CachingStaticResource(url_path, path)
)
else:
resource = web.StaticResource(url_path, path)
self.app.router.register_resource(resource)
self.app["allow_configured_cors"](resource)
return
async def serve_file(request: web.Request) -> web.FileResponse:
"""Serve file from disk."""
if cache_headers:
return web.FileResponse(path, headers=CACHE_HEADERS)
return web.FileResponse(path)
self.app["allow_configured_cors"](
self.app.router.add_route("GET", url_path, serve_file)
)
async def start(self) -> None:
"""Start the aiohttp server."""
context: ssl.SSLContext | None
if self.ssl_certificate:
try:
if self.ssl_profile == SSL_INTERMEDIATE:
context = ssl_util.server_context_intermediate()
else:
context = ssl_util.server_context_modern()
await self.hass.async_add_executor_job(
context.load_cert_chain, self.ssl_certificate, self.ssl_key
)
except OSError as error:
_LOGGER.error(
"Could not read SSL certificate from %s: %s",
self.ssl_certificate,
error,
)
return
if self.ssl_peer_certificate:
context.verify_mode = ssl.CERT_REQUIRED
await self.hass.async_add_executor_job(
context.load_verify_locations, self.ssl_peer_certificate
)
else:
context = None
# Aiohttp freezes apps after start so that no changes can be made.
# However in Home Assistant components can be discovered after boot.
# This will now raise a RunTimeError.
# To work around this we now prevent the router from getting frozen
# pylint: disable=protected-access
self.app._router.freeze = lambda: None # type: ignore[assignment]
self.runner = web.AppRunner(self.app)
await self.runner.setup()
self.site = HomeAssistantTCPSite(
self.runner, self.server_host, self.server_port, ssl_context=context
)
try:
await self.site.start()
except OSError as error:
_LOGGER.error(
"Failed to create HTTP server at port %d: %s", self.server_port, error
)
_LOGGER.info("Now listening on port %d", self.server_port)
async def stop(self) -> None:
"""Stop the aiohttp server."""
if self.site is not None:
await self.site.stop()
if self.runner is not None:
await self.runner.cleanup()
async def start_http_server_and_save_config(
hass: HomeAssistant, conf: dict, server: HomeAssistantHTTP
) -> None:
"""Startup the http server and save the config."""
await server.start()
# If we are set up successful, we store the HTTP settings for safe mode.
store = storage.Store(hass, STORAGE_VERSION, STORAGE_KEY)
if CONF_TRUSTED_PROXIES in conf:
conf[CONF_TRUSTED_PROXIES] = [
str(ip.network_address) for ip in conf[CONF_TRUSTED_PROXIES]
]
store.async_delay_save(lambda: conf, SAVE_DELAY)
| mezz64/home-assistant | homeassistant/components/http/__init__.py | Python | apache-2.0 | 14,018 |
"""Remove uniqueness in Repo
Revision ID: 51d493c4d3e1
Revises: 5ac5404bfcd9
Create Date: 2015-05-11 18:55:46.065354
"""
# revision identifiers, used by Alembic.
revision = '51d493c4d3e1'
down_revision = '5ac5404bfcd9'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(u'ix_RepositoryApps_url', table_name='RepositoryApps')
op.create_index(u'ix_RepositoryApps_url', 'RepositoryApps', ['url'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(u'ix_RepositoryApps_url', table_name='RepositoryApps')
op.create_index(u'ix_RepositoryApps_url', 'RepositoryApps', [u'url'], unique=True)
### end Alembic commands ###
| go-lab/appcomposer | alembic/versions/51d493c4d3e1_remove_uniqueness_in_repo.py | Python | bsd-2-clause | 820 |
from traits.api import Int, Tuple
from enable.tools.api import ViewportPanTool
class MPViewportPanTool(ViewportPanTool):
cur_bid = Int(-1)
_last_blob_pos = Tuple
def normal_blob_down(self, event):
if self.cur_bid == -1 and self.is_draggable(event.x, event.y):
self.cur_bid = event.bid
self.drag_start(event)
def dragging_blob_up(self, event):
if event.bid == self.cur_bid:
self.cur_bid = -1
self.drag_end(event)
def dragging_blob_move(self, event):
if event.bid == self.cur_bid:
self._last_blob_pos = (event.x, event.y)
self.dragging(event)
def drag_start(self, event):
if self.component:
self.original_padding = self.component.padding
if hasattr(event, "bid"):
event.window.capture_blob(self, event.bid,
event.net_transform())
else:
event.window.set_mouse_owner(self, event.net_transform())
self._last_blob_pos = (event.x, event.y)
self.mouse_down_position = (event.x,event.y)
self.event_state = "dragging"
event.handled = True
ViewportPanTool.drag_start(self, event)
return
def drag_end(self, event):
event.x, event.y = self._last_blob_pos
if hasattr(event, "bid"):
event.window.release_blob(event.bid)
self.event_state = "normal"
ViewportPanTool.drag_end(self, event)
| tommy-u/chaco | examples/demo/canvas/mp_viewport_pan_tool.py | Python | bsd-3-clause | 1,535 |
# Copyright (c) 2012-2013, Itzik Kotler
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of the author nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
import networkx
# Local imports
import pythonect.internal.parsers.p2y
class TestPythonectScriptParser(unittest.TestCase):
def test_program_empty(self):
g = networkx.DiGraph()
self.assertEqual(len(pythonect.internal.parsers.p2y.PythonectScriptParser().parse('').nodes()) == len(g.nodes()), True)
def test_expr_atom(self):
g = networkx.DiGraph()
g.add_node('1')
self.assertEqual(len(pythonect.internal.parsers.p2y.PythonectScriptParser().parse('1').nodes()) == len(g.nodes()), True)
def test_shebang_line_with_even_expr_atom_op_expr(self):
g = networkx.DiGraph()
g.add_node('1')
g.add_node('2')
g.add_edge('1', '2')
self.assertEqual(len(pythonect.internal.parsers.p2y.PythonectScriptParser().parse('#! /usr/bin/env pythonect\n1 -> 1').edges()) == len(g.edges()), True)
def test_even_expr_atom_op_expr(self):
g = networkx.DiGraph()
g.add_node('1')
g.add_node('2')
g.add_edge('1', '2')
self.assertEqual(len(pythonect.internal.parsers.p2y.PythonectScriptParser().parse('1 -> 1').edges()) == len(g.edges()), True)
def test_odd_expr_atom_op_expr(self):
g = networkx.DiGraph()
g.add_node('1')
g.add_node('2')
g.add_node('3')
g.add_edge('1', '2')
g.add_edge('2', '3')
self.assertEqual(len(pythonect.internal.parsers.p2y.PythonectScriptParser().parse('1 -> 1 -> 1').edges()) == len(g.edges()), True)
def test_program_expr_list(self):
g = networkx.DiGraph()
g.add_node('1')
g.add_node('2')
self.assertEqual(len(pythonect.internal.parsers.p2y.PythonectScriptParser().parse('1 , 2').nodes()) == len(g.nodes()), True)
| fr34k8/pythonect | pythonect/internal/parsers/test/test_p2y.py | Python | bsd-3-clause | 3,355 |
import numpy as np
from menpo.image import Image, BooleanImage, MaskedImage
from menpo.shape import PointCloud
from menpo.testing import is_same_array
def test_image_copy():
pixels = np.ones([1, 10, 10])
landmarks = PointCloud(np.ones([3, 2]), copy=False)
im = Image(pixels, copy=False)
im.landmarks['test'] = landmarks
im_copy = im.copy()
assert (not is_same_array(im.pixels, im_copy.pixels))
assert (not is_same_array(im_copy.landmarks['test'].points,
im.landmarks['test'].points))
def test_booleanimage_copy():
pixels = np.ones([10, 10], dtype=np.bool)
landmarks = PointCloud(np.ones([3, 2]), copy=False)
im = BooleanImage(pixels, copy=False)
im.landmarks['test'] = landmarks
im_copy = im.copy()
assert (not is_same_array(im.pixels, im_copy.pixels))
assert (not is_same_array(im_copy.landmarks['test'].points,
im.landmarks['test'].points))
def test_maskedimage_copy():
pixels = np.ones([1, 10, 10])
landmarks = PointCloud(np.ones([3, 2]), copy=False)
im = MaskedImage(pixels, copy=False)
im.landmarks['test'] = landmarks
im_copy = im.copy()
assert (not is_same_array(im.pixels, im_copy.pixels))
assert (not is_same_array(im_copy.landmarks['test'].points,
im.landmarks['test'].points))
| grigorisg9gr/menpo | menpo/image/test/image_copy_test.py | Python | bsd-3-clause | 1,372 |
#!/usr/bin/env python
from __future__ import print_function
from builtins import input
import sys
import pmagpy.pmag as pmag
def main():
"""
NAME
gofish.py
DESCRIPTION
calculates fisher parameters from dec inc data
INPUT FORMAT
takes dec/inc as first two columns in space delimited file
SYNTAX
gofish.py [options] [< filename]
OPTIONS
-h prints help message and quits
-i for interactive filename entry
-f FILE, specify input file
-F FILE, specifies output file name
< filename for reading from standard input
OUTPUT
mean dec, mean inc, N, R, k, a95, csd
"""
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-i' in sys.argv: # ask for filename
file=input("Enter file name with dec, inc data: ")
f=open(file,'r')
data=f.readlines()
elif '-f' in sys.argv:
dat=[]
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
f=open(file,'r')
data=f.readlines()
else:
data = sys.stdin.readlines() # read from standard input
ofile = ""
if '-F' in sys.argv:
ind = sys.argv.index('-F')
ofile= sys.argv[ind+1]
out = open(ofile, 'w + a')
DIs= [] # set up list for dec inc data
for line in data: # read in the data from standard input
if '\t' in line:
rec=line.split('\t') # split each line on space to get records
else:
rec=line.split() # split each line on space to get records
DIs.append((float(rec[0]),float(rec[1])))
#
fpars=pmag.fisher_mean(DIs)
outstring='%7.1f %7.1f %i %10.4f %8.1f %7.1f %7.1f'%(fpars['dec'],fpars['inc'],fpars['n'],fpars['r'],fpars['k'],fpars['alpha95'], fpars['csd'])
if ofile == "":
print(outstring)
else:
out.write(outstring+'\n')
#
if __name__ == "__main__":
main()
| Caoimhinmg/PmagPy | programs/gofish.py | Python | bsd-3-clause | 1,976 |
#!/usr/bin/env python
"""
fs.tests: testcases for the fs module
"""
from __future__ import with_statement
# Send any output from the logging module to stdout, so it will
# be captured by nose and reported appropriately
import sys
import logging
logging.basicConfig(level=logging.ERROR, stream=sys.stdout)
from fs.base import *
from fs.path import *
from fs.errors import *
from fs.filelike import StringIO
import datetime
import unittest
import os
import os.path
import pickle
import random
import copy
import time
try:
import threading
except ImportError:
import dummy_threading as threading
import six
from six import PY3, b
class FSTestCases(object):
"""Base suite of testcases for filesystem implementations.
Any FS subclass should be capable of passing all of these tests.
To apply the tests to your own FS implementation, simply use FSTestCase
as a mixin for your own unittest.TestCase subclass and have the setUp
method set self.fs to an instance of your FS implementation.
NB. The Filesystem being tested must have a capacity of at least 3MB.
This class is designed as a mixin so that it's not detected by test
loading tools such as nose.
"""
def check(self, p):
"""Check that a file exists within self.fs"""
return self.fs.exists(p)
def test_invalid_chars(self):
"""Check paths validate ok"""
# Will have to be overriden selectively for custom validepath methods
self.assertEqual(self.fs.validatepath(''), None)
self.assertEqual(self.fs.validatepath('.foo'), None)
self.assertEqual(self.fs.validatepath('foo'), None)
self.assertEqual(self.fs.validatepath('foo/bar'), None)
self.assert_(self.fs.isvalidpath('foo/bar'))
def test_meta(self):
"""Checks getmeta / hasmeta are functioning"""
# getmeta / hasmeta are hard to test, since there is no way to validate
# the implementation's response
meta_names = ["read_only",
"network",
"unicode_paths"]
stupid_meta = 'thismetashouldnotexist!"r$$%^&&*()_+'
self.assertRaises(NoMetaError, self.fs.getmeta, stupid_meta)
self.assertFalse(self.fs.hasmeta(stupid_meta))
self.assertEquals(None, self.fs.getmeta(stupid_meta, None))
self.assertEquals(3.14, self.fs.getmeta(stupid_meta, 3.14))
for meta_name in meta_names:
try:
meta = self.fs.getmeta(meta_name)
self.assertTrue(self.fs.hasmeta(meta_name))
except NoMetaError:
self.assertFalse(self.fs.hasmeta(meta_name))
def test_root_dir(self):
self.assertTrue(self.fs.isdir(""))
self.assertTrue(self.fs.isdir("/"))
# These may be false (e.g. empty dict) but mustn't raise errors
self.fs.getinfo("")
self.assertTrue(self.fs.getinfo("/") is not None)
def test_getsyspath(self):
try:
syspath = self.fs.getsyspath("/")
except NoSysPathError:
pass
else:
self.assertTrue(isinstance(syspath, unicode))
syspath = self.fs.getsyspath("/", allow_none=True)
if syspath is not None:
self.assertTrue(isinstance(syspath, unicode))
def test_debug(self):
str(self.fs)
repr(self.fs)
self.assert_(hasattr(self.fs, 'desc'))
def test_open_on_directory(self):
self.fs.makedir("testdir")
try:
f = self.fs.open("testdir")
except ResourceInvalidError:
pass
except Exception:
raise
ecls = sys.exc_info()[0]
assert False, "%s raised instead of ResourceInvalidError" % (ecls,)
else:
f.close()
assert False, "ResourceInvalidError was not raised"
def test_writefile(self):
self.assertRaises(ResourceNotFoundError, self.fs.open, "test1.txt")
f = self.fs.open("test1.txt", "wb")
f.write(b("testing"))
f.close()
self.assertTrue(self.check("test1.txt"))
f = self.fs.open("test1.txt", "rb")
self.assertEquals(f.read(), b("testing"))
f.close()
f = self.fs.open("test1.txt", "wb")
f.write(b("test file overwrite"))
f.close()
self.assertTrue(self.check("test1.txt"))
f = self.fs.open("test1.txt", "rb")
self.assertEquals(f.read(), b("test file overwrite"))
f.close()
def test_createfile(self):
test = b('now with content')
self.fs.createfile("test.txt")
self.assert_(self.fs.exists("test.txt"))
self.assertEqual(self.fs.getcontents("test.txt", "rb"), b(''))
self.fs.setcontents("test.txt", test)
self.fs.createfile("test.txt")
self.assertEqual(self.fs.getcontents("test.txt", "rb"), test)
self.fs.createfile("test.txt", wipe=True)
self.assertEqual(self.fs.getcontents("test.txt", "rb"), b(''))
def test_setcontents(self):
# setcontents() should accept both a string...
self.fs.setcontents("hello", b("world"))
self.assertEquals(self.fs.getcontents("hello", "rb"), b("world"))
# ...and a file-like object
self.fs.setcontents("hello", StringIO(b("to you, good sir!")))
self.assertEquals(self.fs.getcontents(
"hello", "rb"), b("to you, good sir!"))
# setcontents() should accept both a string...
self.fs.setcontents("hello", b("world"), chunk_size=2)
self.assertEquals(self.fs.getcontents("hello", "rb"), b("world"))
# ...and a file-like object
self.fs.setcontents("hello", StringIO(
b("to you, good sir!")), chunk_size=2)
self.assertEquals(self.fs.getcontents(
"hello", "rb"), b("to you, good sir!"))
self.fs.setcontents("hello", b(""))
self.assertEquals(self.fs.getcontents("hello", "rb"), b(""))
def test_setcontents_async(self):
# setcontents() should accept both a string...
self.fs.setcontents_async("hello", b("world")).wait()
self.assertEquals(self.fs.getcontents("hello", "rb"), b("world"))
# ...and a file-like object
self.fs.setcontents_async("hello", StringIO(
b("to you, good sir!"))).wait()
self.assertEquals(self.fs.getcontents("hello"), b("to you, good sir!"))
self.fs.setcontents_async("hello", b("world"), chunk_size=2).wait()
self.assertEquals(self.fs.getcontents("hello", "rb"), b("world"))
# ...and a file-like object
self.fs.setcontents_async("hello", StringIO(
b("to you, good sir!")), chunk_size=2).wait()
self.assertEquals(self.fs.getcontents(
"hello", "rb"), b("to you, good sir!"))
def test_isdir_isfile(self):
self.assertFalse(self.fs.exists("dir1"))
self.assertFalse(self.fs.isdir("dir1"))
self.assertFalse(self.fs.isfile("a.txt"))
self.fs.setcontents("a.txt", b(''))
self.assertFalse(self.fs.isdir("dir1"))
self.assertTrue(self.fs.exists("a.txt"))
self.assertTrue(self.fs.isfile("a.txt"))
self.assertFalse(self.fs.exists("a.txt/thatsnotadir"))
self.fs.makedir("dir1")
self.assertTrue(self.fs.isdir("dir1"))
self.assertTrue(self.fs.exists("dir1"))
self.assertTrue(self.fs.exists("a.txt"))
self.fs.remove("a.txt")
self.assertFalse(self.fs.exists("a.txt"))
def test_listdir(self):
def check_unicode(items):
for item in items:
self.assertTrue(isinstance(item, unicode))
self.fs.setcontents(u"a", b(''))
self.fs.setcontents("b", b(''))
self.fs.setcontents("foo", b(''))
self.fs.setcontents("bar", b(''))
# Test listing of the root directory
d1 = self.fs.listdir()
self.assertEqual(len(d1), 4)
self.assertEqual(sorted(d1), [u"a", u"b", u"bar", u"foo"])
check_unicode(d1)
d1 = self.fs.listdir("")
self.assertEqual(len(d1), 4)
self.assertEqual(sorted(d1), [u"a", u"b", u"bar", u"foo"])
check_unicode(d1)
d1 = self.fs.listdir("/")
self.assertEqual(len(d1), 4)
check_unicode(d1)
# Test listing absolute paths
d2 = self.fs.listdir(absolute=True)
self.assertEqual(len(d2), 4)
self.assertEqual(sorted(d2), [u"/a", u"/b", u"/bar", u"/foo"])
check_unicode(d2)
# Create some deeper subdirectories, to make sure their
# contents are not inadvertantly included
self.fs.makedir("p/1/2/3", recursive=True)
self.fs.setcontents("p/1/2/3/a", b(''))
self.fs.setcontents("p/1/2/3/b", b(''))
self.fs.setcontents("p/1/2/3/foo", b(''))
self.fs.setcontents("p/1/2/3/bar", b(''))
self.fs.makedir("q")
# Test listing just files, just dirs, and wildcards
dirs_only = self.fs.listdir(dirs_only=True)
files_only = self.fs.listdir(files_only=True)
contains_a = self.fs.listdir(wildcard="*a*")
self.assertEqual(sorted(dirs_only), [u"p", u"q"])
self.assertEqual(sorted(files_only), [u"a", u"b", u"bar", u"foo"])
self.assertEqual(sorted(contains_a), [u"a", u"bar"])
check_unicode(dirs_only)
check_unicode(files_only)
check_unicode(contains_a)
# Test listing a subdirectory
d3 = self.fs.listdir("p/1/2/3")
self.assertEqual(len(d3), 4)
self.assertEqual(sorted(d3), [u"a", u"b", u"bar", u"foo"])
check_unicode(d3)
# Test listing a subdirectory with absoliute and full paths
d4 = self.fs.listdir("p/1/2/3", absolute=True)
self.assertEqual(len(d4), 4)
self.assertEqual(sorted(d4), [u"/p/1/2/3/a", u"/p/1/2/3/b", u"/p/1/2/3/bar", u"/p/1/2/3/foo"])
check_unicode(d4)
d4 = self.fs.listdir("p/1/2/3", full=True)
self.assertEqual(len(d4), 4)
self.assertEqual(sorted(d4), [u"p/1/2/3/a", u"p/1/2/3/b", u"p/1/2/3/bar", u"p/1/2/3/foo"])
check_unicode(d4)
# Test that appropriate errors are raised
self.assertRaises(ResourceNotFoundError, self.fs.listdir, "zebra")
self.assertRaises(ResourceInvalidError, self.fs.listdir, "foo")
def test_listdirinfo(self):
def check_unicode(items):
for (nm, info) in items:
self.assertTrue(isinstance(nm, unicode))
def check_equal(items, target):
names = [nm for (nm, info) in items]
self.assertEqual(sorted(names), sorted(target))
self.fs.setcontents(u"a", b(''))
self.fs.setcontents("b", b(''))
self.fs.setcontents("foo", b(''))
self.fs.setcontents("bar", b(''))
# Test listing of the root directory
d1 = self.fs.listdirinfo()
self.assertEqual(len(d1), 4)
check_equal(d1, [u"a", u"b", u"bar", u"foo"])
check_unicode(d1)
d1 = self.fs.listdirinfo("")
self.assertEqual(len(d1), 4)
check_equal(d1, [u"a", u"b", u"bar", u"foo"])
check_unicode(d1)
d1 = self.fs.listdirinfo("/")
self.assertEqual(len(d1), 4)
check_equal(d1, [u"a", u"b", u"bar", u"foo"])
check_unicode(d1)
# Test listing absolute paths
d2 = self.fs.listdirinfo(absolute=True)
self.assertEqual(len(d2), 4)
check_equal(d2, [u"/a", u"/b", u"/bar", u"/foo"])
check_unicode(d2)
# Create some deeper subdirectories, to make sure their
# contents are not inadvertantly included
self.fs.makedir("p/1/2/3", recursive=True)
self.fs.setcontents("p/1/2/3/a", b(''))
self.fs.setcontents("p/1/2/3/b", b(''))
self.fs.setcontents("p/1/2/3/foo", b(''))
self.fs.setcontents("p/1/2/3/bar", b(''))
self.fs.makedir("q")
# Test listing just files, just dirs, and wildcards
dirs_only = self.fs.listdirinfo(dirs_only=True)
files_only = self.fs.listdirinfo(files_only=True)
contains_a = self.fs.listdirinfo(wildcard="*a*")
check_equal(dirs_only, [u"p", u"q"])
check_equal(files_only, [u"a", u"b", u"bar", u"foo"])
check_equal(contains_a, [u"a", u"bar"])
check_unicode(dirs_only)
check_unicode(files_only)
check_unicode(contains_a)
# Test listing a subdirectory
d3 = self.fs.listdirinfo("p/1/2/3")
self.assertEqual(len(d3), 4)
check_equal(d3, [u"a", u"b", u"bar", u"foo"])
check_unicode(d3)
# Test listing a subdirectory with absoliute and full paths
d4 = self.fs.listdirinfo("p/1/2/3", absolute=True)
self.assertEqual(len(d4), 4)
check_equal(d4, [u"/p/1/2/3/a", u"/p/1/2/3/b", u"/p/1/2/3/bar", u"/p/1/2/3/foo"])
check_unicode(d4)
d4 = self.fs.listdirinfo("p/1/2/3", full=True)
self.assertEqual(len(d4), 4)
check_equal(d4, [u"p/1/2/3/a", u"p/1/2/3/b", u"p/1/2/3/bar", u"p/1/2/3/foo"])
check_unicode(d4)
# Test that appropriate errors are raised
self.assertRaises(ResourceNotFoundError, self.fs.listdirinfo, "zebra")
self.assertRaises(ResourceInvalidError, self.fs.listdirinfo, "foo")
def test_walk(self):
self.fs.setcontents('a.txt', b('hello'))
self.fs.setcontents('b.txt', b('world'))
self.fs.makeopendir('foo').setcontents('c', b('123'))
sorted_walk = sorted([(d, sorted(fs)) for (d, fs) in self.fs.walk()])
self.assertEquals(sorted_walk,
[("/", ["a.txt", "b.txt"]),
("/foo", ["c"])])
# When searching breadth-first, shallow entries come first
found_a = False
for _, files in self.fs.walk(search="breadth"):
if "a.txt" in files:
found_a = True
if "c" in files:
break
assert found_a, "breadth search order was wrong"
# When searching depth-first, deep entries come first
found_c = False
for _, files in self.fs.walk(search="depth"):
if "c" in files:
found_c = True
if "a.txt" in files:
break
assert found_c, "depth search order was wrong: " + \
str(list(self.fs.walk(search="depth")))
def test_walk_wildcard(self):
self.fs.setcontents('a.txt', b('hello'))
self.fs.setcontents('b.txt', b('world'))
self.fs.makeopendir('foo').setcontents('c', b('123'))
self.fs.makeopendir('.svn').setcontents('ignored', b(''))
for dir_path, paths in self.fs.walk(wildcard='*.txt'):
for path in paths:
self.assert_(path.endswith('.txt'))
for dir_path, paths in self.fs.walk(wildcard=lambda fn: fn.endswith('.txt')):
for path in paths:
self.assert_(path.endswith('.txt'))
def test_walk_dir_wildcard(self):
self.fs.setcontents('a.txt', b('hello'))
self.fs.setcontents('b.txt', b('world'))
self.fs.makeopendir('foo').setcontents('c', b('123'))
self.fs.makeopendir('.svn').setcontents('ignored', b(''))
for dir_path, paths in self.fs.walk(dir_wildcard=lambda fn: not fn.endswith('.svn')):
for path in paths:
self.assert_('.svn' not in path)
def test_walkfiles(self):
self.fs.makeopendir('bar').setcontents('a.txt', b('123'))
self.fs.makeopendir('foo').setcontents('b', b('123'))
self.assertEquals(sorted(
self.fs.walkfiles()), ["/bar/a.txt", "/foo/b"])
self.assertEquals(sorted(self.fs.walkfiles(
dir_wildcard="*foo*")), ["/foo/b"])
self.assertEquals(sorted(self.fs.walkfiles(
wildcard="*.txt")), ["/bar/a.txt"])
def test_walkdirs(self):
self.fs.makeopendir('bar').setcontents('a.txt', b('123'))
self.fs.makeopendir('foo').makeopendir(
"baz").setcontents('b', b('123'))
self.assertEquals(sorted(self.fs.walkdirs()), [
"/", "/bar", "/foo", "/foo/baz"])
self.assertEquals(sorted(self.fs.walkdirs(
wildcard="*foo*")), ["/", "/foo", "/foo/baz"])
def test_unicode(self):
alpha = u"\N{GREEK SMALL LETTER ALPHA}"
beta = u"\N{GREEK SMALL LETTER BETA}"
self.fs.makedir(alpha)
self.fs.setcontents(alpha + "/a", b(''))
self.fs.setcontents(alpha + "/" + beta, b(''))
self.assertTrue(self.check(alpha))
self.assertEquals(sorted(self.fs.listdir(alpha)), ["a", beta])
def test_makedir(self):
check = self.check
self.fs.makedir("a")
self.assertTrue(check("a"))
self.assertRaises(
ParentDirectoryMissingError, self.fs.makedir, "a/b/c")
self.fs.makedir("a/b/c", recursive=True)
self.assert_(check("a/b/c"))
self.fs.makedir("foo/bar/baz", recursive=True)
self.assert_(check("foo/bar/baz"))
self.fs.makedir("a/b/child")
self.assert_(check("a/b/child"))
self.assertRaises(DestinationExistsError, self.fs.makedir, "/a/b")
self.fs.makedir("/a/b", allow_recreate=True)
self.fs.setcontents("/a/file", b(''))
self.assertRaises(ResourceInvalidError, self.fs.makedir, "a/file")
def test_remove(self):
self.fs.setcontents("a.txt", b(''))
self.assertTrue(self.check("a.txt"))
self.fs.remove("a.txt")
self.assertFalse(self.check("a.txt"))
self.assertRaises(ResourceNotFoundError, self.fs.remove, "a.txt")
self.fs.makedir("dir1")
self.assertRaises(ResourceInvalidError, self.fs.remove, "dir1")
self.fs.setcontents("/dir1/a.txt", b(''))
self.assertTrue(self.check("dir1/a.txt"))
self.fs.remove("dir1/a.txt")
self.assertFalse(self.check("/dir1/a.txt"))
def test_removedir(self):
check = self.check
self.fs.makedir("a")
self.assert_(check("a"))
self.fs.removedir("a")
self.assertRaises(ResourceNotFoundError, self.fs.removedir, "a")
self.assert_(not check("a"))
self.fs.makedir("a/b/c/d", recursive=True)
self.assertRaises(DirectoryNotEmptyError, self.fs.removedir, "a/b")
self.fs.removedir("a/b/c/d")
self.assert_(not check("a/b/c/d"))
self.fs.removedir("a/b/c")
self.assert_(not check("a/b/c"))
self.fs.removedir("a/b")
self.assert_(not check("a/b"))
# Test recursive removal of empty parent dirs
self.fs.makedir("foo/bar/baz", recursive=True)
self.fs.removedir("foo/bar/baz", recursive=True)
self.assert_(not check("foo/bar/baz"))
self.assert_(not check("foo/bar"))
self.assert_(not check("foo"))
self.fs.makedir("foo/bar/baz", recursive=True)
self.fs.setcontents("foo/file.txt", b("please don't delete me"))
self.fs.removedir("foo/bar/baz", recursive=True)
self.assert_(not check("foo/bar/baz"))
self.assert_(not check("foo/bar"))
self.assert_(check("foo/file.txt"))
# Ensure that force=True works as expected
self.fs.makedir("frollic/waggle", recursive=True)
self.fs.setcontents("frollic/waddle.txt", b("waddlewaddlewaddle"))
self.assertRaises(DirectoryNotEmptyError, self.fs.removedir, "frollic")
self.assertRaises(
ResourceInvalidError, self.fs.removedir, "frollic/waddle.txt")
self.fs.removedir("frollic", force=True)
self.assert_(not check("frollic"))
# Test removing unicode dirs
kappa = u"\N{GREEK CAPITAL LETTER KAPPA}"
self.fs.makedir(kappa)
self.assert_(self.fs.isdir(kappa))
self.fs.removedir(kappa)
self.assertRaises(ResourceNotFoundError, self.fs.removedir, kappa)
self.assert_(not self.fs.isdir(kappa))
self.fs.makedir(pathjoin("test", kappa), recursive=True)
self.assert_(check(pathjoin("test", kappa)))
self.fs.removedir("test", force=True)
self.assert_(not check("test"))
def test_rename(self):
check = self.check
# test renaming a file in the same directory
self.fs.setcontents("foo.txt", b("Hello, World!"))
self.assert_(check("foo.txt"))
self.fs.rename("foo.txt", "bar.txt")
self.assert_(check("bar.txt"))
self.assert_(not check("foo.txt"))
# test renaming a directory in the same directory
self.fs.makedir("dir_a")
self.fs.setcontents("dir_a/test.txt", b("testerific"))
self.assert_(check("dir_a"))
self.fs.rename("dir_a", "dir_b")
self.assert_(check("dir_b"))
self.assert_(check("dir_b/test.txt"))
self.assert_(not check("dir_a/test.txt"))
self.assert_(not check("dir_a"))
# test renaming a file into a different directory
self.fs.makedir("dir_a")
self.fs.rename("dir_b/test.txt", "dir_a/test.txt")
self.assert_(not check("dir_b/test.txt"))
self.assert_(check("dir_a/test.txt"))
# test renaming a file into a non-existent directory
self.assertRaises(ParentDirectoryMissingError,
self.fs.rename, "dir_a/test.txt", "nonexistent/test.txt")
def test_info(self):
test_str = b("Hello, World!")
self.fs.setcontents("info.txt", test_str)
info = self.fs.getinfo("info.txt")
self.assertEqual(info['size'], len(test_str))
self.fs.desc("info.txt")
self.assertRaises(ResourceNotFoundError, self.fs.getinfo, "notafile")
self.assertRaises(
ResourceNotFoundError, self.fs.getinfo, "info.txt/inval")
def test_infokeys(self):
test_str = b("Hello, World!")
self.fs.setcontents("info.txt", test_str)
info = self.fs.getinfo("info.txt")
for k, v in info.iteritems():
self.assertEqual(self.fs.getinfokeys('info.txt', k), {k: v})
test_info = {}
if 'modified_time' in info:
test_info['modified_time'] = info['modified_time']
if 'size' in info:
test_info['size'] = info['size']
self.assertEqual(self.fs.getinfokeys('info.txt', 'size', 'modified_time'), test_info)
self.assertEqual(self.fs.getinfokeys('info.txt', 'thiscantpossiblyexistininfo'), {})
def test_getsize(self):
test_str = b("*") * 23
self.fs.setcontents("info.txt", test_str)
size = self.fs.getsize("info.txt")
self.assertEqual(size, len(test_str))
def test_movefile(self):
check = self.check
contents = b(
"If the implementation is hard to explain, it's a bad idea.")
def makefile(path):
self.fs.setcontents(path, contents)
def checkcontents(path):
check_contents = self.fs.getcontents(path, "rb")
self.assertEqual(check_contents, contents)
return contents == check_contents
self.fs.makedir("foo/bar", recursive=True)
makefile("foo/bar/a.txt")
self.assert_(check("foo/bar/a.txt"))
self.assert_(checkcontents("foo/bar/a.txt"))
self.fs.move("foo/bar/a.txt", "foo/b.txt")
self.assert_(not check("foo/bar/a.txt"))
self.assert_(check("foo/b.txt"))
self.assert_(checkcontents("foo/b.txt"))
self.fs.move("foo/b.txt", "c.txt")
self.assert_(not check("foo/b.txt"))
self.assert_(check("/c.txt"))
self.assert_(checkcontents("/c.txt"))
makefile("foo/bar/a.txt")
self.assertRaises(
DestinationExistsError, self.fs.move, "foo/bar/a.txt", "/c.txt")
self.assert_(check("foo/bar/a.txt"))
self.assert_(check("/c.txt"))
self.fs.move("foo/bar/a.txt", "/c.txt", overwrite=True)
self.assert_(not check("foo/bar/a.txt"))
self.assert_(check("/c.txt"))
def test_movedir(self):
check = self.check
contents = b(
"If the implementation is hard to explain, it's a bad idea.")
def makefile(path):
self.fs.setcontents(path, contents)
self.assertRaises(ResourceNotFoundError, self.fs.movedir, "a", "b")
self.fs.makedir("a")
self.fs.makedir("b")
makefile("a/1.txt")
makefile("a/2.txt")
makefile("a/3.txt")
self.fs.makedir("a/foo/bar", recursive=True)
makefile("a/foo/bar/baz.txt")
self.fs.movedir("a", "copy of a")
self.assert_(self.fs.isdir("copy of a"))
self.assert_(check("copy of a/1.txt"))
self.assert_(check("copy of a/2.txt"))
self.assert_(check("copy of a/3.txt"))
self.assert_(check("copy of a/foo/bar/baz.txt"))
self.assert_(not check("a/1.txt"))
self.assert_(not check("a/2.txt"))
self.assert_(not check("a/3.txt"))
self.assert_(not check("a/foo/bar/baz.txt"))
self.assert_(not check("a/foo/bar"))
self.assert_(not check("a/foo"))
self.assert_(not check("a"))
self.fs.makedir("a")
self.assertRaises(
DestinationExistsError, self.fs.movedir, "copy of a", "a")
self.fs.movedir("copy of a", "a", overwrite=True)
self.assert_(not check("copy of a"))
self.assert_(check("a/1.txt"))
self.assert_(check("a/2.txt"))
self.assert_(check("a/3.txt"))
self.assert_(check("a/foo/bar/baz.txt"))
def test_cant_copy_from_os(self):
sys_executable = os.path.abspath(os.path.realpath(sys.executable))
self.assertRaises(FSError, self.fs.copy, sys_executable, "py.exe")
def test_copyfile(self):
check = self.check
contents = b(
"If the implementation is hard to explain, it's a bad idea.")
def makefile(path, contents=contents):
self.fs.setcontents(path, contents)
def checkcontents(path, contents=contents):
check_contents = self.fs.getcontents(path, "rb")
self.assertEqual(check_contents, contents)
return contents == check_contents
self.fs.makedir("foo/bar", recursive=True)
makefile("foo/bar/a.txt")
self.assert_(check("foo/bar/a.txt"))
self.assert_(checkcontents("foo/bar/a.txt"))
# import rpdb2; rpdb2.start_embedded_debugger('password');
self.fs.copy("foo/bar/a.txt", "foo/b.txt")
self.assert_(check("foo/bar/a.txt"))
self.assert_(check("foo/b.txt"))
self.assert_(checkcontents("foo/bar/a.txt"))
self.assert_(checkcontents("foo/b.txt"))
self.fs.copy("foo/b.txt", "c.txt")
self.assert_(check("foo/b.txt"))
self.assert_(check("/c.txt"))
self.assert_(checkcontents("/c.txt"))
makefile("foo/bar/a.txt", b("different contents"))
self.assert_(checkcontents("foo/bar/a.txt", b("different contents")))
self.assertRaises(
DestinationExistsError, self.fs.copy, "foo/bar/a.txt", "/c.txt")
self.assert_(checkcontents("/c.txt"))
self.fs.copy("foo/bar/a.txt", "/c.txt", overwrite=True)
self.assert_(checkcontents("foo/bar/a.txt", b("different contents")))
self.assert_(checkcontents("/c.txt", b("different contents")))
def test_copydir(self):
check = self.check
contents = b(
"If the implementation is hard to explain, it's a bad idea.")
def makefile(path):
self.fs.setcontents(path, contents)
def checkcontents(path):
check_contents = self.fs.getcontents(path)
self.assertEqual(check_contents, contents)
return contents == check_contents
self.fs.makedir("a")
self.fs.makedir("b")
makefile("a/1.txt")
makefile("a/2.txt")
makefile("a/3.txt")
self.fs.makedir("a/foo/bar", recursive=True)
makefile("a/foo/bar/baz.txt")
self.fs.copydir("a", "copy of a")
self.assert_(check("copy of a/1.txt"))
self.assert_(check("copy of a/2.txt"))
self.assert_(check("copy of a/3.txt"))
self.assert_(check("copy of a/foo/bar/baz.txt"))
checkcontents("copy of a/1.txt")
self.assert_(check("a/1.txt"))
self.assert_(check("a/2.txt"))
self.assert_(check("a/3.txt"))
self.assert_(check("a/foo/bar/baz.txt"))
checkcontents("a/1.txt")
self.assertRaises(DestinationExistsError, self.fs.copydir, "a", "b")
self.fs.copydir("a", "b", overwrite=True)
self.assert_(check("b/1.txt"))
self.assert_(check("b/2.txt"))
self.assert_(check("b/3.txt"))
self.assert_(check("b/foo/bar/baz.txt"))
checkcontents("b/1.txt")
def test_copydir_with_dotfile(self):
check = self.check
contents = b(
"If the implementation is hard to explain, it's a bad idea.")
def makefile(path):
self.fs.setcontents(path, contents)
self.fs.makedir("a")
makefile("a/1.txt")
makefile("a/2.txt")
makefile("a/.hidden.txt")
self.fs.copydir("a", "copy of a")
self.assert_(check("copy of a/1.txt"))
self.assert_(check("copy of a/2.txt"))
self.assert_(check("copy of a/.hidden.txt"))
self.assert_(check("a/1.txt"))
self.assert_(check("a/2.txt"))
self.assert_(check("a/.hidden.txt"))
def test_readwriteappendseek(self):
def checkcontents(path, check_contents):
read_contents = self.fs.getcontents(path, "rb")
self.assertEqual(read_contents, check_contents)
return read_contents == check_contents
test_strings = [b("Beautiful is better than ugly."),
b("Explicit is better than implicit."),
b("Simple is better than complex.")]
all_strings = b("").join(test_strings)
self.assertRaises(ResourceNotFoundError, self.fs.open, "a.txt", "r")
self.assert_(not self.fs.exists("a.txt"))
f1 = self.fs.open("a.txt", "wb")
pos = 0
for s in test_strings:
f1.write(s)
pos += len(s)
self.assertEqual(pos, f1.tell())
f1.close()
self.assert_(self.fs.exists("a.txt"))
self.assert_(checkcontents("a.txt", all_strings))
f2 = self.fs.open("b.txt", "wb")
f2.write(test_strings[0])
f2.close()
self.assert_(checkcontents("b.txt", test_strings[0]))
f3 = self.fs.open("b.txt", "ab")
# On win32, tell() gives zero until you actually write to the file
# self.assertEquals(f3.tell(),len(test_strings[0]))
f3.write(test_strings[1])
self.assertEquals(f3.tell(), len(test_strings[0])+len(test_strings[1]))
f3.write(test_strings[2])
self.assertEquals(f3.tell(), len(all_strings))
f3.close()
self.assert_(checkcontents("b.txt", all_strings))
f4 = self.fs.open("b.txt", "wb")
f4.write(test_strings[2])
f4.close()
self.assert_(checkcontents("b.txt", test_strings[2]))
f5 = self.fs.open("c.txt", "wb")
for s in test_strings:
f5.write(s+b("\n"))
f5.close()
f6 = self.fs.open("c.txt", "rb")
for s, t in zip(f6, test_strings):
self.assertEqual(s, t+b("\n"))
f6.close()
f7 = self.fs.open("c.txt", "rb")
f7.seek(13)
word = f7.read(6)
self.assertEqual(word, b("better"))
f7.seek(1, os.SEEK_CUR)
word = f7.read(4)
self.assertEqual(word, b("than"))
f7.seek(-9, os.SEEK_END)
word = f7.read(7)
self.assertEqual(word, b("complex"))
f7.close()
self.assertEqual(self.fs.getcontents("a.txt", "rb"), all_strings)
def test_truncate(self):
def checkcontents(path, check_contents):
read_contents = self.fs.getcontents(path, "rb")
self.assertEqual(read_contents, check_contents)
return read_contents == check_contents
self.fs.setcontents("hello", b("world"))
checkcontents("hello", b("world"))
self.fs.setcontents("hello", b("hi"))
checkcontents("hello", b("hi"))
self.fs.setcontents("hello", b("1234567890"))
checkcontents("hello", b("1234567890"))
with self.fs.open("hello", "rb+") as f:
f.truncate(7)
checkcontents("hello", b("1234567"))
with self.fs.open("hello", "rb+") as f:
f.seek(5)
f.truncate()
checkcontents("hello", b("12345"))
def test_truncate_to_larger_size(self):
with self.fs.open("hello", "wb") as f:
f.truncate(30)
self.assertEquals(self.fs.getsize("hello"), 30)
# Some file systems (FTPFS) don't support both reading and writing
if self.fs.getmeta('file.read_and_write', True):
with self.fs.open("hello", "rb+") as f:
f.seek(25)
f.write(b("123456"))
with self.fs.open("hello", "rb") as f:
f.seek(25)
self.assertEquals(f.read(), b("123456"))
def test_write_past_end_of_file(self):
if self.fs.getmeta('file.read_and_write', True):
with self.fs.open("write_at_end", "wb") as f:
f.seek(25)
f.write(b("EOF"))
with self.fs.open("write_at_end", "rb") as f:
self.assertEquals(f.read(), b("\x00")*25 + b("EOF"))
def test_with_statement(self):
# This is a little tricky since 'with' is actually new syntax.
# We use eval() to make this method safe for old python versions.
import sys
if sys.version_info[0] >= 2 and sys.version_info[1] >= 5:
# A successful 'with' statement
contents = "testing the with statement"
code = "from __future__ import with_statement\n"
code += "with self.fs.open('f.txt','wb-') as testfile:\n"
code += " testfile.write(contents)\n"
code += "self.assertEquals(self.fs.getcontents('f.txt', 'rb'),contents)"
code = compile(code, "<string>", 'exec')
eval(code)
# A 'with' statement raising an error
contents = "testing the with statement"
code = "from __future__ import with_statement\n"
code += "with self.fs.open('f.txt','wb-') as testfile:\n"
code += " testfile.write(contents)\n"
code += " raise ValueError\n"
code = compile(code, "<string>", 'exec')
self.assertRaises(ValueError, eval, code, globals(), locals())
self.assertEquals(self.fs.getcontents('f.txt', 'rb'), contents)
def test_pickling(self):
if self.fs.getmeta('pickle_contents', True):
self.fs.setcontents("test1", b("hello world"))
fs2 = pickle.loads(pickle.dumps(self.fs))
self.assert_(fs2.isfile("test1"))
fs3 = pickle.loads(pickle.dumps(self.fs, -1))
self.assert_(fs3.isfile("test1"))
else:
# Just make sure it doesn't throw an exception
fs2 = pickle.loads(pickle.dumps(self.fs))
def test_big_file(self):
"""Test handling of a big file (1MB)"""
chunk_size = 1024 * 256
num_chunks = 4
def chunk_stream():
"""Generate predictable-but-randomy binary content."""
r = random.Random(0)
randint = r.randint
int2byte = six.int2byte
for _i in xrange(num_chunks):
c = b("").join(int2byte(randint(
0, 255)) for _j in xrange(chunk_size//8))
yield c * 8
f = self.fs.open("bigfile", "wb")
try:
for chunk in chunk_stream():
f.write(chunk)
finally:
f.close()
chunks = chunk_stream()
f = self.fs.open("bigfile", "rb")
try:
try:
while True:
if chunks.next() != f.read(chunk_size):
assert False, "bigfile was corrupted"
except StopIteration:
if f.read() != b(""):
assert False, "bigfile was corrupted"
finally:
f.close()
def test_settimes(self):
def cmp_datetimes(d1, d2):
"""Test datetime objects are the same to within the timestamp accuracy"""
dts1 = time.mktime(d1.timetuple())
dts2 = time.mktime(d2.timetuple())
return int(dts1) == int(dts2)
d1 = datetime.datetime(2010, 6, 20, 11, 0, 9, 987699)
d2 = datetime.datetime(2010, 7, 5, 11, 0, 9, 500000)
self.fs.setcontents('/dates.txt', b('check dates'))
# If the implementation supports settimes, check that the times
# can be set and then retrieved
try:
self.fs.settimes('/dates.txt', d1, d2)
except UnsupportedError:
pass
else:
info = self.fs.getinfo('/dates.txt')
self.assertTrue(cmp_datetimes(d1, info['accessed_time']))
self.assertTrue(cmp_datetimes(d2, info['modified_time']))
def test_removeroot(self):
self.assertRaises(RemoveRootError, self.fs.removedir, "/")
def test_zero_read(self):
"""Test read(0) returns empty string"""
self.fs.setcontents('foo.txt', b('Hello, World'))
with self.fs.open('foo.txt', 'rb') as f:
self.assert_(len(f.read(0)) == 0)
with self.fs.open('foo.txt', 'rt') as f:
self.assert_(len(f.read(0)) == 0)
# May be disabled - see end of file
class ThreadingTestCases(object):
"""Testcases for thread-safety of FS implementations."""
# These are either too slow to be worth repeating,
# or cannot possibly break cross-thread.
_dont_retest = ("test_pickling", "test_multiple_overwrite",)
__lock = threading.RLock()
def _yield(self):
# time.sleep(0.001)
# Yields without a delay
time.sleep(0)
def _lock(self):
self.__lock.acquire()
def _unlock(self):
self.__lock.release()
def _makeThread(self, func, errors):
def runThread():
try:
func()
except Exception:
errors.append(sys.exc_info())
thread = threading.Thread(target=runThread)
thread.daemon = True
return thread
def _runThreads(self, *funcs):
check_interval = sys.getcheckinterval()
sys.setcheckinterval(1)
try:
errors = []
threads = [self._makeThread(f, errors) for f in funcs]
for t in threads:
t.start()
for t in threads:
t.join()
for (c, e, t) in errors:
raise e, None, t
finally:
sys.setcheckinterval(check_interval)
def test_setcontents_threaded(self):
def setcontents(name, contents):
f = self.fs.open(name, "wb")
self._yield()
try:
f.write(contents)
self._yield()
finally:
f.close()
def thread1():
c = b("thread1 was 'ere")
setcontents("thread1.txt", c)
self.assertEquals(self.fs.getcontents("thread1.txt", 'rb'), c)
def thread2():
c = b("thread2 was 'ere")
setcontents("thread2.txt", c)
self.assertEquals(self.fs.getcontents("thread2.txt", 'rb'), c)
self._runThreads(thread1, thread2)
def test_setcontents_threaded_samefile(self):
def setcontents(name, contents):
f = self.fs.open(name, "wb")
self._yield()
try:
f.write(contents)
self._yield()
finally:
f.close()
def thread1():
c = b("thread1 was 'ere")
setcontents("threads.txt", c)
self._yield()
self.assertEquals(self.fs.listdir("/"), ["threads.txt"])
def thread2():
c = b("thread2 was 'ere")
setcontents("threads.txt", c)
self._yield()
self.assertEquals(self.fs.listdir("/"), ["threads.txt"])
def thread3():
c = b("thread3 was 'ere")
setcontents("threads.txt", c)
self._yield()
self.assertEquals(self.fs.listdir("/"), ["threads.txt"])
try:
self._runThreads(thread1, thread2, thread3)
except ResourceLockedError:
# that's ok, some implementations don't support concurrent writes
pass
def test_cases_in_separate_dirs(self):
class TestCases_in_subdir(self.__class__, unittest.TestCase):
"""Run all testcases against a subdir of self.fs"""
def __init__(this, subdir):
super(TestCases_in_subdir, this).__init__("test_listdir")
this.subdir = subdir
for meth in dir(this):
if not meth.startswith("test_"):
continue
if meth in self._dont_retest:
continue
if not hasattr(FSTestCases, meth):
continue
if self.fs.exists(subdir):
self.fs.removedir(subdir, force=True)
self.assertFalse(self.fs.isdir(subdir))
self.assertTrue(self.fs.isdir("/"))
self.fs.makedir(subdir)
self._yield()
getattr(this, meth)()
@property
def fs(this):
return self.fs.opendir(this.subdir)
def check(this, p):
return self.check(pathjoin(this.subdir, relpath(p)))
def thread1():
TestCases_in_subdir("thread1")
def thread2():
TestCases_in_subdir("thread2")
def thread3():
TestCases_in_subdir("thread3")
self._runThreads(thread1, thread2, thread3)
def test_makedir_winner(self):
errors = []
def makedir():
try:
self.fs.makedir("testdir")
except DestinationExistsError, e:
errors.append(e)
def makedir_noerror():
try:
self.fs.makedir("testdir", allow_recreate=True)
except DestinationExistsError, e:
errors.append(e)
def removedir():
try:
self.fs.removedir("testdir")
except (ResourceNotFoundError, ResourceLockedError), e:
errors.append(e)
# One thread should succeed, one should error
self._runThreads(makedir, makedir)
self.assertEquals(len(errors), 1)
self.fs.removedir("testdir")
# One thread should succeed, two should error
errors = []
self._runThreads(makedir, makedir, makedir)
if len(errors) != 2:
raise AssertionError(errors)
self.fs.removedir("testdir")
# All threads should succeed
errors = []
self._runThreads(makedir_noerror, makedir_noerror, makedir_noerror)
self.assertEquals(len(errors), 0)
self.assertTrue(self.fs.isdir("testdir"))
self.fs.removedir("testdir")
# makedir() can beat removedir() and vice-versa
errors = []
self._runThreads(makedir, removedir)
if self.fs.isdir("testdir"):
self.assertEquals(len(errors), 1)
self.assertFalse(isinstance(errors[0], DestinationExistsError))
self.fs.removedir("testdir")
else:
self.assertEquals(len(errors), 0)
def test_concurrent_copydir(self):
self.fs.makedir("a")
self.fs.makedir("a/b")
self.fs.setcontents("a/hello.txt", b("hello world"))
self.fs.setcontents("a/guido.txt", b("is a space alien"))
self.fs.setcontents("a/b/parrot.txt", b("pining for the fiords"))
def copydir():
self._yield()
self.fs.copydir("a", "copy of a")
def copydir_overwrite():
self._yield()
self.fs.copydir("a", "copy of a", overwrite=True)
# This should error out since we're not overwriting
self.assertRaises(
DestinationExistsError, self._runThreads, copydir, copydir)
self.assert_(self.fs.isdir('a'))
self.assert_(self.fs.isdir('a'))
copydir_overwrite()
self.assert_(self.fs.isdir('a'))
# This should run to completion and give a valid state, unless
# files get locked when written to.
try:
self._runThreads(copydir_overwrite, copydir_overwrite)
except ResourceLockedError:
pass
self.assertTrue(self.fs.isdir("copy of a"))
self.assertTrue(self.fs.isdir("copy of a/b"))
self.assertEqual(self.fs.getcontents(
"copy of a/b/parrot.txt", 'rb'), b("pining for the fiords"))
self.assertEqual(self.fs.getcontents(
"copy of a/hello.txt", 'rb'), b("hello world"))
self.assertEqual(self.fs.getcontents(
"copy of a/guido.txt", 'rb'), b("is a space alien"))
def test_multiple_overwrite(self):
contents = [b("contents one"), b(
"contents the second"), b("number three")]
def thread1():
for i in xrange(30):
for c in contents:
self.fs.setcontents("thread1.txt", c)
self.assertEquals(self.fs.getsize("thread1.txt"), len(c))
self.assertEquals(self.fs.getcontents(
"thread1.txt", 'rb'), c)
def thread2():
for i in xrange(30):
for c in contents:
self.fs.setcontents("thread2.txt", c)
self.assertEquals(self.fs.getsize("thread2.txt"), len(c))
self.assertEquals(self.fs.getcontents(
"thread2.txt", 'rb'), c)
self._runThreads(thread1, thread2)
# Uncomment to temporarily disable threading tests
# class ThreadingTestCases(object):
# _dont_retest = ()
| duedil-ltd/pyfilesystem | fs/tests/__init__.py | Python | bsd-3-clause | 46,382 |
"""Integration test for Notifications."""
import github3
from .helper import IntegrationHelper
class TestThread(IntegrationHelper):
"""Integration test for methods on Test class"""
def test_subscription(self):
"""Show that a user can retrieve notifications for repository"""
self.token_login()
cassette_name = self.cassette_name("subscription")
with self.recorder.use_cassette(cassette_name):
repository = self.gh.repository('sigmavirus24', 'github3.py')
threads = list(repository.notifications(all=True))
assert len(threads) > 0
thread = threads[0]
assert isinstance(thread, github3.notifications.Thread)
assert isinstance(thread.subscription(),
github3.notifications.Subscription)
class TestSubscription(IntegrationHelper):
"""Integration test for methods on Test class"""
def test_set(self):
"""Show that user can successful set subscription"""
self.token_login()
cassette_name = self.cassette_name("set")
with self.recorder.use_cassette(cassette_name):
repository = self.gh.repository('sigmavirus24', 'github3.py')
threads = list(repository.notifications(all='true'))
assert len(threads) > 0
subscription = threads[0].subscription()
assert subscription.set(True, False) is None
assert isinstance(subscription, github3.notifications.Subscription)
| christophelec/github3.py | tests/integration/test_notifications.py | Python | bsd-3-clause | 1,512 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from builtins import zip
import pycrfsuite
def compareTaggers(model1, model2, string_list, module_name):
"""
Compare two models. Given a list of strings, prints out tokens & tags
whenever the two taggers parse a string differently. This is for spot-checking models
:param tagger1: a .crfsuite filename
:param tagger2: another .crfsuite filename
:param string_list: a list of strings to be checked
:param module_name: name of a parser module
"""
module = __import__(module_name)
tagger1 = pycrfsuite.Tagger()
tagger1.open(module_name+'/'+model1)
tagger2 = pycrfsuite.Tagger()
tagger2.open(module_name+'/'+model2)
count_discrepancies = 0
for string in string_list:
tokens = module.tokenize(string)
if tokens:
features = module.tokens2features(tokens)
tags1 = tagger1.tag(features)
tags2 = tagger2.tag(features)
if tags1 != tags2:
count_discrepancies += 1
print('\n')
print("%s. %s" %(count_discrepancies, string))
print('-'*75)
print_spaced('token', model1, model2)
print('-'*75)
for token in zip(tokens, tags1, tags2):
print_spaced(token[0], token[1], token[2])
print("\n\n%s of %s strings were labeled differently"%(count_discrepancies, len(string_list)))
def print_spaced(s1, s2, s3):
n = 25
print(s1 + " "*(n-len(s1)) + s2 + " "*(n-len(s2)) + s3)
def validateTaggers(model1, model2, labeled_string_list, module_name):
module = __import__(module_name)
tagger1 = pycrfsuite.Tagger()
tagger1.open(module_name+'/'+model1)
tagger2 = pycrfsuite.Tagger()
tagger2.open(module_name+'/'+model2)
wrong_count_1 = 0
wrong_count_2 = 0
wrong_count_both = 0
correct_count = 0
for labeled_string in labeled_string_list:
unlabeled_string, components = labeled_string
tokens = module.tokenize(unlabeled_string)
if tokens:
features = module.tokens2features(tokens)
_, tags_true = list(zip(*components))
tags_true = list(tags_true)
tags1 = tagger1.tag(features)
tags2 = tagger2.tag(features)
if (tags1 != tags_true) and (tags2 != tags_true):
print("\nSTRING: ", unlabeled_string)
print("TRUE: ", tags_true)
print("*%s: "%model1, tags1)
print("*%s: "%model2, tags2)
wrong_count_both += 1
elif (tags1 != tags_true):
print("\nSTRING: ", unlabeled_string)
print("TRUE: ", tags_true)
print("*%s: "%model1, tags1)
print("%s: "%model2, tags2)
wrong_count_1 += 1
elif (tags2 != tags_true):
print("\nSTRING: ", unlabeled_string)
print("TRUE: ", tags_true)
print("%s: "%model1, tags1)
print("*%s: "%model2, tags2)
wrong_count_2 += 1
else:
correct_count += 1
print("\n\nBOTH WRONG: ", wrong_count_both)
print("%s WRONG: %s" %(model1, wrong_count_1))
print("%s WRONG: %s" %(model2, wrong_count_2))
print("BOTH CORRECT: ", correct_count)
| et-al-Health/parserator | parserator/spotcheck.py | Python | mit | 3,434 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('statmaps', '0026_populate_cogatlas'),
]
operations = [
migrations.AddField(
model_name='statisticmap',
name='cognitive_paradigm_cogatlas',
field=models.CharField(help_text=b"Task (or lack of it) performed by the subjects in the scanner described using <a href='http://www.cognitiveatlas.org/'>Cognitive Atlas</a> terms", max_length=200, null=True, verbose_name=b'Cognitive Paradigm'),
preserve_default=True,
),
migrations.AlterField(
model_name='statisticmap',
name='modality',
field=models.CharField(help_text=b'Brain imaging procedure that was used to acquire the data.', max_length=200, verbose_name=b'Modality & Acquisition Type', choices=[(b'fMRI-BOLD', b'fMRI-BOLD'), (b'fMRI-CBF', b'fMRI-CBF'), (b'fMRI-CBV', b'fMRI-CBV'), (b'Diffusion MRI', b'Diffusion MRI'), (b'Structural MRI', b'Structural MRI'), (b'PET FDG', b'PET FDG'), (b'PET [15O]-water', b'PET [15O]-water'), (b'PET other', b'PET other'), (b'MEG', b'MEG'), (b'EEG', b'EEG'), (b'Other', b'Other')]),
preserve_default=True,
),
]
| chrisfilo/NeuroVault | neurovault/apps/statmaps/migrations/0027_auto_20150220_0305.py | Python | mit | 1,314 |
import numpy as np
import cudarray as ca
from .base import PickleMixin
_FLT_MIN = np.finfo(ca.float_).tiny
class Loss(PickleMixin):
# abll: I suspect that this interface is not ideal. It would be more
# elegant if Loss only provided loss() and grad(). However, where should
# we place the logic from fprop()?
@classmethod
def from_any(cls, arg):
if isinstance(arg, Loss):
return arg
elif isinstance(arg, str):
if arg == 'softmaxce':
return SoftmaxCrossEntropy()
elif arg == 'bce':
return BinaryCrossEntropy()
elif arg == 'mse':
return MeanSquaredError()
raise ValueError('Invalid constructor arguments: %s' % arg)
def _setup(self, x_shape):
pass
def fprop(self, x):
return x
def loss(self, target, x):
""" Returns the loss calculated from the target and the input. """
raise NotImplementedError()
def grad(self, target, x):
""" Returns the input gradient. """
raise NotImplementedError()
def y_shape(self, x_shape):
return x_shape
class SoftmaxCrossEntropy(Loss):
"""
Softmax + cross entropy (aka. multinomial logistic loss)
"""
def __init__(self):
self.name = 'softmaxce'
self._tmp_x = None
self._tmp_y = None
self._tmp_target = None
self._tmp_one_hot = None
self.n_classes = None
def _setup(self, x_shape):
self.n_classes = x_shape[1]
def _softmax(self, x):
# caching wrapper
if self._tmp_x is not x:
self._tmp_y = ca.nnet.softmax(x)
self._tmp_x = x
return self._tmp_y
def _one_hot(self, target):
# caching wrapper
if self._tmp_target is not target:
self._tmp_one_hot = ca.nnet.one_hot_encode(target, self.n_classes)
self._tmp_target = target
return self._tmp_one_hot
def fprop(self, x):
return ca.nnet.one_hot_decode(self._softmax(x))
def loss(self, target, x):
y = self._softmax(x)
target = self._one_hot(target)
return ca.nnet.categorical_cross_entropy(y_pred=y, y_true=target)
def grad(self, target, x):
y = self._softmax(x)
target = self._one_hot(target)
return -(target - y)
def y_shape(self, x_shape):
return (x_shape[0],)
class BinaryCrossEntropy(Loss):
def __init__(self):
self.name = 'bce'
def loss(self, y, y_pred):
y_pred = ca.maximum(y_pred, _FLT_MIN)
return -ca.mean(y*ca.log(y_pred) + (1 - y)*ca.log(1 - y_pred), axis=1)
def grad(self, y, y_pred):
y_pred = ca.maximum(y_pred, _FLT_MIN)
return -(y/y_pred - (1-y)/(1-y_pred))
class MeanSquaredError(Loss):
def __init__(self):
self.name = 'mse'
self.n_targets = None
def _setup(self, x_shape):
self.n_targets = x_shape[1]
def loss(self, y, y_pred):
return ca.mean((y-y_pred)**2, axis=1)
def grad(self, y, y_pred):
return 2.0 / self.n_targets * (y_pred - y)
| lre/deeppy | deeppy/loss.py | Python | mit | 3,134 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Message.updated'
db.alter_column(u'mailer_message', 'updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True))
def backwards(self, orm):
# Changing field 'Message.updated'
db.alter_column(u'mailer_message', 'updated', self.gf('django.db.models.fields.DateTimeField')())
models = {
u'mailer.message': {
'Meta': {'object_name': 'Message'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message_data': ('django.db.models.fields.TextField', [], {}),
'priority': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}),
'recipients': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'subject': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['mailer'] | zamazaljiri/django-mailer | mailer/migrations/0002_auto__chg_field_message_updated.py | Python | mit | 1,493 |
# Copyright (c) 2012-2013 Paul Tagliamonte <[email protected]>
# Copyright (c) 2013 Leo Cavaille <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from debile.slave.wrappers.pep8 import parse_pep8
from debile.slave.utils import cd
from debile.utils.commands import run_command
def pep8(dsc, analysis):
run_command(["dpkg-source", "-x", dsc, "source-pep8"])
with cd('source-pep8'):
out, _, ret = run_command(['pep8', '.'])
failed = ret != 0
for issue in parse_pep8(out.splitlines()):
analysis.results.append(issue)
return (analysis, out, failed, None, None)
def version():
out, _, ret = run_command(['pep8', '--version'])
if ret != 0:
raise Exception("pep8 is not installed")
return ('pep8', out.strip())
| lucaskanashiro/debile | debile/slave/runners/pep8.py | Python | mit | 1,811 |
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
from devil.android import device_errors
class FlagChanger(object):
"""Changes the flags Chrome runs with.
There are two different use cases for this file:
* Flags are permanently set by calling Set().
* Flags can be temporarily set for a particular set of unit tests. These
tests should call Restore() to revert the flags to their original state
once the tests have completed.
"""
def __init__(self, device, cmdline_file):
"""Initializes the FlagChanger and records the original arguments.
Args:
device: A DeviceUtils instance.
cmdline_file: Path to the command line file on the device.
"""
self._device = device
self._cmdline_file = cmdline_file
# Save the original flags.
try:
self._orig_line = self._device.ReadFile(self._cmdline_file).strip()
except device_errors.CommandFailedError:
self._orig_line = ''
# Parse out the flags into a list to facilitate adding and removing flags.
self._current_flags = self._TokenizeFlags(self._orig_line)
def Get(self):
"""Returns list of current flags."""
return self._current_flags
def Set(self, flags):
"""Replaces all flags on the current command line with the flags given.
Args:
flags: A list of flags to set, eg. ['--single-process'].
"""
if flags:
assert flags[0] != 'chrome'
self._current_flags = flags
self._UpdateCommandLineFile()
def AddFlags(self, flags):
"""Appends flags to the command line if they aren't already there.
Args:
flags: A list of flags to add on, eg. ['--single-process'].
"""
if flags:
assert flags[0] != 'chrome'
# Avoid appending flags that are already present.
for flag in flags:
if flag not in self._current_flags:
self._current_flags.append(flag)
self._UpdateCommandLineFile()
def RemoveFlags(self, flags):
"""Removes flags from the command line, if they exist.
Args:
flags: A list of flags to remove, eg. ['--single-process']. Note that we
expect a complete match when removing flags; if you want to remove
a switch with a value, you must use the exact string used to add
it in the first place.
"""
if flags:
assert flags[0] != 'chrome'
for flag in flags:
if flag in self._current_flags:
self._current_flags.remove(flag)
self._UpdateCommandLineFile()
def Restore(self):
"""Restores the flags to their original state."""
self._current_flags = self._TokenizeFlags(self._orig_line)
self._UpdateCommandLineFile()
def _UpdateCommandLineFile(self):
"""Writes out the command line to the file, or removes it if empty."""
logging.info('Current flags: %s', self._current_flags)
# Root is not required to write to /data/local/tmp/.
use_root = '/data/local/tmp/' not in self._cmdline_file
if self._current_flags:
# The first command line argument doesn't matter as we are not actually
# launching the chrome executable using this command line.
cmd_line = ' '.join(['_'] + self._current_flags)
self._device.WriteFile(
self._cmdline_file, cmd_line, as_root=use_root)
file_contents = self._device.ReadFile(
self._cmdline_file, as_root=use_root).rstrip()
assert file_contents == cmd_line, (
'Failed to set the command line file at %s' % self._cmdline_file)
else:
self._device.RunShellCommand('rm ' + self._cmdline_file,
as_root=use_root)
assert not self._device.FileExists(self._cmdline_file), (
'Failed to remove the command line file at %s' % self._cmdline_file)
@staticmethod
def _TokenizeFlags(line):
"""Changes the string containing the command line into a list of flags.
Follows similar logic to CommandLine.java::tokenizeQuotedArguments:
* Flags are split using whitespace, unless the whitespace is within a
pair of quotation marks.
* Unlike the Java version, we keep the quotation marks around switch
values since we need them to re-create the file when new flags are
appended.
Args:
line: A string containing the entire command line. The first token is
assumed to be the program name.
"""
if not line:
return []
tokenized_flags = []
current_flag = ""
within_quotations = False
# Move through the string character by character and build up each flag
# along the way.
for c in line.strip():
if c is '"':
if len(current_flag) > 0 and current_flag[-1] == '\\':
# Last char was a backslash; pop it, and treat this " as a literal.
current_flag = current_flag[0:-1] + '"'
else:
within_quotations = not within_quotations
current_flag += c
elif not within_quotations and (c is ' ' or c is '\t'):
if current_flag is not "":
tokenized_flags.append(current_flag)
current_flag = ""
else:
current_flag += c
# Tack on the last flag.
if not current_flag:
if within_quotations:
logging.warn('Unterminated quoted argument: ' + line)
else:
tokenized_flags.append(current_flag)
# Return everything but the program name.
return tokenized_flags[1:]
| Teamxrtc/webrtc-streaming-node | third_party/webrtc/src/chromium/src/build/android/pylib/flag_changer.py | Python | mit | 5,496 |
#!/usr/bin/env python
"""
This module contains the :class:`Column` class, which defines a "vertical"
array of tabular data. Whereas :class:`.Row` instances are independent of their
parent :class:`.Table`, columns depend on knowledge of both their position in
the parent (column name, data type) as well as the rows that contain their data.
"""
import six
from agate.mapped_sequence import MappedSequence
from agate.utils import NullOrder, memoize
if six.PY3: # pragma: no cover
# pylint: disable=W0622
xrange = range
def null_handler(k):
"""
Key method for sorting nulls correctly.
"""
if k is None:
return NullOrder()
return k
class Column(MappedSequence):
"""
Proxy access to column data. Instances of :class:`Column` should
not be constructed directly. They are created by :class:`.Table`
instances and are unique to them.
Columns are implemented as subclass of :class:`.MappedSequence`. They
deviate from the underlying implementation in that loading of their data
is deferred until it is needed.
:param name:
The name of this column.
:param data_type:
An instance of :class:`.DataType`.
:param rows:
A :class:`.MappedSequence` that contains the :class:`.Row` instances
containing the data for this column.
:param row_names:
An optional list of row names (keys) for this column.
"""
__slots__ = ['_index', '_name', '_data_type', '_rows', '_row_names']
def __init__(self, index, name, data_type, rows, row_names=None):
self._index = index
self._name = name
self._data_type = data_type
self._rows = rows
self._keys = row_names
def __getstate__(self):
"""
Return state values to be pickled.
This is necessary on Python2.7 when using :code:`__slots__`.
"""
return {
'_index': self._index,
'_name': self._name,
'_data_type': self._data_type,
'_rows': self._rows,
'_keys': self._keys
}
def __setstate__(self, data):
"""
Restore pickled state.
This is necessary on Python2.7 when using :code:`__slots__`.
"""
self._index = data['_index']
self._name = data['_name']
self._data_type = data['_data_type']
self._rows = data['_rows']
self._keys = data['_keys']
@property
def index(self):
"""
This column's index.
"""
return self._index
@property
def name(self):
"""
This column's name.
"""
return self._name
@property
def data_type(self):
"""
This column's data type.
"""
return self._data_type
@memoize
def values(self):
"""
Get the values in this column, as a tuple.
"""
return tuple(row[self._index] for row in self._rows)
@memoize
def values_distinct(self):
"""
Get the distinct values in this column, as a tuple.
"""
return tuple(set(self.values()))
@memoize
def values_without_nulls(self):
"""
Get the values in this column with any null values removed.
"""
return tuple(d for d in self.values() if d is not None)
@memoize
def values_sorted(self):
"""
Get the values in this column sorted.
"""
return sorted(self.values(), key=null_handler)
@memoize
def values_without_nulls_sorted(self):
"""
Get the values in this column with any null values removed and sorted.
"""
return sorted(self.values_without_nulls(), key=null_handler)
| flother/agate | agate/columns.py | Python | mit | 3,731 |
from __future__ import absolute_import
from .base import *
| cr8ivecodesmith/pyort | pyort/pyort/settings/production.py | Python | gpl-2.0 | 59 |
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from contextlib import contextmanager
import sqlalchemy as sa
from sqlalchemy.ext import compiler
from sqlalchemy.sql.expression import ClauseElement
from sqlalchemy.sql.expression import Executable
# from http:
# //www.sqlalchemy.org/docs/core/compiler.html#compiling-sub-elements-of-a-custom-expression-construct # noqa pylint: disable=line-too-long
# _execution_options per
# http://docs.sqlalchemy.org/en/rel_0_7/core/compiler.html#enabling-compiled-autocommit
# (UpdateBase requires sqlalchemy 0.7.0)
class InsertFromSelect(Executable, ClauseElement):
_execution_options = \
Executable._execution_options.union({'autocommit': True})
def __init__(self, table, select):
self.table = table
self.select = select
@compiler.compiles(InsertFromSelect)
def _visit_insert_from_select(element, compiler, **kw):
return "INSERT INTO {} {}".format(compiler.process(element.table, asfrom=True),
compiler.process(element.select))
def sa_version():
if hasattr(sa, '__version__'):
def tryint(s):
try:
return int(s)
except (ValueError, TypeError):
return -1
return tuple(map(tryint, sa.__version__.split('.')))
return (0, 0, 0) # "it's old"
def Table(*args, **kwargs):
"""Wrap table creation to add any necessary dialect-specific options"""
# work around the case where a database was created for us with
# a non-utf8 character set (mysql's default)
kwargs['mysql_character_set'] = 'utf8'
return sa.Table(*args, **kwargs)
@contextmanager
def withoutSqliteForeignKeys(engine, connection=None):
conn = connection
if engine.dialect.name == 'sqlite':
if conn is None:
conn = engine.connect()
# This context is not re-entrant. Ensure it.
assert not getattr(engine, 'fk_disabled', False)
engine.fk_disabled = True
conn.execute('pragma foreign_keys=OFF')
try:
yield
finally:
if engine.dialect.name == 'sqlite':
engine.fk_disabled = False
conn.execute('pragma foreign_keys=ON')
if connection is None:
conn.close()
| anish/buildbot | master/buildbot/util/sautils.py | Python | gpl-2.0 | 2,925 |
"""
A HTML5 target.
"""
from targets import _
from html import TYPE
import html
NAME = _('HTML5 page')
EXTENSION = 'html'
HEADER = """\
<!DOCTYPE html>
<html>
<head>
<meta charset="%(ENCODING)s">
<title>%(HEADER1)s</title>
<meta name="generator" content="http://txt2tags.org">
<link rel="stylesheet" href="%(STYLE)s">
<style>
body{background-color:#fff;color:#000;}
hr{background-color:#000;border:0;color:#000;}
hr.heavy{height:5px;}
hr.light{height:1px;}
img{border:0;display:block;}
img.right{margin:0 0 0 auto;}
img.center{border:0;margin:0 auto;}
table th,table td{padding:4px;}
.center,header{text-align:center;}
table.center {margin-left:auto; margin-right:auto;}
.right{text-align:right;}
.left{text-align:left;}
.tableborder,.tableborder td,.tableborder th{border:1px solid #000;}
.underline{text-decoration:underline;}
</style>
</head>
<body>
<header>
<hgroup>
<h1>%(HEADER1)s</h1>
<h2>%(HEADER2)s</h2>
<h3>%(HEADER3)s</h3>
</hgroup>
</header>
<article>
"""
HEADERCSS = """\
<!DOCTYPE html>
<html>
<head>
<meta charset="%(ENCODING)s">
<title>%(HEADER1)s</title>
<meta name="generator" content="http://txt2tags.org">
<link rel="stylesheet" href="%(STYLE)s">
</head>
<body>
<header>
<hgroup>
<h1>%(HEADER1)s</h1>
<h2>%(HEADER2)s</h2>
<h3>%(HEADER3)s</h3>
</hgroup>
</header>
<article>
"""
TAGS = html.TAGS.copy()
for tag in TAGS:
TAGS[tag] = TAGS[tag].lower()
HTML5TAGS = {
'title1Open' : '<section~A~>\n<h1>\a</h1>' ,
'title1Close' : '</section>' ,
'title2Open' : '<section~A~>\n<h2>\a</h2>' ,
'title2Close' : '</section>' ,
'title3Open' : '<section~A~>\n<h3>\a</h3>' ,
'title3Close' : '</section>' ,
'title4Open' : '<section~A~>\n<h4>\a</h4>' ,
'title4Close' : '</section>' ,
'title5Open' : '<section~A~>\n<h5>\a</h5>' ,
'title5Close' : '</section>' ,
'fontBoldOpen' : '<strong>' ,
'fontBoldClose' : '</strong>' ,
'fontItalicOpen' : '<em>' ,
'fontItalicClose' : '</em>' ,
'fontUnderlineOpen' : '<span class="underline">',
'fontUnderlineClose' : '</span>' ,
'fontStrikeOpen' : '<del>' ,
'fontStrikeClose' : '</del>' ,
'listItemClose' : '</li>' ,
'numlistItemClose' : '</li>' ,
'deflistItem2Close' : '</dd>' ,
'bar1' : '<hr class="light">' ,
'bar2' : '<hr class="heavy">' ,
'img' : '<img~a~ src="\a" alt="">' ,
'imgEmbed' : '<img~a~ src="\a" alt="">' ,
'_imgAlignLeft' : ' class="left"' ,
'_imgAlignCenter' : ' class="center"',
'_imgAlignRight' : ' class="right"' ,
'tableOpen' : '<table~a~~b~>' ,
'_tableBorder' : ' class="tableborder"' ,
'_tableAlignCenter' : ' style="margin-left: auto; margin-right: auto;"',
'_tableCellAlignRight' : ' class="right"' ,
'_tableCellAlignCenter': ' class="center"',
'cssOpen' : '<style>' ,
'tocOpen' : '<nav>' ,
'tocClose' : '</nav>' ,
'EOD' : '</article></body></html>'
}
TAGS.update(HTML5TAGS)
RULES = html.RULES.copy()
#Update the rules to use explicit <section> </section> tags
HTML5RULES = {
'titleblocks' : 1,
}
RULES.update(HTML5RULES)
| farvardin/txt2tags-test | targets/html5.py | Python | gpl-2.0 | 3,582 |
import unittest
from circular_buffer import (
CircularBuffer,
BufferFullException,
BufferEmptyException
)
class CircularBufferTest(unittest.TestCase):
def test_read_empty_buffer(self):
buf = CircularBuffer(1)
with self.assertRaises(BufferEmptyException):
buf.read()
def test_write_and_read_back_one_item(self):
buf = CircularBuffer(1)
buf.write('1')
self.assertEqual('1', buf.read())
with self.assertRaises(BufferEmptyException):
buf.read()
def test_write_and_read_back_multiple_items(self):
buf = CircularBuffer(2)
buf.write('1')
buf.write('2')
self.assertEqual('1', buf.read())
self.assertEqual('2', buf.read())
with self.assertRaises(BufferEmptyException):
buf.read()
def test_clearing_buffer(self):
buf = CircularBuffer(3)
for c in '123':
buf.write(c)
buf.clear()
with self.assertRaises(BufferEmptyException):
buf.read()
buf.write('1')
buf.write('2')
self.assertEqual('1', buf.read())
buf.write('3')
self.assertEqual('2', buf.read())
def test_alternate_write_and_read(self):
buf = CircularBuffer(2)
buf.write('1')
self.assertEqual('1', buf.read())
buf.write('2')
self.assertEqual('2', buf.read())
def test_read_back_oldest_item(self):
buf = CircularBuffer(3)
buf.write('1')
buf.write('2')
buf.read()
buf.write('3')
buf.read()
self.assertEqual('3', buf.read())
def test_write_full_buffer(self):
buf = CircularBuffer(2)
buf.write('1')
buf.write('2')
with self.assertRaises(BufferFullException):
buf.write('A')
def test_overwrite_full_buffer(self):
buf = CircularBuffer(2)
buf.write('1')
buf.write('2')
buf.overwrite('A')
self.assertEqual('2', buf.read())
self.assertEqual('A', buf.read())
with self.assertRaises(BufferEmptyException):
buf.read()
def test_overwrite_non_full_buffer(self):
buf = CircularBuffer(2)
buf.overwrite('1')
buf.overwrite('2')
self.assertEqual('1', buf.read())
self.assertEqual('2', buf.read())
with self.assertRaises(BufferEmptyException):
buf.read()
def test_alternate_read_and_overwrite(self):
buf = CircularBuffer(5)
for c in '123':
buf.write(c)
buf.read()
buf.read()
buf.write('4')
buf.read()
for c in '5678':
buf.write(c)
buf.overwrite('A')
buf.overwrite('B')
self.assertEqual('6', buf.read())
self.assertEqual('7', buf.read())
self.assertEqual('8', buf.read())
self.assertEqual('A', buf.read())
self.assertEqual('B', buf.read())
with self.assertRaises(BufferEmptyException):
buf.read()
if __name__ == '__main__':
unittest.main()
| GregMilway/Exercism | python/circular-buffer/circular_buffer_test.py | Python | gpl-3.0 | 3,084 |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Commands for interacting with Google Compute Engine firewalls."""
import socket
from google.apputils import appcommands
import gflags as flags
from gcutil_lib import command_base
from gcutil_lib import gcutil_errors
from gcutil_lib import utils
FLAGS = flags.FLAGS
class FirewallCommand(command_base.GoogleComputeCommand):
"""Base command for working with the firewalls collection."""
print_spec = command_base.ResourcePrintSpec(
summary=['name', 'network'],
field_mappings=(
('name', 'name'),
('description', 'description'),
('network', 'network'),
('source-ips', 'sourceRanges'),
('source-tags', 'sourceTags'),
('target-tags', 'targetTags')),
detail=(
('name', 'name'),
('description', 'description'),
('creation-time', 'creationTimestamp'),
('network', 'network'),
('source-ips', 'sourceRanges'),
('source-tags', 'sourceTags'),
('target-tags', 'targetTags')),
sort_by='name')
resource_collection_name = 'firewalls'
def __init__(self, name, flag_values):
super(FirewallCommand, self).__init__(name, flag_values)
def GetDetailRow(self, result):
"""Returns an associative list of items for display in a detail table.
Args:
result: A dict returned by the server.
Returns:
A list.
"""
data = []
# Add the rules
for allowed in result.get('allowed', []):
as_string = str(allowed['IPProtocol'])
if allowed.get('ports'):
as_string += ': %s' % ', '.join(allowed['ports'])
data.append(('allowed', as_string))
return data
class FirewallRules(object):
"""Class representing the list of a firewall's rules.
This class is only used for parsing a firewall from command-line flags,
for printing the firewall, we simply dump the JSON.
"""
@staticmethod
def ParsePortSpecs(port_spec_strings):
"""Parse the port-specification portion of firewall rules.
This takes the value of the 'allowed' flag and builds the
corresponding firewall rules, excluding the 'source' fields.
Args:
port_spec_strings: A list of strings specifying the port-specific
components of a firewall rule. These are of the form
"(<protocol>)?(:<port>('-'<port>)?)?"
Returns:
A list of dict values containing a protocol string and a list
of port range strings. This is a substructure of the firewall
rule dictionaries, which additionally contain a 'source' field.
Raises:
ValueError: If any of the input strings are malformed.
"""
def _AddToPortSpecs(protocol, port_string, port_specs):
"""Ensure the specified rule for this protocol allows the given port(s).
If there is no port_string specified it implies all ports are allowed,
and whatever is in the port_specs map for that protocol get clobbered.
This method also makes sure that any protocol entry without a ports
member does not get further restricted.
Args:
protocol: The protocol under which the given port range is allowed.
port_string: The string specification of what ports are allowed.
port_specs: The mapping from protocols to firewall rules.
"""
port_spec_entry = port_specs.setdefault(protocol,
{'IPProtocol': str(protocol),
'ports': []})
if 'ports' in port_spec_entry:
# We only handle the 'then' case because in the other case the
# existing entry already allows all ports.
if not port_string:
# A missing 'ports' field indicates all ports are allowed.
port_spec_entry.pop('ports')
else:
port_spec_entry['ports'].append(port_string)
port_specs = {}
for port_spec_string in port_spec_strings:
protocol = None
port_string = None
parts = port_spec_string.split(':')
if len(parts) > 2:
raise ValueError('Invalid allowed entry: %s' %
port_spec_string)
elif len(parts) == 2:
if parts[0]:
protocol = utils.ParseProtocol(parts[0])
port_string = utils.ReplacePortNames(parts[1])
else:
protocol = utils.ParseProtocol(parts[0])
if protocol:
_AddToPortSpecs(protocol, port_string, port_specs)
else:
# Add entries for both UPD and TCP
_AddToPortSpecs(socket.getprotobyname('tcp'), port_string, port_specs)
_AddToPortSpecs(socket.getprotobyname('udp'), port_string, port_specs)
return port_specs.values()
def __init__(self, allowed, allowed_ip_sources):
self.port_specs = FirewallRules.ParsePortSpecs(allowed)
self.source_ranges = allowed_ip_sources
self.source_tags = []
self.target_tags = []
def SetTags(self, source_tags, target_tags):
self.source_tags = sorted(set(source_tags))
self.target_tags = sorted(set(target_tags))
def AddToFirewall(self, firewall):
if self.source_ranges:
firewall['sourceRanges'] = self.source_ranges
if self.source_tags:
firewall['sourceTags'] = self.source_tags
if self.target_tags:
firewall['targetTags'] = self.target_tags
firewall['allowed'] = self.port_specs
class AddFirewall(FirewallCommand):
"""Create a new firewall rule to allow incoming traffic to a network."""
positional_args = '<firewall-name>'
def __init__(self, name, flag_values):
super(AddFirewall, self).__init__(name, flag_values)
flags.DEFINE_string('description',
'',
'An optional Firewall description.',
flag_values=flag_values)
flags.DEFINE_string('network',
'default',
'Specifies which network this firewall applies to.',
flag_values=flag_values)
flags.DEFINE_list('allowed',
None,
'[Required] Specifies a list of allowed ports for this '
'firewall. Each entry must be a combination of the '
'protocol and the port or port range in the following '
'form: \'<protocol>:<port>-<port>\' or '
'\'<protocol>:<port>\'. To specify multiple ports, '
'protocols, or ranges, provide them as comma'
'-separated entries. For example: '
'\'--allowed=tcp:ssh,udp:5000-6000,tcp:80,icmp\'.',
flag_values=flag_values)
flags.DEFINE_list('allowed_ip_sources',
[],
'Specifies a list of IP addresses that are allowed '
'to talk to instances within the network, through the '
'<protocols>:<ports> described by the \'--allowed\' '
'flag. If no IP or tag sources are listed, all sources '
'will be allowed.',
flag_values=flag_values)
flags.DEFINE_list('allowed_tag_sources',
[],
'Specifies a list of instance tags that are allowed to '
'talk to instances within the network, through the '
'<protocols>:<ports> described by the \'--allowed\' '
'flag. If specifying multiple tags, provide them as '
'comma-separated entries. For example, '
'\'--allowed_tag_sources=www,database,frontend\'. '
'If no tag or ip sources are listed, all sources will '
'be allowed.',
flag_values=flag_values)
flags.DEFINE_list('target_tags',
[],
'Specifies a set of tagged instances that this '
'firewall applies to. To specify multiple tags, '
'provide them as comma-separated entries. If no tags '
'are listed, this firewall applies to all instances in '
'the network.',
flag_values=flag_values)
def Handle(self, firewall_name):
"""Add the specified firewall.
Args:
firewall_name: The name of the firewall to add.
Returns:
The result of inserting the firewall.
Raises:
gcutil_errors.CommandError: If the passed flag values cannot be
interpreted.
"""
if not self._flags.allowed:
raise gcutil_errors.CommandError(
'You must specify at least one rule through --allowed.')
firewall_context = self._context_parser.ParseContextOrPrompt('firewalls',
firewall_name)
firewall_resource = {
'kind': self._GetResourceApiKind('firewall'),
'name': firewall_context['firewall'],
'description': self._flags.description,
}
if self._flags.network is not None:
firewall_resource['network'] = self._context_parser.NormalizeOrPrompt(
'networks', self._flags.network)
if (not self._flags.allowed_ip_sources and
not self._flags.allowed_tag_sources):
self._flags.allowed_ip_sources.append('0.0.0.0/0')
try:
firewall_rules = FirewallRules(self._flags.allowed,
self._flags.allowed_ip_sources)
firewall_rules.SetTags(self._flags.allowed_tag_sources,
self._flags.target_tags)
firewall_rules.AddToFirewall(firewall_resource)
firewall_request = self.api.firewalls.insert(
project=firewall_context['project'], body=firewall_resource)
return firewall_request.execute()
except ValueError, e:
raise gcutil_errors.CommandError(e)
class GetFirewall(FirewallCommand):
"""Get a firewall."""
positional_args = '<firewall-name>'
def __init__(self, name, flag_values):
super(GetFirewall, self).__init__(name, flag_values)
def Handle(self, firewall_name):
"""Get the specified firewall.
Args:
firewall_name: The name of the firewall to get.
Returns:
The result of getting the firewall.
"""
firewall_context = self._context_parser.ParseContextOrPrompt('firewalls',
firewall_name)
firewall_request = self.api.firewalls.get(
project=firewall_context['project'],
firewall=firewall_context['firewall'])
return firewall_request.execute()
class DeleteFirewall(FirewallCommand):
"""Delete one or more firewall rules.
Specify multiple firewalls as multiple arguments. The firewalls will be
deleted in parallel.
"""
positional_args = '<firewall-name-1> ... <firewall-name-n>'
safety_prompt = 'Delete firewall'
def __init__(self, name, flag_values):
super(DeleteFirewall, self).__init__(name, flag_values)
def Handle(self, *firewall_names):
"""Delete the specified firewall.
Args:
*firewall_names: The names of the firewalls to delete.
Returns:
Tuple (results, exceptions) - results of deleting the firewalls.
"""
requests = []
for name in firewall_names:
firewall_context = self._context_parser.ParseContextOrPrompt('firewalls',
name)
requests.append(self.api.firewalls.delete(
project=firewall_context['project'],
firewall=firewall_context['firewall']))
results, exceptions = self.ExecuteRequests(requests)
return (self.MakeListResult(results, 'operationList'), exceptions)
class ListFirewalls(FirewallCommand, command_base.GoogleComputeListCommand):
"""List the firewall rules for a project."""
def ListFunc(self):
"""Returns the function for listing firewalls."""
return self.api.firewalls.list
def AddCommands():
appcommands.AddCmd('addfirewall', AddFirewall)
appcommands.AddCmd('getfirewall', GetFirewall)
appcommands.AddCmd('deletefirewall', DeleteFirewall)
appcommands.AddCmd('listfirewalls', ListFirewalls)
| harshilasu/LinkurApp | y/google-cloud-sdk/platform/gcutil/lib/google_compute_engine/gcutil_lib/firewall_cmds.py | Python | gpl-3.0 | 12,747 |
from unittest import TestCase
class Test(TestCase):
pass
| egcodes/haberbus | aristotle/tests/test_util.py | Python | gpl-3.0 | 63 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2010-2011
# Drakmail < [email protected] >
# NomerUNO < [email protected] >
# Platon Peacel☮ve <[email protected]>
# Elec.Lomy.RU <[email protected]>
# ADcomp <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from Queue import Queue
from subprocess import Popen
from debug import logINFO
devnull = open(os.path.devnull, 'w')
q = None
def start():
global q
q = Queue()
def stop():
while not q.empty():
q.get()
q.task_done()
q.join()
def check_programs():
programs = []
while not q.empty():
program = q.get()
if program.poll() == None:
programs.append(program)
q.task_done()
for program in programs:
q.put(program)
return True
def launch_command(cmd):
try:
p = Popen(cmd, stdout = devnull, stderr = devnull )
q.put(p)
except OSError, e:
logINFO("unable to execute a command: %s : %s" % (repr(cmd), repr(e) ))
| tectronics/snapfly | src/launcher.py | Python | gpl-3.0 | 1,632 |
#!/usr/bin/python
#
# Copyright (c) 2016 Matt Davis, <[email protected]>
# Chris Houseknecht, <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: azure_rm_publicipaddress
version_added: "2.1"
short_description: Manage Azure Public IP Addresses.
description:
- Create, update and delete a Public IP address. Allows setting and updating the address allocation method and
domain name label. Use the azure_rm_networkinterface module to associate a Public IP with a network interface.
options:
resource_group:
description:
- Name of resource group with which the Public IP is associated.
required: true
allocation_method:
description:
- Control whether the assigned Public IP remains permanently assigned to the object. If not
set to 'Static', the IP address my changed anytime an associated virtual machine is power cycled.
choices:
- Dynamic
- Static
default: Dynamic
required: false
domain_name_label:
description:
- The customizable portion of the FQDN assigned to public IP address. This is an explicit setting. If
no value is provided, any existing value will be removed on an existing public IP.
aliases:
- domain_name_label
required: false
default: null
name:
description:
- Name of the Public IP.
required: true
state:
description:
- Assert the state of the Public IP. Use 'present' to create or update a and
'absent' to delete.
default: present
choices:
- absent
- present
required: false
location:
description:
- Valid azure location. Defaults to location of the resource group.
default: resource_group location
required: false
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Chris Houseknecht (@chouseknecht)"
- "Matt Davis (@nitzmahone)"
'''
EXAMPLES = '''
- name: Create a public ip address
azure_rm_publicipaddress:
resource_group: testing
name: my_public_ip
allocation_method: Static
domain_name: foobar
- name: Delete public ip
azure_rm_publicipaddress:
resource_group: testing
name: my_public_ip
state: absent
'''
RETURN = '''
state:
description: Facts about the current state of the object.
returned: always
type: dict
sample:{
"dns_settings": {},
"etag": "W/\"a5e56955-12df-445a-bda4-dc129d22c12f\"",
"idle_timeout_in_minutes": 4,
"ip_address": "52.160.103.93",
"location": "westus",
"name": "publicip002",
"provisioning_state": "Succeeded",
"public_ip_allocation_method": "Static",
"tags": {},
"type": "Microsoft.Network/publicIPAddresses"
}
'''
from ansible.module_utils.basic import *
from ansible.module_utils.azure_rm_common import *
try:
from msrestazure.azure_exceptions import CloudError
from azure.mgmt.network.models import PublicIPAddress, PublicIPAddressDnsSettings
except ImportError:
# This is handled in azure_rm_common
pass
NAME_PATTERN = re.compile(r"^[a-z][a-z0-9-]{1,61}[a-z0-9]$")
def pip_to_dict(pip):
result = dict(
name=pip.name,
type=pip.type,
location=pip.location,
tags=pip.tags,
public_ip_allocation_method=pip.public_ip_allocation_method.value,
dns_settings=dict(),
ip_address=pip.ip_address,
idle_timeout_in_minutes=pip.idle_timeout_in_minutes,
provisioning_state=pip.provisioning_state,
etag=pip.etag
)
if pip.dns_settings:
result['dns_settings']['domain_name_label'] = pip.dns_settings.domain_name_label
result['dns_settings']['fqdn'] = pip.dns_settings.fqdn
result['dns_settings']['reverse_fqdn'] = pip.dns_settings.reverse_fqdn
return result
class AzureRMPublicIPAddress(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(type='str', required=True),
name=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['present', 'absent']),
location=dict(type='str'),
allocation_method=dict(type='str', default='Dynamic', choices=['Dynamic', 'Static']),
domain_name=dict(type='str', aliases=['domain_name_label']),
)
self.resource_group = None
self.name = None
self.location = None
self.state = None
self.tags = None
self.allocation_method = None
self.domain_name = None
self.results = dict(
changed=False,
state=dict()
)
super(AzureRMPublicIPAddress, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True)
def exec_module(self, **kwargs):
for key in self.module_arg_spec.keys() + ['tags']:
setattr(self, key, kwargs[key])
results = dict()
changed = False
pip = None
resource_group = self.get_resource_group(self.resource_group)
if not self.location:
# Set default location
self.location = resource_group.location
if not NAME_PATTERN.match(self.name):
self.fail("Parameter error: name must begin with a letter or number, end with a letter or number "
"and contain at least one number.")
try:
self.log("Fetch public ip {0}".format(self.name))
pip = self.network_client.public_ip_addresses.get(self.resource_group, self.name)
self.check_provisioning_state(pip, self.state)
self.log("PIP {0} exists".format(self.name))
if self.state == 'present':
results = pip_to_dict(pip)
if self.domain_name != results['dns_settings'].get('domain_name_label'):
self.log('CHANGED: domain_name_label')
changed = True
results['dns_settings']['domain_name_label'] =self.domain_name
if self.allocation_method != results['public_ip_allocation_method']:
self.log("CHANGED: allocation_method")
changed = True
results['public_ip_allocation_method'] = self.allocation_method
update_tags, results['tags'] = self.update_tags(results['tags'])
if update_tags:
changed = True
elif self.state == 'absent':
self.log("CHANGED: public ip {0} exists but requested state is 'absent'".format(self.name))
changed = True
except CloudError:
self.log('Public ip {0} does not exist'.format(self.name))
if self.state == 'present':
self.log("CHANGED: pip {0} does not exist but requested state is 'present'".format(self.name))
changed = True
self.results['state'] = results
self.results['changed'] = changed
if self.check_mode:
return results
if changed:
if self.state == 'present':
if not pip:
self.log("Create new Public IP {0}".format(self.name))
pip = PublicIPAddress(
location=self.location,
public_ip_allocation_method=self.allocation_method,
)
if self.tags:
pip.tags = self.tags
if self.domain_name:
pip.dns_settings = PublicIPAddressDnsSettings(
domain_name_label=self.domain_name
)
else:
self.log("Update Public IP {0}".format(self.name))
pip = PublicIPAddress(
location=results['location'],
public_ip_allocation_method=results['public_ip_allocation_method'],
tags=results['tags']
)
if self.domain_name:
pip.dns_settings = PublicIPAddressDnsSettings(
domain_name_label=self.domain_name
)
self.results['state'] = self.create_or_update_pip(pip)
elif self.state == 'absent':
self.log('Delete public ip {0}'.format(self.name))
self.delete_pip()
return self.results
def create_or_update_pip(self, pip):
try:
poller = self.network_client.public_ip_addresses.create_or_update(self.resource_group, self.name, pip)
pip = self.get_poller_result(poller)
except Exception as exc:
self.fail("Error creating or updating {0} - {1}".format(self.name, str(exc)))
return pip_to_dict(pip)
def delete_pip(self):
try:
poller = self.network_client.public_ip_addresses.delete(self.resource_group, self.name)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error deleting {0} - {1}".format(self.name, str(exc)))
# Delete returns nada. If we get here, assume that all is well.
self.results['state']['status'] = 'Deleted'
return True
def main():
AzureRMPublicIPAddress()
if __name__ == '__main__':
main()
| hlieberman/ansible-modules-core | cloud/azure/azure_rm_publicipaddress.py | Python | gpl-3.0 | 10,272 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Monkeypatch initialisation functions
"""
try:
from collections import OrderedDict
except ImportError: # pragma: no-cover
from ordereddict import OrderedDict # pylint:disable=import-error
from rebulk.match import Match
def monkeypatch_rebulk():
"""Monkeypatch rebulk classes"""
@property
def match_advanced(self):
"""
Build advanced dict from match
:param self:
:return:
"""
ret = OrderedDict()
ret['value'] = self.value
if self.raw:
ret['raw'] = self.raw
ret['start'] = self.start
ret['end'] = self.end
return ret
Match.advanced = match_advanced
| clinton-hall/nzbToMedia | libs/common/guessit/monkeypatch.py | Python | gpl-3.0 | 729 |
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.views.generic import View
from django.conf import settings
from geonode.base.enumerations import LINK_TYPES as _LT
# from geonode.base.models import Link
from geonode.utils import json_response
from geonode.geoserver import ows
LINK_TYPES = [L for L in _LT if L.startswith("OGC:")]
class OWSListView(View):
def get(self, request):
out = {'success': True}
data = []
out['data'] = data
# per-layer links
# for link in Link.objects.filter(link_type__in=LINK_TYPES): # .distinct('url'):
# data.append({'url': link.url, 'type': link.link_type})
data.append({'url': ows._wcs_get_capabilities(), 'type': 'OGC:WCS'})
data.append({'url': ows._wfs_get_capabilities(), 'type': 'OGC:WFS'})
data.append({'url': ows._wms_get_capabilities(), 'type': 'OGC:WMS'})
# catalogue from configuration
for catname, catconf in settings.CATALOGUE.items():
data.append({'url': catconf['URL'], 'type': 'OGC:CSW'})
# main site url
data.append({'url': settings.SITEURL, 'type': 'WWW:LINK'})
return json_response(out)
ows_endpoints = OWSListView.as_view()
| timlinux/geonode | geonode/contrib/ows_api/views.py | Python | gpl-3.0 | 2,017 |
VERSION = (0, 6, 0)
__version__ = '.'.join((str(x) for x in VERSION))
| jicksy/oneanddone_test | vendor-local/lib/python/jingo_minify/__init__.py | Python | mpl-2.0 | 70 |
import random, copy
def generate(data):
data['correct_answers']['x'] = 3
def grade(data):
raise Exception('deliberately broken grading function')
| PrairieLearn/PrairieLearn | testCourse/questions/brokenGrading/server.py | Python | agpl-3.0 | 156 |
# -*- coding: utf-8 -*-
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import res_partner
| rosenvladimirov/addons | partner_vat_search/models/__init__.py | Python | agpl-3.0 | 120 |
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 17 15:48:31 2015
@author: thomas.douenne
"""
# L'objectif est de décrire l'évolution des montants des accises de la TICPE depuis 1993
# Import de fonctions spécifiques à Openfisca Indirect Taxation
from openfisca_france_indirect_taxation.examples.utils_example import graph_builder_bar_list
from openfisca_france_indirect_taxation.examples.dataframes_from_legislation.get_accises import \
get_accise_ticpe_majoree
# Recherche des paramètres de la législation
liste = ['ticpe_gazole', 'ticpe_super9598', 'super_plombe_ticpe']
df_accises = get_accise_ticpe_majoree()
# Réalisation des graphiques
graph_builder_bar_list(df_accises['accise majoree sans plomb'], 1, 1)
graph_builder_bar_list(df_accises['accise majoree diesel'], 1, 1)
graph_builder_bar_list(df_accises['accise majoree super plombe'], 1, 1)
| benjello/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/examples/transports/plot_legislation/plot_ticpe_accises.py | Python | agpl-3.0 | 865 |
"""SCons.Tool.gcc
Tool-specific initialization for gcc.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2017 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/gcc.py 74b2c53bc42290e911b334a6b44f187da698a668 2017/11/14 13:16:53 bdbaddog"
from . import cc
import os
import re
import subprocess
import SCons.Util
compilers = ['gcc', 'cc']
def generate(env):
"""Add Builders and construction variables for gcc to an Environment."""
if 'CC' not in env:
env['CC'] = env.Detect(compilers) or compilers[0]
cc.generate(env)
if env['PLATFORM'] in ['cygwin', 'win32']:
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
else:
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -fPIC')
# determine compiler version
version = detect_version(env, env['CC'])
if version:
env['CCVERSION'] = version
def exists(env):
# is executable, and is a GNU compiler (or accepts '--version' at least)
return detect_version(env, env.Detect(env.get('CC', compilers)))
def detect_version(env, cc):
"""Return the version of the GNU compiler, or None if it is not a GNU compiler."""
cc = env.subst(cc)
if not cc:
return None
version = None
#pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['-dumpversion'],
pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['--version'],
stdin = 'devnull',
stderr = 'devnull',
stdout = subprocess.PIPE)
# -dumpversion was added in GCC 3.0. As long as we're supporting
# GCC versions older than that, we should use --version and a
# regular expression.
#line = pipe.stdout.read().strip()
#if line:
# version = line
line = SCons.Util.to_str(pipe.stdout.readline())
match = re.search(r'[0-9]+(\.[0-9]+)+', line)
if match:
version = match.group(0)
# Non-GNU compiler's output (like AIX xlc's) may exceed the stdout buffer:
# So continue with reading to let the child process actually terminate.
while SCons.Util.to_str(pipe.stdout.readline()):
pass
ret = pipe.wait()
if ret != 0:
return None
return version
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mapycz/mapnik | scons/scons-local-3.0.1/SCons/Tool/gcc.py | Python | lgpl-2.1 | 3,530 |
# Copyright 2012 OpenStack Foundation
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import re
from oslo_log import log as logging
import testtools
from tempest.common.utils import data_utils
from tempest.common import waiters
from tempest import config
from tempest import exceptions
from tempest.scenario import manager
from tempest.services.network import resources as net_resources
from tempest import test
CONF = config.CONF
LOG = logging.getLogger(__name__)
Floating_IP_tuple = collections.namedtuple('Floating_IP_tuple',
['floating_ip', 'server'])
class TestNetworkBasicOps(manager.NetworkScenarioTest):
"""
This smoke test suite assumes that Nova has been configured to
boot VM's with Neutron-managed networking, and attempts to
verify network connectivity as follows:
There are presumed to be two types of networks: tenant and
public. A tenant network may or may not be reachable from the
Tempest host. A public network is assumed to be reachable from
the Tempest host, and it should be possible to associate a public
('floating') IP address with a tenant ('fixed') IP address to
facilitate external connectivity to a potentially unroutable
tenant IP address.
This test suite can be configured to test network connectivity to
a VM via a tenant network, a public network, or both. If both
networking types are to be evaluated, tests that need to be
executed remotely on the VM (via ssh) will only be run against
one of the networks (to minimize test execution time).
Determine which types of networks to test as follows:
* Configure tenant network checks (via the
'tenant_networks_reachable' key) if the Tempest host should
have direct connectivity to tenant networks. This is likely to
be the case if Tempest is running on the same host as a
single-node devstack installation with IP namespaces disabled.
* Configure checks for a public network if a public network has
been configured prior to the test suite being run and if the
Tempest host should have connectivity to that public network.
Checking connectivity for a public network requires that a
value be provided for 'public_network_id'. A value can
optionally be provided for 'public_router_id' if tenants will
use a shared router to access a public network (as is likely to
be the case when IP namespaces are not enabled). If a value is
not provided for 'public_router_id', a router will be created
for each tenant and use the network identified by
'public_network_id' as its gateway.
"""
@classmethod
def skip_checks(cls):
super(TestNetworkBasicOps, cls).skip_checks()
if not (CONF.network.tenant_networks_reachable
or CONF.network.public_network_id):
msg = ('Either tenant_networks_reachable must be "true", or '
'public_network_id must be defined.')
raise cls.skipException(msg)
for ext in ['router', 'security-group']:
if not test.is_extension_enabled(ext, 'network'):
msg = "%s extension not enabled." % ext
raise cls.skipException(msg)
@classmethod
def setup_credentials(cls):
# Create no network resources for these tests.
cls.set_network_resources()
super(TestNetworkBasicOps, cls).setup_credentials()
def setUp(self):
super(TestNetworkBasicOps, self).setUp()
self.keypairs = {}
self.servers = []
def _setup_network_and_servers(self, **kwargs):
boot_with_port = kwargs.pop('boot_with_port', False)
self.security_group = \
self._create_security_group(tenant_id=self.tenant_id)
self.network, self.subnet, self.router = self.create_networks(**kwargs)
self.check_networks()
self.ports = []
self.port_id = None
if boot_with_port:
# create a port on the network and boot with that
self.port_id = self._create_port(self.network['id']).id
self.ports.append({'port': self.port_id})
name = data_utils.rand_name('server-smoke')
server = self._create_server(name, self.network, self.port_id)
self._check_tenant_network_connectivity()
floating_ip = self.create_floating_ip(server)
self.floating_ip_tuple = Floating_IP_tuple(floating_ip, server)
def check_networks(self):
"""
Checks that we see the newly created network/subnet/router via
checking the result of list_[networks,routers,subnets]
"""
seen_nets = self._list_networks()
seen_names = [n['name'] for n in seen_nets]
seen_ids = [n['id'] for n in seen_nets]
self.assertIn(self.network.name, seen_names)
self.assertIn(self.network.id, seen_ids)
if self.subnet:
seen_subnets = self._list_subnets()
seen_net_ids = [n['network_id'] for n in seen_subnets]
seen_subnet_ids = [n['id'] for n in seen_subnets]
self.assertIn(self.network.id, seen_net_ids)
self.assertIn(self.subnet.id, seen_subnet_ids)
if self.router:
seen_routers = self._list_routers()
seen_router_ids = [n['id'] for n in seen_routers]
seen_router_names = [n['name'] for n in seen_routers]
self.assertIn(self.router.name,
seen_router_names)
self.assertIn(self.router.id,
seen_router_ids)
def _create_server(self, name, network, port_id=None):
keypair = self.create_keypair()
self.keypairs[keypair['name']] = keypair
security_groups = [{'name': self.security_group['name']}]
create_kwargs = {
'networks': [
{'uuid': network.id},
],
'key_name': keypair['name'],
'security_groups': security_groups,
}
if port_id is not None:
create_kwargs['networks'][0]['port'] = port_id
server = self.create_server(name=name, create_kwargs=create_kwargs)
self.servers.append(server)
return server
def _get_server_key(self, server):
return self.keypairs[server['key_name']]['private_key']
def _check_tenant_network_connectivity(self):
ssh_login = CONF.compute.image_ssh_user
for server in self.servers:
# call the common method in the parent class
super(TestNetworkBasicOps, self).\
_check_tenant_network_connectivity(
server, ssh_login, self._get_server_key(server),
servers_for_debug=self.servers)
def check_public_network_connectivity(
self, should_connect=True, msg=None,
should_check_floating_ip_status=True):
"""Verifies connectivty to a VM via public network and floating IP,
and verifies floating IP has resource status is correct.
:param should_connect: bool. determines if connectivity check is
negative or positive.
:param msg: Failure message to add to Error message. Should describe
the place in the test scenario where the method was called,
to indicate the context of the failure
:param should_check_floating_ip_status: bool. should status of
floating_ip be checked or not
"""
ssh_login = CONF.compute.image_ssh_user
floating_ip, server = self.floating_ip_tuple
ip_address = floating_ip.floating_ip_address
private_key = None
floatingip_status = 'DOWN'
if should_connect:
private_key = self._get_server_key(server)
floatingip_status = 'ACTIVE'
# Check FloatingIP Status before initiating a connection
if should_check_floating_ip_status:
self.check_floating_ip_status(floating_ip, floatingip_status)
# call the common method in the parent class
super(TestNetworkBasicOps, self).check_public_network_connectivity(
ip_address, ssh_login, private_key, should_connect, msg,
self.servers)
def _disassociate_floating_ips(self):
floating_ip, server = self.floating_ip_tuple
self._disassociate_floating_ip(floating_ip)
self.floating_ip_tuple = Floating_IP_tuple(
floating_ip, None)
def _reassociate_floating_ips(self):
floating_ip, server = self.floating_ip_tuple
name = data_utils.rand_name('new_server-smoke')
# create a new server for the floating ip
server = self._create_server(name, self.network)
self._associate_floating_ip(floating_ip, server)
self.floating_ip_tuple = Floating_IP_tuple(
floating_ip, server)
def _create_new_network(self, create_gateway=False):
self.new_net = self._create_network(tenant_id=self.tenant_id)
if create_gateway:
self.new_subnet = self._create_subnet(
network=self.new_net)
else:
self.new_subnet = self._create_subnet(
network=self.new_net,
gateway_ip=None)
def _hotplug_server(self):
old_floating_ip, server = self.floating_ip_tuple
ip_address = old_floating_ip.floating_ip_address
private_key = self._get_server_key(server)
ssh_client = self.get_remote_client(ip_address,
private_key=private_key)
old_nic_list = self._get_server_nics(ssh_client)
# get a port from a list of one item
port_list = self._list_ports(device_id=server['id'])
self.assertEqual(1, len(port_list))
old_port = port_list[0]
interface = self.interface_client.create_interface(
server_id=server['id'],
net_id=self.new_net.id)['interfaceAttachment']
self.addCleanup(self.network_client.wait_for_resource_deletion,
'port',
interface['port_id'])
self.addCleanup(self.delete_wrapper,
self.interface_client.delete_interface,
server['id'], interface['port_id'])
def check_ports():
self.new_port_list = [port for port in
self._list_ports(device_id=server['id'])
if port['id'] != old_port['id']]
return len(self.new_port_list) == 1
if not test.call_until_true(check_ports, CONF.network.build_timeout,
CONF.network.build_interval):
raise exceptions.TimeoutException(
"No new port attached to the server in time (%s sec)! "
"Old port: %s. Number of new ports: %d" % (
CONF.network.build_timeout, old_port,
len(self.new_port_list)))
new_port = net_resources.DeletablePort(client=self.network_client,
**self.new_port_list[0])
def check_new_nic():
new_nic_list = self._get_server_nics(ssh_client)
self.diff_list = [n for n in new_nic_list if n not in old_nic_list]
return len(self.diff_list) == 1
if not test.call_until_true(check_new_nic, CONF.network.build_timeout,
CONF.network.build_interval):
raise exceptions.TimeoutException("Interface not visible on the "
"guest after %s sec"
% CONF.network.build_timeout)
num, new_nic = self.diff_list[0]
ssh_client.assign_static_ip(nic=new_nic,
addr=new_port.fixed_ips[0]['ip_address'])
ssh_client.turn_nic_on(nic=new_nic)
def _get_server_nics(self, ssh_client):
reg = re.compile(r'(?P<num>\d+): (?P<nic_name>\w+):')
ipatxt = ssh_client.get_ip_list()
return reg.findall(ipatxt)
def _check_network_internal_connectivity(self, network,
should_connect=True):
"""
via ssh check VM internal connectivity:
- ping internal gateway and DHCP port, implying in-tenant connectivity
pinging both, because L3 and DHCP agents might be on different nodes
"""
floating_ip, server = self.floating_ip_tuple
# get internal ports' ips:
# get all network ports in the new network
internal_ips = (p['fixed_ips'][0]['ip_address'] for p in
self._list_ports(tenant_id=server['tenant_id'],
network_id=network.id)
if p['device_owner'].startswith('network'))
self._check_server_connectivity(floating_ip,
internal_ips,
should_connect)
def _check_network_external_connectivity(self):
"""
ping public network default gateway to imply external connectivity
"""
if not CONF.network.public_network_id:
msg = 'public network not defined.'
LOG.info(msg)
return
# We ping the external IP from the instance using its floating IP
# which is always IPv4, so we must only test connectivity to
# external IPv4 IPs if the external network is dualstack.
v4_subnets = [s for s in self._list_subnets(
network_id=CONF.network.public_network_id) if s['ip_version'] == 4]
self.assertEqual(1, len(v4_subnets),
"Found %d IPv4 subnets" % len(v4_subnets))
external_ips = [v4_subnets[0]['gateway_ip']]
self._check_server_connectivity(self.floating_ip_tuple.floating_ip,
external_ips)
def _check_server_connectivity(self, floating_ip, address_list,
should_connect=True):
ip_address = floating_ip.floating_ip_address
private_key = self._get_server_key(self.floating_ip_tuple.server)
ssh_source = self._ssh_to_server(ip_address, private_key)
for remote_ip in address_list:
if should_connect:
msg = ("Timed out waiting for %s to become "
"reachable") % remote_ip
else:
msg = "ip address %s is reachable" % remote_ip
try:
self.assertTrue(self._check_remote_connectivity
(ssh_source, remote_ip, should_connect),
msg)
except Exception:
LOG.exception("Unable to access {dest} via ssh to "
"floating-ip {src}".format(dest=remote_ip,
src=floating_ip))
raise
@test.attr(type='smoke')
@test.idempotent_id('f323b3ba-82f8-4db7-8ea6-6a895869ec49')
@test.services('compute', 'network')
def test_network_basic_ops(self):
"""
For a freshly-booted VM with an IP address ("port") on a given
network:
- the Tempest host can ping the IP address. This implies, but
does not guarantee (see the ssh check that follows), that the
VM has been assigned the correct IP address and has
connectivity to the Tempest host.
- the Tempest host can perform key-based authentication to an
ssh server hosted at the IP address. This check guarantees
that the IP address is associated with the target VM.
- the Tempest host can ssh into the VM via the IP address and
successfully execute the following:
- ping an external IP address, implying external connectivity.
- ping an external hostname, implying that dns is correctly
configured.
- ping an internal IP address, implying connectivity to another
VM on the same network.
- detach the floating-ip from the VM and verify that it becomes
unreachable
- associate detached floating ip to a new VM and verify connectivity.
VMs are created with unique keypair so connectivity also asserts that
floating IP is associated with the new VM instead of the old one
Verifies that floating IP status is updated correctly after each change
"""
self._setup_network_and_servers()
self.check_public_network_connectivity(should_connect=True)
self._check_network_internal_connectivity(network=self.network)
self._check_network_external_connectivity()
self._disassociate_floating_ips()
self.check_public_network_connectivity(should_connect=False,
msg="after disassociate "
"floating ip")
self._reassociate_floating_ips()
self.check_public_network_connectivity(should_connect=True,
msg="after re-associate "
"floating ip")
@test.idempotent_id('1546850e-fbaa-42f5-8b5f-03d8a6a95f15')
@testtools.skipIf(CONF.baremetal.driver_enabled,
'Baremetal relies on a shared physical network.')
@test.services('compute', 'network')
def test_connectivity_between_vms_on_different_networks(self):
"""
For a freshly-booted VM with an IP address ("port") on a given
network:
- the Tempest host can ping the IP address.
- the Tempest host can ssh into the VM via the IP address and
successfully execute the following:
- ping an external IP address, implying external connectivity.
- ping an external hostname, implying that dns is correctly
configured.
- ping an internal IP address, implying connectivity to another
VM on the same network.
- Create another network on the same tenant with subnet, create
an VM on the new network.
- Ping the new VM from previous VM failed since the new network
was not attached to router yet.
- Attach the new network to the router, Ping the new VM from
previous VM succeed.
"""
self._setup_network_and_servers()
self.check_public_network_connectivity(should_connect=True)
self._check_network_internal_connectivity(network=self.network)
self._check_network_external_connectivity()
self._create_new_network(create_gateway=True)
name = data_utils.rand_name('server-smoke')
self._create_server(name, self.new_net)
self._check_network_internal_connectivity(network=self.new_net,
should_connect=False)
self.new_subnet.add_to_router(self.router.id)
self._check_network_internal_connectivity(network=self.new_net,
should_connect=True)
@test.idempotent_id('c5adff73-e961-41f1-b4a9-343614f18cfa')
@testtools.skipUnless(CONF.compute_feature_enabled.interface_attach,
'NIC hotplug not available')
@testtools.skipIf(CONF.network.port_vnic_type in ['direct', 'macvtap'],
'NIC hotplug not supported for '
'vnic_type direct or macvtap')
@test.services('compute', 'network')
def test_hotplug_nic(self):
"""
1. create a new network, with no gateway (to prevent overwriting VM's
gateway)
2. connect VM to new network
3. set static ip and bring new nic up
4. check VM can ping new network dhcp port
"""
self._setup_network_and_servers()
self.check_public_network_connectivity(should_connect=True)
self._create_new_network()
self._hotplug_server()
self._check_network_internal_connectivity(network=self.new_net)
@test.idempotent_id('04b9fe4e-85e8-4aea-b937-ea93885ac59f')
@testtools.skipIf(CONF.baremetal.driver_enabled,
'Router state cannot be altered on a shared baremetal '
'network')
@test.services('compute', 'network')
def test_update_router_admin_state(self):
"""
1. Check public connectivity before updating
admin_state_up attribute of router to False
2. Check public connectivity after updating
admin_state_up attribute of router to False
3. Check public connectivity after updating
admin_state_up attribute of router to True
"""
self._setup_network_and_servers()
self.check_public_network_connectivity(
should_connect=True, msg="before updating "
"admin_state_up of router to False")
self._update_router_admin_state(self.router, False)
# TODO(alokmaurya): Remove should_check_floating_ip_status=False check
# once bug 1396310 is fixed
self.check_public_network_connectivity(
should_connect=False, msg="after updating "
"admin_state_up of router to False",
should_check_floating_ip_status=False)
self._update_router_admin_state(self.router, True)
self.check_public_network_connectivity(
should_connect=True, msg="after updating "
"admin_state_up of router to True")
@test.idempotent_id('d8bb918e-e2df-48b2-97cd-b73c95450980')
@testtools.skipIf(CONF.baremetal.driver_enabled,
'network isolation not available for baremetal nodes')
@testtools.skipUnless(CONF.scenario.dhcp_client,
"DHCP client is not available.")
@test.services('compute', 'network')
def test_subnet_details(self):
"""Tests that subnet's extra configuration details are affecting
the VMs. This test relies on non-shared, isolated tenant networks.
NOTE: Neutron subnets push data to servers via dhcp-agent, so any
update in subnet requires server to actively renew its DHCP lease.
1. Configure subnet with dns nameserver
2. retrieve the VM's configured dns and verify it matches the one
configured for the subnet.
3. update subnet's dns
4. retrieve the VM's configured dns and verify it matches the new one
configured for the subnet.
TODO(yfried): add host_routes
any resolution check would be testing either:
* l3 forwarding (tested in test_network_basic_ops)
* Name resolution of an external DNS nameserver - out of scope for
Tempest
"""
# this test check only updates (no actual resolution) so using
# arbitrary ip addresses as nameservers, instead of parsing CONF
initial_dns_server = '1.2.3.4'
alt_dns_server = '9.8.7.6'
# renewal should be immediate.
# Timeouts are suggested by salvatore-orlando in
# https://bugs.launchpad.net/neutron/+bug/1412325/comments/3
renew_delay = CONF.network.build_interval
renew_timeout = CONF.network.build_timeout
self._setup_network_and_servers(dns_nameservers=[initial_dns_server])
self.check_public_network_connectivity(should_connect=True)
floating_ip, server = self.floating_ip_tuple
ip_address = floating_ip.floating_ip_address
private_key = self._get_server_key(server)
ssh_client = self._ssh_to_server(ip_address, private_key)
dns_servers = [initial_dns_server]
servers = ssh_client.get_dns_servers()
self.assertEqual(set(dns_servers), set(servers),
'Looking for servers: {trgt_serv}. '
'Retrieved DNS nameservers: {act_serv} '
'From host: {host}.'
.format(host=ssh_client.ssh_client.host,
act_serv=servers,
trgt_serv=dns_servers))
self.subnet.update(dns_nameservers=[alt_dns_server])
# asserts that Neutron DB has updated the nameservers
self.assertEqual([alt_dns_server], self.subnet.dns_nameservers,
"Failed to update subnet's nameservers")
def check_new_dns_server():
"""Server needs to renew its dhcp lease in order to get the new dns
definitions from subnet
NOTE(amuller): we are renewing the lease as part of the retry
because Neutron updates dnsmasq asynchronously after the
subnet-update API call returns.
"""
ssh_client.renew_lease(fixed_ip=floating_ip['fixed_ip_address'])
if ssh_client.get_dns_servers() != [alt_dns_server]:
LOG.debug("Failed to update DNS nameservers")
return False
return True
self.assertTrue(test.call_until_true(check_new_dns_server,
renew_timeout,
renew_delay),
msg="DHCP renewal failed to fetch "
"new DNS nameservers")
@test.idempotent_id('f5dfcc22-45fd-409f-954c-5bd500d7890b')
@testtools.skipIf(CONF.baremetal.driver_enabled,
'admin_state of instance ports cannot be altered '
'for baremetal nodes')
@testtools.skipUnless(CONF.network_feature_enabled.port_admin_state_change,
"Changing a port's admin state is not supported "
"by the test environment")
@test.services('compute', 'network')
def test_update_instance_port_admin_state(self):
"""
1. Check public connectivity before updating
admin_state_up attribute of instance port to False
2. Check public connectivity after updating
admin_state_up attribute of instance port to False
3. Check public connectivity after updating
admin_state_up attribute of instance port to True
"""
self._setup_network_and_servers()
floating_ip, server = self.floating_ip_tuple
server_id = server['id']
port_id = self._list_ports(device_id=server_id)[0]['id']
self.check_public_network_connectivity(
should_connect=True, msg="before updating "
"admin_state_up of instance port to False")
self.network_client.update_port(port_id, admin_state_up=False)
self.check_public_network_connectivity(
should_connect=False, msg="after updating "
"admin_state_up of instance port to False",
should_check_floating_ip_status=False)
self.network_client.update_port(port_id, admin_state_up=True)
self.check_public_network_connectivity(
should_connect=True, msg="after updating "
"admin_state_up of instance port to True")
@test.idempotent_id('759462e1-8535-46b0-ab3a-33aa45c55aaa')
@testtools.skipUnless(CONF.compute_feature_enabled.preserve_ports,
'Preserving ports on instance delete may not be '
'supported in the version of Nova being tested.')
@test.services('compute', 'network')
def test_preserve_preexisting_port(self):
"""Tests that a pre-existing port provided on server boot is not
deleted if the server is deleted.
Nova should unbind the port from the instance on delete if the port was
not created by Nova as part of the boot request.
"""
# Setup the network, create a port and boot the server from that port.
self._setup_network_and_servers(boot_with_port=True)
_, server = self.floating_ip_tuple
self.assertEqual(1, len(self.ports),
'There should only be one port created for '
'server %s.' % server['id'])
port_id = self.ports[0]['port']
self.assertIsNotNone(port_id,
'Server should have been created from a '
'pre-existing port.')
# Assert the port is bound to the server.
port_list = self._list_ports(device_id=server['id'],
network_id=self.network['id'])
self.assertEqual(1, len(port_list),
'There should only be one port created for '
'server %s.' % server['id'])
self.assertEqual(port_id, port_list[0]['id'])
# Delete the server.
self.servers_client.delete_server(server['id'])
waiters.wait_for_server_termination(self.servers_client, server['id'])
# Assert the port still exists on the network but is unbound from
# the deleted server.
port = self.network_client.show_port(port_id)['port']
self.assertEqual(self.network['id'], port['network_id'])
self.assertEqual('', port['device_id'])
self.assertEqual('', port['device_owner'])
@test.idempotent_id('2e788c46-fb3f-4ac9-8f82-0561555bea73')
@test.services('compute', 'network')
def test_router_rescheduling(self):
"""Tests that router can be removed from agent and add to a new agent.
1. Verify connectivity
2. Remove router from all l3-agents
3. Verify connectivity is down
4. Assign router to new l3-agent (or old one if no new agent is
available)
5. Verify connectivity
"""
# TODO(yfried): refactor this test to be used for other agents (dhcp)
# as well
list_hosts = (self.admin_manager.network_client.
list_l3_agents_hosting_router)
schedule_router = (self.admin_manager.network_client.
add_router_to_l3_agent)
unschedule_router = (self.admin_manager.network_client.
remove_router_from_l3_agent)
agent_list = set(a["id"] for a in
self._list_agents(agent_type="L3 agent"))
self._setup_network_and_servers()
# NOTE(kevinbenton): we have to use the admin credentials to check
# for the distributed flag because self.router only has a tenant view.
admin = self.admin_manager.network_client.show_router(self.router.id)
if admin['router'].get('distributed', False):
msg = "Rescheduling test does not apply to distributed routers."
raise self.skipException(msg)
self.check_public_network_connectivity(should_connect=True)
# remove resource from agents
hosting_agents = set(a["id"] for a in
list_hosts(self.router.id)['agents'])
no_migration = agent_list == hosting_agents
LOG.info("Router will be assigned to {mig} hosting agent".
format(mig="the same" if no_migration else "a new"))
for hosting_agent in hosting_agents:
unschedule_router(hosting_agent, self.router.id)
self.assertNotIn(hosting_agent,
[a["id"] for a in
list_hosts(self.router.id)['agents']],
'unscheduling router failed')
# verify resource is un-functional
self.check_public_network_connectivity(
should_connect=False,
msg='after router unscheduling',
should_check_floating_ip_status=False
)
# schedule resource to new agent
target_agent = list(hosting_agents if no_migration else
agent_list - hosting_agents)[0]
schedule_router(target_agent,
self.router['id'])
self.assertEqual(
target_agent,
list_hosts(self.router.id)['agents'][0]['id'],
"Router failed to reschedule. Hosting agent doesn't match "
"target agent")
# verify resource is functional
self.check_public_network_connectivity(
should_connect=True,
msg='After router rescheduling')
| flyingfish007/tempest | tempest/scenario/test_network_basic_ops.py | Python | apache-2.0 | 32,963 |
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controllers for the moderator page."""
from core.controllers import base
from core.domain import acl_decorators
from core.domain import activity_domain
from core.domain import activity_services
from core.domain import email_manager
from core.domain import summary_services
import feconf
class ModeratorPage(base.BaseHandler):
"""The moderator page."""
@acl_decorators.can_access_moderator_page
def get(self):
"""Handles GET requests."""
self.render_template('pages/moderator/moderator.html')
class FeaturedActivitiesHandler(base.BaseHandler):
"""The moderator page handler for featured activities."""
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
@acl_decorators.can_access_moderator_page
def get(self):
"""Handles GET requests."""
self.render_json({
'featured_activity_references': [
activity_reference.to_dict() for activity_reference in
activity_services.get_featured_activity_references()
],
})
@acl_decorators.can_access_moderator_page
def post(self):
"""Handles POST requests."""
featured_activity_reference_dicts = self.payload.get(
'featured_activity_reference_dicts')
featured_activity_references = [
activity_domain.ActivityReference(
reference_dict['type'], reference_dict['id'])
for reference_dict in featured_activity_reference_dicts]
try:
summary_services.require_activities_to_be_public(
featured_activity_references)
except Exception as e:
raise self.InvalidInputException(e)
activity_services.update_featured_activity_references(
featured_activity_references)
self.render_json({})
class EmailDraftHandler(base.BaseHandler):
"""Provide default email templates for moderator emails."""
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
@acl_decorators.can_send_moderator_emails
def get(self, action):
"""Handles GET requests."""
self.render_json({
'draft_email_body': (
email_manager.get_draft_moderator_action_email(action)),
})
| himanshu-dixit/oppia | core/controllers/moderator.py | Python | apache-2.0 | 2,847 |
from zerver.lib.test_classes import WebhookTestCase
class PagerDutyHookTests(WebhookTestCase):
STREAM_NAME = 'pagerduty'
URL_TEMPLATE = "/api/v1/external/pagerduty?api_key={api_key}&stream={stream}"
FIXTURE_DIR_NAME = 'pagerduty'
def test_trigger(self) -> None:
expected_message = 'Incident [3](https://zulip-test.pagerduty.com/incidents/P140S4Y) triggered by [Test service](https://zulip-test.pagerduty.com/services/PIL5CUQ) (assigned to [armooo](https://zulip-test.pagerduty.com/users/POBCFRJ)):\n\n``` quote\nfoo\n```'
self.send_and_test_stream_message('trigger', "Incident 3", expected_message)
def test_trigger_v2(self) -> None:
expected_message = 'Incident [33](https://webdemo.pagerduty.com/incidents/PRORDTY) triggered by [Production XDB Cluster](https://webdemo.pagerduty.com/services/PN49J75) (assigned to [Laura Haley](https://webdemo.pagerduty.com/users/P553OPV)):\n\n``` quote\nMy new incident\n```'
self.send_and_test_stream_message('trigger_v2', 'Incident 33', expected_message)
def test_trigger_without_assignee_v2(self) -> None:
expected_message = 'Incident [33](https://webdemo.pagerduty.com/incidents/PRORDTY) triggered by [Production XDB Cluster](https://webdemo.pagerduty.com/services/PN49J75) (assigned to nobody):\n\n``` quote\nMy new incident\n```'
self.send_and_test_stream_message('trigger_without_assignee_v2', 'Incident 33', expected_message)
def test_unacknowledge(self) -> None:
expected_message = 'Incident [3](https://zulip-test.pagerduty.com/incidents/P140S4Y) unacknowledged by [Test service](https://zulip-test.pagerduty.com/services/PIL5CUQ) (assigned to [armooo](https://zulip-test.pagerduty.com/users/POBCFRJ)):\n\n``` quote\nfoo\n```'
self.send_and_test_stream_message('unacknowledge', "Incident 3", expected_message)
def test_resolved(self) -> None:
expected_message = 'Incident [1](https://zulip-test.pagerduty.com/incidents/PO1XIJ5) resolved by [armooo](https://zulip-test.pagerduty.com/users/POBCFRJ):\n\n``` quote\nIt is on fire\n```'
self.send_and_test_stream_message('resolved', "Incident 1", expected_message)
def test_resolved_v2(self) -> None:
expected_message = 'Incident [33](https://webdemo.pagerduty.com/incidents/PRORDTY) resolved by [Laura Haley](https://webdemo.pagerduty.com/users/P553OPV):\n\n``` quote\nMy new incident\n```'
self.send_and_test_stream_message('resolve_v2', 'Incident 33', expected_message)
def test_auto_resolved(self) -> None:
expected_message = 'Incident [2](https://zulip-test.pagerduty.com/incidents/PX7K9J2) resolved:\n\n``` quote\nnew\n```'
self.send_and_test_stream_message('auto_resolved', "Incident 2", expected_message)
def test_acknowledge(self) -> None:
expected_message = 'Incident [1](https://zulip-test.pagerduty.com/incidents/PO1XIJ5) acknowledged by [armooo](https://zulip-test.pagerduty.com/users/POBCFRJ):\n\n``` quote\nIt is on fire\n```'
self.send_and_test_stream_message('acknowledge', "Incident 1", expected_message)
def test_acknowledge_without_trigger_summary_data(self) -> None:
expected_message = 'Incident [1](https://zulip-test.pagerduty.com/incidents/PO1XIJ5) acknowledged by [armooo](https://zulip-test.pagerduty.com/users/POBCFRJ):\n\n``` quote\n\n```'
self.send_and_test_stream_message('acknowledge_without_trigger_summary_data',
"Incident 1", expected_message)
def test_acknowledge_v2(self) -> None:
expected_message = 'Incident [33](https://webdemo.pagerduty.com/incidents/PRORDTY) acknowledged by [Laura Haley](https://webdemo.pagerduty.com/users/P553OPV):\n\n``` quote\nMy new incident\n```'
self.send_and_test_stream_message('acknowledge_v2', 'Incident 33', expected_message)
def test_incident_assigned_v2(self) -> None:
expected_message = 'Incident [33](https://webdemo.pagerduty.com/incidents/PRORDTY) assigned to [Wiley Jacobson](https://webdemo.pagerduty.com/users/PFBSJ2Z):\n\n``` quote\nMy new incident\n```'
self.send_and_test_stream_message('assign_v2', 'Incident 33', expected_message)
def test_no_subject(self) -> None:
expected_message = 'Incident [48219](https://dropbox.pagerduty.com/incidents/PJKGZF9) resolved:\n\n``` quote\nmp_error_block_down_critical\u2119\u01b4\n```'
self.send_and_test_stream_message('mp_fail', "Incident 48219", expected_message)
| timabbott/zulip | zerver/webhooks/pagerduty/tests.py | Python | apache-2.0 | 4,489 |
# Copyright 2015 Hewlett-Packard Development Company, L.P.dsvsv
# Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.tests.api import base
from neutron.tests.tempest import config
from neutron.tests.tempest import test
from tempest_lib.common.utils import data_utils
CONF = config.CONF
class SharedNetworksTest(base.BaseAdminNetworkTest):
@classmethod
def resource_setup(cls):
super(SharedNetworksTest, cls).resource_setup()
cls.shared_network = cls.create_shared_network()
@test.idempotent_id('6661d219-b96d-4597-ad10-55766ce4abf7')
def test_create_update_shared_network(self):
shared_network = self.create_shared_network()
net_id = shared_network['id']
self.assertEqual('ACTIVE', shared_network['status'])
self.assertIsNotNone(shared_network['id'])
self.assertTrue(self.shared_network['shared'])
new_name = "New_shared_network"
body = self.admin_client.update_network(net_id, name=new_name,
admin_state_up=False,
shared=False)
updated_net = body['network']
self.assertEqual(new_name, updated_net['name'])
self.assertFalse(updated_net['shared'])
self.assertFalse(updated_net['admin_state_up'])
@test.idempotent_id('9c31fabb-0181-464f-9ace-95144fe9ca77')
def test_create_port_shared_network_as_non_admin_tenant(self):
# create a port as non admin
body = self.client.create_port(network_id=self.shared_network['id'])
port = body['port']
self.addCleanup(self.admin_client.delete_port, port['id'])
# verify the tenant id of admin network and non admin port
self.assertNotEqual(self.shared_network['tenant_id'],
port['tenant_id'])
@test.idempotent_id('3e39c4a6-9caf-4710-88f1-d20073c6dd76')
def test_create_bulk_shared_network(self):
# Creates 2 networks in one request
net_nm = [data_utils.rand_name('network'),
data_utils.rand_name('network')]
body = self.admin_client.create_bulk_network(net_nm, shared=True)
created_networks = body['networks']
for net in created_networks:
self.addCleanup(self.admin_client.delete_network, net['id'])
self.assertIsNotNone(net['id'])
self.assertTrue(net['shared'])
def _list_shared_networks(self, user):
body = user.list_networks(shared=True)
networks_list = [net['id'] for net in body['networks']]
self.assertIn(self.shared_network['id'], networks_list)
self.assertTrue(self.shared_network['shared'])
@test.idempotent_id('a064a9fd-e02f-474a-8159-f828cd636a28')
def test_list_shared_networks(self):
# List the shared networks and confirm that
# shared network extension attribute is returned for those networks
# that are created as shared
self._list_shared_networks(self.admin_client)
self._list_shared_networks(self.client)
def _show_shared_network(self, user):
body = user.show_network(self.shared_network['id'])
show_shared_net = body['network']
self.assertEqual(self.shared_network['name'], show_shared_net['name'])
self.assertEqual(self.shared_network['id'], show_shared_net['id'])
self.assertTrue(show_shared_net['shared'])
@test.idempotent_id('e03c92a2-638d-4bfa-b50a-b1f66f087e58')
def test_show_shared_networks_attribute(self):
# Show a shared network and confirm that
# shared network extension attribute is returned.
self._show_shared_network(self.admin_client)
self._show_shared_network(self.client)
| pnavarro/neutron | neutron/tests/api/admin/test_shared_network_extension.py | Python | apache-2.0 | 4,322 |
import asyncio
from unittest import mock
import pytest
from waterbutler.core import utils
class TestAsyncRetry:
@pytest.mark.asyncio
async def test_returns_success(self):
mock_func = mock.Mock(return_value='Foo')
retryable = utils.async_retry(5, 0, raven=None)(mock_func)
x = await retryable()
assert x == 'Foo'
assert mock_func.call_count == 1
@pytest.mark.asyncio
async def test_retries_until(self):
mock_func = mock.Mock(side_effect=[Exception(), 'Foo'])
retryable = utils.async_retry(5, 0, raven=None)(mock_func)
x = await retryable()
assert x == 'Foo'
assert mock_func.call_count == 2
@pytest.mark.asyncio
async def test_retries_then_raises(self):
mock_func = mock.Mock(side_effect=Exception('Foo'))
retryable = utils.async_retry(5, 0, raven=None)(mock_func)
with pytest.raises(Exception) as e:
coro = await retryable()
assert e.type == Exception
assert e.value.args == ('Foo',)
assert mock_func.call_count == 6
@pytest.mark.asyncio
async def test_retries_by_its_self(self):
mock_func = mock.Mock(side_effect=Exception())
retryable = utils.async_retry(8, 0, raven=None)(mock_func)
retryable()
await asyncio.sleep(.1)
assert mock_func.call_count == 9
async def test_docstring_survives(self):
async def mytest():
'''This is a docstring'''
pass
retryable = utils.async_retry(8, 0, raven=None)(mytest)
assert retryable.__doc__ == '''This is a docstring'''
@pytest.mark.asyncio
async def test_kwargs_work(self):
async def mytest(mack, *args, **kwargs):
mack()
assert args == ('test', 'Foo')
assert kwargs == {'test': 'Foo', 'baz': 'bam'}
return True
retryable = utils.async_retry(8, 0, raven=None)(mytest)
merk = mock.Mock(side_effect=[Exception(''), 5])
fut = retryable(merk, 'test', 'Foo', test='Foo', baz='bam')
assert await fut
assert merk.call_count == 2
@pytest.mark.asyncio
async def test_all_retry(self):
mock_func = mock.Mock(side_effect=Exception())
retryable = utils.async_retry(8, 0, raven=None)(mock_func)
retryable()
retryable()
await asyncio.sleep(.1)
assert mock_func.call_count == 18
| TomBaxter/waterbutler | tests/core/test_utils.py | Python | apache-2.0 | 2,451 |
# -*- coding: utf-8 -*-
from __future__ import with_statement
import datetime
from cms.api import create_page, publish_page, add_plugin
from cms.exceptions import PluginAlreadyRegistered, PluginNotRegistered
from cms.models import Page, Placeholder
from cms.models.pluginmodel import CMSPlugin, PluginModelBase
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cms.plugins.utils import get_plugins_for_page
from cms.plugins.file.models import File
from cms.plugins.inherit.models import InheritPagePlaceholder
from cms.plugins.link.forms import LinkForm
from cms.plugins.link.models import Link
from cms.plugins.picture.models import Picture
from cms.plugins.text.models import Text
from cms.plugins.text.utils import (plugin_tags_to_id_list, plugin_tags_to_admin_html)
from cms.plugins.twitter.models import TwitterRecentEntries
from cms.test_utils.project.pluginapp.models import Article, Section
from cms.test_utils.project.pluginapp.plugins.manytomany_rel.models import (
ArticlePluginModel)
from cms.test_utils.testcases import CMSTestCase, URL_CMS_PAGE, URL_CMS_PLUGIN_MOVE, \
URL_CMS_PAGE_ADD, URL_CMS_PLUGIN_ADD, URL_CMS_PLUGIN_EDIT, URL_CMS_PAGE_CHANGE, URL_CMS_PLUGIN_REMOVE, \
URL_CMS_PLUGIN_HISTORY_EDIT
from cms.sitemaps.cms_sitemap import CMSSitemap
from cms.test_utils.util.context_managers import SettingsOverride
from cms.utils.copy_plugins import copy_plugins_to
from django.utils import timezone
from django.conf import settings
from django.contrib import admin
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.management import call_command
from django.forms.widgets import Media
from django.test.testcases import TestCase
import os
class DumbFixturePlugin(CMSPluginBase):
model = CMSPlugin
name = "Dumb Test Plugin. It does nothing."
render_template = ""
admin_preview = False
allow_children = True
def render(self, context, instance, placeholder):
return context
class PluginsTestBaseCase(CMSTestCase):
def setUp(self):
self.super_user = User(username="test", is_staff=True, is_active=True, is_superuser=True)
self.super_user.set_password("test")
self.super_user.save()
self.slave = User(username="slave", is_staff=True, is_active=True, is_superuser=False)
self.slave.set_password("slave")
self.slave.save()
self.FIRST_LANG = settings.LANGUAGES[0][0]
self.SECOND_LANG = settings.LANGUAGES[1][0]
self._login_context = self.login_user_context(self.super_user)
self._login_context.__enter__()
def tearDown(self):
self._login_context.__exit__(None, None, None)
def approve_page(self, page):
response = self.client.get(URL_CMS_PAGE + "%d/approve/" % page.pk)
self.assertRedirects(response, URL_CMS_PAGE)
# reload page
return self.reload_page(page)
def get_request(self, *args, **kwargs):
request = super(PluginsTestBaseCase, self).get_request(*args, **kwargs)
request.placeholder_media = Media()
return request
class PluginsTestCase(PluginsTestBaseCase):
def _create_text_plugin_on_page(self, page):
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot="body").pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
created_plugin_id = int(response.content)
self.assertEquals(created_plugin_id, CMSPlugin.objects.all()[0].pk)
return created_plugin_id
def _edit_text_plugin(self, plugin_id, text):
edit_url = "%s%s/" % (URL_CMS_PLUGIN_EDIT, plugin_id)
response = self.client.get(edit_url)
self.assertEquals(response.status_code, 200)
data = {
"body": text
}
response = self.client.post(edit_url, data)
self.assertEquals(response.status_code, 200)
txt = Text.objects.get(pk=plugin_id)
return txt
def test_add_edit_plugin(self):
"""
Test that you can add a text plugin
"""
# add a new text plugin
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
created_plugin_id = self._create_text_plugin_on_page(page)
# now edit the plugin
txt = self._edit_text_plugin(created_plugin_id, "Hello World")
self.assertEquals("Hello World", txt.body)
# edit body, but click cancel button
data = {
"body": "Hello World!!",
"_cancel": True,
}
edit_url = '%s%d/' % (URL_CMS_PLUGIN_EDIT, created_plugin_id)
response = self.client.post(edit_url, data)
self.assertEquals(response.status_code, 200)
txt = Text.objects.all()[0]
self.assertEquals("Hello World", txt.body)
def test_plugin_history_view(self):
"""
Test plugin history view
"""
import reversion
page_data = self.get_new_page_data()
# two versions created by simply creating the page
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
page_id = int(page.pk)
# page version 3
created_plugin_id = self._create_text_plugin_on_page(page)
# page version 4
txt = self._edit_text_plugin(created_plugin_id, "Hello Foo")
self.assertEquals("Hello Foo", txt.body)
# page version 5
txt = self._edit_text_plugin(created_plugin_id, "Hello Bar")
self.assertEquals("Hello Bar", txt.body)
versions = [v.pk for v in reversed(reversion.get_for_object(page))]
history_url = '%s%d/' % (
URL_CMS_PLUGIN_HISTORY_EDIT % (page_id, versions[-2]),
created_plugin_id)
response = self.client.get(history_url)
self.assertEquals(response.status_code, 200)
self.assertIn('Hello Foo', response.content)
def test_plugin_order(self):
"""
Test that plugin position is saved after creation
"""
page_en = create_page("PluginOrderPage", "col_two.html", "en",
slug="page1", published=True, in_navigation=True)
ph_en = page_en.placeholders.get(slot="col_left")
# We check created objects and objects from the DB to be sure the position value
# has been saved correctly
text_plugin_1 = add_plugin(ph_en, "TextPlugin", "en", body="I'm the first")
text_plugin_2 = add_plugin(ph_en, "TextPlugin", "en", body="I'm the second")
db_plugin_1 = CMSPlugin.objects.get(pk=text_plugin_1.pk)
db_plugin_2 = CMSPlugin.objects.get(pk=text_plugin_2.pk)
with SettingsOverride(CMS_PERMISSION=False):
self.assertEqual(text_plugin_1.position, 1)
self.assertEqual(db_plugin_1.position, 1)
self.assertEqual(text_plugin_2.position, 2)
self.assertEqual(db_plugin_2.position, 2)
## Finally we render the placeholder to test the actual content
rendered_placeholder = ph_en.render(self.get_context(page_en.get_absolute_url()), None)
self.assertEquals(rendered_placeholder, "I'm the firstI'm the second")
def test_add_cancel_plugin(self):
"""
Test that you can cancel a new plugin before editing and
that the plugin is removed.
"""
# add a new text plugin
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot="body").pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk)
# now click cancel instead of editing
edit_url = URL_CMS_PLUGIN_EDIT + response.content + "/"
response = self.client.get(edit_url)
self.assertEquals(response.status_code, 200)
data = {
"body": "Hello World",
"_cancel": True,
}
response = self.client.post(edit_url, data)
self.assertEquals(response.status_code, 200)
self.assertEquals(0, Text.objects.count())
def test_add_text_plugin_empty_tag(self):
"""
Test that you can add a text plugin
"""
# add a new text plugin
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot="body").pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk)
# now edit the plugin
edit_url = URL_CMS_PLUGIN_EDIT + response.content + "/"
response = self.client.get(edit_url)
self.assertEquals(response.status_code, 200)
data = {
"body": '<div class="someclass"></div><p>foo</p>'
}
response = self.client.post(edit_url, data)
self.assertEquals(response.status_code, 200)
txt = Text.objects.all()[0]
self.assertEquals('<div class="someclass"></div><p>foo</p>', txt.body)
def test_add_text_plugin_html_sanitizer(self):
"""
Test that you can add a text plugin
"""
# add a new text plugin
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot="body").pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk)
# now edit the plugin
edit_url = URL_CMS_PLUGIN_EDIT + response.content + "/"
response = self.client.get(edit_url)
self.assertEquals(response.status_code, 200)
data = {
"body": '<script>var bar="hacked"</script>'
}
response = self.client.post(edit_url, data)
self.assertEquals(response.status_code, 200)
txt = Text.objects.all()[0]
self.assertEquals('<script>var bar="hacked"</script>', txt.body)
def test_copy_plugins(self):
"""
Test that copying plugins works as expected.
"""
# create some objects
page_en = create_page("CopyPluginTestPage (EN)", "nav_playground.html", "en")
page_de = create_page("CopyPluginTestPage (DE)", "nav_playground.html", "de")
ph_en = page_en.placeholders.get(slot="body")
ph_de = page_de.placeholders.get(slot="body")
# add the text plugin
text_plugin_en = add_plugin(ph_en, "TextPlugin", "en", body="Hello World")
self.assertEquals(text_plugin_en.pk, CMSPlugin.objects.all()[0].pk)
# add a *nested* link plugin
link_plugin_en = add_plugin(ph_en, "LinkPlugin", "en", target=text_plugin_en,
name="A Link", url="https://www.django-cms.org")
# the call above to add a child makes a plugin reload required here.
text_plugin_en = self.reload(text_plugin_en)
# check the relations
self.assertEquals(text_plugin_en.get_children().count(), 1)
self.assertEqual(link_plugin_en.parent.pk, text_plugin_en.pk)
# just sanity check that so far everything went well
self.assertEqual(CMSPlugin.objects.count(), 2)
# copy the plugins to the german placeholder
copy_plugins_to(ph_en.get_plugins(), ph_de, 'de')
self.assertEqual(ph_de.cmsplugin_set.filter(parent=None).count(), 1)
text_plugin_de = ph_de.cmsplugin_set.get(parent=None).get_plugin_instance()[0]
self.assertEqual(text_plugin_de.get_children().count(), 1)
link_plugin_de = text_plugin_de.get_children().get().get_plugin_instance()[0]
# check we have twice as many plugins as before
self.assertEqual(CMSPlugin.objects.count(), 4)
# check language plugins
self.assertEqual(CMSPlugin.objects.filter(language='de').count(), 2)
self.assertEqual(CMSPlugin.objects.filter(language='en').count(), 2)
text_plugin_en = self.reload(text_plugin_en)
link_plugin_en = self.reload(link_plugin_en)
# check the relations in english didn't change
self.assertEquals(text_plugin_en.get_children().count(), 1)
self.assertEqual(link_plugin_en.parent.pk, text_plugin_en.pk)
self.assertEqual(link_plugin_de.name, link_plugin_en.name)
self.assertEqual(link_plugin_de.url, link_plugin_en.url)
self.assertEqual(text_plugin_de.body, text_plugin_en.body)
def test_remove_plugin_before_published(self):
"""
When removing a draft plugin we would expect the public copy of the plugin to also be removed
"""
# add a page
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
# add a plugin
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot="body").pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk)
# there should be only 1 plugin
self.assertEquals(CMSPlugin.objects.all().count(), 1)
# delete the plugin
plugin_data = {
'plugin_id': int(response.content)
}
remove_url = URL_CMS_PLUGIN_REMOVE
response = self.client.post(remove_url, plugin_data)
self.assertEquals(response.status_code, 200)
# there should be no plugins
self.assertEquals(0, CMSPlugin.objects.all().count())
def test_remove_plugin_after_published(self):
# add a page
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
# add a plugin
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot="body").pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
plugin_id = int(response.content)
self.assertEquals(response.status_code, 200)
self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk)
# there should be only 1 plugin
self.assertEquals(CMSPlugin.objects.all().count(), 1)
self.assertEquals(CMSPlugin.objects.filter(placeholder__page__publisher_is_draft=True).count(), 1)
# publish page
response = self.client.post(URL_CMS_PAGE + "%d/change-status/" % page.pk, {1: 1})
self.assertEqual(response.status_code, 200)
self.assertEquals(Page.objects.count(), 2)
# there should now be two plugins - 1 draft, 1 public
self.assertEquals(CMSPlugin.objects.all().count(), 2)
# delete the plugin
plugin_data = {
'plugin_id': plugin_id
}
remove_url = URL_CMS_PLUGIN_REMOVE
response = self.client.post(remove_url, plugin_data)
self.assertEquals(response.status_code, 200)
# there should be no plugins
self.assertEquals(CMSPlugin.objects.all().count(), 1)
self.assertEquals(CMSPlugin.objects.filter(placeholder__page__publisher_is_draft=False).count(), 1)
def test_remove_plugin_not_associated_to_page(self):
"""
Test case for PlaceholderField
"""
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
# add a plugin
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot="body").pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk)
# there should be only 1 plugin
self.assertEquals(CMSPlugin.objects.all().count(), 1)
ph = Placeholder(slot="subplugin")
ph.save()
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': ph.pk,
'parent': int(response.content)
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
# no longer allowed for security reasons
self.assertEqual(response.status_code, 404)
def test_register_plugin_twice_should_raise(self):
number_of_plugins_before = len(plugin_pool.get_all_plugins())
# The first time we register the plugin is should work
plugin_pool.register_plugin(DumbFixturePlugin)
# Let's add it a second time. We should catch and exception
raised = False
try:
plugin_pool.register_plugin(DumbFixturePlugin)
except PluginAlreadyRegistered:
raised = True
self.assertTrue(raised)
# Let's also unregister the plugin now, and assert it's not in the
# pool anymore
plugin_pool.unregister_plugin(DumbFixturePlugin)
# Let's make sure we have the same number of plugins as before:
number_of_plugins_after = len(plugin_pool.get_all_plugins())
self.assertEqual(number_of_plugins_before, number_of_plugins_after)
def test_unregister_non_existing_plugin_should_raise(self):
number_of_plugins_before = len(plugin_pool.get_all_plugins())
raised = False
try:
# There should not be such a plugin registered if the others tests
# don't leak plugins
plugin_pool.unregister_plugin(DumbFixturePlugin)
except PluginNotRegistered:
raised = True
self.assertTrue(raised)
# Let's count, to make sure we didn't remove a plugin accidentally.
number_of_plugins_after = len(plugin_pool.get_all_plugins())
self.assertEqual(number_of_plugins_before, number_of_plugins_after)
def test_inheritplugin_media(self):
"""
Test case for InheritPagePlaceholder
"""
inheritfrompage = create_page('page to inherit from',
'nav_playground.html',
'en')
body = inheritfrompage.placeholders.get(slot="body")
plugin = TwitterRecentEntries(
plugin_type='TwitterRecentEntriesPlugin',
placeholder=body,
position=1,
language=settings.LANGUAGE_CODE,
twitter_user='djangocms',
)
plugin.insert_at(None, position='last-child', save=True)
inheritfrompage.publish()
page = create_page('inherit from page',
'nav_playground.html',
'en',
published=True)
inherited_body = page.placeholders.get(slot="body")
inherit_plugin = InheritPagePlaceholder(
plugin_type='InheritPagePlaceholderPlugin',
placeholder=inherited_body,
position=1,
language=settings.LANGUAGE_CODE,
from_page=inheritfrompage,
from_language=settings.LANGUAGE_CODE)
inherit_plugin.insert_at(None, position='last-child', save=True)
page.publish()
self.client.logout()
response = self.client.get(page.get_absolute_url())
self.assertTrue('%scms/js/libs/jquery.tweet.js' % settings.STATIC_URL in response.content, response.content)
def test_inherit_plugin_with_empty_plugin(self):
inheritfrompage = create_page('page to inherit from',
'nav_playground.html',
'en', published=True)
body = inheritfrompage.placeholders.get(slot="body")
empty_plugin = CMSPlugin(
plugin_type='TextPlugin', # create an empty plugin
placeholder=body,
position=1,
language='en',
)
empty_plugin.insert_at(None, position='last-child', save=True)
other_page = create_page('other page', 'nav_playground.html', 'en', published=True)
inherited_body = other_page.placeholders.get(slot="body")
inherit_plugin = InheritPagePlaceholder(
plugin_type='InheritPagePlaceholderPlugin',
placeholder=inherited_body,
position=1,
language='en',
from_page=inheritfrompage,
from_language='en'
)
inherit_plugin.insert_at(None, position='last-child', save=True)
add_plugin(inherited_body, "TextPlugin", "en", body="foobar")
# this should not fail, even if there in an empty plugin
rendered = inherited_body.render(context=self.get_context(other_page.get_absolute_url()), width=200)
self.assertIn("foobar", rendered)
def test_render_textplugin(self):
# Setup
page = create_page("render test", "nav_playground.html", "en")
ph = page.placeholders.get(slot="body")
text_plugin = add_plugin(ph, "TextPlugin", "en", body="Hello World")
link_plugins = []
for i in range(0, 10):
link_plugins.append(add_plugin(ph, "LinkPlugin", "en",
target=text_plugin,
name="A Link %d" % i,
url="http://django-cms.org"))
text_plugin.text.body += '<img src="/static/cms/images/plugins/link.png" alt="Link - %s" id="plugin_obj_%d" title="Link - %s" />' % (
link_plugins[-1].name,
link_plugins[-1].pk,
link_plugins[-1].name,
)
text_plugin.save()
txt = text_plugin.text
ph = Placeholder.objects.get(pk=ph.pk)
with self.assertNumQueries(2):
# 1 query for the CMSPlugin objects,
# 1 query for each type of child object (1 in this case, all are Link plugins)
txt.body = plugin_tags_to_admin_html(
'\n'.join(["{{ plugin_object %d }}" % l.cmsplugin_ptr_id
for l in link_plugins]))
txt.save()
text_plugin = self.reload(text_plugin)
with self.assertNumQueries(2):
rendered = text_plugin.render_plugin(placeholder=ph)
for i in range(0, 10):
self.assertTrue('A Link %d' % i in rendered)
def test_copy_textplugin(self):
"""
Test that copying of textplugins replaces references to copied plugins
"""
page = create_page("page", "nav_playground.html", "en")
placeholder = page.placeholders.get(slot='body')
plugin_base = CMSPlugin(
plugin_type='TextPlugin',
placeholder=placeholder,
position=1,
language=self.FIRST_LANG)
plugin_base.insert_at(None, position='last-child', save=False)
plugin = Text(body='')
plugin_base.set_base_attr(plugin)
plugin.save()
plugin_ref_1_base = CMSPlugin(
plugin_type='TextPlugin',
placeholder=placeholder,
position=1,
language=self.FIRST_LANG)
plugin_ref_1_base.insert_at(plugin_base, position='last-child', save=False)
plugin_ref_1 = Text(body='')
plugin_ref_1_base.set_base_attr(plugin_ref_1)
plugin_ref_1.save()
plugin_ref_2_base = CMSPlugin(
plugin_type='TextPlugin',
placeholder=placeholder,
position=2,
language=self.FIRST_LANG)
plugin_ref_2_base.insert_at(plugin_base, position='last-child', save=False)
plugin_ref_2 = Text(body='')
plugin_ref_2_base.set_base_attr(plugin_ref_2)
plugin_ref_2.save()
plugin.body = plugin_tags_to_admin_html(
' {{ plugin_object %s }} {{ plugin_object %s }} ' % (str(plugin_ref_1.pk), str(plugin_ref_2.pk)))
plugin.save()
page_data = self.get_new_page_data()
#create 2nd language page
page_data.update({
'language': self.SECOND_LANG,
'title': "%s %s" % (page.get_title(), self.SECOND_LANG),
})
response = self.client.post(URL_CMS_PAGE_CHANGE % page.pk + "?language=%s" % self.SECOND_LANG, page_data)
self.assertRedirects(response, URL_CMS_PAGE)
self.assertEquals(CMSPlugin.objects.filter(language=self.FIRST_LANG).count(), 3)
self.assertEquals(CMSPlugin.objects.filter(language=self.SECOND_LANG).count(), 0)
self.assertEquals(CMSPlugin.objects.count(), 3)
self.assertEquals(Page.objects.all().count(), 1)
copy_data = {
'placeholder': placeholder.pk,
'language': self.SECOND_LANG,
'copy_from': self.FIRST_LANG,
}
response = self.client.post(URL_CMS_PAGE + "copy-plugins/", copy_data)
self.assertEquals(response.status_code, 200)
self.assertEqual(response.content.count('<li '), 3)
# assert copy success
self.assertEquals(CMSPlugin.objects.filter(language=self.FIRST_LANG).count(), 3)
self.assertEquals(CMSPlugin.objects.filter(language=self.SECOND_LANG).count(), 3)
self.assertEquals(CMSPlugin.objects.count(), 6)
plugins = list(Text.objects.all())
new_plugin = plugins[-1]
idlist = sorted(plugin_tags_to_id_list(new_plugin.body))
expected = sorted([plugins[3].pk, plugins[4].pk])
self.assertEquals(idlist, expected)
def test_empty_plugin_is_ignored(self):
page = create_page("page", "nav_playground.html", "en")
placeholder = page.placeholders.get(slot='body')
plugin = CMSPlugin(
plugin_type='TextPlugin',
placeholder=placeholder,
position=1,
language=self.FIRST_LANG)
plugin.insert_at(None, position='last-child', save=True)
# this should not raise any errors, but just ignore the empty plugin
out = placeholder.render(self.get_context(), width=300)
self.assertFalse(len(out))
self.assertFalse(len(placeholder._en_plugins_cache))
def test_editing_plugin_changes_page_modification_time_in_sitemap(self):
now = timezone.now()
one_day_ago = now - datetime.timedelta(days=1)
page = create_page("page", "nav_playground.html", "en", published=True, publication_date=now)
page.creation_date = one_day_ago
page.changed_date = one_day_ago
plugin_id = self._create_text_plugin_on_page(page)
plugin = self._edit_text_plugin(plugin_id, "fnord")
actual_last_modification_time = CMSSitemap().lastmod(page)
self.assertEqual(plugin.changed_date - datetime.timedelta(microseconds=plugin.changed_date.microsecond),
actual_last_modification_time - datetime.timedelta(
microseconds=actual_last_modification_time.microsecond))
def test_moving_plugin_to_different_placeholder(self):
plugin_pool.register_plugin(DumbFixturePlugin)
page = create_page("page", "nav_playground.html", "en", published=True)
plugin_data = {
'plugin_type': 'DumbFixturePlugin',
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot='body').pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD % page.pk, plugin_data)
self.assertEquals(response.status_code, 200)
plugin_data['parent_id'] = int(response.content)
del plugin_data['placeholder']
response = self.client.post(URL_CMS_PLUGIN_ADD % page.pk, plugin_data)
self.assertEquals(response.status_code, 200)
post = {
'plugin_id': int(response.content),
'placeholder': 'right-column',
}
response = self.client.post(URL_CMS_PLUGIN_MOVE % page.pk, post)
self.assertEquals(response.status_code, 200)
from cms.plugins.utils import build_plugin_tree
build_plugin_tree(page.placeholders.get(slot='right-column').get_plugins_list())
plugin_pool.unregister_plugin(DumbFixturePlugin)
def test_get_plugins_for_page(self):
page_en = create_page("PluginOrderPage", "col_two.html", "en",
slug="page1", published=True, in_navigation=True)
ph_en = page_en.placeholders.get(slot="col_left")
text_plugin_1 = add_plugin(ph_en, "TextPlugin", "en", body="I'm inside an existing placeholder.")
# This placeholder is not in the template.
ph_en_not_used = page_en.placeholders.create(slot="not_used")
text_plugin_2 = add_plugin(ph_en_not_used, "TextPlugin", "en", body="I'm inside a non-existent placeholder.")
page_plugins = get_plugins_for_page(None, page_en, page_en.get_title_obj_attribute('language'))
db_text_plugin_1 = page_plugins.get(pk=text_plugin_1.pk)
self.assertRaises(CMSPlugin.DoesNotExist, page_plugins.get, pk=text_plugin_2.pk)
self.assertEquals(db_text_plugin_1.pk, text_plugin_1.pk)
def test_is_last_in_placeholder(self):
"""
Tests that children plugins don't affect the is_last_in_placeholder plugin method.
"""
page_en = create_page("PluginOrderPage", "col_two.html", "en",
slug="page1", published=True, in_navigation=True)
ph_en = page_en.placeholders.get(slot="col_left")
text_plugin_1 = add_plugin(ph_en, "TextPlugin", "en", body="I'm the first")
text_plugin_2 = add_plugin(ph_en, "TextPlugin", "en", body="I'm the second")
inner_text_plugin_1 = add_plugin(ph_en, "TextPlugin", "en", body="I'm the first child of text_plugin_1")
text_plugin_1.cmsplugin_set.add(inner_text_plugin_1)
self.assertEquals(text_plugin_2.is_last_in_placeholder(), True)
class FileSystemPluginTests(PluginsTestBaseCase):
def setUp(self):
super(FileSystemPluginTests, self).setUp()
call_command('collectstatic', interactive=False, verbosity=0, link=True)
def tearDown(self):
for directory in [settings.STATIC_ROOT, settings.MEDIA_ROOT]:
for root, dirs, files in os.walk(directory, topdown=False):
# We need to walk() the directory tree since rmdir() does not allow
# to remove non-empty directories...
for name in files:
# Start by killing all files we walked
os.remove(os.path.join(root, name))
for name in dirs:
# Now all directories we walked...
os.rmdir(os.path.join(root, name))
super(FileSystemPluginTests, self).tearDown()
def test_fileplugin_icon_uppercase(self):
page = create_page('testpage', 'nav_playground.html', 'en')
body = page.placeholders.get(slot="body")
plugin = File(
plugin_type='FilePlugin',
placeholder=body,
position=1,
language=settings.LANGUAGE_CODE,
)
plugin.file.save("UPPERCASE.JPG", SimpleUploadedFile("UPPERCASE.jpg", "content"), False)
plugin.insert_at(None, position='last-child', save=True)
self.assertNotEquals(plugin.get_icon_url().find('jpg'), -1)
class PluginManyToManyTestCase(PluginsTestBaseCase):
def setUp(self):
self.super_user = User(username="test", is_staff=True, is_active=True, is_superuser=True)
self.super_user.set_password("test")
self.super_user.save()
self.slave = User(username="slave", is_staff=True, is_active=True, is_superuser=False)
self.slave.set_password("slave")
self.slave.save()
self._login_context = self.login_user_context(self.super_user)
self._login_context.__enter__()
# create 3 sections
self.sections = []
self.section_pks = []
for i in range(3):
section = Section.objects.create(name="section %s" % i)
self.sections.append(section)
self.section_pks.append(section.pk)
self.section_count = len(self.sections)
# create 10 articles by section
for section in self.sections:
for j in range(10):
Article.objects.create(
title="article %s" % j,
section=section
)
self.FIRST_LANG = settings.LANGUAGES[0][0]
self.SECOND_LANG = settings.LANGUAGES[1][0]
def test_add_plugin_with_m2m(self):
# add a new text plugin
self.assertEqual(ArticlePluginModel.objects.count(), 0)
page_data = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
placeholder = page.placeholders.get(slot="body")
plugin_data = {
'plugin_type': "ArticlePlugin",
'language': self.FIRST_LANG,
'placeholder': placeholder.pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk)
# now edit the plugin
edit_url = URL_CMS_PLUGIN_EDIT + response.content + "/"
response = self.client.get(edit_url)
self.assertEquals(response.status_code, 200)
data = {
'title': "Articles Plugin 1",
"sections": self.section_pks
}
response = self.client.post(edit_url, data)
self.assertEqual(response.status_code, 200)
self.assertEqual(ArticlePluginModel.objects.count(), 1)
plugin = ArticlePluginModel.objects.all()[0]
self.assertEquals(self.section_count, plugin.sections.count())
def test_add_plugin_with_m2m_and_publisher(self):
self.assertEqual(ArticlePluginModel.objects.count(), 0)
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
self.assertEqual(response.status_code, 302)
page = Page.objects.all()[0]
placeholder = page.placeholders.get(slot="body")
# add a plugin
plugin_data = {
'plugin_type': "ArticlePlugin",
'language': self.FIRST_LANG,
'placeholder': placeholder.pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk)
# there should be only 1 plugin
self.assertEquals(1, CMSPlugin.objects.all().count())
articles_plugin_pk = int(response.content)
self.assertEquals(articles_plugin_pk, CMSPlugin.objects.all()[0].pk)
# now edit the plugin
edit_url = URL_CMS_PLUGIN_EDIT + response.content + "/"
data = {
'title': "Articles Plugin 1",
'sections': self.section_pks
}
response = self.client.post(edit_url, data)
self.assertEquals(response.status_code, 200)
self.assertEquals(1, ArticlePluginModel.objects.count())
articles_plugin = ArticlePluginModel.objects.all()[0]
self.assertEquals(u'Articles Plugin 1', articles_plugin.title)
self.assertEquals(self.section_count, articles_plugin.sections.count())
# check publish box
page = publish_page(page, self.super_user)
# there should now be two plugins - 1 draft, 1 public
self.assertEquals(2, CMSPlugin.objects.all().count())
self.assertEquals(2, ArticlePluginModel.objects.all().count())
db_counts = [plugin.sections.count() for plugin in ArticlePluginModel.objects.all()]
expected = [self.section_count for i in range(len(db_counts))]
self.assertEqual(expected, db_counts)
def test_copy_plugin_with_m2m(self):
page = create_page("page", "nav_playground.html", "en")
placeholder = page.placeholders.get(slot='body')
plugin = ArticlePluginModel(
plugin_type='ArticlePlugin',
placeholder=placeholder,
position=1,
language=self.FIRST_LANG)
plugin.insert_at(None, position='last-child', save=True)
edit_url = URL_CMS_PLUGIN_EDIT + str(plugin.pk) + "/"
data = {
'title': "Articles Plugin 1",
"sections": self.section_pks
}
response = self.client.post(edit_url, data)
self.assertEquals(response.status_code, 200)
self.assertEqual(ArticlePluginModel.objects.count(), 1)
self.assertEqual(ArticlePluginModel.objects.all()[0].sections.count(), self.section_count)
page_data = self.get_new_page_data()
#create 2nd language page
page_data.update({
'language': self.SECOND_LANG,
'title': "%s %s" % (page.get_title(), self.SECOND_LANG),
})
response = self.client.post(URL_CMS_PAGE_CHANGE % page.pk + "?language=%s" % self.SECOND_LANG, page_data)
self.assertRedirects(response, URL_CMS_PAGE)
self.assertEquals(CMSPlugin.objects.filter(language=self.FIRST_LANG).count(), 1)
self.assertEquals(CMSPlugin.objects.filter(language=self.SECOND_LANG).count(), 0)
self.assertEquals(CMSPlugin.objects.count(), 1)
self.assertEquals(Page.objects.all().count(), 1)
copy_data = {
'placeholder': placeholder.pk,
'language': self.SECOND_LANG,
'copy_from': self.FIRST_LANG,
}
response = self.client.post(URL_CMS_PAGE + "copy-plugins/", copy_data)
self.assertEquals(response.status_code, 200)
self.assertEqual(response.content.count('<li '), 1)
# assert copy success
self.assertEquals(CMSPlugin.objects.filter(language=self.FIRST_LANG).count(), 1)
self.assertEquals(CMSPlugin.objects.filter(language=self.SECOND_LANG).count(), 1)
self.assertEquals(CMSPlugin.objects.count(), 2)
db_counts = [plugin.sections.count() for plugin in ArticlePluginModel.objects.all()]
expected = [self.section_count for i in range(len(db_counts))]
self.assertEqual(expected, db_counts)
class PluginsMetaOptionsTests(TestCase):
''' TestCase set for ensuring that bugs like #992 are caught '''
# these plugins are inlined because, due to the nature of the #992
# ticket, we cannot actually import a single file with all the
# plugin variants in, because that calls __new__, at which point the
# error with splitted occurs.
def test_meta_options_as_defaults(self):
''' handling when a CMSPlugin meta options are computed defaults '''
# this plugin relies on the base CMSPlugin and Model classes to
# decide what the app_label and db_table should be
class TestPlugin(CMSPlugin):
pass
plugin = TestPlugin()
self.assertEqual(plugin._meta.db_table, 'cmsplugin_testplugin')
self.assertEqual(plugin._meta.app_label, 'tests') # because it's inlined
def test_meta_options_as_declared_defaults(self):
''' handling when a CMSPlugin meta options are declared as per defaults '''
# here, we declare the db_table and app_label explicitly, but to the same
# values as would be computed, thus making sure it's not a problem to
# supply options.
class TestPlugin2(CMSPlugin):
class Meta:
db_table = 'cmsplugin_testplugin2'
app_label = 'tests'
plugin = TestPlugin2()
self.assertEqual(plugin._meta.db_table, 'cmsplugin_testplugin2')
self.assertEqual(plugin._meta.app_label, 'tests') # because it's inlined
def test_meta_options_custom_app_label(self):
''' make sure customised meta options on CMSPlugins don't break things '''
class TestPlugin3(CMSPlugin):
class Meta:
app_label = 'one_thing'
plugin = TestPlugin3()
self.assertEqual(plugin._meta.db_table, 'cmsplugin_testplugin3') # because it's inlined
self.assertEqual(plugin._meta.app_label, 'one_thing')
def test_meta_options_custom_db_table(self):
''' make sure custom database table names are OK. '''
class TestPlugin4(CMSPlugin):
class Meta:
db_table = 'or_another'
plugin = TestPlugin4()
self.assertEqual(plugin._meta.db_table, 'or_another')
self.assertEqual(plugin._meta.app_label, 'tests') # because it's inlined
def test_meta_options_custom_both(self):
''' We should be able to customise app_label and db_table together '''
class TestPlugin5(CMSPlugin):
class Meta:
app_label = 'one_thing'
db_table = 'or_another'
plugin = TestPlugin5()
self.assertEqual(plugin._meta.db_table, 'or_another')
self.assertEqual(plugin._meta.app_label, 'one_thing')
class LinkPluginTestCase(PluginsTestBaseCase):
def test_does_not_verify_existance_of_url(self):
form = LinkForm(
{'name': 'Linkname', 'url': 'http://www.nonexistant.test'})
self.assertTrue(form.is_valid())
def test_opens_in_same_window_by_default(self):
"""Could not figure out how to render this plugin
Checking only for the values in the model"""
form = LinkForm({'name': 'Linkname',
'url': 'http://www.nonexistant.test'})
link = form.save()
self.assertEquals(link.target, '')
def test_open_in_blank_window(self):
form = LinkForm({'name': 'Linkname',
'url': 'http://www.nonexistant.test', 'target': '_blank'})
link = form.save()
self.assertEquals(link.target, '_blank')
def test_open_in_parent_window(self):
form = LinkForm({'name': 'Linkname',
'url': 'http://www.nonexistant.test', 'target': '_parent'})
link = form.save()
self.assertEquals(link.target, '_parent')
def test_open_in_top_window(self):
form = LinkForm({'name': 'Linkname',
'url': 'http://www.nonexistant.test', 'target': '_top'})
link = form.save()
self.assertEquals(link.target, '_top')
def test_open_in_nothing_else(self):
form = LinkForm({'name': 'Linkname',
'url': 'http://www.nonexistant.test', 'target': 'artificial'})
self.assertFalse(form.is_valid())
class NoDatabasePluginTests(TestCase):
def test_render_meta_is_unique(self):
text = Text()
link = Link()
self.assertNotEqual(id(text._render_meta), id(link._render_meta))
def test_render_meta_does_not_leak(self):
text = Text()
link = Link()
text._render_meta.text_enabled = False
link._render_meta.text_enabled = False
self.assertFalse(text._render_meta.text_enabled)
self.assertFalse(link._render_meta.text_enabled)
link._render_meta.text_enabled = True
self.assertFalse(text._render_meta.text_enabled)
self.assertTrue(link._render_meta.text_enabled)
def test_db_table_hack(self):
# TODO: Django tests seem to leak models from test methods, somehow
# we should clear django.db.models.loading.app_cache in tearDown.
plugin_class = PluginModelBase('TestPlugin', (CMSPlugin,), {'__module__': 'cms.tests.plugins'})
self.assertEqual(plugin_class._meta.db_table, 'cmsplugin_testplugin')
def test_db_table_hack_with_mixin(self):
class LeftMixin: pass
class RightMixin: pass
plugin_class = PluginModelBase('TestPlugin2', (LeftMixin, CMSPlugin, RightMixin),
{'__module__': 'cms.tests.plugins'})
self.assertEqual(plugin_class._meta.db_table, 'cmsplugin_testplugin2')
class PicturePluginTests(PluginsTestBaseCase):
def test_link_or_page(self):
"""Test a validator: you can enter a url or a page_link, but not both."""
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
picture = Picture(url="test")
# Note: don't call full_clean as it will check ALL fields - including
# the image, which we haven't defined. Call clean() instead which
# just validates the url and page_link fields.
picture.clean()
picture.page_link = page
picture.url = None
picture.clean()
picture.url = "test"
self.assertRaises(ValidationError, picture.clean)
class SimplePluginTests(TestCase):
def test_simple_naming(self):
class MyPlugin(CMSPluginBase):
render_template = 'base.html'
self.assertEqual(MyPlugin.name, 'My Plugin')
def test_simple_context(self):
class MyPlugin(CMSPluginBase):
render_template = 'base.html'
plugin = MyPlugin(ArticlePluginModel, admin.site)
context = {}
out_context = plugin.render(context, 1, 2)
self.assertEqual(out_context['instance'], 1)
self.assertEqual(out_context['placeholder'], 2)
self.assertIs(out_context, context)
| mpetyx/palmdrop | venv/lib/python2.7/site-packages/cms/tests/plugins.py | Python | apache-2.0 | 46,414 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import atexit
import json
import logging
import os
import subprocess
import time
from nose.tools import assert_true, assert_false
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from desktop.lib.django_test_util import make_logged_in_client
from desktop.lib.paths import get_run_root
from desktop.lib.python_util import find_unused_port
from desktop.lib.security_util import get_localhost_name
from desktop.lib.test_utils import add_to_group, grant_access
from hadoop import pseudo_hdfs4
from hadoop.pseudo_hdfs4 import is_live_cluster, get_db_prefix
import beeswax.conf
from beeswax.server.dbms import get_query_server_config
from beeswax.server import dbms
HIVE_SERVER_TEST_PORT = find_unused_port()
_INITIALIZED = False
_SHARED_HIVE_SERVER_PROCESS = None
_SHARED_HIVE_SERVER = None
_SHARED_HIVE_SERVER_CLOSER = None
LOG = logging.getLogger(__name__)
def _start_server(cluster):
args = [beeswax.conf.HIVE_SERVER_BIN.get()]
env = cluster._mr2_env.copy()
hadoop_cp_proc = subprocess.Popen(args=[get_run_root('ext/hadoop/hadoop') + '/bin/hadoop', 'classpath'], env=env, cwd=cluster._tmpdir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
hadoop_cp_proc.wait()
hadoop_cp = hadoop_cp_proc.stdout.read().strip()
env.update({
'HADOOP_HOME': get_run_root('ext/hadoop/hadoop'), # Used only by Hive for some reason
'HIVE_CONF_DIR': beeswax.conf.HIVE_CONF_DIR.get(),
'HIVE_SERVER2_THRIFT_PORT': str(HIVE_SERVER_TEST_PORT),
'HADOOP_MAPRED_HOME': get_run_root('ext/hadoop/hadoop') + '/share/hadoop/mapreduce',
# Links created in jenkins script.
# If missing classes when booting HS2, check here.
'AUX_CLASSPATH':
get_run_root('ext/hadoop/hadoop') + '/share/hadoop/hdfs/hadoop-hdfs.jar'
+ ':' +
get_run_root('ext/hadoop/hadoop') + '/share/hadoop/common/lib/hadoop-auth.jar'
+ ':' +
get_run_root('ext/hadoop/hadoop') + '/share/hadoop/common/hadoop-common.jar'
+ ':' +
get_run_root('ext/hadoop/hadoop') + '/share/hadoop/mapreduce/hadoop-mapreduce-client-core.jar'
,
'HADOOP_CLASSPATH': hadoop_cp,
})
if os.getenv("JAVA_HOME"):
env["JAVA_HOME"] = os.getenv("JAVA_HOME")
LOG.info("Executing %s, env %s, cwd %s" % (repr(args), repr(env), cluster._tmpdir))
return subprocess.Popen(args=args, env=env, cwd=cluster._tmpdir, stdin=subprocess.PIPE)
def get_shared_beeswax_server(db_name='default'):
global _SHARED_HIVE_SERVER
global _SHARED_HIVE_SERVER_CLOSER
if _SHARED_HIVE_SERVER is None:
cluster = pseudo_hdfs4.shared_cluster()
if is_live_cluster():
def s():
pass
else:
s = _start_mini_hs2(cluster)
start = time.time()
started = False
sleep = 1
make_logged_in_client()
user = User.objects.get(username='test')
query_server = get_query_server_config()
db = dbms.get(user, query_server)
while not started and time.time() - start <= 30:
try:
db.open_session(user)
started = True
break
except Exception, e:
LOG.info('HiveServer2 server could not be found after: %s' % e)
time.sleep(sleep)
if not started:
raise Exception("Server took too long to come up.")
_SHARED_HIVE_SERVER, _SHARED_HIVE_SERVER_CLOSER = cluster, s
return _SHARED_HIVE_SERVER, _SHARED_HIVE_SERVER_CLOSER
def _start_mini_hs2(cluster):
HIVE_CONF = cluster.hadoop_conf_dir
finish = (
beeswax.conf.HIVE_SERVER_HOST.set_for_testing(get_localhost_name()),
beeswax.conf.HIVE_SERVER_PORT.set_for_testing(HIVE_SERVER_TEST_PORT),
beeswax.conf.HIVE_SERVER_BIN.set_for_testing(get_run_root('ext/hive/hive') + '/bin/hiveserver2'),
beeswax.conf.HIVE_CONF_DIR.set_for_testing(HIVE_CONF)
)
default_xml = """<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:derby:;databaseName=%(root)s/metastore_db;create=true</value>
<description>JDBC connect string for a JDBC metastore</description>
</property>
<property>
<name>hive.server2.enable.impersonation</name>
<value>false</value>
</property>
<property>
<name>hive.querylog.location</name>
<value>%(querylog)s</value>
</property>
</configuration>
""" % {'root': cluster._tmpdir, 'querylog': cluster.log_dir + '/hive'}
file(HIVE_CONF + '/hive-site.xml', 'w').write(default_xml)
global _SHARED_HIVE_SERVER_PROCESS
if _SHARED_HIVE_SERVER_PROCESS is None:
p = _start_server(cluster)
LOG.info("started")
cluster.fs.do_as_superuser(cluster.fs.chmod, '/tmp', 01777)
_SHARED_HIVE_SERVER_PROCESS = p
def kill():
LOG.info("Killing server (pid %d)." % p.pid)
os.kill(p.pid, 9)
p.wait()
atexit.register(kill)
def s():
for f in finish:
f()
cluster.stop()
return s
def wait_for_query_to_finish(client, response, max=60.0):
# Take a async API execute_query() response in input
start = time.time()
sleep_time = 0.05
if is_finished(response): # aka Has error at submission
return response
content = json.loads(response.content)
watch_url = content['watch_url']
response = client.get(watch_url, follow=True)
# Loop and check status
while not is_finished(response):
time.sleep(sleep_time)
sleep_time = min(1.0, sleep_time * 2) # Capped exponential
if (time.time() - start) > max:
message = "Query took too long! %d seconds" % (time.time() - start)
LOG.warning(message)
raise Exception(message)
response = client.get(watch_url, follow=True)
return response
def is_finished(response):
status = json.loads(response.content)
return 'error' in status or status.get('isSuccess') or status.get('isFailure')
def fetch_query_result_data(client, status_response, n=0, server_name='beeswax'):
# Take a wait_for_query_to_finish() response in input
status = json.loads(status_response.content)
response = client.get("/%(server_name)s/results/%(id)s/%(n)s?format=json" % {'server_name': server_name, 'id': status.get('id'), 'n': n})
content = json.loads(response.content)
return content
def make_query(client, query, submission_type="Execute",
udfs=None, settings=None, resources=None,
wait=False, name=None, desc=None, local=True,
is_parameterized=True, max=60.0, database='default', email_notify=False, params=None, server_name='beeswax', **kwargs):
"""
Prepares arguments for the execute view.
If wait is True, waits for query to finish as well.
"""
if settings is None:
settings = []
if params is None:
params = []
if local:
# Tests run faster if not run against the real cluster.
settings.append(('mapreduce.framework.name', 'local'))
# Prepares arguments for the execute view.
parameters = {
'query-query': query,
'query-name': name if name else '',
'query-desc': desc if desc else '',
'query-is_parameterized': is_parameterized and "on",
'query-database': database,
'query-email_notify': email_notify and "on",
}
if submission_type == 'Execute':
parameters['button-submit'] = 'Whatever'
elif submission_type == 'Explain':
parameters['button-explain'] = 'Whatever'
elif submission_type == 'Save':
parameters['saveform-save'] = 'True'
if name:
parameters['saveform-name'] = name
if desc:
parameters['saveform-desc'] = desc
parameters["functions-next_form_id"] = str(len(udfs or []))
for i, udf_pair in enumerate(udfs or []):
name, klass = udf_pair
parameters["functions-%d-name" % i] = name
parameters["functions-%d-class_name" % i] = klass
parameters["functions-%d-_exists" % i] = 'True'
parameters["settings-next_form_id"] = str(len(settings))
for i, settings_pair in enumerate(settings or []):
key, value = settings_pair
parameters["settings-%d-key" % i] = str(key)
parameters["settings-%d-value" % i] = str(value)
parameters["settings-%d-_exists" % i] = 'True'
parameters["file_resources-next_form_id"] = str(len(resources or []))
for i, resources_pair in enumerate(resources or []):
type, path = resources_pair
parameters["file_resources-%d-type" % i] = str(type)
parameters["file_resources-%d-path" % i] = str(path)
parameters["file_resources-%d-_exists" % i] = 'True'
for name, value in params:
parameters["parameterization-%s" % name] = value
kwargs.setdefault('follow', True)
execute_url = reverse("%(server_name)s:api_execute" % {'server_name': server_name})
if submission_type == 'Explain':
execute_url += "?explain=true"
if submission_type == 'Save':
execute_url = reverse("%(server_name)s:api_save_design" % {'server_name': server_name})
response = client.post(execute_url, parameters, **kwargs)
if wait:
return wait_for_query_to_finish(client, response, max)
return response
def verify_history(client, fragment, design=None, reverse=False, server_name='beeswax'):
"""
Verify that the query fragment and/or design are in the query history.
If reverse is True, verify the opposite.
Return the size of the history; -1 if we fail to determine it.
"""
resp = client.get('/%(server_name)s/query_history' % {'server_name': server_name})
my_assert = reverse and assert_false or assert_true
my_assert(fragment in resp.content, resp.content)
if design:
my_assert(design in resp.content, resp.content)
if resp.context:
try:
return len(resp.context['page'].object_list)
except KeyError:
pass
LOG.warn('Cannot find history size. Response context clobbered')
return -1
class BeeswaxSampleProvider(object):
"""
Setup the test db and install sample data
"""
@classmethod
def setup_class(cls):
cls.db_name = get_db_prefix(name='hive')
cls.cluster, shutdown = get_shared_beeswax_server(cls.db_name)
cls.client = make_logged_in_client(username='test', is_superuser=False)
add_to_group('test')
grant_access("test", "test", "beeswax")
# Weird redirection to avoid binding nonsense.
cls.shutdown = [ shutdown ]
cls.init_beeswax_db()
@classmethod
def teardown_class(cls):
if is_live_cluster():
# Delete test DB and tables
client = make_logged_in_client()
user = User.objects.get(username='test')
query_server = get_query_server_config()
db = dbms.get(user, query_server)
for db_name in [cls.db_name, '%s_other' % cls.db_name]:
databases = db.get_databases()
if db_name in databases:
tables = db.get_tables(database=db_name)
for table in tables:
make_query(client, 'DROP TABLE IF EXISTS `%(db)s`.`%(table)s`' % {'db': db_name, 'table': table}, wait=True)
make_query(client, 'DROP VIEW IF EXISTS `%(db)s`.`myview`' % {'db': db_name}, wait=True)
make_query(client, 'DROP DATABASE IF EXISTS %(db)s' % {'db': db_name}, wait=True)
# Check the cleanup
databases = db.get_databases()
assert_false(db_name in databases)
@classmethod
def init_beeswax_db(cls):
"""
Install the common test tables (only once)
"""
global _INITIALIZED
if _INITIALIZED:
return
make_query(cls.client, 'CREATE DATABASE IF NOT EXISTS %(db)s' % {'db': cls.db_name}, wait=True)
make_query(cls.client, 'CREATE DATABASE IF NOT EXISTS %(db)s_other' % {'db': cls.db_name}, wait=True)
data_file = cls.cluster.fs_prefix + u'/beeswax/sample_data_échantillon_%d.tsv'
# Create a "test_partitions" table.
CREATE_PARTITIONED_TABLE = """
CREATE TABLE `%(db)s`.`test_partitions` (foo INT, bar STRING)
PARTITIONED BY (baz STRING, boom STRING)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\t'
LINES TERMINATED BY '\n'
""" % {'db': cls.db_name}
make_query(cls.client, CREATE_PARTITIONED_TABLE, wait=True)
cls._make_data_file(data_file % 1)
LOAD_DATA = """
LOAD DATA INPATH '%(data_file)s'
OVERWRITE INTO TABLE `%(db)s`.`test_partitions`
PARTITION (baz='baz_one', boom='boom_two')
""" % {'db': cls.db_name, 'data_file': data_file % 1}
make_query(cls.client, LOAD_DATA, wait=True, local=False)
# Insert additional partition data into "test_partitions" table
ADD_PARTITION = """
ALTER TABLE `%(db)s`.`test_partitions` ADD PARTITION(baz='baz_foo', boom='boom_bar') LOCATION '%(fs_prefix)s/baz_foo/boom_bar'
""" % {'db': cls.db_name, 'fs_prefix': cls.cluster.fs_prefix}
make_query(cls.client, ADD_PARTITION, wait=True, local=False)
# Create a bunch of other tables
CREATE_TABLE = """
CREATE TABLE `%(db)s`.`%(name)s` (foo INT, bar STRING)
COMMENT "%(comment)s"
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\t'
LINES TERMINATED BY '\n'
"""
# Create a "test" table.
table_info = {'db': cls.db_name, 'name': 'test', 'comment': 'Test table'}
cls._make_data_file(data_file % 2)
cls._make_table(table_info['name'], CREATE_TABLE % table_info, data_file % 2)
if is_live_cluster():
LOG.warn('HUE-2884: We cannot create Hive UTF8 tables when live cluster testing at the moment')
else:
# Create a "test_utf8" table.
table_info = {'db': cls.db_name, 'name': 'test_utf8', 'comment': cls.get_i18n_table_comment()}
cls._make_i18n_data_file(data_file % 3, 'utf-8')
cls._make_table(table_info['name'], CREATE_TABLE % table_info, data_file % 3)
# Create a "test_latin1" table.
table_info = {'db': cls.db_name, 'name': 'test_latin1', 'comment': cls.get_i18n_table_comment()}
cls._make_i18n_data_file(data_file % 4, 'latin1')
cls._make_table(table_info['name'], CREATE_TABLE % table_info, data_file % 4)
# Create a "myview" view.
make_query(cls.client, "CREATE VIEW `%(db)s`.`myview` (foo, bar) as SELECT * FROM `%(db)s`.`test`" % {'db': cls.db_name}, wait=True)
_INITIALIZED = True
@staticmethod
def get_i18n_table_comment():
return u'en-hello pt-Olá ch-你好 ko-안녕 ru-Здравствуйте'
@classmethod
def _make_table(cls, table_name, create_ddl, filename):
make_query(cls.client, create_ddl, wait=True, database=cls.db_name)
LOAD_DATA = """
LOAD DATA INPATH '%(filename)s' OVERWRITE INTO TABLE `%(db)s`.`%(table_name)s`
""" % {'filename': filename, 'table_name': table_name, 'db': cls.db_name}
make_query(cls.client, LOAD_DATA, wait=True, local=False, database=cls.db_name)
@classmethod
def _make_data_file(cls, filename):
"""
Create data to be loaded into tables.
Data contains two columns of:
<num> 0x<hex_num>
where <num> goes from 0 to 255 inclusive.
"""
cls.cluster.fs.setuser(cls.cluster.superuser)
f = cls.cluster.fs.open(filename, "w")
for x in xrange(256):
f.write("%d\t0x%x\n" % (x, x))
f.close()
@classmethod
def _make_i18n_data_file(cls, filename, encoding):
"""
Create i18n data to be loaded into tables.
Data contains two columns of:
<num> <unichr(num)>
where <num> goes from 0 to 255 inclusive.
"""
cls.cluster.fs.setuser(cls.cluster.superuser)
f = cls.cluster.fs.open(filename, "w")
for x in xrange(256):
f.write("%d\t%s\n" % (x, unichr(x).encode(encoding)))
f.close()
@classmethod
def _make_custom_data_file(cls, filename, data):
f = cls.cluster.fs.open(filename, "w")
for x in data:
f.write("%s\n" % x)
f.close()
| rahul67/hue | apps/beeswax/src/beeswax/test_base.py | Python | apache-2.0 | 16,313 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-12-09 21:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('osf', '0025_preprintprovider_social_instagram'),
]
operations = [
migrations.AddField(
model_name='preprintservice',
name='license',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='osf.NodeLicenseRecord'),
),
]
| mluo613/osf.io | osf/migrations/0026_preprintservice_license.py | Python | apache-2.0 | 583 |
import numpy as np
import pytest
from numpy.testing import assert_allclose
try:
import scipy
except ImportError:
HAS_SCIPY = False
else:
HAS_SCIPY = True
import astropy.units as u
from astropy.timeseries.periodograms.lombscargle import LombScargle
from astropy.timeseries.periodograms.lombscargle._statistics import (fap_single, inv_fap_single,
METHODS)
from astropy.timeseries.periodograms.lombscargle.utils import convert_normalization, compute_chi2_ref
METHOD_KWDS = dict(bootstrap={'n_bootstraps': 20, 'random_seed': 42})
NORMALIZATIONS = ['standard', 'psd', 'log', 'model']
def make_data(N=100, period=1, theta=[10, 2, 3], dy=1, rseed=0, units=False):
"""Generate some data for testing"""
rng = np.random.RandomState(rseed)
t = 5 * period * rng.rand(N)
omega = 2 * np.pi / period
y = theta[0] + theta[1] * np.sin(omega * t) + theta[2] * np.cos(omega * t)
dy = dy * (0.5 + rng.rand(N))
y += dy * rng.randn(N)
fmax = 5
if units:
return t * u.day, y * u.mag, dy * u.mag, fmax / u.day
else:
return t, y, dy, fmax
def null_data(N=1000, dy=1, rseed=0, units=False):
"""Generate null hypothesis data"""
rng = np.random.RandomState(rseed)
t = 100 * rng.rand(N)
dy = 0.5 * dy * (1 + rng.rand(N))
y = dy * rng.randn(N)
fmax = 40
if units:
return t * u.day, y * u.mag, dy * u.mag, fmax / u.day
else:
return t, y, dy, fmax
@pytest.mark.parametrize('normalization', NORMALIZATIONS)
@pytest.mark.parametrize('with_errors', [True, False])
@pytest.mark.parametrize('units', [False, True])
def test_distribution(normalization, with_errors, units):
t, y, dy, fmax = null_data(units=units)
if not with_errors:
dy = None
ls = LombScargle(t, y, dy, normalization=normalization)
freq, power = ls.autopower(maximum_frequency=fmax)
z = np.linspace(0, power.max(), 1000)
# Test that pdf and cdf are consistent
dz = z[1] - z[0]
z_mid = z[:-1] + 0.5 * dz
pdf = ls.distribution(z_mid)
cdf = ls.distribution(z, cumulative=True)
if isinstance(dz, u.Quantity):
dz = dz.value
assert_allclose(pdf, np.diff(cdf) / dz, rtol=1E-5, atol=1E-8)
# psd normalization without specified errors produces bad results
if not (normalization == 'psd' and not with_errors):
# Test that observed power is distributed according to the theoretical pdf
hist, bins = np.histogram(power, 30, density=True)
midpoints = 0.5 * (bins[1:] + bins[:-1])
pdf = ls.distribution(midpoints)
assert_allclose(hist, pdf, rtol=0.05, atol=0.05 * pdf[0])
@pytest.mark.parametrize('N', [10, 100, 1000])
@pytest.mark.parametrize('normalization', NORMALIZATIONS)
def test_inverse_single(N, normalization):
fap = np.linspace(0, 1, 11)
z = inv_fap_single(fap, N, normalization)
fap_out = fap_single(z, N, normalization)
assert_allclose(fap, fap_out)
@pytest.mark.parametrize('normalization', NORMALIZATIONS)
@pytest.mark.parametrize('use_errs', [True, False])
@pytest.mark.parametrize('units', [False, True])
def test_inverse_bootstrap(normalization, use_errs, units):
t, y, dy, fmax = null_data(units=units)
if not use_errs:
dy = None
fap = np.linspace(0, 1, 11)
method = 'bootstrap'
method_kwds = METHOD_KWDS['bootstrap']
ls = LombScargle(t, y, dy, normalization=normalization)
z = ls.false_alarm_level(fap, maximum_frequency=fmax,
method=method, method_kwds=method_kwds)
fap_out = ls.false_alarm_probability(z, maximum_frequency=fmax,
method=method,
method_kwds=method_kwds)
# atol = 1 / n_bootstraps
assert_allclose(fap, fap_out, atol=0.05)
@pytest.mark.parametrize('method', sorted(set(METHODS) - {'bootstrap'}))
@pytest.mark.parametrize('normalization', NORMALIZATIONS)
@pytest.mark.parametrize('use_errs', [True, False])
@pytest.mark.parametrize('N', [10, 100, 1000])
@pytest.mark.parametrize('units', [False, True])
def test_inverses(method, normalization, use_errs, N, units, T=5):
if not HAS_SCIPY and method in ['baluev', 'davies']:
pytest.skip("SciPy required")
t, y, dy, fmax = make_data(N, rseed=543, units=units)
if not use_errs:
dy = None
method_kwds = METHOD_KWDS.get(method, None)
fap = np.logspace(-10, 0, 11)
ls = LombScargle(t, y, dy, normalization=normalization)
z = ls.false_alarm_level(fap, maximum_frequency=fmax,
method=method,
method_kwds=method_kwds)
fap_out = ls.false_alarm_probability(z, maximum_frequency=fmax,
method=method,
method_kwds=method_kwds)
assert_allclose(fap, fap_out)
@pytest.mark.parametrize('method', sorted(METHODS))
@pytest.mark.parametrize('normalization', NORMALIZATIONS)
@pytest.mark.parametrize('units', [False, True])
def test_false_alarm_smoketest(method, normalization, units):
if not HAS_SCIPY and method in ['baluev', 'davies']:
pytest.skip("SciPy required")
kwds = METHOD_KWDS.get(method, None)
t, y, dy, fmax = make_data(units=units)
ls = LombScargle(t, y, dy, normalization=normalization)
freq, power = ls.autopower(maximum_frequency=fmax)
Z = np.linspace(power.min(), power.max(), 30)
fap = ls.false_alarm_probability(Z, maximum_frequency=fmax,
method=method, method_kwds=kwds)
assert len(fap) == len(Z)
if method != 'davies':
assert np.all(fap <= 1)
assert np.all(fap[:-1] >= fap[1:]) # monotonically decreasing
@pytest.mark.parametrize('method', sorted(METHODS))
@pytest.mark.parametrize('use_errs', [True, False])
@pytest.mark.parametrize('normalization', sorted(set(NORMALIZATIONS) - {'psd'}))
@pytest.mark.parametrize('units', [False, True])
def test_false_alarm_equivalence(method, normalization, use_errs, units):
# Note: the PSD normalization is not equivalent to the others, in that it
# depends on the absolute errors rather than relative errors. Because the
# scaling contributes to the distribution, it cannot be converted directly
# from any of the three normalized versions.
if not HAS_SCIPY and method in ['baluev', 'davies']:
pytest.skip("SciPy required")
kwds = METHOD_KWDS.get(method, None)
t, y, dy, fmax = make_data(units=units)
if not use_errs:
dy = None
ls = LombScargle(t, y, dy, normalization=normalization)
freq, power = ls.autopower(maximum_frequency=fmax)
Z = np.linspace(power.min(), power.max(), 30)
fap = ls.false_alarm_probability(Z, maximum_frequency=fmax,
method=method, method_kwds=kwds)
# Compute the equivalent Z values in the standard normalization
# and check that the FAP is consistent
Z_std = convert_normalization(Z, len(t),
from_normalization=normalization,
to_normalization='standard',
chi2_ref=compute_chi2_ref(y, dy))
ls = LombScargle(t, y, dy, normalization='standard')
fap_std = ls.false_alarm_probability(Z_std, maximum_frequency=fmax,
method=method, method_kwds=kwds)
assert_allclose(fap, fap_std, rtol=0.1)
| MSeifert04/astropy | astropy/timeseries/periodograms/lombscargle/tests/test_statistics.py | Python | bsd-3-clause | 7,519 |
#!/usr/bin/env python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import time
import unittest
import node
LEADER = 1
ROUTER1 = 2
ROUTER2 = 3
ED = 4
SED = 5
MTDS = [ED, SED]
class Cert_5_6_9_NetworkDataForwarding(unittest.TestCase):
def setUp(self):
self.nodes = {}
for i in range(1,6):
self.nodes[i] = node.Node(i, (i in MTDS))
self.nodes[LEADER].set_panid(0xface)
self.nodes[LEADER].set_mode('rsdn')
self.nodes[LEADER].add_whitelist(self.nodes[ROUTER1].get_addr64())
self.nodes[LEADER].add_whitelist(self.nodes[ROUTER2].get_addr64())
self.nodes[LEADER].enable_whitelist()
self.nodes[ROUTER1].set_panid(0xface)
self.nodes[ROUTER1].set_mode('rsdn')
self.nodes[ROUTER1].add_whitelist(self.nodes[LEADER].get_addr64())
self.nodes[ROUTER1].add_whitelist(self.nodes[ED].get_addr64())
self.nodes[ROUTER1].add_whitelist(self.nodes[SED].get_addr64())
self.nodes[ROUTER1].enable_whitelist()
self.nodes[ROUTER1].set_router_selection_jitter(1)
self.nodes[ROUTER2].set_panid(0xface)
self.nodes[ROUTER2].set_mode('rsdn')
self.nodes[ROUTER2].add_whitelist(self.nodes[LEADER].get_addr64())
self.nodes[ROUTER2].enable_whitelist()
self.nodes[ROUTER2].set_router_selection_jitter(1)
self.nodes[ED].set_panid(0xface)
self.nodes[ED].set_mode('rsn')
self.nodes[ED].add_whitelist(self.nodes[ROUTER1].get_addr64())
self.nodes[ED].enable_whitelist()
self.nodes[SED].set_panid(0xface)
self.nodes[SED].set_mode('s')
self.nodes[SED].add_whitelist(self.nodes[ROUTER1].get_addr64())
self.nodes[SED].enable_whitelist()
self.nodes[SED].set_timeout(3)
def tearDown(self):
for node in list(self.nodes.values()):
node.stop()
del self.nodes
def test(self):
self.nodes[LEADER].start()
self.nodes[LEADER].set_state('leader')
self.assertEqual(self.nodes[LEADER].get_state(), 'leader')
self.nodes[ROUTER1].start()
time.sleep(5)
self.assertEqual(self.nodes[ROUTER1].get_state(), 'router')
self.nodes[ROUTER2].start()
time.sleep(5)
self.assertEqual(self.nodes[ROUTER2].get_state(), 'router')
self.nodes[ED].start()
time.sleep(5)
self.assertEqual(self.nodes[ED].get_state(), 'child')
self.nodes[SED].start()
time.sleep(5)
self.assertEqual(self.nodes[SED].get_state(), 'child')
self.nodes[LEADER].add_prefix('2001:2:0:1::/64', 'paros', 'med')
self.nodes[LEADER].add_route('2001:2:0:2::/64', 'med')
self.nodes[LEADER].register_netdata()
time.sleep(10)
self.nodes[ROUTER2].add_prefix('2001:2:0:1::/64', 'paros', 'low')
self.nodes[ROUTER2].add_route('2001:2:0:2::/64', 'high')
self.nodes[ROUTER2].register_netdata()
time.sleep(10)
self.assertFalse(self.nodes[SED].ping('2001:2:0:2::1'))
self.assertFalse(self.nodes[SED].ping('2007::1'))
self.nodes[ROUTER2].remove_prefix('2001:2:0:1::/64')
self.nodes[ROUTER2].add_prefix('2001:2:0:1::/64', 'paros', 'high')
self.nodes[ROUTER2].register_netdata()
time.sleep(10)
self.assertFalse(self.nodes[SED].ping('2007::1'))
self.nodes[ROUTER2].remove_prefix('2001:2:0:1::/64')
self.nodes[ROUTER2].add_prefix('2001:2:0:1::/64', 'paros', 'med')
self.nodes[ROUTER2].register_netdata()
time.sleep(10)
self.assertFalse(self.nodes[SED].ping('2007::1'))
if __name__ == '__main__':
unittest.main()
| vaas-krish/openthread | tests/scripts/thread-cert/Cert_5_6_09_NetworkDataForwarding.py | Python | bsd-3-clause | 5,202 |
from rlpy.Representations import IndependentDiscretization
from rlpy.Domains import GridWorld, InfiniteTrackCartPole
import numpy as np
from rlpy.Tools import __rlpy_location__
import os
def test_number_of_cells():
""" Ensure create appropriate # of cells (despite ``discretization``) """
mapDir = os.path.join(__rlpy_location__, "Domains", "GridWorldMaps")
mapname=os.path.join(mapDir, "4x5.txt") # expect 4*5 = 20 states
domain = GridWorld(mapname=mapname)
rep = IndependentDiscretization(domain, discretization=100)
assert rep.features_num == 9
rep = IndependentDiscretization(domain, discretization=5)
assert rep.features_num == 9
def test_phi_cells():
""" Ensure correct features are activated for corresponding state """
mapDir = os.path.join(__rlpy_location__, "Domains", "GridWorldMaps")
mapname=os.path.join(mapDir, "4x5.txt") # expect 4*5 = 20 states
domain = GridWorld(mapname=mapname)
rep = IndependentDiscretization(domain)
for r in np.arange(4):
for c in np.arange(5):
phiVec = rep.phi(np.array([r,c]), terminal=False)
assert sum(phiVec) == 2 # 1 for each dimension
assert phiVec[r] == 1 # correct row activated
assert phiVec[4+c] == 1 # correct col activated
def test_continuous_discr():
""" Ensure correct discretization in continuous state spaces """
# NOTE - if possible, test a domain with mixed discr/continuous
domain = InfiniteTrackCartPole.InfTrackCartPole() #2 continuous dims
rep = IndependentDiscretization(domain, discretization=20)
assert rep.features_num == 40
rep = IndependentDiscretization(domain, discretization=50)
assert rep.features_num == 100 | imanolarrieta/RL | tests/test_representations/test_IndependentDiscretization.py | Python | bsd-3-clause | 1,739 |
"""
Test cases for the template loaders
Note: This test requires setuptools!
"""
from django.conf import settings
if __name__ == '__main__':
settings.configure()
import unittest
import sys
import pkg_resources
import imp
import StringIO
import os.path
from django.template import TemplateDoesNotExist
from django.template.loaders.eggs import load_template_source as lts_egg
# Mock classes and objects for pkg_resources functions.
class MockProvider(pkg_resources.NullProvider):
def __init__(self, module):
pkg_resources.NullProvider.__init__(self, module)
self.module = module
def _has(self, path):
return path in self.module._resources
def _isdir(self,path):
return False
def get_resource_stream(self, manager, resource_name):
return self.module._resources[resource_name]
def _get(self, path):
return self.module._resources[path].read()
class MockLoader(object):
pass
def create_egg(name, resources):
"""
Creates a mock egg with a list of resources.
name: The name of the module.
resources: A dictionary of resources. Keys are the names and values the data.
"""
egg = imp.new_module(name)
egg.__loader__ = MockLoader()
egg._resources = resources
sys.modules[name] = egg
class EggLoader(unittest.TestCase):
def setUp(self):
pkg_resources._provider_factories[MockLoader] = MockProvider
self.empty_egg = create_egg("egg_empty", {})
self.egg_1 = create_egg("egg_1", {
os.path.normcase('templates/y.html') : StringIO.StringIO("y"),
os.path.normcase('templates/x.txt') : StringIO.StringIO("x"),
})
self._old_installed_apps = settings.INSTALLED_APPS
settings.INSTALLED_APPS = []
def tearDown(self):
settings.INSTALLED_APPS = self._old_installed_apps
def test_empty(self):
"Loading any template on an empty egg should fail"
settings.INSTALLED_APPS = ['egg_empty']
self.assertRaises(TemplateDoesNotExist, lts_egg, "not-existing.html")
def test_non_existing(self):
"Template loading fails if the template is not in the egg"
settings.INSTALLED_APPS = ['egg_1']
self.assertRaises(TemplateDoesNotExist, lts_egg, "not-existing.html")
def test_existing(self):
"A template can be loaded from an egg"
settings.INSTALLED_APPS = ['egg_1']
contents, template_name = lts_egg("y.html")
self.assertEqual(contents, "y")
self.assertEqual(template_name, "egg:egg_1:templates/y.html")
def test_not_installed(self):
"Loading an existent template from an egg not included in INSTALLED_APPS should fail"
settings.INSTALLED_APPS = []
self.assertRaises(TemplateDoesNotExist, lts_egg, "y.html")
if __name__ == "__main__":
unittest.main()
| grangier/django-11599 | tests/regressiontests/templates/loaders.py | Python | bsd-3-clause | 2,859 |
# All of the other examples directly embed the Javascript and CSS code for
# Bokeh's client-side runtime into the HTML. This leads to the HTML files
# being rather large. An alternative is to ask Bokeh to produce HTML that
# has a relative link to the Bokeh Javascript and CSS. This is easy to
# do; you just pass in a few extra arguments to the output_file() command.
import numpy as np
from bokeh.plotting import *
N = 100
x = np.linspace(0, 4*np.pi, N)
y = np.sin(x)
output_file("relative_paths.html", title="Relative path example", mode="relative")
scatter(x,y, color="#FF00FF", tools="pan,wheel_zoom,box_zoom,reset,previewsave")
show()
# By default, the URLs for the Javascript and CSS will be relative to
# the current directory, i.e. the directory in which the HTML file is
# generated. You can provide a different "root" directory from which
# the relative paths will be computed:
#
# output_file("scatter.html", title="scatter.py example",
# resources="relative", rootdir="some/other/path")
| the13fools/Bokeh_Examples | plotting/file/relative_paths.py | Python | bsd-3-clause | 1,023 |
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Look if qt is installed, and try out all builders.
"""
import os
import sys
import TestSCons
test = TestSCons.TestSCons()
if not os.environ.get('QTDIR', None):
x ="External environment variable $QTDIR not set; skipping test(s).\n"
test.skip_test(x)
test.Qt_dummy_installation()
QTDIR=os.environ['QTDIR']
test.write('SConstruct', """\
import os
dummy_env = Environment()
ENV = dummy_env['ENV']
try:
PATH=ARGUMENTS['PATH']
if 'PATH' in ENV:
ENV_PATH = PATH + os.pathsep + ENV['PATH']
else:
Exit(0) # this is certainly a weird system :-)
except KeyError:
ENV_PATH=ENV.get('PATH', '')
env = Environment(tools=['default','qt'],
ENV={'PATH':ENV_PATH,
'PATHEXT':os.environ.get('PATHEXT'),
'HOME':os.getcwd(),
'SystemRoot':ENV.get('SystemRoot')},
# moc / uic want to write stuff in ~/.qt
CXXFILESUFFIX=".cpp")
conf = env.Configure()
if not conf.CheckLib(env.subst("$QT_LIB"), autoadd=0):
conf.env['QT_LIB'] = 'qt-mt'
if not conf.CheckLib(env.subst("$QT_LIB"), autoadd=0):
Exit(0)
env = conf.Finish()
VariantDir('bld', '.')
env.Program('bld/test_realqt', ['bld/mocFromCpp.cpp',
'bld/mocFromH.cpp',
'bld/anUiFile.ui',
'bld/main.cpp'])
""")
test.write('mocFromCpp.h', """\
void mocFromCpp();
""")
test.write('mocFromCpp.cpp', """\
#include <qobject.h>
#include "mocFromCpp.h"
class MyClass1 : public QObject {
Q_OBJECT
public:
MyClass1() : QObject() {};
public slots:
void myslot() {};
};
void mocFromCpp() {
MyClass1 myclass;
}
#include "mocFromCpp.moc"
""")
test.write('mocFromH.h', """\
#include <qobject.h>
class MyClass2 : public QObject {
Q_OBJECT;
public:
MyClass2();
public slots:
void myslot();
};
void mocFromH();
""")
test.write('mocFromH.cpp', """\
#include "mocFromH.h"
MyClass2::MyClass2() : QObject() {}
void MyClass2::myslot() {}
void mocFromH() {
MyClass2 myclass;
}
""")
test.write('anUiFile.ui', """\
<!DOCTYPE UI><UI>
<class>MyWidget</class>
<widget>
<class>QWidget</class>
<property name="name">
<cstring>MyWidget</cstring>
</property>
<property name="caption">
<string>MyWidget</string>
</property>
</widget>
<includes>
<include location="local" impldecl="in implementation">anUiFile.ui.h</include>
</includes>
<slots>
<slot>testSlot()</slot>
</slots>
<layoutdefaults spacing="6" margin="11"/>
</UI>
""")
test.write('anUiFile.ui.h', r"""
#include <stdio.h>
#if QT_VERSION >= 0x030100
void MyWidget::testSlot()
{
printf("Hello World\n");
}
#endif
""")
test.write('main.cpp', r"""
#include <qapp.h>
#include "mocFromCpp.h"
#include "mocFromH.h"
#include "anUiFile.h"
#include <stdio.h>
int main(int argc, char **argv) {
QApplication app(argc, argv);
mocFromCpp();
mocFromH();
MyWidget mywidget;
#if QT_VERSION >= 0x030100
mywidget.testSlot();
#else
printf("Hello World\n");
#endif
return 0;
}
""")
test.run(arguments="bld/test_realqt" + TestSCons._exe)
test.run(program=test.workpath("bld", "test_realqt"),
stdout=None,
status=None,
stderr=None)
if test.stdout() != "Hello World\n" or test.stderr() != '' or test.status:
sys.stdout.write(test.stdout())
sys.stderr.write(test.stderr())
# The test might be run on a system that doesn't have an X server
# running, or may be run by an ID that can't connect to the server.
# If so, then print whatever it showed us (which is in and of itself
# an indication that it built correctly) but don't fail the test.
expect = 'cannot connect to X server'
test.fail_test(test.stdout())
test.fail_test(test.stderr().find(expect) == -1)
if test.status != 1 and (test.status>>8) != 1:
sys.stdout.write('test_realqt returned status %s\n' % test.status)
test.fail_test()
QTDIR = os.environ['QTDIR']
PATH = os.environ['PATH']
os.environ['QTDIR']=''
os.environ['PATH']='.'
test.run(stderr=None, arguments="-c bld/test_realqt" + TestSCons._exe)
expect1 = "scons: warning: Could not detect qt, using empty QTDIR"
expect2 = "scons: warning: Could not detect qt, using moc executable as a hint"
test.fail_test(test.stderr().find(expect1) == -1 and
test.stderr().find(expect2) == -1)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| timj/scons | test/QT/installed.py | Python | mit | 5,726 |
"""Support functions for working with wheel files.
"""
from __future__ import absolute_import
import logging
from email.parser import Parser
from zipfile import ZipFile
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.pkg_resources import DistInfoDistribution
from pip._vendor.six import PY2, ensure_str
from pip._internal.exceptions import UnsupportedWheel
from pip._internal.utils.pkg_resources import DictMetadata
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from email.message import Message
from typing import Dict, Tuple
from pip._vendor.pkg_resources import Distribution
if PY2:
from zipfile import BadZipfile as BadZipFile
else:
from zipfile import BadZipFile
VERSION_COMPATIBLE = (1, 0)
logger = logging.getLogger(__name__)
class WheelMetadata(DictMetadata):
"""Metadata provider that maps metadata decoding exceptions to our
internal exception type.
"""
def __init__(self, metadata, wheel_name):
# type: (Dict[str, bytes], str) -> None
super(WheelMetadata, self).__init__(metadata)
self._wheel_name = wheel_name
def get_metadata(self, name):
# type: (str) -> str
try:
return super(WheelMetadata, self).get_metadata(name)
except UnicodeDecodeError as e:
# Augment the default error with the origin of the file.
raise UnsupportedWheel(
"Error decoding metadata for {}: {}".format(
self._wheel_name, e
)
)
def pkg_resources_distribution_for_wheel(wheel_zip, name, location):
# type: (ZipFile, str, str) -> Distribution
"""Get a pkg_resources distribution given a wheel.
:raises UnsupportedWheel: on any errors
"""
info_dir, _ = parse_wheel(wheel_zip, name)
metadata_files = [
p for p in wheel_zip.namelist() if p.startswith("{}/".format(info_dir))
]
metadata_text = {} # type: Dict[str, bytes]
for path in metadata_files:
# If a flag is set, namelist entries may be unicode in Python 2.
# We coerce them to native str type to match the types used in the rest
# of the code. This cannot fail because unicode can always be encoded
# with UTF-8.
full_path = ensure_str(path)
_, metadata_name = full_path.split("/", 1)
try:
metadata_text[metadata_name] = read_wheel_metadata_file(
wheel_zip, full_path
)
except UnsupportedWheel as e:
raise UnsupportedWheel(
"{} has an invalid wheel, {}".format(name, str(e))
)
metadata = WheelMetadata(metadata_text, location)
return DistInfoDistribution(
location=location, metadata=metadata, project_name=name
)
def parse_wheel(wheel_zip, name):
# type: (ZipFile, str) -> Tuple[str, Message]
"""Extract information from the provided wheel, ensuring it meets basic
standards.
Returns the name of the .dist-info directory and the parsed WHEEL metadata.
"""
try:
info_dir = wheel_dist_info_dir(wheel_zip, name)
metadata = wheel_metadata(wheel_zip, info_dir)
version = wheel_version(metadata)
except UnsupportedWheel as e:
raise UnsupportedWheel(
"{} has an invalid wheel, {}".format(name, str(e))
)
check_compatibility(version, name)
return info_dir, metadata
def wheel_dist_info_dir(source, name):
# type: (ZipFile, str) -> str
"""Returns the name of the contained .dist-info directory.
Raises AssertionError or UnsupportedWheel if not found, >1 found, or
it doesn't match the provided name.
"""
# Zip file path separators must be /
subdirs = list(set(p.split("/")[0] for p in source.namelist()))
info_dirs = [s for s in subdirs if s.endswith('.dist-info')]
if not info_dirs:
raise UnsupportedWheel(".dist-info directory not found")
if len(info_dirs) > 1:
raise UnsupportedWheel(
"multiple .dist-info directories found: {}".format(
", ".join(info_dirs)
)
)
info_dir = info_dirs[0]
info_dir_name = canonicalize_name(info_dir)
canonical_name = canonicalize_name(name)
if not info_dir_name.startswith(canonical_name):
raise UnsupportedWheel(
".dist-info directory {!r} does not start with {!r}".format(
info_dir, canonical_name
)
)
# Zip file paths can be unicode or str depending on the zip entry flags,
# so normalize it.
return ensure_str(info_dir)
def read_wheel_metadata_file(source, path):
# type: (ZipFile, str) -> bytes
try:
return source.read(path)
# BadZipFile for general corruption, KeyError for missing entry,
# and RuntimeError for password-protected files
except (BadZipFile, KeyError, RuntimeError) as e:
raise UnsupportedWheel(
"could not read {!r} file: {!r}".format(path, e)
)
def wheel_metadata(source, dist_info_dir):
# type: (ZipFile, str) -> Message
"""Return the WHEEL metadata of an extracted wheel, if possible.
Otherwise, raise UnsupportedWheel.
"""
path = "{}/WHEEL".format(dist_info_dir)
# Zip file path separators must be /
wheel_contents = read_wheel_metadata_file(source, path)
try:
wheel_text = ensure_str(wheel_contents)
except UnicodeDecodeError as e:
raise UnsupportedWheel("error decoding {!r}: {!r}".format(path, e))
# FeedParser (used by Parser) does not raise any exceptions. The returned
# message may have .defects populated, but for backwards-compatibility we
# currently ignore them.
return Parser().parsestr(wheel_text)
def wheel_version(wheel_data):
# type: (Message) -> Tuple[int, ...]
"""Given WHEEL metadata, return the parsed Wheel-Version.
Otherwise, raise UnsupportedWheel.
"""
version_text = wheel_data["Wheel-Version"]
if version_text is None:
raise UnsupportedWheel("WHEEL is missing Wheel-Version")
version = version_text.strip()
try:
return tuple(map(int, version.split('.')))
except ValueError:
raise UnsupportedWheel("invalid Wheel-Version: {!r}".format(version))
def check_compatibility(version, name):
# type: (Tuple[int, ...], str) -> None
"""Raises errors or warns if called with an incompatible Wheel-Version.
Pip should refuse to install a Wheel-Version that's a major series
ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
installing a version only minor version ahead (e.g 1.2 > 1.1).
version: a 2-tuple representing a Wheel-Version (Major, Minor)
name: name of wheel or package to raise exception about
:raises UnsupportedWheel: when an incompatible Wheel-Version is given
"""
if version[0] > VERSION_COMPATIBLE[0]:
raise UnsupportedWheel(
"%s's Wheel-Version (%s) is not compatible with this version "
"of pip" % (name, '.'.join(map(str, version)))
)
elif version > VERSION_COMPATIBLE:
logger.warning(
'Installing from a newer Wheel-Version (%s)',
'.'.join(map(str, version)),
)
| xavfernandez/pip | src/pip/_internal/utils/wheel.py | Python | mit | 7,302 |
def task_compute():
def comp():
return {'x':5,'y':10, 'z': 20}
return {'actions': [(comp,)]}
def show_getargs(values):
print(values)
def task_args_dict():
return {'actions': [show_getargs],
'getargs': {'values': ('compute', None)},
'verbosity': 2,
}
| agustinhenze/doit.debian | doc/tutorial/getargs_dict.py | Python | mit | 301 |
"""
Demo platform for the cover component.
For more details about this platform, please refer to the documentation
https://home-assistant.io/components/demo/
"""
from homeassistant.components.cover import CoverDevice
from homeassistant.helpers.event import track_utc_time_change
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Demo covers."""
add_devices([
DemoCover(hass, 'Kitchen Window'),
DemoCover(hass, 'Hall Window', 10),
DemoCover(hass, 'Living Room Window', 70, 50),
])
class DemoCover(CoverDevice):
"""Representation of a demo cover."""
# pylint: disable=no-self-use, too-many-instance-attributes
def __init__(self, hass, name, position=None, tilt_position=None):
"""Initialize the cover."""
self.hass = hass
self._name = name
self._position = position
self._set_position = None
self._set_tilt_position = None
self._tilt_position = tilt_position
self._closing = True
self._closing_tilt = True
self._unsub_listener_cover = None
self._unsub_listener_cover_tilt = None
@property
def name(self):
"""Return the name of the cover."""
return self._name
@property
def should_poll(self):
"""No polling needed for a demo cover."""
return False
@property
def current_cover_position(self):
"""Return the current position of the cover."""
return self._position
@property
def current_cover_tilt_position(self):
"""Return the current tilt position of the cover."""
return self._tilt_position
@property
def is_closed(self):
"""Return if the cover is closed."""
if self._position is not None:
if self.current_cover_position > 0:
return False
else:
return True
else:
return None
def close_cover(self, **kwargs):
"""Close the cover."""
if self._position in (0, None):
return
self._listen_cover()
self._closing = True
def close_cover_tilt(self, **kwargs):
"""Close the cover tilt."""
if self._tilt_position in (0, None):
return
self._listen_cover_tilt()
self._closing_tilt = True
def open_cover(self, **kwargs):
"""Open the cover."""
if self._position in (100, None):
return
self._listen_cover()
self._closing = False
def open_cover_tilt(self, **kwargs):
"""Open the cover tilt."""
if self._tilt_position in (100, None):
return
self._listen_cover_tilt()
self._closing_tilt = False
def set_cover_position(self, position, **kwargs):
"""Move the cover to a specific position."""
self._set_position = round(position, -1)
if self._position == position:
return
self._listen_cover()
self._closing = position < self._position
def set_cover_tilt_position(self, tilt_position, **kwargs):
"""Move the cover til to a specific position."""
self._set_tilt_position = round(tilt_position, -1)
if self._tilt_position == tilt_position:
return
self._listen_cover_tilt()
self._closing_tilt = tilt_position < self._tilt_position
def stop_cover(self, **kwargs):
"""Stop the cover."""
if self._position is None:
return
if self._unsub_listener_cover is not None:
self._unsub_listener_cover()
self._unsub_listener_cover = None
self._set_position = None
def stop_cover_tilt(self, **kwargs):
"""Stop the cover tilt."""
if self._tilt_position is None:
return
if self._unsub_listener_cover_tilt is not None:
self._unsub_listener_cover_tilt()
self._unsub_listener_cover_tilt = None
self._set_tilt_position = None
def _listen_cover(self):
"""Listen for changes in cover."""
if self._unsub_listener_cover is None:
self._unsub_listener_cover = track_utc_time_change(
self.hass, self._time_changed_cover)
def _time_changed_cover(self, now):
"""Track time changes."""
if self._closing:
self._position -= 10
else:
self._position += 10
if self._position in (100, 0, self._set_position):
self.stop_cover()
self.update_ha_state()
def _listen_cover_tilt(self):
"""Listen for changes in cover tilt."""
if self._unsub_listener_cover_tilt is None:
self._unsub_listener_cover_tilt = track_utc_time_change(
self.hass, self._time_changed_cover_tilt)
def _time_changed_cover_tilt(self, now):
"""Track time changes."""
if self._closing_tilt:
self._tilt_position -= 10
else:
self._tilt_position += 10
if self._tilt_position in (100, 0, self._set_tilt_position):
self.stop_cover_tilt()
self.update_ha_state()
| Smart-Torvy/torvy-home-assistant | homeassistant/components/cover/demo.py | Python | mit | 5,159 |
#!/usr/bin/env python3
from python_utility.command_process import CommandProcess
def main():
process = CommandProcess(
arguments=[
'flake8',
'--exclude', '.git,.idea,.tox',
'--verbose',
'--max-complexity', '5'
],
)
process.print_output()
if __name__ == '__main__':
main()
| FunTimeCoding/jenkins-job-manager | script/python/flake8.py | Python | mit | 357 |
# -*- coding: utf-8 -*-
from qiniu import config
from qiniu import http
class PersistentFop(object):
"""持久化处理类
该类用于主动触发异步持久化操作,具体规格参考:
http://developer.qiniu.com/docs/v6/api/reference/fop/pfop/pfop.html
Attributes:
auth: 账号管理密钥对,Auth对象
bucket: 操作资源所在空间
pipeline: 多媒体处理队列,详见 https://portal.qiniu.com/mps/pipeline
notify_url: 持久化处理结果通知URL
"""
def __init__(self, auth, bucket, pipeline=None, notify_url=None):
"""初始化持久化处理类"""
self.auth = auth
self.bucket = bucket
self.pipeline = pipeline
self.notify_url = notify_url
def execute(self, key, fops, force=None):
"""执行持久化处理:
Args:
key: 待处理的源文件
fops: 处理详细操作,规格详见 http://developer.qiniu.com/docs/v6/api/reference/fop/
force: 强制执行持久化处理开关
Returns:
一个dict变量,返回持久化处理的persistentId,类似{"persistentId": 5476bedf7823de4068253bae};
一个ResponseInfo对象
"""
ops = ';'.join(fops)
data = {'bucket': self.bucket, 'key': key, 'fops': ops}
if self.pipeline:
data['pipeline'] = self.pipeline
if self.notify_url:
data['notifyURL'] = self.notify_url
if force == 1:
data['force'] = 1
url = 'http://{0}/pfop'.format(config.get_default('default_api_host'))
return http._post_with_auth(url, data, self.auth)
| hotpoor-for-Liwei/hj_hackathon_201607 | vendor/qiniu/services/processing/pfop.py | Python | mit | 1,697 |
test = "test of the localtime() function"
import time
times = [ 0, 100000, int (time.time()) ]
filedata = """
{$
for (i, %(times)s) {
locals { v : localtime(i) }
print ("${v[0]} ${v[1]} ${v[2]} ${v[3]} ${v[4]} ");
}
$}
""" % { "times" : times }
# in publand, localtime(0) should give the time now
times[0] = int (time.time())
outcome_v = []
for i in times:
lt = time.localtime (i)
outcome_v += [ lt.tm_year, lt.tm_mon, lt.tm_mday, lt.tm_hour, lt.tm_min ]
outcome = " ".join ([ str (i) for i in outcome_v ])
| OkCupid/okws | test/regtest/cases/98.py | Python | gpl-2.0 | 542 |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2012 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.legacy.dbquery import run_sql
depends_on = ['invenio_release_1_1_0']
def info():
return "New bibsched status (schSTATUS) table"
def do_upgrade():
run_sql("""CREATE TABLE IF NOT EXISTS schSTATUS (
name varchar(50),
value mediumblob,
PRIMARY KEY (name)
) ENGINE=MyISAM
""")
def estimate():
return 1
| SamiHiltunen/invenio-upgrader | invenio_upgrader/upgrades/invenio_2013_06_24_new_bibsched_status_table.py | Python | gpl-2.0 | 1,112 |
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: aci_firmware_group_node
short_description: This modules adds and remove nodes from the firmware group
version_added: "2.8"
description:
- This module addes/deletes a node to the firmware group. This modules assigns 1 node at a time.
options:
group:
description:
- This is the name of the firmware group
type: str
required: true
node:
description:
- The node to be added to the firmware group - the value equals the NodeID
type: str
required: true
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
type: str
default: present
choices: [ absent, present, query ]
name_alias:
version_added: '2.10'
description:
- The alias for the current object. This relates to the nameAlias field in ACI.
type: str
extends_documentation_fragment:
- aci
author:
- Steven Gerhart (@sgerhart)
'''
EXAMPLES = '''
- name: add firmware group node
aci_firmware_group_node:
host: "{{ inventory_hostname }}"
username: "{{ user }}"
password: "{{ pass }}"
validate_certs: no
group: testingfwgrp
node: 1001
state: present
- name: Remove firmware group node
aci_firmware_group_node:
host: "{{ inventory_hostname }}"
username: "{{ user }}"
password: "{{ pass }}"
validate_certs: no
group: testingfwgrp
node: 1001
state: absent
'''
RETURN = '''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: str
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: str
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: str
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: str
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: str
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
import json
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
group=dict(type='str', aliases=['group']), # Not required for querying all objects
node=dict(type='str', aliases=['node']),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
name_alias=dict(type='str'),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['node', 'group']],
['state', 'present', ['node', 'group']],
],
)
state = module.params.get('state')
group = module.params.get('group')
node = module.params.get('node')
name_alias = module.params.get('name_alias')
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class='firmwareFwGrp',
aci_rn='fabric/fwgrp-{0}'.format(group),
target_filter={'name': group},
module_object=group,
),
subclass_1=dict(
aci_class='fabricNodeBlk',
aci_rn='nodeblk-blk{0}-{0}'.format(node),
target_filter={'name': node},
module_object=node,
),
)
aci.get_existing()
if state == 'present':
aci.payload(
aci_class='fabricNodeBlk',
class_config=dict(
from_=node,
to_=node,
nameAlias=name_alias,
),
)
aci.get_diff(aci_class='fabricNodeBlk')
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| roadmapper/ansible | lib/ansible/modules/network/aci/aci_firmware_group_node.py | Python | gpl-3.0 | 6,497 |
# -*- coding: utf-8 -*-
from module.plugins.internal.DeadHoster import DeadHoster, create_getInfo
class SockshareCom(DeadHoster):
__name__ = "SockshareCom"
__type__ = "hoster"
__version__ = "0.05"
__pattern__ = r'http://(?:www\.)?sockshare\.com/(mobile/)?(file|embed)/(?P<ID>\w+)'
__description__ = """Sockshare.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("jeix", "[email protected]"),
("stickell", "[email protected]"),
("Walter Purcaro", "[email protected]")]
getInfo = create_getInfo(SockshareCom)
| sebdelsol/pyload | module/plugins/hoster/SockshareCom.py | Python | gpl-3.0 | 613 |
def load_config(default_values, user_values):
if user_values is None:
return default_values
config = {}
for k, v in user_values.items():
if k in default_values:
if isinstance(v, dict):
cloned = user_values[k].copy()
for key, value in default_values[k].items():
if key is not None and key not in user_values[k] \
or user_values[k][key] == '':
cloned[key] = value
config[k] = cloned
else:
config[k] = v
else:
config[k] = v
for k, v in default_values.items():
if k not in config:
config[k] = v
return config
def import_class(full_path):
path_split = full_path.split('.')
path = ".".join(path_split[:-1])
klass = path_split[-1:]
mod = __import__(path, fromlist=[klass])
return getattr(mod, klass[0])
| CoderBotOrg/coderbotsrv | server/lib/engineauth/utils.py | Python | gpl-3.0 | 948 |
from datetime import timedelta
import json
from django.utils import timezone
import factory
from factory.django import DjangoModelFactory
from opaque_keys.edx.locator import CourseLocator
from ..models import CourseOverview
class CourseOverviewFactory(DjangoModelFactory):
class Meta(object):
model = CourseOverview
django_get_or_create = ('id', )
exclude = ('run', )
version = CourseOverview.VERSION
pre_requisite_courses = []
org = 'edX'
run = factory.Sequence('2012_Fall_{}'.format)
@factory.lazy_attribute
def _pre_requisite_courses_json(self):
return json.dumps(self.pre_requisite_courses)
@factory.lazy_attribute
def _location(self):
return self.id.make_usage_key('course', 'course')
@factory.lazy_attribute
def id(self):
return CourseLocator(self.org, 'toy', self.run)
@factory.lazy_attribute
def display_name(self):
return "{} Course".format(self.id)
@factory.lazy_attribute
def start(self):
return timezone.now()
@factory.lazy_attribute
def end(self):
return timezone.now() + timedelta(30)
| cpennington/edx-platform | openedx/core/djangoapps/content/course_overviews/tests/factories.py | Python | agpl-3.0 | 1,152 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import six
from sys import platform
import locale
import os.path
from pelican.tests.support import unittest, get_settings
from pelican.contents import Page, Article, Static, URLWrapper, Author, Category
from pelican.settings import DEFAULT_CONFIG
from pelican.utils import path_to_url, truncate_html_words, SafeDatetime, posix_join
from pelican.signals import content_object_init
from jinja2.utils import generate_lorem_ipsum
# generate one paragraph, enclosed with <p>
TEST_CONTENT = str(generate_lorem_ipsum(n=1))
TEST_SUMMARY = generate_lorem_ipsum(n=1, html=False)
class TestPage(unittest.TestCase):
def setUp(self):
super(TestPage, self).setUp()
self.old_locale = locale.setlocale(locale.LC_ALL)
locale.setlocale(locale.LC_ALL, str('C'))
self.page_kwargs = {
'content': TEST_CONTENT,
'context': {
'localsiteurl': '',
},
'metadata': {
'summary': TEST_SUMMARY,
'title': 'foo bar',
'author': Author('Blogger', DEFAULT_CONFIG),
},
'source_path': '/path/to/file/foo.ext'
}
def tearDown(self):
locale.setlocale(locale.LC_ALL, self.old_locale)
def test_use_args(self):
# Creating a page with arguments passed to the constructor should use
# them to initialise object's attributes.
metadata = {'foo': 'bar', 'foobar': 'baz', 'title': 'foobar', }
page = Page(TEST_CONTENT, metadata=metadata,
context={'localsiteurl': ''})
for key, value in metadata.items():
self.assertTrue(hasattr(page, key))
self.assertEqual(value, getattr(page, key))
self.assertEqual(page.content, TEST_CONTENT)
def test_mandatory_properties(self):
# If the title is not set, must throw an exception.
page = Page('content')
with self.assertRaises(NameError):
page.check_properties()
page = Page('content', metadata={'title': 'foobar'})
page.check_properties()
def test_summary_from_metadata(self):
# If a :summary: metadata is given, it should be used
page = Page(**self.page_kwargs)
self.assertEqual(page.summary, TEST_SUMMARY)
def test_summary_max_length(self):
# If a :SUMMARY_MAX_LENGTH: is set, and there is no other summary,
# generated summary should not exceed the given length.
page_kwargs = self._copy_page_kwargs()
settings = get_settings()
page_kwargs['settings'] = settings
del page_kwargs['metadata']['summary']
settings['SUMMARY_MAX_LENGTH'] = None
page = Page(**page_kwargs)
self.assertEqual(page.summary, TEST_CONTENT)
settings['SUMMARY_MAX_LENGTH'] = 10
page = Page(**page_kwargs)
self.assertEqual(page.summary, truncate_html_words(TEST_CONTENT, 10))
settings['SUMMARY_MAX_LENGTH'] = 0
page = Page(**page_kwargs)
self.assertEqual(page.summary, '')
def test_slug(self):
page_kwargs = self._copy_page_kwargs()
settings = get_settings()
page_kwargs['settings'] = settings
settings['SLUGIFY_SOURCE'] = "title"
page = Page(**page_kwargs)
self.assertEqual(page.slug, 'foo-bar')
settings['SLUGIFY_SOURCE'] = "basename"
page = Page(**page_kwargs)
self.assertEqual(page.slug, 'foo')
def test_defaultlang(self):
# If no lang is given, default to the default one.
page = Page(**self.page_kwargs)
self.assertEqual(page.lang, DEFAULT_CONFIG['DEFAULT_LANG'])
# it is possible to specify the lang in the metadata infos
self.page_kwargs['metadata'].update({'lang': 'fr', })
page = Page(**self.page_kwargs)
self.assertEqual(page.lang, 'fr')
def test_save_as(self):
# If a lang is not the default lang, save_as should be set
# accordingly.
# if a title is defined, save_as should be set
page = Page(**self.page_kwargs)
self.assertEqual(page.save_as, "pages/foo-bar.html")
# if a language is defined, save_as should include it accordingly
self.page_kwargs['metadata'].update({'lang': 'fr', })
page = Page(**self.page_kwargs)
self.assertEqual(page.save_as, "pages/foo-bar-fr.html")
def test_metadata_url_format(self):
# Arbitrary metadata should be passed through url_format()
page = Page(**self.page_kwargs)
self.assertIn('summary', page.url_format.keys())
page.metadata['directory'] = 'test-dir'
page.settings = get_settings(PAGE_SAVE_AS='{directory}/{slug}')
self.assertEqual(page.save_as, 'test-dir/foo-bar')
def test_datetime(self):
# If DATETIME is set to a tuple, it should be used to override LOCALE
dt = SafeDatetime(2015, 9, 13)
page_kwargs = self._copy_page_kwargs()
# set its date to dt
page_kwargs['metadata']['date'] = dt
page = Page(**page_kwargs)
# page.locale_date is a unicode string in both python2 and python3
dt_date = dt.strftime(DEFAULT_CONFIG['DEFAULT_DATE_FORMAT'])
# dt_date is a byte string in python2, and a unicode string in python3
# Let's make sure it is a unicode string (relies on python 3.3 supporting the u prefix)
if type(dt_date) != type(u''):
# python2:
dt_date = unicode(dt_date, 'utf8')
self.assertEqual(page.locale_date, dt_date )
page_kwargs['settings'] = get_settings()
# I doubt this can work on all platforms ...
if platform == "win32":
locale = 'jpn'
else:
locale = 'ja_JP.utf8'
page_kwargs['settings']['DATE_FORMATS'] = {'jp': (locale,
'%Y-%m-%d(%a)')}
page_kwargs['metadata']['lang'] = 'jp'
import locale as locale_module
try:
page = Page(**page_kwargs)
self.assertEqual(page.locale_date, '2015-09-13(\u65e5)')
except locale_module.Error:
# The constructor of ``Page`` will try to set the locale to
# ``ja_JP.utf8``. But this attempt will failed when there is no
# such locale in the system. You can see which locales there are
# in your system with ``locale -a`` command.
#
# Until we find some other method to test this functionality, we
# will simply skip this test.
unittest.skip("There is no locale %s in this system." % locale)
def test_template(self):
# Pages default to page, metadata overwrites
default_page = Page(**self.page_kwargs)
self.assertEqual('page', default_page.template)
page_kwargs = self._copy_page_kwargs()
page_kwargs['metadata']['template'] = 'custom'
custom_page = Page(**page_kwargs)
self.assertEqual('custom', custom_page.template)
def _copy_page_kwargs(self):
# make a deep copy of page_kwargs
page_kwargs = dict([(key, self.page_kwargs[key]) for key in
self.page_kwargs])
for key in page_kwargs:
if not isinstance(page_kwargs[key], dict):
break
page_kwargs[key] = dict([(subkey, page_kwargs[key][subkey])
for subkey in page_kwargs[key]])
return page_kwargs
def test_signal(self):
# If a title is given, it should be used to generate the slug.
def receiver_test_function(sender, instance):
pass
content_object_init.connect(receiver_test_function, sender=Page)
Page(**self.page_kwargs)
self.assertTrue(content_object_init.has_receivers_for(Page))
def test_get_content(self):
# Test that the content is updated with the relative links to
# filenames, tags and categories.
settings = get_settings()
args = self.page_kwargs.copy()
args['settings'] = settings
# Tag
args['content'] = ('A simple test, with a '
'<a href="|tag|tagname">link</a>')
page = Page(**args)
content = page.get_content('http://notmyidea.org')
self.assertEqual(
content,
('A simple test, with a '
'<a href="http://notmyidea.org/tag/tagname.html">link</a>'))
# Category
args['content'] = ('A simple test, with a '
'<a href="|category|category">link</a>')
page = Page(**args)
content = page.get_content('http://notmyidea.org')
self.assertEqual(
content,
('A simple test, with a '
'<a href="http://notmyidea.org/category/category.html">link</a>'))
def test_intrasite_link(self):
# type does not take unicode in PY2 and bytes in PY3, which in
# combination with unicode literals leads to following insane line:
cls_name = '_DummyArticle' if six.PY3 else b'_DummyArticle'
article = type(cls_name, (object,), {'url': 'article.html'})
args = self.page_kwargs.copy()
args['settings'] = get_settings()
args['source_path'] = 'content'
args['context']['filenames'] = {'article.rst': article}
# Classic intrasite link via filename
args['content'] = (
'A simple test, with a '
'<a href="|filename|article.rst">link</a>'
)
content = Page(**args).get_content('http://notmyidea.org')
self.assertEqual(
content,
'A simple test, with a '
'<a href="http://notmyidea.org/article.html">link</a>'
)
# fragment
args['content'] = (
'A simple test, with a '
'<a href="|filename|article.rst#section-2">link</a>'
)
content = Page(**args).get_content('http://notmyidea.org')
self.assertEqual(
content,
'A simple test, with a '
'<a href="http://notmyidea.org/article.html#section-2">link</a>'
)
# query
args['content'] = (
'A simple test, with a '
'<a href="|filename|article.rst'
'?utm_whatever=234&highlight=word">link</a>'
)
content = Page(**args).get_content('http://notmyidea.org')
self.assertEqual(
content,
'A simple test, with a '
'<a href="http://notmyidea.org/article.html'
'?utm_whatever=234&highlight=word">link</a>'
)
# combination
args['content'] = (
'A simple test, with a '
'<a href="|filename|article.rst'
'?utm_whatever=234&highlight=word#section-2">link</a>'
)
content = Page(**args).get_content('http://notmyidea.org')
self.assertEqual(
content,
'A simple test, with a '
'<a href="http://notmyidea.org/article.html'
'?utm_whatever=234&highlight=word#section-2">link</a>'
)
def test_intrasite_link_more(self):
# type does not take unicode in PY2 and bytes in PY3, which in
# combination with unicode literals leads to following insane line:
cls_name = '_DummyAsset' if six.PY3 else b'_DummyAsset'
args = self.page_kwargs.copy()
args['settings'] = get_settings()
args['source_path'] = 'content'
args['context']['filenames'] = {
'images/poster.jpg': type(cls_name, (object,), {'url': 'images/poster.jpg'}),
'assets/video.mp4': type(cls_name, (object,), {'url': 'assets/video.mp4'}),
'images/graph.svg': type(cls_name, (object,), {'url': 'images/graph.svg'}),
'reference.rst': type(cls_name, (object,), {'url': 'reference.html'}),
}
# video.poster
args['content'] = (
'There is a video with poster '
'<video controls poster="{filename}/images/poster.jpg">'
'<source src="|filename|/assets/video.mp4" type="video/mp4">'
'</video>'
)
content = Page(**args).get_content('http://notmyidea.org')
self.assertEqual(
content,
'There is a video with poster '
'<video controls poster="http://notmyidea.org/images/poster.jpg">'
'<source src="http://notmyidea.org/assets/video.mp4" type="video/mp4">'
'</video>'
)
# object.data
args['content'] = (
'There is a svg object '
'<object data="{filename}/images/graph.svg" type="image/svg+xml"></object>'
)
content = Page(**args).get_content('http://notmyidea.org')
self.assertEqual(
content,
'There is a svg object '
'<object data="http://notmyidea.org/images/graph.svg" type="image/svg+xml"></object>'
)
# blockquote.cite
args['content'] = (
'There is a blockquote with cite attribute '
'<blockquote cite="{filename}reference.rst">blah blah</blockquote>'
)
content = Page(**args).get_content('http://notmyidea.org')
self.assertEqual(
content,
'There is a blockquote with cite attribute '
'<blockquote cite="http://notmyidea.org/reference.html">blah blah</blockquote>'
)
def test_intrasite_link_markdown_spaces(self):
# Markdown introduces %20 instead of spaces, this tests that
# we support markdown doing this.
cls_name = '_DummyArticle' if six.PY3 else b'_DummyArticle'
article = type(cls_name, (object,), {'url': 'article-spaces.html'})
args = self.page_kwargs.copy()
args['settings'] = get_settings()
args['source_path'] = 'content'
args['context']['filenames'] = {'article spaces.rst': article}
# An intrasite link via filename with %20 as a space
args['content'] = (
'A simple test, with a '
'<a href="|filename|article%20spaces.rst">link</a>'
)
content = Page(**args).get_content('http://notmyidea.org')
self.assertEqual(
content,
'A simple test, with a '
'<a href="http://notmyidea.org/article-spaces.html">link</a>'
)
def test_multiple_authors(self):
"""Test article with multiple authors."""
args = self.page_kwargs.copy()
content = Page(**args)
assert content.authors == [content.author]
args['metadata'].pop('author')
args['metadata']['authors'] = [Author('First Author', DEFAULT_CONFIG),
Author('Second Author', DEFAULT_CONFIG)]
content = Page(**args)
assert content.authors
assert content.author == content.authors[0]
class TestArticle(TestPage):
def test_template(self):
# Articles default to article, metadata overwrites
default_article = Article(**self.page_kwargs)
self.assertEqual('article', default_article.template)
article_kwargs = self._copy_page_kwargs()
article_kwargs['metadata']['template'] = 'custom'
custom_article = Article(**article_kwargs)
self.assertEqual('custom', custom_article.template)
def test_slugify_category_author(self):
settings = get_settings()
settings['SLUG_SUBSTITUTIONS'] = [ ('C#', 'csharp') ]
settings['ARTICLE_URL'] = '{author}/{category}/{slug}/'
settings['ARTICLE_SAVE_AS'] = '{author}/{category}/{slug}/index.html'
article_kwargs = self._copy_page_kwargs()
article_kwargs['metadata']['author'] = Author("O'Brien", settings)
article_kwargs['metadata']['category'] = Category('C# & stuff', settings)
article_kwargs['metadata']['title'] = 'fnord'
article_kwargs['settings'] = settings
article = Article(**article_kwargs)
self.assertEqual(article.url, 'obrien/csharp-stuff/fnord/')
self.assertEqual(article.save_as, 'obrien/csharp-stuff/fnord/index.html')
class TestStatic(unittest.TestCase):
def setUp(self):
self.settings = get_settings(
STATIC_SAVE_AS='{path}',
STATIC_URL='{path}',
PAGE_SAVE_AS=os.path.join('outpages', '{slug}.html'),
PAGE_URL='outpages/{slug}.html')
self.context = self.settings.copy()
self.static = Static(content=None, metadata={}, settings=self.settings,
source_path=posix_join('dir', 'foo.jpg'), context=self.context)
self.context['filenames'] = {self.static.source_path: self.static}
def tearDown(self):
pass
def test_attach_to_same_dir(self):
"""attach_to() overrides a static file's save_as and url.
"""
page = Page(content="fake page",
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'fakepage.md'))
self.static.attach_to(page)
expected_save_as = os.path.join('outpages', 'foo.jpg')
self.assertEqual(self.static.save_as, expected_save_as)
self.assertEqual(self.static.url, path_to_url(expected_save_as))
def test_attach_to_parent_dir(self):
"""attach_to() preserves dirs inside the linking document dir.
"""
page = Page(content="fake page", metadata={'title': 'fakepage'},
settings=self.settings, source_path='fakepage.md')
self.static.attach_to(page)
expected_save_as = os.path.join('outpages', 'dir', 'foo.jpg')
self.assertEqual(self.static.save_as, expected_save_as)
self.assertEqual(self.static.url, path_to_url(expected_save_as))
def test_attach_to_other_dir(self):
"""attach_to() ignores dirs outside the linking document dir.
"""
page = Page(content="fake page",
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'otherdir', 'fakepage.md'))
self.static.attach_to(page)
expected_save_as = os.path.join('outpages', 'foo.jpg')
self.assertEqual(self.static.save_as, expected_save_as)
self.assertEqual(self.static.url, path_to_url(expected_save_as))
def test_attach_to_ignores_subsequent_calls(self):
"""attach_to() does nothing when called a second time.
"""
page = Page(content="fake page",
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'fakepage.md'))
self.static.attach_to(page)
otherdir_settings = self.settings.copy()
otherdir_settings.update(dict(
PAGE_SAVE_AS=os.path.join('otherpages', '{slug}.html'),
PAGE_URL='otherpages/{slug}.html'))
otherdir_page = Page(content="other page",
metadata={'title': 'otherpage'}, settings=otherdir_settings,
source_path=os.path.join('dir', 'otherpage.md'))
self.static.attach_to(otherdir_page)
otherdir_save_as = os.path.join('otherpages', 'foo.jpg')
self.assertNotEqual(self.static.save_as, otherdir_save_as)
self.assertNotEqual(self.static.url, path_to_url(otherdir_save_as))
def test_attach_to_does_nothing_after_save_as_referenced(self):
"""attach_to() does nothing if the save_as was already referenced.
(For example, by a {filename} link an a document processed earlier.)
"""
original_save_as = self.static.save_as
page = Page(content="fake page",
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'fakepage.md'))
self.static.attach_to(page)
self.assertEqual(self.static.save_as, original_save_as)
self.assertEqual(self.static.url, path_to_url(original_save_as))
def test_attach_to_does_nothing_after_url_referenced(self):
"""attach_to() does nothing if the url was already referenced.
(For example, by a {filename} link an a document processed earlier.)
"""
original_url = self.static.url
page = Page(content="fake page",
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'fakepage.md'))
self.static.attach_to(page)
self.assertEqual(self.static.save_as, self.static.source_path)
self.assertEqual(self.static.url, original_url)
def test_attach_to_does_not_override_an_override(self):
"""attach_to() does not override paths that were overridden elsewhere.
(For example, by the user with EXTRA_PATH_METADATA)
"""
customstatic = Static(content=None,
metadata=dict(save_as='customfoo.jpg', url='customfoo.jpg'),
settings=self.settings,
source_path=os.path.join('dir', 'foo.jpg'),
context=self.settings.copy())
page = Page(content="fake page",
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'fakepage.md'))
customstatic.attach_to(page)
self.assertEqual(customstatic.save_as, 'customfoo.jpg')
self.assertEqual(customstatic.url, 'customfoo.jpg')
def test_attach_link_syntax(self):
"""{attach} link syntax triggers output path override & url replacement.
"""
html = '<a href="{attach}../foo.jpg">link</a>'
page = Page(content=html,
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'otherdir', 'fakepage.md'),
context=self.context)
content = page.get_content('')
self.assertNotEqual(content, html,
"{attach} link syntax did not trigger URL replacement.")
expected_save_as = os.path.join('outpages', 'foo.jpg')
self.assertEqual(self.static.save_as, expected_save_as)
self.assertEqual(self.static.url, path_to_url(expected_save_as))
def test_tag_link_syntax(self):
"{tag} link syntax triggers url replacement."
html = '<a href="{tag}foo">link</a>'
page = Page(
content=html,
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'otherdir', 'fakepage.md'),
context=self.context)
content = page.get_content('')
self.assertNotEqual(content, html)
def test_category_link_syntax(self):
"{category} link syntax triggers url replacement."
html = '<a href="{category}foo">link</a>'
page = Page(content=html,
metadata={'title': 'fakepage'}, settings=self.settings,
source_path=os.path.join('dir', 'otherdir', 'fakepage.md'),
context=self.context)
content = page.get_content('')
self.assertNotEqual(content, html)
class TestURLWrapper(unittest.TestCase):
def test_comparisons(self):
# URLWrappers are sorted by name
wrapper_a = URLWrapper(name='first', settings={})
wrapper_b = URLWrapper(name='last', settings={})
self.assertFalse(wrapper_a > wrapper_b)
self.assertFalse(wrapper_a >= wrapper_b)
self.assertFalse(wrapper_a == wrapper_b)
self.assertTrue(wrapper_a != wrapper_b)
self.assertTrue(wrapper_a <= wrapper_b)
self.assertTrue(wrapper_a < wrapper_b)
wrapper_b.name = 'first'
self.assertFalse(wrapper_a > wrapper_b)
self.assertTrue(wrapper_a >= wrapper_b)
self.assertTrue(wrapper_a == wrapper_b)
self.assertFalse(wrapper_a != wrapper_b)
self.assertTrue(wrapper_a <= wrapper_b)
self.assertFalse(wrapper_a < wrapper_b)
wrapper_a.name = 'last'
self.assertTrue(wrapper_a > wrapper_b)
self.assertTrue(wrapper_a >= wrapper_b)
self.assertFalse(wrapper_a == wrapper_b)
self.assertTrue(wrapper_a != wrapper_b)
self.assertFalse(wrapper_a <= wrapper_b)
self.assertFalse(wrapper_a < wrapper_b)
| goerz/pelican | pelican/tests/test_contents.py | Python | agpl-3.0 | 24,186 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('schedules', '0005_auto_20171010_1722'),
]
operations = [
migrations.CreateModel(
name='ScheduleExperience',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('experience_type', models.PositiveSmallIntegerField(default=0, choices=[(0, b'Recurring Nudge and Upgrade Reminder'), (1, b'Course Updates')])),
('schedule', models.OneToOneField(related_name='experience', to='schedules.Schedule')),
],
),
]
| lduarte1991/edx-platform | openedx/core/djangoapps/schedules/migrations/0006_scheduleexperience.py | Python | agpl-3.0 | 750 |
""" Tests for OAuth Dispatch python API module. """
import unittest
from django.conf import settings
from django.http import HttpRequest
from django.test import TestCase
from oauth2_provider.models import AccessToken
from common.djangoapps.student.tests.factories import UserFactory
OAUTH_PROVIDER_ENABLED = settings.FEATURES.get('ENABLE_OAUTH2_PROVIDER')
if OAUTH_PROVIDER_ENABLED:
from openedx.core.djangoapps.oauth_dispatch import api
from openedx.core.djangoapps.oauth_dispatch.adapters import DOTAdapter
from openedx.core.djangoapps.oauth_dispatch.tests.constants import DUMMY_REDIRECT_URL
EXPECTED_DEFAULT_EXPIRES_IN = 36000
@unittest.skipUnless(OAUTH_PROVIDER_ENABLED, 'OAuth2 not enabled')
class TestOAuthDispatchAPI(TestCase):
""" Tests for oauth_dispatch's api.py module. """
def setUp(self):
super().setUp()
self.adapter = DOTAdapter()
self.user = UserFactory()
self.client = self.adapter.create_public_client(
name='public app',
user=self.user,
redirect_uri=DUMMY_REDIRECT_URL,
client_id='public-client-id',
)
def _assert_stored_token(self, stored_token_value, expected_token_user, expected_client):
stored_access_token = AccessToken.objects.get(token=stored_token_value)
assert stored_access_token.user.id == expected_token_user.id
assert stored_access_token.application.client_id == expected_client.client_id
assert stored_access_token.application.user.id == expected_client.user.id
def test_create_token_success(self):
token = api.create_dot_access_token(HttpRequest(), self.user, self.client)
assert token['access_token']
assert token['refresh_token']
self.assertDictContainsSubset(
{
'token_type': 'Bearer',
'expires_in': EXPECTED_DEFAULT_EXPIRES_IN,
'scope': '',
},
token,
)
self._assert_stored_token(token['access_token'], self.user, self.client)
def test_create_token_another_user(self):
another_user = UserFactory()
token = api.create_dot_access_token(HttpRequest(), another_user, self.client)
self._assert_stored_token(token['access_token'], another_user, self.client)
def test_create_token_overrides(self):
expires_in = 4800
token = api.create_dot_access_token(
HttpRequest(), self.user, self.client, expires_in=expires_in, scopes=['profile'],
)
self.assertDictContainsSubset({'scope': 'profile'}, token)
self.assertDictContainsSubset({'expires_in': expires_in}, token)
| eduNEXT/edx-platform | openedx/core/djangoapps/oauth_dispatch/tests/test_api.py | Python | agpl-3.0 | 2,669 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyPyani(PythonPackage):
"""pyani is a Python3 module that provides support for calculating
average nucleotide identity (ANI) and related measures for whole genome
comparisons, and rendering relevant graphical summary output. Where
available, it takes advantage of multicore systems, and can integrate
with SGE/OGE-type job schedulers for the sequence comparisons."""
homepage = "http://widdowquinn.github.io/pyani"
url = "https://pypi.io/packages/source/p/pyani/pyani-0.2.7.tar.gz"
version('0.2.7', '239ba630d375a81c35b7c60fb9bec6fa')
version('0.2.6', 'd5524b9a3c62c36063ed474ea95785c9')
depends_on('[email protected]:')
depends_on('py-setuptools', type='build')
depends_on('py-matplotlib', type=('build', 'run'))
depends_on('py-seaborn', type=('build', 'run'))
# Required for ANI analysis
depends_on('py-biopython', type=('build', 'run'))
depends_on('py-pandas', type=('build', 'run'))
depends_on('py-scipy', type=('build', 'run'))
# Required for ANIb analysis
depends_on('blast-plus~python', type='run')
# Required for ANIm analysis
depends_on('mummer', type='run')
| EmreAtes/spack | var/spack/repos/builtin/packages/py-pyani/package.py | Python | lgpl-2.1 | 2,432 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Editres(AutotoolsPackage):
"""Dynamic resource editor for X Toolkit applications."""
homepage = "http://cgit.freedesktop.org/xorg/app/editres"
url = "https://www.x.org/archive/individual/app/editres-1.0.6.tar.gz"
version('1.0.6', '310c504347ca499874593ac96e935353')
depends_on('libxaw')
depends_on('libx11')
depends_on('libxt')
depends_on('libxmu')
depends_on('pkgconfig', type='build')
depends_on('util-macros', type='build')
| EmreAtes/spack | var/spack/repos/builtin/packages/editres/package.py | Python | lgpl-2.1 | 1,739 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( [email protected] )
"""
The I{builder} module provides an wsdl/xsd defined types factory
"""
from logging import getLogger
from suds import *
from suds.sudsobject import Factory
log = getLogger(__name__)
class Builder:
""" Builder used to construct an object for types defined in the schema """
def __init__(self, resolver):
"""
@param resolver: A schema object name resolver.
@type resolver: L{resolver.Resolver}
"""
self.resolver = resolver
def build(self, name):
""" build a an object for the specified typename as defined in the schema """
if isinstance(name, str):
type = self.resolver.find(name)
if type is None:
raise TypeNotFound(name)
else:
type = name
cls = type.name
if type.mixed():
data = Factory.property(cls)
else:
data = Factory.object(cls)
resolved = type.resolve()
md = data.__metadata__
md.sxtype = resolved
md.ordering = self.ordering(resolved)
history = []
self.add_attributes(data, resolved)
for child, ancestry in type.children():
if self.skip_child(child, ancestry):
continue
self.process(data, child, history[:])
return data
def process(self, data, type, history):
""" process the specified type then process its children """
if type in history:
return
if type.enum():
return
history.append(type)
resolved = type.resolve()
value = None
if type.unbounded():
value = []
else:
if len(resolved) > 0:
if resolved.mixed():
value = Factory.property(resolved.name)
md = value.__metadata__
md.sxtype = resolved
else:
value = Factory.object(resolved.name)
md = value.__metadata__
md.sxtype = resolved
md.ordering = self.ordering(resolved)
setattr(data, type.name, value)
if value is not None:
data = value
if not isinstance(data, list):
self.add_attributes(data, resolved)
for child, ancestry in resolved.children():
if self.skip_child(child, ancestry):
continue
self.process(data, child, history[:])
def add_attributes(self, data, type):
""" add required attributes """
for attr, ancestry in type.attributes():
name = '_%s' % attr.name
value = attr.get_default()
setattr(data, name, value)
def skip_child(self, child, ancestry):
""" get whether or not to skip the specified child """
if child.any(): return True
for x in ancestry:
if x.choice():
return True
return False
def ordering(self, type):
""" get the ordering """
result = []
for child, ancestry in type.resolve():
name = child.name
if child.name is None:
continue
if child.isattr():
name = '_%s' % child.name
result.append(name)
return result
| USGM/suds | suds/builder.py | Python | lgpl-3.0 | 4,213 |
# coding: utf-8
from __future__ import unicode_literals
import re
import calendar
import datetime
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
HEADRequest,
unified_strdate,
strip_jsonp,
int_or_none,
float_or_none,
determine_ext,
remove_end,
unescapeHTML,
)
class ORFTVthekIE(InfoExtractor):
IE_NAME = 'orf:tvthek'
IE_DESC = 'ORF TVthek'
_VALID_URL = r'https?://tvthek\.orf\.at/(?:[^/]+/)+(?P<id>\d+)'
_TESTS = [{
'url': 'http://tvthek.orf.at/program/Aufgetischt/2745173/Aufgetischt-Mit-der-Steirischen-Tafelrunde/8891389',
'playlist': [{
'md5': '2942210346ed779588f428a92db88712',
'info_dict': {
'id': '8896777',
'ext': 'mp4',
'title': 'Aufgetischt: Mit der Steirischen Tafelrunde',
'description': 'md5:c1272f0245537812d4e36419c207b67d',
'duration': 2668,
'upload_date': '20141208',
},
}],
'skip': 'Blocked outside of Austria / Germany',
}, {
'url': 'http://tvthek.orf.at/topic/Im-Wandel-der-Zeit/8002126/Best-of-Ingrid-Thurnher/7982256',
'info_dict': {
'id': '7982259',
'ext': 'mp4',
'title': 'Best of Ingrid Thurnher',
'upload_date': '20140527',
'description': 'Viele Jahre war Ingrid Thurnher das "Gesicht" der ZIB 2. Vor ihrem Wechsel zur ZIB 2 im Jahr 1995 moderierte sie unter anderem "Land und Leute", "Österreich-Bild" und "Niederösterreich heute".',
},
'params': {
'skip_download': True, # rtsp downloads
},
'_skip': 'Blocked outside of Austria / Germany',
}, {
'url': 'http://tvthek.orf.at/topic/Fluechtlingskrise/10463081/Heimat-Fremde-Heimat/13879132/Senioren-betreuen-Migrantenkinder/13879141',
'skip_download': True,
}, {
'url': 'http://tvthek.orf.at/profile/Universum/35429',
'skip_download': True,
}]
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
data_jsb = self._parse_json(
self._search_regex(
r'<div[^>]+class=(["\']).*?VideoPlaylist.*?\1[^>]+data-jsb=(["\'])(?P<json>.+?)\2',
webpage, 'playlist', group='json'),
playlist_id, transform_source=unescapeHTML)['playlist']['videos']
def quality_to_int(s):
m = re.search('([0-9]+)', s)
if m is None:
return -1
return int(m.group(1))
entries = []
for sd in data_jsb:
video_id, title = sd.get('id'), sd.get('title')
if not video_id or not title:
continue
video_id = compat_str(video_id)
formats = [{
'preference': -10 if fd['delivery'] == 'hls' else None,
'format_id': '%s-%s-%s' % (
fd['delivery'], fd['quality'], fd['quality_string']),
'url': fd['src'],
'protocol': fd['protocol'],
'quality': quality_to_int(fd['quality']),
} for fd in sd['sources']]
# Check for geoblocking.
# There is a property is_geoprotection, but that's always false
geo_str = sd.get('geoprotection_string')
if geo_str:
try:
http_url = next(
f['url']
for f in formats
if re.match(r'^https?://.*\.mp4$', f['url']))
except StopIteration:
pass
else:
req = HEADRequest(http_url)
self._request_webpage(
req, video_id,
note='Testing for geoblocking',
errnote=((
'This video seems to be blocked outside of %s. '
'You may want to try the streaming-* formats.')
% geo_str),
fatal=False)
self._check_formats(formats, video_id)
self._sort_formats(formats)
subtitles = {}
for sub in sd.get('subtitles', []):
sub_src = sub.get('src')
if not sub_src:
continue
subtitles.setdefault(sub.get('lang', 'de-AT'), []).append({
'url': sub_src,
})
upload_date = unified_strdate(sd.get('created_date'))
entries.append({
'_type': 'video',
'id': video_id,
'title': title,
'formats': formats,
'subtitles': subtitles,
'description': sd.get('description'),
'duration': int_or_none(sd.get('duration_in_seconds')),
'upload_date': upload_date,
'thumbnail': sd.get('image_full_url'),
})
return {
'_type': 'playlist',
'entries': entries,
'id': playlist_id,
}
class ORFOE1IE(InfoExtractor):
IE_NAME = 'orf:oe1'
IE_DESC = 'Radio Österreich 1'
_VALID_URL = r'https?://oe1\.orf\.at/(?:programm/|konsole\?.*?\btrack_id=)(?P<id>[0-9]+)'
# Audios on ORF radio are only available for 7 days, so we can't add tests.
_TESTS = [{
'url': 'http://oe1.orf.at/konsole?show=on_demand#?track_id=394211',
'only_matching': True,
}, {
'url': 'http://oe1.orf.at/konsole?show=ondemand&track_id=443608&load_day=/programm/konsole/tag/20160726',
'only_matching': True,
}]
def _real_extract(self, url):
show_id = self._match_id(url)
data = self._download_json(
'http://oe1.orf.at/programm/%s/konsole' % show_id,
show_id
)
timestamp = datetime.datetime.strptime('%s %s' % (
data['item']['day_label'],
data['item']['time']
), '%d.%m.%Y %H:%M')
unix_timestamp = calendar.timegm(timestamp.utctimetuple())
return {
'id': show_id,
'title': data['item']['title'],
'url': data['item']['url_stream'],
'ext': 'mp3',
'description': data['item'].get('info'),
'timestamp': unix_timestamp
}
class ORFFM4IE(InfoExtractor):
IE_NAME = 'orf:fm4'
IE_DESC = 'radio FM4'
_VALID_URL = r'https?://fm4\.orf\.at/(?:7tage/?#|player/)(?P<date>[0-9]+)/(?P<show>\w+)'
_TEST = {
'url': 'http://fm4.orf.at/player/20160110/IS/',
'md5': '01e736e8f1cef7e13246e880a59ad298',
'info_dict': {
'id': '2016-01-10_2100_tl_54_7DaysSun13_11244',
'ext': 'mp3',
'title': 'Im Sumpf',
'description': 'md5:384c543f866c4e422a55f66a62d669cd',
'duration': 7173,
'timestamp': 1452456073,
'upload_date': '20160110',
},
'skip': 'Live streams on FM4 got deleted soon',
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
show_date = mobj.group('date')
show_id = mobj.group('show')
data = self._download_json(
'http://audioapi.orf.at/fm4/json/2.0/broadcasts/%s/4%s' % (show_date, show_id),
show_id
)
def extract_entry_dict(info, title, subtitle):
return {
'id': info['loopStreamId'].replace('.mp3', ''),
'url': 'http://loopstream01.apa.at/?channel=fm4&id=%s' % info['loopStreamId'],
'title': title,
'description': subtitle,
'duration': (info['end'] - info['start']) / 1000,
'timestamp': info['start'] / 1000,
'ext': 'mp3'
}
entries = [extract_entry_dict(t, data['title'], data['subtitle']) for t in data['streams']]
return {
'_type': 'playlist',
'id': show_id,
'title': data['title'],
'description': data['subtitle'],
'entries': entries
}
class ORFIPTVIE(InfoExtractor):
IE_NAME = 'orf:iptv'
IE_DESC = 'iptv.ORF.at'
_VALID_URL = r'https?://iptv\.orf\.at/(?:#/)?stories/(?P<id>\d+)'
_TEST = {
'url': 'http://iptv.orf.at/stories/2275236/',
'md5': 'c8b22af4718a4b4af58342529453e3e5',
'info_dict': {
'id': '350612',
'ext': 'flv',
'title': 'Weitere Evakuierungen um Vulkan Calbuco',
'description': 'md5:d689c959bdbcf04efeddedbf2299d633',
'duration': 68.197,
'thumbnail': 're:^https?://.*\.jpg$',
'upload_date': '20150425',
},
}
def _real_extract(self, url):
story_id = self._match_id(url)
webpage = self._download_webpage(
'http://iptv.orf.at/stories/%s' % story_id, story_id)
video_id = self._search_regex(
r'data-video(?:id)?="(\d+)"', webpage, 'video id')
data = self._download_json(
'http://bits.orf.at/filehandler/static-api/json/current/data.json?file=%s' % video_id,
video_id)[0]
duration = float_or_none(data['duration'], 1000)
video = data['sources']['default']
load_balancer_url = video['loadBalancerUrl']
abr = int_or_none(video.get('audioBitrate'))
vbr = int_or_none(video.get('bitrate'))
fps = int_or_none(video.get('videoFps'))
width = int_or_none(video.get('videoWidth'))
height = int_or_none(video.get('videoHeight'))
thumbnail = video.get('preview')
rendition = self._download_json(
load_balancer_url, video_id, transform_source=strip_jsonp)
f = {
'abr': abr,
'vbr': vbr,
'fps': fps,
'width': width,
'height': height,
}
formats = []
for format_id, format_url in rendition['redirect'].items():
if format_id == 'rtmp':
ff = f.copy()
ff.update({
'url': format_url,
'format_id': format_id,
})
formats.append(ff)
elif determine_ext(format_url) == 'f4m':
formats.extend(self._extract_f4m_formats(
format_url, video_id, f4m_id=format_id))
elif determine_ext(format_url) == 'm3u8':
formats.extend(self._extract_m3u8_formats(
format_url, video_id, 'mp4', m3u8_id=format_id))
else:
continue
self._sort_formats(formats)
title = remove_end(self._og_search_title(webpage), ' - iptv.ORF.at')
description = self._og_search_description(webpage)
upload_date = unified_strdate(self._html_search_meta(
'dc.date', webpage, 'upload date'))
return {
'id': video_id,
'title': title,
'description': description,
'duration': duration,
'thumbnail': thumbnail,
'upload_date': upload_date,
'formats': formats,
}
| TRox1972/youtube-dl | youtube_dl/extractor/orf.py | Python | unlicense | 11,297 |
import fechbase
class Records(fechbase.RecordsBase):
def __init__(self):
fechbase.RecordsBase.__init__(self)
self.fields = [
{'name': 'FORM TYPE', 'number': '1'},
{'name': 'FILER COMMITTEE ID NUMBER', 'number': '2'},
{'name': 'ENTITY TYPE', 'number': '3'},
{'name': 'ORGANIZATION NAME', 'number': '4'},
{'name': 'INDIVIDUAL LAST NAME', 'number': '5'},
{'name': 'INDIVIDUAL FIRST NAME', 'number': '6'},
{'name': 'INDIVIDUAL MIDDLE NAME', 'number': '7'},
{'name': 'INDIVIDUAL PREFIX', 'number': '8'},
{'name': 'INDIVIDUAL SUFFIX', 'number': '9'},
{'name': 'CHANGE OF ADDRESS', 'number': '10'},
{'name': 'STREET 1', 'number': '11'},
{'name': 'STREET 2', 'number': '12'},
{'name': 'CITY', 'number': '13'},
{'name': 'STATE', 'number': '14'},
{'name': 'ZIP', 'number': '15'},
{'name': 'INDIVIDUAL EMPLOYER', 'number': '16'},
{'name': 'INDIVIDUAL OCCUPATION', 'number': '17'},
{'name': 'COVERAGE FROM DATE', 'number': '18'},
{'name': 'COVERAGE THROUGH DATE', 'number': '19'},
{'name': 'DATE OF PUBLIC DISTRIBUTION', 'number': '20'},
{'name': 'COMMUNICATION TITLE', 'number': '21'},
{'name': 'FILER CODE', 'number': '22'},
{'name': 'FILER CODE DESCRIPTION', 'number': '23'},
{'name': 'SEGREGATED BANK ACCOUNT', 'number': '24'},
{'name': 'CUSTODIAN LAST NAME', 'number': '25'},
{'name': 'CUSTODIAN FIRST NAME', 'number': '26'},
{'name': 'CUSTODIAN MIDDLE NAME', 'number': '27'},
{'name': 'CUSTODIAN PREFIX', 'number': '28'},
{'name': 'CUSTODIAN SUFFIX', 'number': '29'},
{'name': 'CUSTODIAN STREET 1', 'number': '30'},
{'name': 'CUSTODIAN STREET 2', 'number': '31'},
{'name': 'CUSTODIAN CITY', 'number': '32'},
{'name': 'CUSTODIAN STATE', 'number': '33'},
{'name': 'CUSTODIAN ZIP', 'number': '34'},
{'name': 'CUSTODIAN EMPLOYER', 'number': '35'},
{'name': 'CUSTODIAN OCCUPATION', 'number': '36'},
{'name': 'TOTAL DONATIONS THIS STATEMENT', 'number': '37-9.'},
{'name': 'TOTAL DISB./OBLIG. THIS STATEMENT', 'number': '38-10.'},
{'name': 'PERSON COMPLETING LAST NAME', 'number': '39'},
{'name': 'PERSON COMPLETING FIRST NAME', 'number': '40'},
{'name': 'PERSON COMPLETING MIDDLE NAME', 'number': '41'},
{'name': 'PERSON COMPLETING PREFIX', 'number': '42'},
{'name': 'PERSON COMPLETING SUFFIX', 'number': '43'},
{'name': 'DATE SIGNED', 'number': '44'},
]
self.fields_names = self.hash_names(self.fields)
| h4ck3rm1k3/FEC-Field-Documentation | fec/version/v8_0/F9.py | Python | unlicense | 2,852 |
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utilities and helper functions."""
import contextlib
import copy
import datetime
import errno
import functools
import hashlib
import hmac
import inspect
import logging as std_logging
import os
import pyclbr
import random
import re
import shutil
import socket
import struct
import sys
import tempfile
import time
from xml.sax import saxutils
import eventlet
import netaddr
from oslo_concurrency import lockutils
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_context import context as common_context
from oslo_log import log as logging
import oslo_messaging as messaging
from oslo_utils import encodeutils
from oslo_utils import excutils
from oslo_utils import importutils
from oslo_utils import strutils
from oslo_utils import timeutils
from oslo_utils import units
import six
from six.moves import range
from nova import exception
from nova.i18n import _, _LE, _LI, _LW
notify_decorator = 'nova.notifications.notify_decorator'
monkey_patch_opts = [
cfg.BoolOpt('monkey_patch',
default=False,
help='Whether to log monkey patching'),
cfg.ListOpt('monkey_patch_modules',
default=[
'nova.api.ec2.cloud:%s' % (notify_decorator),
'nova.compute.api:%s' % (notify_decorator)
],
help='List of modules/decorators to monkey patch'),
]
utils_opts = [
cfg.IntOpt('password_length',
default=12,
help='Length of generated instance admin passwords'),
cfg.StrOpt('instance_usage_audit_period',
default='month',
help='Time period to generate instance usages for. '
'Time period must be hour, day, month or year'),
cfg.BoolOpt('use_rootwrap_daemon', default=False,
help="Start and use a daemon that can run the commands that "
"need to be run with root privileges. This option is "
"usually enabled on nodes that run nova compute "
"processes"),
cfg.StrOpt('rootwrap_config',
default="/etc/nova/rootwrap.conf",
help='Path to the rootwrap configuration file to use for '
'running commands as root'),
cfg.StrOpt('tempdir',
help='Explicitly specify the temporary working directory'),
]
workarounds_opts = [
cfg.BoolOpt('disable_rootwrap',
default=False,
help='This option allows a fallback to sudo for performance '
'reasons. For example see '
'https://bugs.launchpad.net/nova/+bug/1415106'),
cfg.BoolOpt('disable_libvirt_livesnapshot',
default=True,
help='When using libvirt 1.2.2 live snapshots fail '
'intermittently under load. This config option provides '
'a mechanism to enable live snapshot while this is '
'resolved. See '
'https://bugs.launchpad.net/nova/+bug/1334398'),
cfg.BoolOpt('destroy_after_evacuate',
default=True,
deprecated_for_removal=True,
help='DEPRECATED: Whether to destroy '
'instances on startup when we suspect '
'they have previously been evacuated. This can result in '
'data loss if undesired. See '
'https://launchpad.net/bugs/1419785'),
cfg.BoolOpt('handle_virt_lifecycle_events',
default=True,
help="Whether or not to handle events raised from the compute "
"driver's 'emit_event' method. These are lifecycle "
"events raised from compute drivers that implement the "
"method. An example of a lifecycle event is an instance "
"starting or stopping. If the instance is going through "
"task state changes due to an API operation, like "
"resize, the events are ignored. However, this is an "
"advanced feature which allows the hypervisor to signal "
"to the compute service that an unexpected state change "
"has occurred in an instance and the instance can be "
"shutdown automatically - which can inherently race in "
"reboot operations or when the compute service or host "
"is rebooted, either planned or due to an unexpected "
"outage. Care should be taken when using this and "
"sync_power_state_interval is negative since then if any "
"instances are out of sync between the hypervisor and "
"the Nova database they will have to be synchronized "
"manually. See https://bugs.launchpad.net/bugs/1444630"),
]
""" The workarounds_opts group is for very specific reasons.
If you're:
- Working around an issue in a system tool (e.g. libvirt or qemu) where the
fix is in flight/discussed in that community.
- The tool can be/is fixed in some distributions and rather than patch the
code those distributions can trivially set a config option to get the
"correct" behavior.
Then this is a good place for your workaround.
.. warning::
Please use with care! Document the BugID that your workaround is paired with.
"""
CONF = cfg.CONF
CONF.register_opts(monkey_patch_opts)
CONF.register_opts(utils_opts)
CONF.import_opt('network_api_class', 'nova.network')
CONF.register_opts(workarounds_opts, group='workarounds')
LOG = logging.getLogger(__name__)
# used in limits
TIME_UNITS = {
'SECOND': 1,
'MINUTE': 60,
'HOUR': 3600,
'DAY': 86400
}
_IS_NEUTRON = None
synchronized = lockutils.synchronized_with_prefix('nova-')
SM_IMAGE_PROP_PREFIX = "image_"
SM_INHERITABLE_KEYS = (
'min_ram', 'min_disk', 'disk_format', 'container_format',
)
# Keys which hold large structured data that won't fit in the
# size constraints of the system_metadata table, so we avoid
# storing and/or loading them.
SM_SKIP_KEYS = (
# Legacy names
'mappings', 'block_device_mapping',
# Modern names
'img_mappings', 'img_block_device_mapping',
)
# Image attributes which Cinder stores in volume image metadata
# as regular properties
VIM_IMAGE_ATTRIBUTES = (
'image_id', 'image_name', 'size', 'checksum',
'container_format', 'disk_format', 'min_ram', 'min_disk',
)
_FILE_CACHE = {}
def vpn_ping(address, port, timeout=0.05, session_id=None):
"""Sends a vpn negotiation packet and returns the server session.
Returns Boolean indicating whether the vpn_server is listening.
Basic packet structure is below.
Client packet (14 bytes)::
0 1 8 9 13
+-+--------+-----+
|x| cli_id |?????|
+-+--------+-----+
x = packet identifier 0x38
cli_id = 64 bit identifier
? = unknown, probably flags/padding
Server packet (26 bytes)::
0 1 8 9 13 14 21 2225
+-+--------+-----+--------+----+
|x| srv_id |?????| cli_id |????|
+-+--------+-----+--------+----+
x = packet identifier 0x40
cli_id = 64 bit identifier
? = unknown, probably flags/padding
bit 9 was 1 and the rest were 0 in testing
"""
# NOTE(tonyb) session_id isn't used for a real VPN connection so using a
# cryptographically weak value is fine.
if session_id is None:
session_id = random.randint(0, 0xffffffffffffffff)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
data = struct.pack('!BQxxxxx', 0x38, session_id)
sock.sendto(data, (address, port))
sock.settimeout(timeout)
try:
received = sock.recv(2048)
except socket.timeout:
return False
finally:
sock.close()
fmt = '!BQxxxxxQxxxx'
if len(received) != struct.calcsize(fmt):
LOG.warning(_LW('Expected to receive %(exp)s bytes, '
'but actually %(act)s'),
dict(exp=struct.calcsize(fmt), act=len(received)))
return False
(identifier, server_sess, client_sess) = struct.unpack(fmt, received)
return (identifier == 0x40 and client_sess == session_id)
def get_root_helper():
if CONF.workarounds.disable_rootwrap:
cmd = 'sudo'
else:
cmd = 'sudo nova-rootwrap %s' % CONF.rootwrap_config
return cmd
def _get_rootwrap_helper():
if CONF.use_rootwrap_daemon:
return RootwrapDaemonHelper(CONF.rootwrap_config)
else:
return RootwrapProcessHelper()
class RootwrapProcessHelper(object):
def trycmd(self, *cmd, **kwargs):
kwargs['root_helper'] = get_root_helper()
return processutils.trycmd(*cmd, **kwargs)
def execute(self, *cmd, **kwargs):
kwargs['root_helper'] = get_root_helper()
return processutils.execute(*cmd, **kwargs)
class RootwrapDaemonHelper(RootwrapProcessHelper):
_clients = {}
@synchronized('daemon-client-lock')
def _get_client(cls, rootwrap_config):
try:
return cls._clients[rootwrap_config]
except KeyError:
from oslo_rootwrap import client
new_client = client.Client([
"sudo", "nova-rootwrap-daemon", rootwrap_config])
cls._clients[rootwrap_config] = new_client
return new_client
def __init__(self, rootwrap_config):
self.client = self._get_client(rootwrap_config)
def trycmd(self, *args, **kwargs):
discard_warnings = kwargs.pop('discard_warnings', False)
try:
out, err = self.execute(*args, **kwargs)
failed = False
except processutils.ProcessExecutionError as exn:
out, err = '', six.text_type(exn)
failed = True
if not failed and discard_warnings and err:
# Handle commands that output to stderr but otherwise succeed
err = ''
return out, err
def execute(self, *cmd, **kwargs):
# NOTE(dims): This method is to provide compatibility with the
# processutils.execute interface. So that calling daemon or direct
# rootwrap to honor the same set of flags in kwargs and to ensure
# that we don't regress any current behavior.
cmd = [str(c) for c in cmd]
loglevel = kwargs.pop('loglevel', std_logging.DEBUG)
log_errors = kwargs.pop('log_errors', None)
process_input = kwargs.pop('process_input', None)
delay_on_retry = kwargs.pop('delay_on_retry', True)
attempts = kwargs.pop('attempts', 1)
check_exit_code = kwargs.pop('check_exit_code', [0])
ignore_exit_code = False
if isinstance(check_exit_code, bool):
ignore_exit_code = not check_exit_code
check_exit_code = [0]
elif isinstance(check_exit_code, int):
check_exit_code = [check_exit_code]
sanitized_cmd = strutils.mask_password(' '.join(cmd))
LOG.info(_LI('Executing RootwrapDaemonHelper.execute '
'cmd=[%(cmd)r] kwargs=[%(kwargs)r]'),
{'cmd': sanitized_cmd, 'kwargs': kwargs})
while attempts > 0:
attempts -= 1
try:
start_time = time.time()
LOG.log(loglevel, _('Running cmd (subprocess): %s'),
sanitized_cmd)
(returncode, out, err) = self.client.execute(
cmd, process_input)
end_time = time.time() - start_time
LOG.log(loglevel,
'CMD "%(sanitized_cmd)s" returned: %(return_code)s '
'in %(end_time)0.3fs',
{'sanitized_cmd': sanitized_cmd,
'return_code': returncode,
'end_time': end_time})
if not ignore_exit_code and returncode not in check_exit_code:
out = strutils.mask_password(out)
err = strutils.mask_password(err)
raise processutils.ProcessExecutionError(
exit_code=returncode,
stdout=out,
stderr=err,
cmd=sanitized_cmd)
return (out, err)
except processutils.ProcessExecutionError as err:
# if we want to always log the errors or if this is
# the final attempt that failed and we want to log that.
if log_errors == processutils.LOG_ALL_ERRORS or (
log_errors == processutils.LOG_FINAL_ERROR and
not attempts):
format = _('%(desc)r\ncommand: %(cmd)r\n'
'exit code: %(code)r\nstdout: %(stdout)r\n'
'stderr: %(stderr)r')
LOG.log(loglevel, format, {"desc": err.description,
"cmd": err.cmd,
"code": err.exit_code,
"stdout": err.stdout,
"stderr": err.stderr})
if not attempts:
LOG.log(loglevel, _('%r failed. Not Retrying.'),
sanitized_cmd)
raise
else:
LOG.log(loglevel, _('%r failed. Retrying.'),
sanitized_cmd)
if delay_on_retry:
time.sleep(random.randint(20, 200) / 100.0)
def execute(*cmd, **kwargs):
"""Convenience wrapper around oslo's execute() method."""
if 'run_as_root' in kwargs and kwargs.get('run_as_root'):
if CONF.use_rootwrap_daemon:
return RootwrapDaemonHelper(CONF.rootwrap_config).execute(
*cmd, **kwargs)
else:
return RootwrapProcessHelper().execute(*cmd, **kwargs)
return processutils.execute(*cmd, **kwargs)
def ssh_execute(dest, *cmd, **kwargs):
"""Convenience wrapper to execute ssh command."""
ssh_cmd = ['ssh', '-o', 'BatchMode=yes']
ssh_cmd.append(dest)
ssh_cmd.extend(cmd)
return execute(*ssh_cmd, **kwargs)
def trycmd(*args, **kwargs):
"""Convenience wrapper around oslo's trycmd() method."""
if kwargs.get('run_as_root', False):
if CONF.use_rootwrap_daemon:
return RootwrapDaemonHelper(CONF.rootwrap_config).trycmd(
*args, **kwargs)
else:
return RootwrapProcessHelper().trycmd(*args, **kwargs)
return processutils.trycmd(*args, **kwargs)
def novadir():
import nova
return os.path.abspath(nova.__file__).split('nova/__init__.py')[0]
def generate_uid(topic, size=8):
characters = '01234567890abcdefghijklmnopqrstuvwxyz'
choices = [random.choice(characters) for _x in range(size)]
return '%s-%s' % (topic, ''.join(choices))
# Default symbols to use for passwords. Avoids visually confusing characters.
# ~6 bits per symbol
DEFAULT_PASSWORD_SYMBOLS = ('23456789', # Removed: 0,1
'ABCDEFGHJKLMNPQRSTUVWXYZ', # Removed: I, O
'abcdefghijkmnopqrstuvwxyz') # Removed: l
# ~5 bits per symbol
EASIER_PASSWORD_SYMBOLS = ('23456789', # Removed: 0, 1
'ABCDEFGHJKLMNPQRSTUVWXYZ') # Removed: I, O
def last_completed_audit_period(unit=None, before=None):
"""This method gives you the most recently *completed* audit period.
arguments:
units: string, one of 'hour', 'day', 'month', 'year'
Periods normally begin at the beginning (UTC) of the
period unit (So a 'day' period begins at midnight UTC,
a 'month' unit on the 1st, a 'year' on Jan, 1)
unit string may be appended with an optional offset
like so: 'day@18' This will begin the period at 18:00
UTC. 'month@15' starts a monthly period on the 15th,
and year@3 begins a yearly one on March 1st.
before: Give the audit period most recently completed before
<timestamp>. Defaults to now.
returns: 2 tuple of datetimes (begin, end)
The begin timestamp of this audit period is the same as the
end of the previous.
"""
if not unit:
unit = CONF.instance_usage_audit_period
offset = 0
if '@' in unit:
unit, offset = unit.split("@", 1)
offset = int(offset)
if before is not None:
rightnow = before
else:
rightnow = timeutils.utcnow()
if unit not in ('month', 'day', 'year', 'hour'):
raise ValueError('Time period must be hour, day, month or year')
if unit == 'month':
if offset == 0:
offset = 1
end = datetime.datetime(day=offset,
month=rightnow.month,
year=rightnow.year)
if end >= rightnow:
year = rightnow.year
if 1 >= rightnow.month:
year -= 1
month = 12 + (rightnow.month - 1)
else:
month = rightnow.month - 1
end = datetime.datetime(day=offset,
month=month,
year=year)
year = end.year
if 1 >= end.month:
year -= 1
month = 12 + (end.month - 1)
else:
month = end.month - 1
begin = datetime.datetime(day=offset, month=month, year=year)
elif unit == 'year':
if offset == 0:
offset = 1
end = datetime.datetime(day=1, month=offset, year=rightnow.year)
if end >= rightnow:
end = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 1)
begin = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 2)
else:
begin = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 1)
elif unit == 'day':
end = datetime.datetime(hour=offset,
day=rightnow.day,
month=rightnow.month,
year=rightnow.year)
if end >= rightnow:
end = end - datetime.timedelta(days=1)
begin = end - datetime.timedelta(days=1)
elif unit == 'hour':
end = rightnow.replace(minute=offset, second=0, microsecond=0)
if end >= rightnow:
end = end - datetime.timedelta(hours=1)
begin = end - datetime.timedelta(hours=1)
return (begin, end)
def generate_password(length=None, symbolgroups=DEFAULT_PASSWORD_SYMBOLS):
"""Generate a random password from the supplied symbol groups.
At least one symbol from each group will be included. Unpredictable
results if length is less than the number of symbol groups.
Believed to be reasonably secure (with a reasonable password length!)
"""
if length is None:
length = CONF.password_length
r = random.SystemRandom()
# NOTE(jerdfelt): Some password policies require at least one character
# from each group of symbols, so start off with one random character
# from each symbol group
password = [r.choice(s) for s in symbolgroups]
# If length < len(symbolgroups), the leading characters will only
# be from the first length groups. Try our best to not be predictable
# by shuffling and then truncating.
r.shuffle(password)
password = password[:length]
length -= len(password)
# then fill with random characters from all symbol groups
symbols = ''.join(symbolgroups)
password.extend([r.choice(symbols) for _i in range(length)])
# finally shuffle to ensure first x characters aren't from a
# predictable group
r.shuffle(password)
return ''.join(password)
def get_my_linklocal(interface):
try:
if_str = execute('ip', '-f', 'inet6', '-o', 'addr', 'show', interface)
condition = '\s+inet6\s+([0-9a-f:]+)/\d+\s+scope\s+link'
links = [re.search(condition, x) for x in if_str[0].split('\n')]
address = [w.group(1) for w in links if w is not None]
if address[0] is not None:
return address[0]
else:
msg = _('Link Local address is not found.:%s') % if_str
raise exception.NovaException(msg)
except Exception as ex:
msg = _("Couldn't get Link Local IP of %(interface)s"
" :%(ex)s") % {'interface': interface, 'ex': ex}
raise exception.NovaException(msg)
def xhtml_escape(value):
"""Escapes a string so it is valid within XML or XHTML.
"""
return saxutils.escape(value, {'"': '"', "'": '''})
def utf8(value):
"""Try to turn a string into utf-8 if possible.
Code is directly from the utf8 function in
http://github.com/facebook/tornado/blob/master/tornado/escape.py
"""
if isinstance(value, six.text_type):
return value.encode('utf-8')
assert isinstance(value, str)
return value
def check_isinstance(obj, cls):
"""Checks that obj is of type cls, and lets PyLint infer types."""
if isinstance(obj, cls):
return obj
raise Exception(_('Expected object of type: %s') % (str(cls)))
def parse_server_string(server_str):
"""Parses the given server_string and returns a tuple of host and port.
If it's not a combination of host part and port, the port element
is an empty string. If the input is invalid expression, return a tuple of
two empty strings.
"""
try:
# First of all, exclude pure IPv6 address (w/o port).
if netaddr.valid_ipv6(server_str):
return (server_str, '')
# Next, check if this is IPv6 address with a port number combination.
if server_str.find("]:") != -1:
(address, port) = server_str.replace('[', '', 1).split(']:')
return (address, port)
# Third, check if this is a combination of an address and a port
if server_str.find(':') == -1:
return (server_str, '')
# This must be a combination of an address and a port
(address, port) = server_str.split(':')
return (address, port)
except (ValueError, netaddr.AddrFormatError):
LOG.error(_LE('Invalid server_string: %s'), server_str)
return ('', '')
def is_valid_ipv6_cidr(address):
try:
netaddr.IPNetwork(address, version=6).cidr
return True
except (TypeError, netaddr.AddrFormatError):
return False
def get_shortened_ipv6(address):
addr = netaddr.IPAddress(address, version=6)
return str(addr.ipv6())
def get_shortened_ipv6_cidr(address):
net = netaddr.IPNetwork(address, version=6)
return str(net.cidr)
def is_valid_cidr(address):
"""Check if address is valid
The provided address can be a IPv6 or a IPv4
CIDR address.
"""
try:
# Validate the correct CIDR Address
netaddr.IPNetwork(address)
except netaddr.AddrFormatError:
return False
# Prior validation partially verify /xx part
# Verify it here
ip_segment = address.split('/')
if (len(ip_segment) <= 1 or
ip_segment[1] == ''):
return False
return True
def get_ip_version(network):
"""Returns the IP version of a network (IPv4 or IPv6).
Raises AddrFormatError if invalid network.
"""
if netaddr.IPNetwork(network).version == 6:
return "IPv6"
elif netaddr.IPNetwork(network).version == 4:
return "IPv4"
def safe_ip_format(ip):
"""Transform ip string to "safe" format.
Will return ipv4 addresses unchanged, but will nest ipv6 addresses
inside square brackets.
"""
try:
if netaddr.IPAddress(ip).version == 6:
return '[%s]' % ip
except (TypeError, netaddr.AddrFormatError): # hostname
pass
# it's IPv4 or hostname
return ip
def monkey_patch():
"""If the CONF.monkey_patch set as True,
this function patches a decorator
for all functions in specified modules.
You can set decorators for each modules
using CONF.monkey_patch_modules.
The format is "Module path:Decorator function".
Example:
'nova.api.ec2.cloud:nova.notifications.notify_decorator'
Parameters of the decorator is as follows.
(See nova.notifications.notify_decorator)
name - name of the function
function - object of the function
"""
# If CONF.monkey_patch is not True, this function do nothing.
if not CONF.monkey_patch:
return
if six.PY3:
def is_method(obj):
# Unbound methods became regular functions on Python 3
return inspect.ismethod(obj) or inspect.isfunction(obj)
else:
is_method = inspect.ismethod
# Get list of modules and decorators
for module_and_decorator in CONF.monkey_patch_modules:
module, decorator_name = module_and_decorator.split(':')
# import decorator function
decorator = importutils.import_class(decorator_name)
__import__(module)
# Retrieve module information using pyclbr
module_data = pyclbr.readmodule_ex(module)
for key, value in module_data.items():
# set the decorator for the class methods
if isinstance(value, pyclbr.Class):
clz = importutils.import_class("%s.%s" % (module, key))
for method, func in inspect.getmembers(clz, is_method):
setattr(clz, method,
decorator("%s.%s.%s" % (module, key, method), func))
# set the decorator for the function
if isinstance(value, pyclbr.Function):
func = importutils.import_class("%s.%s" % (module, key))
setattr(sys.modules[module], key,
decorator("%s.%s" % (module, key), func))
def convert_to_list_dict(lst, label):
"""Convert a value or list into a list of dicts."""
if not lst:
return None
if not isinstance(lst, list):
lst = [lst]
return [{label: x} for x in lst]
def make_dev_path(dev, partition=None, base='/dev'):
"""Return a path to a particular device.
>>> make_dev_path('xvdc')
/dev/xvdc
>>> make_dev_path('xvdc', 1)
/dev/xvdc1
"""
path = os.path.join(base, dev)
if partition:
path += str(partition)
return path
def sanitize_hostname(hostname, default_name=None):
"""Return a hostname which conforms to RFC-952 and RFC-1123 specs except
the length of hostname.
Window, Linux, and Dnsmasq has different limitation:
Windows: 255 (net_bios limits to 15, but window will truncate it)
Linux: 64
Dnsmasq: 63
Due to nova-network will leverage dnsmasq to set hostname, so we chose
63.
"""
def truncate_hostname(name):
if len(name) > 63:
LOG.warning(_LW("Hostname %(hostname)s is longer than 63, "
"truncate it to %(truncated_name)s"),
{'hostname': name, 'truncated_name': name[:63]})
return name[:63]
if isinstance(hostname, six.text_type):
# Remove characters outside the Unicode range U+0000-U+00FF
hostname = hostname.encode('latin-1', 'ignore')
if six.PY3:
hostname = hostname.decode('latin-1')
hostname = re.sub('[ _]', '-', hostname)
hostname = re.sub('[^\w.-]+', '', hostname)
hostname = hostname.lower()
hostname = hostname.strip('.-')
# NOTE(eliqiao): set hostname to default_display_name to avoid
# empty hostname
if hostname == "" and default_name is not None:
return truncate_hostname(default_name)
return truncate_hostname(hostname)
@contextlib.contextmanager
def temporary_mutation(obj, **kwargs):
"""Temporarily set the attr on a particular object to a given value then
revert when finished.
One use of this is to temporarily set the read_deleted flag on a context
object:
with temporary_mutation(context, read_deleted="yes"):
do_something_that_needed_deleted_objects()
"""
def is_dict_like(thing):
return hasattr(thing, 'has_key')
def get(thing, attr, default):
if is_dict_like(thing):
return thing.get(attr, default)
else:
return getattr(thing, attr, default)
def set_value(thing, attr, val):
if is_dict_like(thing):
thing[attr] = val
else:
setattr(thing, attr, val)
def delete(thing, attr):
if is_dict_like(thing):
del thing[attr]
else:
delattr(thing, attr)
NOT_PRESENT = object()
old_values = {}
for attr, new_value in kwargs.items():
old_values[attr] = get(obj, attr, NOT_PRESENT)
set_value(obj, attr, new_value)
try:
yield
finally:
for attr, old_value in old_values.items():
if old_value is NOT_PRESENT:
delete(obj, attr)
else:
set_value(obj, attr, old_value)
def generate_mac_address():
"""Generate an Ethernet MAC address."""
# NOTE(vish): We would prefer to use 0xfe here to ensure that linux
# bridge mac addresses don't change, but it appears to
# conflict with libvirt, so we use the next highest octet
# that has the unicast and locally administered bits set
# properly: 0xfa.
# Discussion: https://bugs.launchpad.net/nova/+bug/921838
mac = [0xfa, 0x16, 0x3e,
random.randint(0x00, 0xff),
random.randint(0x00, 0xff),
random.randint(0x00, 0xff)]
return ':'.join(map(lambda x: "%02x" % x, mac))
def read_file_as_root(file_path):
"""Secure helper to read file as root."""
try:
out, _err = execute('cat', file_path, run_as_root=True)
return out
except processutils.ProcessExecutionError:
raise exception.FileNotFound(file_path=file_path)
@contextlib.contextmanager
def temporary_chown(path, owner_uid=None):
"""Temporarily chown a path.
:param owner_uid: UID of temporary owner (defaults to current user)
"""
if owner_uid is None:
owner_uid = os.getuid()
orig_uid = os.stat(path).st_uid
if orig_uid != owner_uid:
execute('chown', owner_uid, path, run_as_root=True)
try:
yield
finally:
if orig_uid != owner_uid:
execute('chown', orig_uid, path, run_as_root=True)
@contextlib.contextmanager
def tempdir(**kwargs):
argdict = kwargs.copy()
if 'dir' not in argdict:
argdict['dir'] = CONF.tempdir
tmpdir = tempfile.mkdtemp(**argdict)
try:
yield tmpdir
finally:
try:
shutil.rmtree(tmpdir)
except OSError as e:
LOG.error(_LE('Could not remove tmpdir: %s'), e)
def walk_class_hierarchy(clazz, encountered=None):
"""Walk class hierarchy, yielding most derived classes first."""
if not encountered:
encountered = []
for subclass in clazz.__subclasses__():
if subclass not in encountered:
encountered.append(subclass)
# drill down to leaves first
for subsubclass in walk_class_hierarchy(subclass, encountered):
yield subsubclass
yield subclass
class UndoManager(object):
"""Provides a mechanism to facilitate rolling back a series of actions
when an exception is raised.
"""
def __init__(self):
self.undo_stack = []
def undo_with(self, undo_func):
self.undo_stack.append(undo_func)
def _rollback(self):
for undo_func in reversed(self.undo_stack):
undo_func()
def rollback_and_reraise(self, msg=None, **kwargs):
"""Rollback a series of actions then re-raise the exception.
.. note:: (sirp) This should only be called within an
exception handler.
"""
with excutils.save_and_reraise_exception():
if msg:
LOG.exception(msg, **kwargs)
self._rollback()
def mkfs(fs, path, label=None, run_as_root=False):
"""Format a file or block device
:param fs: Filesystem type (examples include 'swap', 'ext3', 'ext4'
'btrfs', etc.)
:param path: Path to file or block device to format
:param label: Volume label to use
"""
if fs == 'swap':
args = ['mkswap']
else:
args = ['mkfs', '-t', fs]
# add -F to force no interactive execute on non-block device.
if fs in ('ext3', 'ext4', 'ntfs'):
args.extend(['-F'])
if label:
if fs in ('msdos', 'vfat'):
label_opt = '-n'
else:
label_opt = '-L'
args.extend([label_opt, label])
args.append(path)
execute(*args, run_as_root=run_as_root)
def last_bytes(file_like_object, num):
"""Return num bytes from the end of the file, and remaining byte count.
:param file_like_object: The file to read
:param num: The number of bytes to return
:returns (data, remaining)
"""
try:
file_like_object.seek(-num, os.SEEK_END)
except IOError as e:
# seek() fails with EINVAL when trying to go before the start of the
# file. It means that num is larger than the file size, so just
# go to the start.
if e.errno == errno.EINVAL:
file_like_object.seek(0, os.SEEK_SET)
else:
raise
remaining = file_like_object.tell()
return (file_like_object.read(), remaining)
def metadata_to_dict(metadata, filter_deleted=False):
result = {}
for item in metadata:
if not filter_deleted and item.get('deleted'):
continue
result[item['key']] = item['value']
return result
def dict_to_metadata(metadata):
result = []
for key, value in six.iteritems(metadata):
result.append(dict(key=key, value=value))
return result
def instance_meta(instance):
if isinstance(instance['metadata'], dict):
return instance['metadata']
else:
return metadata_to_dict(instance['metadata'])
def instance_sys_meta(instance):
if not instance.get('system_metadata'):
return {}
if isinstance(instance['system_metadata'], dict):
return instance['system_metadata']
else:
return metadata_to_dict(instance['system_metadata'],
filter_deleted=True)
def get_wrapped_function(function):
"""Get the method at the bottom of a stack of decorators."""
if not hasattr(function, '__closure__') or not function.__closure__:
return function
def _get_wrapped_function(function):
if not hasattr(function, '__closure__') or not function.__closure__:
return None
for closure in function.__closure__:
func = closure.cell_contents
deeper_func = _get_wrapped_function(func)
if deeper_func:
return deeper_func
elif hasattr(closure.cell_contents, '__call__'):
return closure.cell_contents
return _get_wrapped_function(function)
def expects_func_args(*args):
def _decorator_checker(dec):
@functools.wraps(dec)
def _decorator(f):
base_f = get_wrapped_function(f)
arg_names, a, kw, _default = inspect.getargspec(base_f)
if a or kw or set(args) <= set(arg_names):
# NOTE (ndipanov): We can't really tell if correct stuff will
# be passed if it's a function with *args or **kwargs so
# we still carry on and hope for the best
return dec(f)
else:
raise TypeError("Decorated function %(f_name)s does not "
"have the arguments expected by the "
"decorator %(d_name)s" %
{'f_name': base_f.__name__,
'd_name': dec.__name__})
return _decorator
return _decorator_checker
class ExceptionHelper(object):
"""Class to wrap another and translate the ClientExceptions raised by its
function calls to the actual ones.
"""
def __init__(self, target):
self._target = target
def __getattr__(self, name):
func = getattr(self._target, name)
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except messaging.ExpectedException as e:
raise (e.exc_info[1], None, e.exc_info[2])
return wrapper
def check_string_length(value, name=None, min_length=0, max_length=None):
"""Check the length of specified string
:param value: the value of the string
:param name: the name of the string
:param min_length: the min_length of the string
:param max_length: the max_length of the string
"""
if not isinstance(value, six.string_types):
if name is None:
msg = _("The input is not a string or unicode")
else:
msg = _("%s is not a string or unicode") % name
raise exception.InvalidInput(message=msg)
if name is None:
name = value
if len(value) < min_length:
msg = _("%(name)s has a minimum character requirement of "
"%(min_length)s.") % {'name': name, 'min_length': min_length}
raise exception.InvalidInput(message=msg)
if max_length and len(value) > max_length:
msg = _("%(name)s has more than %(max_length)s "
"characters.") % {'name': name, 'max_length': max_length}
raise exception.InvalidInput(message=msg)
def validate_integer(value, name, min_value=None, max_value=None):
"""Make sure that value is a valid integer, potentially within range."""
try:
value = int(str(value))
except (ValueError, UnicodeEncodeError):
msg = _('%(value_name)s must be an integer')
raise exception.InvalidInput(reason=(
msg % {'value_name': name}))
if min_value is not None:
if value < min_value:
msg = _('%(value_name)s must be >= %(min_value)d')
raise exception.InvalidInput(
reason=(msg % {'value_name': name,
'min_value': min_value}))
if max_value is not None:
if value > max_value:
msg = _('%(value_name)s must be <= %(max_value)d')
raise exception.InvalidInput(
reason=(
msg % {'value_name': name,
'max_value': max_value})
)
return value
def spawn(func, *args, **kwargs):
"""Passthrough method for eventlet.spawn.
This utility exists so that it can be stubbed for testing without
interfering with the service spawns.
It will also grab the context from the threadlocal store and add it to
the store on the new thread. This allows for continuity in logging the
context when using this method to spawn a new thread.
"""
_context = common_context.get_current()
@functools.wraps(func)
def context_wrapper(*args, **kwargs):
# NOTE: If update_store is not called after spawn it won't be
# available for the logger to pull from threadlocal storage.
if _context is not None:
_context.update_store()
return func(*args, **kwargs)
return eventlet.spawn(context_wrapper, *args, **kwargs)
def spawn_n(func, *args, **kwargs):
"""Passthrough method for eventlet.spawn_n.
This utility exists so that it can be stubbed for testing without
interfering with the service spawns.
It will also grab the context from the threadlocal store and add it to
the store on the new thread. This allows for continuity in logging the
context when using this method to spawn a new thread.
"""
_context = common_context.get_current()
@functools.wraps(func)
def context_wrapper(*args, **kwargs):
# NOTE: If update_store is not called after spawn_n it won't be
# available for the logger to pull from threadlocal storage.
if _context is not None:
_context.update_store()
func(*args, **kwargs)
eventlet.spawn_n(context_wrapper, *args, **kwargs)
def is_none_string(val):
"""Check if a string represents a None value.
"""
if not isinstance(val, six.string_types):
return False
return val.lower() == 'none'
def convert_version_to_int(version):
try:
if isinstance(version, six.string_types):
version = convert_version_to_tuple(version)
if isinstance(version, tuple):
return six.moves.reduce(lambda x, y: (x * 1000) + y, version)
except Exception:
msg = _("Hypervisor version %s is invalid.") % version
raise exception.NovaException(msg)
def convert_version_to_str(version_int):
version_numbers = []
factor = 1000
while version_int != 0:
version_number = version_int - (version_int // factor * factor)
version_numbers.insert(0, str(version_number))
version_int = version_int // factor
return six.moves.reduce(lambda x, y: "%s.%s" % (x, y), version_numbers)
def convert_version_to_tuple(version_str):
return tuple(int(part) for part in version_str.split('.'))
def is_neutron():
global _IS_NEUTRON
if _IS_NEUTRON is not None:
return _IS_NEUTRON
try:
# compatibility with Folsom/Grizzly configs
cls_name = CONF.network_api_class
if cls_name == 'nova.network.quantumv2.api.API':
cls_name = 'nova.network.neutronv2.api.API'
from nova.network.neutronv2 import api as neutron_api
_IS_NEUTRON = issubclass(importutils.import_class(cls_name),
neutron_api.API)
except ImportError:
_IS_NEUTRON = False
return _IS_NEUTRON
def is_auto_disk_config_disabled(auto_disk_config_raw):
auto_disk_config_disabled = False
if auto_disk_config_raw is not None:
adc_lowered = auto_disk_config_raw.strip().lower()
if adc_lowered == "disabled":
auto_disk_config_disabled = True
return auto_disk_config_disabled
def get_auto_disk_config_from_instance(instance=None, sys_meta=None):
if sys_meta is None:
sys_meta = instance_sys_meta(instance)
return sys_meta.get("image_auto_disk_config")
def get_auto_disk_config_from_image_props(image_properties):
return image_properties.get("auto_disk_config")
def get_system_metadata_from_image(image_meta, flavor=None):
system_meta = {}
prefix_format = SM_IMAGE_PROP_PREFIX + '%s'
for key, value in six.iteritems(image_meta.get('properties', {})):
if key in SM_SKIP_KEYS:
continue
new_value = safe_truncate(six.text_type(value), 255)
system_meta[prefix_format % key] = new_value
for key in SM_INHERITABLE_KEYS:
value = image_meta.get(key)
if key == 'min_disk' and flavor:
if image_meta.get('disk_format') == 'vhd':
value = flavor['root_gb']
else:
value = max(value, flavor['root_gb'])
if value is None:
continue
system_meta[prefix_format % key] = value
return system_meta
def get_image_from_system_metadata(system_meta):
image_meta = {}
properties = {}
if not isinstance(system_meta, dict):
system_meta = metadata_to_dict(system_meta, filter_deleted=True)
for key, value in six.iteritems(system_meta):
if value is None:
continue
# NOTE(xqueralt): Not sure this has to inherit all the properties or
# just the ones we need. Leaving it for now to keep the old behaviour.
if key.startswith(SM_IMAGE_PROP_PREFIX):
key = key[len(SM_IMAGE_PROP_PREFIX):]
if key in SM_SKIP_KEYS:
continue
if key in SM_INHERITABLE_KEYS:
image_meta[key] = value
else:
properties[key] = value
image_meta['properties'] = properties
return image_meta
def get_image_metadata_from_volume(volume):
properties = copy.copy(volume.get('volume_image_metadata', {}))
image_meta = {'properties': properties}
# Volume size is no longer related to the original image size,
# so we take it from the volume directly. Cinder creates
# volumes in Gb increments, and stores size in Gb, whereas
# glance reports size in bytes. As we're returning glance
# metadata here, we need to convert it.
image_meta['size'] = volume.get('size', 0) * units.Gi
# NOTE(yjiang5): restore the basic attributes
# NOTE(mdbooth): These values come from volume_glance_metadata
# in cinder. This is a simple key/value table, and all values
# are strings. We need to convert them to ints to avoid
# unexpected type errors.
for attr in VIM_IMAGE_ATTRIBUTES:
val = properties.pop(attr, None)
if attr in ('min_ram', 'min_disk'):
image_meta[attr] = int(val or 0)
# NOTE(yjiang5): Always set the image status as 'active'
# and depends on followed volume_api.check_attach() to
# verify it. This hack should be harmless with that check.
image_meta['status'] = 'active'
return image_meta
def get_hash_str(base_str):
"""Returns string that represents MD5 hash of base_str (in hex format).
If base_str is a Unicode string, encode it to UTF-8.
"""
if isinstance(base_str, six.text_type):
base_str = base_str.encode('utf-8')
return hashlib.md5(base_str).hexdigest()
if hasattr(hmac, 'compare_digest'):
constant_time_compare = hmac.compare_digest
else:
def constant_time_compare(first, second):
"""Returns True if both string inputs are equal, otherwise False.
This function should take a constant amount of time regardless of
how many characters in the strings match.
"""
if len(first) != len(second):
return False
result = 0
for x, y in zip(first, second):
result |= ord(x) ^ ord(y)
return result == 0
def filter_and_format_resource_metadata(resource_type, resource_list,
search_filts, metadata_type=None):
"""Get all metadata for a list of resources after filtering.
Search_filts is a list of dictionaries, where the values in the dictionary
can be string or regex string, or a list of strings/regex strings.
Let's call a dict a 'filter block' and an item in the dict
a 'filter'. A tag is returned if it matches ALL the filters in
a filter block. If more than one values are specified for a
filter, a tag is returned if it matches ATLEAST ONE value of the filter. If
more than one filter blocks are specified, the tag should match ALL the
filter blocks.
For example:
search_filts = [{'key': ['key1', 'key2'], 'value': 'val1'},
{'value': 'val2'}]
The filter translates to 'match any tag for which':
((key=key1 AND value=val1) OR (key=key2 AND value=val1)) AND
(value=val2)
This example filter will never match a tag.
:param resource_type: The resource type as a string, e.g. 'instance'
:param resource_list: List of resource objects
:param search_filts: Filters to filter metadata to be returned. Can be
dict (e.g. {'key': 'env', 'value': 'prod'}, or a list of dicts
(e.g. [{'key': 'env'}, {'value': 'beta'}]. Note that the values
of the dict can be regular expressions.
:param metadata_type: Provided to search for a specific metadata type
(e.g. 'system_metadata')
:returns: List of dicts where each dict is of the form {'key':
'somekey', 'value': 'somevalue', 'instance_id':
'some-instance-uuid-aaa'} if resource_type is 'instance'.
"""
if isinstance(search_filts, dict):
search_filts = [search_filts]
def _get_id(resource):
if resource_type == 'instance':
return resource.get('uuid')
def _match_any(pattern_list, string):
if isinstance(pattern_list, str):
pattern_list = [pattern_list]
return any([re.match(pattern, string)
for pattern in pattern_list])
def _filter_metadata(resource, search_filt, input_metadata):
ids = search_filt.get('resource_id', [])
keys_filter = search_filt.get('key', [])
values_filter = search_filt.get('value', [])
output_metadata = {}
if ids and _get_id(resource) not in ids:
return {}
for k, v in six.iteritems(input_metadata):
# Both keys and value defined -- AND
if (keys_filter and values_filter and
not _match_any(keys_filter, k) and
not _match_any(values_filter, v)):
continue
# Only keys or value is defined
elif ((keys_filter and not _match_any(keys_filter, k)) or
(values_filter and not _match_any(values_filter, v))):
continue
output_metadata[k] = v
return output_metadata
formatted_metadata_list = []
for res in resource_list:
if resource_type == 'instance':
# NOTE(rushiagr): metadata_type should be 'metadata' or
# 'system_metadata' if resource_type is instance. Defaulting to
# 'metadata' if not specified.
if metadata_type is None:
metadata_type = 'metadata'
metadata = res.get(metadata_type, {})
for filt in search_filts:
# By chaining the input to the output, the filters are
# ANDed together
metadata = _filter_metadata(res, filt, metadata)
for (k, v) in metadata.items():
formatted_metadata_list.append({'key': k, 'value': v,
'%s_id' % resource_type: _get_id(res)})
return formatted_metadata_list
def safe_truncate(value, length):
"""Safely truncates unicode strings such that their encoded length is
no greater than the length provided.
"""
b_value = encodeutils.safe_encode(value)[:length]
# NOTE(chaochin) UTF-8 character byte size varies from 1 to 6. If
# truncating a long byte string to 255, the last character may be
# cut in the middle, so that UnicodeDecodeError will occur when
# converting it back to unicode.
decode_ok = False
while not decode_ok:
try:
u_value = encodeutils.safe_decode(b_value)
decode_ok = True
except UnicodeDecodeError:
b_value = b_value[:-1]
return u_value
def read_cached_file(filename, force_reload=False):
"""Read from a file if it has been modified.
:param force_reload: Whether to reload the file.
:returns: A tuple with a boolean specifying if the data is fresh
or not.
"""
global _FILE_CACHE
if force_reload:
delete_cached_file(filename)
reloaded = False
mtime = os.path.getmtime(filename)
cache_info = _FILE_CACHE.setdefault(filename, {})
if not cache_info or mtime > cache_info.get('mtime', 0):
LOG.debug("Reloading cached file %s", filename)
with open(filename) as fap:
cache_info['data'] = fap.read()
cache_info['mtime'] = mtime
reloaded = True
return (reloaded, cache_info['data'])
def delete_cached_file(filename):
"""Delete cached file if present.
:param filename: filename to delete
"""
global _FILE_CACHE
if filename in _FILE_CACHE:
del _FILE_CACHE[filename]
| Francis-Liu/animated-broccoli | nova/utils.py | Python | apache-2.0 | 53,116 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions to bridge `Distribution`s and `tf.contrib.learn.estimator` APIs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.learn.python.learn.estimators.head import _compute_weighted_loss
from tensorflow.contrib.learn.python.learn.estimators.head import _RegressionHead
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
__all__ = [
"estimator_head_distribution_regression",
]
def estimator_head_distribution_regression(make_distribution_fn,
label_dimension=1,
logits_dimension=None,
label_name=None,
weight_column_name=None,
enable_centered_bias=False,
head_name=None):
"""Creates a `Head` for regression under a generic distribution.
Args:
make_distribution_fn: Python `callable` which returns a `tf.Distribution`
instance created using only logits.
label_dimension: Number of regression labels per example. This is the size
of the last dimension of the labels `Tensor` (typically, this has shape
`[batch_size, label_dimension]`).
logits_dimension: Number of logits per example. This is the size of the last
dimension of the logits `Tensor` (typically, this has shape
`[batch_size, logits_dimension]`).
Default value: `label_dimension`.
label_name: Python `str`, name of the key in label `dict`. Can be `None` if
label is a `Tensor` (single headed models).
weight_column_name: Python `str` defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
enable_centered_bias: Python `bool`. If `True`, estimator will learn a
centered bias variable for each class. Rest of the model structure learns
the residual after centered bias.
head_name: Python `str`, name of the head. Predictions, summary and metrics
keys are suffixed by `"/" + head_name` and the default variable scope is
`head_name`.
Returns:
An instance of `Head` for generic regression.
"""
return _DistributionRegressionHead(
make_distribution_fn=make_distribution_fn,
label_dimension=label_dimension,
logits_dimension=logits_dimension,
label_name=label_name,
weight_column_name=weight_column_name,
enable_centered_bias=enable_centered_bias,
head_name=head_name)
class _DistributionRegressionHead(_RegressionHead):
"""Creates a _RegressionHead instance from an arbitrary `Distribution`."""
def __init__(self,
make_distribution_fn,
label_dimension,
logits_dimension=None,
label_name=None,
weight_column_name=None,
enable_centered_bias=False,
head_name=None):
"""`Head` for regression.
Args:
make_distribution_fn: Python `callable` which returns a `tf.Distribution`
instance created using only logits.
label_dimension: Number of regression labels per example. This is the
size of the last dimension of the labels `Tensor` (typically, this has
shape `[batch_size, label_dimension]`).
logits_dimension: Number of logits per example. This is the size of the
last dimension of the logits `Tensor` (typically, this has shape
`[batch_size, logits_dimension]`).
Default value: `label_dimension`.
label_name: Python `str`, name of the key in label `dict`. Can be `None`
if label is a tensor (single headed models).
weight_column_name: Python `str` defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
enable_centered_bias: Python `bool`. If `True`, estimator will learn a
centered bias variable for each class. Rest of the model structure
learns the residual after centered bias.
head_name: Python `str`, name of the head. Predictions, summary and
metrics keys are suffixed by `"/" + head_name` and the default variable
scope is `head_name`.
Raises:
TypeError: if `make_distribution_fn` is not `callable`.
"""
if not callable(make_distribution_fn):
raise TypeError("`make_distribution_fn` must be a callable function.")
self._distributions = {}
self._make_distribution_fn = make_distribution_fn
def static_value(x):
"""Returns the static value of a `Tensor` or `None`."""
return tensor_util.constant_value(ops.convert_to_tensor(x))
def concat_vectors(*args):
"""Concatenates input vectors, statically if possible."""
args_ = [static_value(x) for x in args]
if any(vec is None for vec in args_):
return array_ops.concat(args, axis=0)
return [val for vec in args_ for val in vec]
def loss_fn(labels, logits, weights=None):
"""Returns the loss of using `logits` to predict `labels`."""
d = self.distribution(logits)
labels_batch_shape = labels.shape.with_rank_at_least(1)[:-1]
labels_batch_shape = (
labels_batch_shape.as_list() if labels_batch_shape.is_fully_defined()
else array_ops.shape(labels)[:-1])
labels = array_ops.reshape(
labels,
shape=concat_vectors(labels_batch_shape, d.event_shape_tensor()))
return _compute_weighted_loss(
loss_unweighted=-d.log_prob(labels),
weight=weights)
def link_fn(logits):
"""Returns the inverse link function at `logits`."""
# Note: What the API calls a "link function" is really the inverse-link
# function, i.e., the "mean".
d = self.distribution(logits)
return d.mean()
super(_DistributionRegressionHead, self).__init__(
label_dimension=label_dimension,
loss_fn=loss_fn,
link_fn=link_fn,
logits_dimension=logits_dimension,
label_name=label_name,
weight_column_name=weight_column_name,
enable_centered_bias=enable_centered_bias,
head_name=head_name)
@property
def distributions(self):
"""Returns all distributions created by `DistributionRegressionHead`."""
return self._distributions
def distribution(self, logits, name=None):
"""Retrieves a distribution instance, parameterized by `logits`.
Args:
logits: `float`-like `Tensor` representing the parameters of the
underlying distribution.
name: The Python `str` name to given to this op.
Default value: "distribution".
Returns:
distribution: `tf.Distribution` instance parameterized by `logits`.
"""
with ops.name_scope(name, "distribution", [logits]):
d = self._distributions.get(logits, None)
if d is None:
d = self._make_distribution_fn(logits)
self._distributions[logits] = d
return d
| allenlavoie/tensorflow | tensorflow/contrib/distributions/python/ops/estimator.py | Python | apache-2.0 | 7,908 |
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate.changeset import UniqueConstraint
from migrate import ForeignKeyConstraint
from oslo_log import log as logging
from sqlalchemy import Boolean, BigInteger, Column, DateTime, Enum, Float
from sqlalchemy import dialects
from sqlalchemy import ForeignKey, Index, Integer, MetaData, String, Table
from sqlalchemy import Text
from sqlalchemy.types import NullType
from nova.i18n import _LE
LOG = logging.getLogger(__name__)
# Note on the autoincrement flag: this is defaulted for primary key columns
# of integral type, so is no longer set explicitly in such cases.
# NOTE(dprince): This wrapper allows us to easily match the Folsom MySQL
# Schema. In Folsom we created tables as latin1 and converted them to utf8
# later. This conversion causes some of the Text columns on MySQL to get
# created as mediumtext instead of just text.
def MediumText():
return Text().with_variant(dialects.mysql.MEDIUMTEXT(), 'mysql')
def Inet():
return String(length=43).with_variant(dialects.postgresql.INET(),
'postgresql')
def InetSmall():
return String(length=39).with_variant(dialects.postgresql.INET(),
'postgresql')
def _create_shadow_tables(migrate_engine):
meta = MetaData(migrate_engine)
meta.reflect(migrate_engine)
table_names = list(meta.tables.keys())
meta.bind = migrate_engine
for table_name in table_names:
table = Table(table_name, meta, autoload=True)
columns = []
for column in table.columns:
column_copy = None
# NOTE(boris-42): BigInteger is not supported by sqlite, so
# after copy it will have NullType, other
# types that are used in Nova are supported by
# sqlite.
if isinstance(column.type, NullType):
column_copy = Column(column.name, BigInteger(), default=0)
if table_name == 'instances' and column.name == 'locked_by':
enum = Enum('owner', 'admin',
name='shadow_instances0locked_by')
column_copy = Column(column.name, enum)
else:
column_copy = column.copy()
columns.append(column_copy)
shadow_table_name = 'shadow_' + table_name
shadow_table = Table(shadow_table_name, meta, *columns,
mysql_engine='InnoDB')
try:
shadow_table.create()
except Exception:
LOG.info(repr(shadow_table))
LOG.exception(_LE('Exception while creating table.'))
raise
# NOTE(dprince): we add these here so our schema contains dump tables
# which were added in migration 209 (in Havana). We can drop these in
# Icehouse: https://bugs.launchpad.net/nova/+bug/1266538
def _create_dump_tables(migrate_engine):
meta = MetaData(migrate_engine)
meta.reflect(migrate_engine)
table_names = ['compute_node_stats', 'compute_nodes', 'instance_actions',
'instance_actions_events', 'instance_faults', 'migrations']
for table_name in table_names:
table = Table(table_name, meta, autoload=True)
dump_table_name = 'dump_' + table.name
columns = []
for column in table.columns:
# NOTE(dprince): The dump_ tables were originally created from an
# earlier schema version so we don't want to add the pci_stats
# column so that schema diffs are exactly the same.
if column.name == 'pci_stats':
continue
else:
columns.append(column.copy())
table_dump = Table(dump_table_name, meta, *columns,
mysql_engine='InnoDB')
table_dump.create()
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
agent_builds = Table('agent_builds', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('hypervisor', String(length=255)),
Column('os', String(length=255)),
Column('architecture', String(length=255)),
Column('version', String(length=255)),
Column('url', String(length=255)),
Column('md5hash', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
aggregate_hosts = Table('aggregate_hosts', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('host', String(length=255)),
Column('aggregate_id', Integer, ForeignKey('aggregates.id'),
nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
aggregate_metadata = Table('aggregate_metadata', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('aggregate_id', Integer, ForeignKey('aggregates.id'),
nullable=False),
Column('key', String(length=255), nullable=False),
Column('value', String(length=255), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
aggregates = Table('aggregates', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('name', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
block_device_mapping = Table('block_device_mapping', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('device_name', String(length=255), nullable=True),
Column('delete_on_termination', Boolean),
Column('snapshot_id', String(length=36), nullable=True),
Column('volume_id', String(length=36), nullable=True),
Column('volume_size', Integer),
Column('no_device', Boolean),
Column('connection_info', MediumText()),
Column('instance_uuid', String(length=36)),
Column('deleted', Integer),
Column('source_type', String(length=255), nullable=True),
Column('destination_type', String(length=255), nullable=True),
Column('guest_format', String(length=255), nullable=True),
Column('device_type', String(length=255), nullable=True),
Column('disk_bus', String(length=255), nullable=True),
Column('boot_index', Integer),
Column('image_id', String(length=36), nullable=True),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
bw_usage_cache = Table('bw_usage_cache', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('start_period', DateTime, nullable=False),
Column('last_refreshed', DateTime),
Column('bw_in', BigInteger),
Column('bw_out', BigInteger),
Column('mac', String(length=255)),
Column('uuid', String(length=36)),
Column('last_ctr_in', BigInteger()),
Column('last_ctr_out', BigInteger()),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
cells = Table('cells', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('api_url', String(length=255)),
Column('weight_offset', Float),
Column('weight_scale', Float),
Column('name', String(length=255)),
Column('is_parent', Boolean),
Column('deleted', Integer),
Column('transport_url', String(length=255), nullable=False),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
certificates = Table('certificates', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('file_name', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
compute_node_stats = Table('compute_node_stats', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('compute_node_id', Integer, nullable=False),
Column('key', String(length=255), nullable=False),
Column('value', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
compute_nodes = Table('compute_nodes', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('service_id', Integer, nullable=False),
Column('vcpus', Integer, nullable=False),
Column('memory_mb', Integer, nullable=False),
Column('local_gb', Integer, nullable=False),
Column('vcpus_used', Integer, nullable=False),
Column('memory_mb_used', Integer, nullable=False),
Column('local_gb_used', Integer, nullable=False),
Column('hypervisor_type', MediumText(), nullable=False),
Column('hypervisor_version', Integer, nullable=False),
Column('cpu_info', MediumText(), nullable=False),
Column('disk_available_least', Integer),
Column('free_ram_mb', Integer),
Column('free_disk_gb', Integer),
Column('current_workload', Integer),
Column('running_vms', Integer),
Column('hypervisor_hostname', String(length=255)),
Column('deleted', Integer),
Column('host_ip', InetSmall()),
Column('supported_instances', Text),
Column('pci_stats', Text, nullable=True),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
console_pools = Table('console_pools', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('address', InetSmall()),
Column('username', String(length=255)),
Column('password', String(length=255)),
Column('console_type', String(length=255)),
Column('public_hostname', String(length=255)),
Column('host', String(length=255)),
Column('compute_host', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
consoles_instance_uuid_column_args = ['instance_uuid', String(length=36)]
consoles_instance_uuid_column_args.append(
ForeignKey('instances.uuid', name='consoles_instance_uuid_fkey'))
consoles = Table('consoles', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_name', String(length=255)),
Column('password', String(length=255)),
Column('port', Integer),
Column('pool_id', Integer, ForeignKey('console_pools.id')),
Column(*consoles_instance_uuid_column_args),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
dns_domains = Table('dns_domains', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Boolean),
Column('domain', String(length=255), primary_key=True, nullable=False),
Column('scope', String(length=255)),
Column('availability_zone', String(length=255)),
Column('project_id', String(length=255)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
fixed_ips = Table('fixed_ips', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('address', InetSmall()),
Column('network_id', Integer),
Column('allocated', Boolean),
Column('leased', Boolean),
Column('reserved', Boolean),
Column('virtual_interface_id', Integer),
Column('host', String(length=255)),
Column('instance_uuid', String(length=36)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
floating_ips = Table('floating_ips', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('address', InetSmall()),
Column('fixed_ip_id', Integer),
Column('project_id', String(length=255)),
Column('host', String(length=255)),
Column('auto_assigned', Boolean),
Column('pool', String(length=255)),
Column('interface', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_faults = Table('instance_faults', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_uuid', String(length=36)),
Column('code', Integer, nullable=False),
Column('message', String(length=255)),
Column('details', MediumText()),
Column('host', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_id_mappings = Table('instance_id_mappings', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(36), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_info_caches = Table('instance_info_caches', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('network_info', MediumText()),
Column('instance_uuid', String(length=36), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
groups = Table('instance_groups', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer),
Column('id', Integer, primary_key=True, nullable=False),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('uuid', String(length=36), nullable=False),
Column('name', String(length=255)),
UniqueConstraint('uuid', 'deleted',
name='uniq_instance_groups0uuid0deleted'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
group_metadata = Table('instance_group_metadata', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer),
Column('id', Integer, primary_key=True, nullable=False),
Column('key', String(length=255)),
Column('value', String(length=255)),
Column('group_id', Integer, ForeignKey('instance_groups.id'),
nullable=False),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
group_policy = Table('instance_group_policy', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer),
Column('id', Integer, primary_key=True, nullable=False),
Column('policy', String(length=255)),
Column('group_id', Integer, ForeignKey('instance_groups.id'),
nullable=False),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
group_member = Table('instance_group_member', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_id', String(length=255)),
Column('group_id', Integer, ForeignKey('instance_groups.id'),
nullable=False),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
instance_metadata = Table('instance_metadata', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('key', String(length=255)),
Column('value', String(length=255)),
Column('instance_uuid', String(length=36), nullable=True),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_system_metadata = Table('instance_system_metadata', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_uuid', String(length=36), nullable=False),
Column('key', String(length=255), nullable=False),
Column('value', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_type_extra_specs = Table('instance_type_extra_specs', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_type_id', Integer, ForeignKey('instance_types.id'),
nullable=False),
Column('key', String(length=255)),
Column('value', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_type_projects = Table('instance_type_projects', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_type_id', Integer, nullable=False),
Column('project_id', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_types = Table('instance_types', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('name', String(length=255)),
Column('id', Integer, primary_key=True, nullable=False),
Column('memory_mb', Integer, nullable=False),
Column('vcpus', Integer, nullable=False),
Column('swap', Integer, nullable=False),
Column('vcpu_weight', Integer),
Column('flavorid', String(length=255)),
Column('rxtx_factor', Float),
Column('root_gb', Integer),
Column('ephemeral_gb', Integer),
Column('disabled', Boolean),
Column('is_public', Boolean),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
inst_lock_enum = Enum('owner', 'admin', name='instances0locked_by')
instances = Table('instances', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('internal_id', Integer),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('image_ref', String(length=255)),
Column('kernel_id', String(length=255)),
Column('ramdisk_id', String(length=255)),
Column('launch_index', Integer),
Column('key_name', String(length=255)),
Column('key_data', MediumText()),
Column('power_state', Integer),
Column('vm_state', String(length=255)),
Column('memory_mb', Integer),
Column('vcpus', Integer),
Column('hostname', String(length=255)),
Column('host', String(length=255)),
Column('user_data', MediumText()),
Column('reservation_id', String(length=255)),
Column('scheduled_at', DateTime),
Column('launched_at', DateTime),
Column('terminated_at', DateTime),
Column('display_name', String(length=255)),
Column('display_description', String(length=255)),
Column('availability_zone', String(length=255)),
Column('locked', Boolean),
Column('os_type', String(length=255)),
Column('launched_on', MediumText()),
Column('instance_type_id', Integer),
Column('vm_mode', String(length=255)),
Column('uuid', String(length=36)),
Column('architecture', String(length=255)),
Column('root_device_name', String(length=255)),
Column('access_ip_v4', InetSmall()),
Column('access_ip_v6', InetSmall()),
Column('config_drive', String(length=255)),
Column('task_state', String(length=255)),
Column('default_ephemeral_device', String(length=255)),
Column('default_swap_device', String(length=255)),
Column('progress', Integer),
Column('auto_disk_config', Boolean),
Column('shutdown_terminate', Boolean),
Column('disable_terminate', Boolean),
Column('root_gb', Integer),
Column('ephemeral_gb', Integer),
Column('cell_name', String(length=255)),
Column('node', String(length=255)),
Column('deleted', Integer),
Column('locked_by', inst_lock_enum),
Column('cleaned', Integer, default=0),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_actions = Table('instance_actions', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('action', String(length=255)),
Column('instance_uuid', String(length=36)),
Column('request_id', String(length=255)),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('start_time', DateTime),
Column('finish_time', DateTime),
Column('message', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
instance_actions_events = Table('instance_actions_events', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('event', String(length=255)),
Column('action_id', Integer, ForeignKey('instance_actions.id')),
Column('start_time', DateTime),
Column('finish_time', DateTime),
Column('result', String(length=255)),
Column('traceback', Text),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
iscsi_targets = Table('iscsi_targets', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('target_num', Integer),
Column('host', String(length=255)),
Column('volume_id', String(length=36), nullable=True),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
key_pairs = Table('key_pairs', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('name', String(length=255)),
Column('user_id', String(length=255)),
Column('fingerprint', String(length=255)),
Column('public_key', MediumText()),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
migrations = Table('migrations', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('source_compute', String(length=255)),
Column('dest_compute', String(length=255)),
Column('dest_host', String(length=255)),
Column('status', String(length=255)),
Column('instance_uuid', String(length=36)),
Column('old_instance_type_id', Integer),
Column('new_instance_type_id', Integer),
Column('source_node', String(length=255)),
Column('dest_node', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
networks = Table('networks', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('injected', Boolean),
Column('cidr', Inet()),
Column('netmask', InetSmall()),
Column('bridge', String(length=255)),
Column('gateway', InetSmall()),
Column('broadcast', InetSmall()),
Column('dns1', InetSmall()),
Column('vlan', Integer),
Column('vpn_public_address', InetSmall()),
Column('vpn_public_port', Integer),
Column('vpn_private_address', InetSmall()),
Column('dhcp_start', InetSmall()),
Column('project_id', String(length=255)),
Column('host', String(length=255)),
Column('cidr_v6', Inet()),
Column('gateway_v6', InetSmall()),
Column('label', String(length=255)),
Column('netmask_v6', InetSmall()),
Column('bridge_interface', String(length=255)),
Column('multi_host', Boolean),
Column('dns2', InetSmall()),
Column('uuid', String(length=36)),
Column('priority', Integer),
Column('rxtx_base', Integer),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
pci_devices_uc_name = 'uniq_pci_devices0compute_node_id0address0deleted'
pci_devices = Table('pci_devices', meta,
Column('created_at', DateTime(timezone=False)),
Column('updated_at', DateTime(timezone=False)),
Column('deleted_at', DateTime(timezone=False)),
Column('deleted', Integer, default=0, nullable=False),
Column('id', Integer, primary_key=True),
Column('compute_node_id', Integer, nullable=False),
Column('address', String(12), nullable=False),
Column('product_id', String(4)),
Column('vendor_id', String(4)),
Column('dev_type', String(8)),
Column('dev_id', String(255)),
Column('label', String(255), nullable=False),
Column('status', String(36), nullable=False),
Column('extra_info', Text, nullable=True),
Column('instance_uuid', String(36), nullable=True),
Index('ix_pci_devices_compute_node_id_deleted',
'compute_node_id', 'deleted'),
Index('ix_pci_devices_instance_uuid_deleted',
'instance_uuid', 'deleted'),
UniqueConstraint('compute_node_id',
'address', 'deleted',
name=pci_devices_uc_name),
mysql_engine='InnoDB',
mysql_charset='utf8')
provider_fw_rules = Table('provider_fw_rules', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('protocol', String(length=5)),
Column('from_port', Integer),
Column('to_port', Integer),
Column('cidr', Inet()),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
quota_classes = Table('quota_classes', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('class_name', String(length=255)),
Column('resource', String(length=255)),
Column('hard_limit', Integer),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
quota_usages = Table('quota_usages', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('project_id', String(length=255)),
Column('resource', String(length=255)),
Column('in_use', Integer, nullable=False),
Column('reserved', Integer, nullable=False),
Column('until_refresh', Integer),
Column('deleted', Integer),
Column('user_id', String(length=255)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
quotas = Table('quotas', meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('project_id', String(length=255)),
Column('resource', String(length=255), nullable=False),
Column('hard_limit', Integer),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
uniq_name = "uniq_project_user_quotas0user_id0project_id0resource0deleted"
project_user_quotas = Table('project_user_quotas', meta,
Column('id', Integer, primary_key=True,
nullable=False),
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer),
Column('user_id',
String(length=255),
nullable=False),
Column('project_id',
String(length=255),
nullable=False),
Column('resource',
String(length=255),
nullable=False),
Column('hard_limit', Integer, nullable=True),
UniqueConstraint('user_id', 'project_id', 'resource',
'deleted', name=uniq_name),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
reservations = Table('reservations', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(length=36), nullable=False),
Column('usage_id', Integer, nullable=False),
Column('project_id', String(length=255)),
Column('resource', String(length=255)),
Column('delta', Integer, nullable=False),
Column('expire', DateTime),
Column('deleted', Integer),
Column('user_id', String(length=255)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
s3_images = Table('s3_images', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(length=36), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
security_group_instance_association = \
Table('security_group_instance_association', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('security_group_id', Integer),
Column('instance_uuid', String(length=36)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
security_group_rules = Table('security_group_rules', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('parent_group_id', Integer, ForeignKey('security_groups.id')),
Column('protocol', String(length=255)),
Column('from_port', Integer),
Column('to_port', Integer),
Column('cidr', Inet()),
Column('group_id', Integer, ForeignKey('security_groups.id')),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
security_groups = Table('security_groups', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('name', String(length=255)),
Column('description', String(length=255)),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
security_group_default_rules = Table('security_group_default_rules', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer, default=0),
Column('id', Integer, primary_key=True, nullable=False),
Column('protocol', String(length=5)),
Column('from_port', Integer),
Column('to_port', Integer),
Column('cidr', Inet()),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
services = Table('services', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('host', String(length=255)),
Column('binary', String(length=255)),
Column('topic', String(length=255)),
Column('report_count', Integer, nullable=False),
Column('disabled', Boolean),
Column('deleted', Integer),
Column('disabled_reason', String(length=255)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
snapshot_id_mappings = Table('snapshot_id_mappings', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(length=36), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
snapshots = Table('snapshots', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', String(length=36), primary_key=True, nullable=False),
Column('volume_id', String(length=36), nullable=False),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('status', String(length=255)),
Column('progress', String(length=255)),
Column('volume_size', Integer),
Column('scheduled_at', DateTime),
Column('display_name', String(length=255)),
Column('display_description', String(length=255)),
Column('deleted', String(length=36)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
task_log = Table('task_log', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('task_name', String(length=255), nullable=False),
Column('state', String(length=255), nullable=False),
Column('host', String(length=255), nullable=False),
Column('period_beginning', DateTime, nullable=False),
Column('period_ending', DateTime, nullable=False),
Column('message', String(length=255), nullable=False),
Column('task_items', Integer),
Column('errors', Integer),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
virtual_interfaces = Table('virtual_interfaces', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('address', String(length=255)),
Column('network_id', Integer),
Column('uuid', String(length=36)),
Column('instance_uuid', String(length=36), nullable=True),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
volume_id_mappings = Table('volume_id_mappings', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(length=36), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
volumes = Table('volumes', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', String(length=36), primary_key=True, nullable=False),
Column('ec2_id', String(length=255)),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('host', String(length=255)),
Column('size', Integer),
Column('availability_zone', String(length=255)),
Column('mountpoint', String(length=255)),
Column('status', String(length=255)),
Column('attach_status', String(length=255)),
Column('scheduled_at', DateTime),
Column('launched_at', DateTime),
Column('terminated_at', DateTime),
Column('display_name', String(length=255)),
Column('display_description', String(length=255)),
Column('provider_location', String(length=256)),
Column('provider_auth', String(length=256)),
Column('snapshot_id', String(length=36)),
Column('volume_type_id', Integer),
Column('instance_uuid', String(length=36)),
Column('attach_time', DateTime),
Column('deleted', String(length=36)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
volume_usage_cache = Table('volume_usage_cache', meta,
Column('created_at', DateTime(timezone=False)),
Column('updated_at', DateTime(timezone=False)),
Column('deleted_at', DateTime(timezone=False)),
Column('id', Integer(), primary_key=True, nullable=False),
Column('volume_id', String(36), nullable=False),
Column('tot_last_refreshed', DateTime(timezone=False)),
Column('tot_reads', BigInteger(), default=0),
Column('tot_read_bytes', BigInteger(), default=0),
Column('tot_writes', BigInteger(), default=0),
Column('tot_write_bytes', BigInteger(), default=0),
Column('curr_last_refreshed', DateTime(timezone=False)),
Column('curr_reads', BigInteger(), default=0),
Column('curr_read_bytes', BigInteger(), default=0),
Column('curr_writes', BigInteger(), default=0),
Column('curr_write_bytes', BigInteger(), default=0),
Column('deleted', Integer),
Column("instance_uuid", String(length=36)),
Column("project_id", String(length=36)),
Column("user_id", String(length=36)),
Column("availability_zone", String(length=255)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instances.create()
Index('project_id', instances.c.project_id).create()
Index('uuid', instances.c.uuid, unique=True).create()
# create all tables
tables = [aggregates, console_pools, instance_types,
security_groups, snapshots, volumes,
# those that are children and others later
agent_builds, aggregate_hosts, aggregate_metadata,
block_device_mapping, bw_usage_cache, cells,
certificates, compute_node_stats, compute_nodes, consoles,
dns_domains, fixed_ips, floating_ips,
instance_faults, instance_id_mappings, instance_info_caches,
instance_metadata, instance_system_metadata,
instance_type_extra_specs, instance_type_projects,
instance_actions, instance_actions_events,
groups, group_metadata, group_policy, group_member,
iscsi_targets, key_pairs, migrations, networks,
pci_devices, provider_fw_rules, quota_classes, quota_usages,
quotas, project_user_quotas,
reservations, s3_images, security_group_instance_association,
security_group_rules, security_group_default_rules,
services, snapshot_id_mappings, task_log,
virtual_interfaces,
volume_id_mappings,
volume_usage_cache]
for table in tables:
try:
table.create()
except Exception:
LOG.info(repr(table))
LOG.exception(_LE('Exception while creating table.'))
raise
# task log unique constraint
task_log_uc = "uniq_task_log0task_name0host0period_beginning0period_ending"
task_log_cols = ('task_name', 'host', 'period_beginning', 'period_ending')
uc = UniqueConstraint(*task_log_cols, table=task_log, name=task_log_uc)
uc.create()
# networks unique constraint
UniqueConstraint('vlan', 'deleted', table=networks,
name='uniq_networks0vlan0deleted').create()
# instance_type_name constraint
UniqueConstraint('name', 'deleted', table=instance_types,
name='uniq_instance_types0name0deleted').create()
# flavorid unique constraint
UniqueConstraint('flavorid', 'deleted', table=instance_types,
name='uniq_instance_types0flavorid0deleted').create()
# keypair contraint
UniqueConstraint('user_id', 'name', 'deleted', table=key_pairs,
name='uniq_key_pairs0user_id0name0deleted').create()
# instance_type_projects constraint
inst_type_uc_name = 'uniq_instance_type_projects0instance_type_id0' + \
'project_id0deleted'
UniqueConstraint('instance_type_id', 'project_id', 'deleted',
table=instance_type_projects,
name=inst_type_uc_name).create()
# floating_ips unique constraint
UniqueConstraint('address', 'deleted',
table=floating_ips,
name='uniq_floating_ips0address0deleted').create()
# instance_info_caches
UniqueConstraint('instance_uuid',
table=instance_info_caches,
name='uniq_instance_info_caches0instance_uuid').create()
UniqueConstraint('address', 'deleted',
table=virtual_interfaces,
name='uniq_virtual_interfaces0address0deleted').create()
# cells
UniqueConstraint('name', 'deleted',
table=cells,
name='uniq_cells0name0deleted').create()
# security_groups
uc = UniqueConstraint('project_id', 'name', 'deleted',
table=security_groups,
name='uniq_security_groups0project_id0name0deleted')
uc.create()
# quotas
UniqueConstraint('project_id', 'resource', 'deleted',
table=quotas,
name='uniq_quotas0project_id0resource0deleted').create()
# fixed_ips
UniqueConstraint('address', 'deleted',
table=fixed_ips,
name='uniq_fixed_ips0address0deleted').create()
# services
UniqueConstraint('host', 'topic', 'deleted',
table=services,
name='uniq_services0host0topic0deleted').create()
UniqueConstraint('host', 'binary', 'deleted',
table=services,
name='uniq_services0host0binary0deleted').create()
# agent_builds
uc_name = 'uniq_agent_builds0hypervisor0os0architecture0deleted'
UniqueConstraint('hypervisor', 'os', 'architecture', 'deleted',
table=agent_builds,
name=uc_name).create()
uc_name = 'uniq_console_pools0host0console_type0compute_host0deleted'
UniqueConstraint('host', 'console_type', 'compute_host', 'deleted',
table=console_pools,
name=uc_name).create()
uc_name = 'uniq_aggregate_hosts0host0aggregate_id0deleted'
UniqueConstraint('host', 'aggregate_id', 'deleted',
table=aggregate_hosts,
name=uc_name).create()
uc_name = 'uniq_aggregate_metadata0aggregate_id0key0deleted'
UniqueConstraint('aggregate_id', 'key', 'deleted',
table=aggregate_metadata,
name=uc_name).create()
uc_name = 'uniq_instance_type_extra_specs0instance_type_id0key0deleted'
UniqueConstraint('instance_type_id', 'key', 'deleted',
table=instance_type_extra_specs,
name=uc_name).create()
# created first (to preserve ordering for schema diffs)
mysql_pre_indexes = [
Index('instance_type_id', instance_type_projects.c.instance_type_id),
Index('project_id', dns_domains.c.project_id),
Index('fixed_ip_id', floating_ips.c.fixed_ip_id),
Index('network_id', virtual_interfaces.c.network_id),
Index('network_id', fixed_ips.c.network_id),
Index('fixed_ips_virtual_interface_id_fkey',
fixed_ips.c.virtual_interface_id),
Index('address', fixed_ips.c.address),
Index('fixed_ips_instance_uuid_fkey', fixed_ips.c.instance_uuid),
Index('instance_uuid', instance_system_metadata.c.instance_uuid),
Index('iscsi_targets_volume_id_fkey', iscsi_targets.c.volume_id),
Index('snapshot_id', block_device_mapping.c.snapshot_id),
Index('usage_id', reservations.c.usage_id),
Index('virtual_interfaces_instance_uuid_fkey',
virtual_interfaces.c.instance_uuid),
Index('volume_id', block_device_mapping.c.volume_id),
Index('security_group_id',
security_group_instance_association.c.security_group_id),
]
# Common indexes (indexes we apply to all databases)
# NOTE: order specific for MySQL diff support
common_indexes = [
# aggregate_metadata
Index('aggregate_metadata_key_idx', aggregate_metadata.c.key),
# agent_builds
Index('agent_builds_hypervisor_os_arch_idx',
agent_builds.c.hypervisor,
agent_builds.c.os,
agent_builds.c.architecture),
# block_device_mapping
Index('block_device_mapping_instance_uuid_idx',
block_device_mapping.c.instance_uuid),
Index('block_device_mapping_instance_uuid_device_name_idx',
block_device_mapping.c.instance_uuid,
block_device_mapping.c.device_name),
# NOTE(dprince): This is now a duplicate index on MySQL and needs to
# be removed there. We leave it here so the Index ordering
# matches on schema diffs (for MySQL).
# See Havana migration 186_new_bdm_format where we dropped the
# virtual_name column.
# IceHouse fix is here: https://bugs.launchpad.net/nova/+bug/1265839
Index(
'block_device_mapping_instance_uuid_virtual_name_device_name_idx',
block_device_mapping.c.instance_uuid,
block_device_mapping.c.device_name),
Index('block_device_mapping_instance_uuid_volume_id_idx',
block_device_mapping.c.instance_uuid,
block_device_mapping.c.volume_id),
# bw_usage_cache
Index('bw_usage_cache_uuid_start_period_idx',
bw_usage_cache.c.uuid, bw_usage_cache.c.start_period),
Index('certificates_project_id_deleted_idx',
certificates.c.project_id, certificates.c.deleted),
Index('certificates_user_id_deleted_idx', certificates.c.user_id,
certificates.c.deleted),
# compute_node_stats
Index('ix_compute_node_stats_compute_node_id',
compute_node_stats.c.compute_node_id),
Index('compute_node_stats_node_id_and_deleted_idx',
compute_node_stats.c.compute_node_id,
compute_node_stats.c.deleted),
# consoles
Index('consoles_instance_uuid_idx', consoles.c.instance_uuid),
# dns_domains
Index('dns_domains_domain_deleted_idx',
dns_domains.c.domain, dns_domains.c.deleted),
# fixed_ips
Index('fixed_ips_host_idx', fixed_ips.c.host),
Index('fixed_ips_network_id_host_deleted_idx', fixed_ips.c.network_id,
fixed_ips.c.host, fixed_ips.c.deleted),
Index('fixed_ips_address_reserved_network_id_deleted_idx',
fixed_ips.c.address, fixed_ips.c.reserved,
fixed_ips.c.network_id, fixed_ips.c.deleted),
Index('fixed_ips_deleted_allocated_idx', fixed_ips.c.address,
fixed_ips.c.deleted, fixed_ips.c.allocated),
# floating_ips
Index('floating_ips_host_idx', floating_ips.c.host),
Index('floating_ips_project_id_idx', floating_ips.c.project_id),
Index('floating_ips_pool_deleted_fixed_ip_id_project_id_idx',
floating_ips.c.pool, floating_ips.c.deleted,
floating_ips.c.fixed_ip_id, floating_ips.c.project_id),
# group_member
Index('instance_group_member_instance_idx',
group_member.c.instance_id),
# group_metadata
Index('instance_group_metadata_key_idx', group_metadata.c.key),
# group_policy
Index('instance_group_policy_policy_idx', group_policy.c.policy),
# instances
Index('instances_reservation_id_idx',
instances.c.reservation_id),
Index('instances_terminated_at_launched_at_idx',
instances.c.terminated_at,
instances.c.launched_at),
Index('instances_task_state_updated_at_idx',
instances.c.task_state,
instances.c.updated_at),
Index('instances_host_deleted_idx', instances.c.host,
instances.c.deleted),
Index('instances_uuid_deleted_idx', instances.c.uuid,
instances.c.deleted),
Index('instances_host_node_deleted_idx', instances.c.host,
instances.c.node, instances.c.deleted),
Index('instances_host_deleted_cleaned_idx',
instances.c.host, instances.c.deleted,
instances.c.cleaned),
# instance_actions
Index('instance_uuid_idx', instance_actions.c.instance_uuid),
Index('request_id_idx', instance_actions.c.request_id),
# instance_faults
Index('instance_faults_host_idx', instance_faults.c.host),
Index('instance_faults_instance_uuid_deleted_created_at_idx',
instance_faults.c.instance_uuid, instance_faults.c.deleted,
instance_faults.c.created_at),
# instance_id_mappings
Index('ix_instance_id_mappings_uuid', instance_id_mappings.c.uuid),
# instance_metadata
Index('instance_metadata_instance_uuid_idx',
instance_metadata.c.instance_uuid),
# instance_type_extra_specs
Index('instance_type_extra_specs_instance_type_id_key_idx',
instance_type_extra_specs.c.instance_type_id,
instance_type_extra_specs.c.key),
# iscsi_targets
Index('iscsi_targets_host_idx', iscsi_targets.c.host),
Index('iscsi_targets_host_volume_id_deleted_idx',
iscsi_targets.c.host, iscsi_targets.c.volume_id,
iscsi_targets.c.deleted),
# migrations
Index('migrations_by_host_nodes_and_status_idx',
migrations.c.deleted, migrations.c.source_compute,
migrations.c.dest_compute, migrations.c.source_node,
migrations.c.dest_node, migrations.c.status),
Index('migrations_instance_uuid_and_status_idx',
migrations.c.deleted, migrations.c.instance_uuid,
migrations.c.status),
# networks
Index('networks_host_idx', networks.c.host),
Index('networks_cidr_v6_idx', networks.c.cidr_v6),
Index('networks_bridge_deleted_idx', networks.c.bridge,
networks.c.deleted),
Index('networks_project_id_deleted_idx', networks.c.project_id,
networks.c.deleted),
Index('networks_uuid_project_id_deleted_idx',
networks.c.uuid, networks.c.project_id, networks.c.deleted),
Index('networks_vlan_deleted_idx', networks.c.vlan,
networks.c.deleted),
# project_user_quotas
Index('project_user_quotas_project_id_deleted_idx',
project_user_quotas.c.project_id,
project_user_quotas.c.deleted),
Index('project_user_quotas_user_id_deleted_idx',
project_user_quotas.c.user_id, project_user_quotas.c.deleted),
# reservations
Index('ix_reservations_project_id', reservations.c.project_id),
Index('ix_reservations_user_id_deleted',
reservations.c.user_id, reservations.c.deleted),
Index('reservations_uuid_idx', reservations.c.uuid),
# security_group_instance_association
Index('security_group_instance_association_instance_uuid_idx',
security_group_instance_association.c.instance_uuid),
# task_log
Index('ix_task_log_period_beginning', task_log.c.period_beginning),
Index('ix_task_log_host', task_log.c.host),
Index('ix_task_log_period_ending', task_log.c.period_ending),
# quota_classes
Index('ix_quota_classes_class_name', quota_classes.c.class_name),
# quota_usages
Index('ix_quota_usages_project_id', quota_usages.c.project_id),
Index('ix_quota_usages_user_id_deleted',
quota_usages.c.user_id, quota_usages.c.deleted),
# volumes
Index('volumes_instance_uuid_idx', volumes.c.instance_uuid),
]
# MySQL specific indexes
if migrate_engine.name == 'mysql':
for index in mysql_pre_indexes:
index.create(migrate_engine)
# mysql-specific index by leftmost 100 chars. (mysql gets angry if the
# index key length is too long.)
sql = ("create index migrations_by_host_nodes_and_status_idx ON "
"migrations (deleted, source_compute(100), dest_compute(100), "
"source_node(100), dest_node(100), status)")
migrate_engine.execute(sql)
# PostgreSQL specific indexes
if migrate_engine.name == 'postgresql':
Index('address', fixed_ips.c.address).create()
# NOTE(dprince): PostgreSQL doesn't allow duplicate indexes
# so we skip creation of select indexes (so schemas match exactly).
POSTGRES_INDEX_SKIPS = [
# See Havana migration 186_new_bdm_format where we dropped the
# virtual_name column.
# IceHouse fix is here: https://bugs.launchpad.net/nova/+bug/1265839
'block_device_mapping_instance_uuid_virtual_name_device_name_idx'
]
MYSQL_INDEX_SKIPS = [
# we create this one manually for MySQL above
'migrations_by_host_nodes_and_status_idx'
]
for index in common_indexes:
if ((migrate_engine.name == 'postgresql' and
index.name in POSTGRES_INDEX_SKIPS) or
(migrate_engine.name == 'mysql' and
index.name in MYSQL_INDEX_SKIPS)):
continue
else:
index.create(migrate_engine)
Index('project_id', dns_domains.c.project_id).drop
# Common foreign keys
fkeys = [
[[instance_type_projects.c.instance_type_id],
[instance_types.c.id],
'instance_type_projects_ibfk_1'],
[[iscsi_targets.c.volume_id],
[volumes.c.id],
'iscsi_targets_volume_id_fkey'],
[[reservations.c.usage_id],
[quota_usages.c.id],
'reservations_ibfk_1'],
[[security_group_instance_association.c.security_group_id],
[security_groups.c.id],
'security_group_instance_association_ibfk_1'],
[[compute_node_stats.c.compute_node_id],
[compute_nodes.c.id],
'fk_compute_node_stats_compute_node_id'],
[[compute_nodes.c.service_id],
[services.c.id],
'fk_compute_nodes_service_id'],
]
secgroup_instance_association_instance_uuid_fkey = (
'security_group_instance_association_instance_uuid_fkey')
fkeys.extend(
[
[[fixed_ips.c.instance_uuid],
[instances.c.uuid],
'fixed_ips_instance_uuid_fkey'],
[[block_device_mapping.c.instance_uuid],
[instances.c.uuid],
'block_device_mapping_instance_uuid_fkey'],
[[instance_info_caches.c.instance_uuid],
[instances.c.uuid],
'instance_info_caches_instance_uuid_fkey'],
[[instance_metadata.c.instance_uuid],
[instances.c.uuid],
'instance_metadata_instance_uuid_fkey'],
[[instance_system_metadata.c.instance_uuid],
[instances.c.uuid],
'instance_system_metadata_ibfk_1'],
[[security_group_instance_association.c.instance_uuid],
[instances.c.uuid],
secgroup_instance_association_instance_uuid_fkey],
[[virtual_interfaces.c.instance_uuid],
[instances.c.uuid],
'virtual_interfaces_instance_uuid_fkey'],
[[instance_actions.c.instance_uuid],
[instances.c.uuid],
'fk_instance_actions_instance_uuid'],
[[instance_faults.c.instance_uuid],
[instances.c.uuid],
'fk_instance_faults_instance_uuid'],
[[migrations.c.instance_uuid],
[instances.c.uuid],
'fk_migrations_instance_uuid']
])
for fkey_pair in fkeys:
if migrate_engine.name in ('mysql'):
# For MySQL we name our fkeys explicitly
# so they match Havana
fkey = ForeignKeyConstraint(columns=fkey_pair[0],
refcolumns=fkey_pair[1],
name=fkey_pair[2])
fkey.create()
elif migrate_engine.name == 'postgresql':
# PostgreSQL names things like it wants (correct and compatible!)
fkey = ForeignKeyConstraint(columns=fkey_pair[0],
refcolumns=fkey_pair[1])
fkey.create()
if migrate_engine.name == 'mysql':
# In Folsom we explicitly converted migrate_version to UTF8.
migrate_engine.execute(
'ALTER TABLE migrate_version CONVERT TO CHARACTER SET utf8')
# Set default DB charset to UTF8.
migrate_engine.execute(
'ALTER DATABASE %s DEFAULT CHARACTER SET utf8' %
migrate_engine.url.database)
_create_shadow_tables(migrate_engine)
_create_dump_tables(migrate_engine)
| hanlind/nova | nova/db/sqlalchemy/migrate_repo/versions/216_havana.py | Python | apache-2.0 | 62,236 |
"""
Support for Vera sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.vera/
"""
import logging
from homeassistant.const import (
TEMP_CELSIUS, TEMP_FAHRENHEIT)
from homeassistant.helpers.entity import Entity
from homeassistant.components.sensor import ENTITY_ID_FORMAT
from homeassistant.components.vera import (
VERA_CONTROLLER, VERA_DEVICES, VeraDevice)
DEPENDENCIES = ['vera']
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Perform the setup for Vera controller devices."""
add_devices(
VeraSensor(device, VERA_CONTROLLER)
for device in VERA_DEVICES['sensor'])
class VeraSensor(VeraDevice, Entity):
"""Representation of a Vera Sensor."""
def __init__(self, vera_device, controller):
"""Initialize the sensor."""
self.current_value = None
self._temperature_units = None
VeraDevice.__init__(self, vera_device, controller)
self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id)
@property
def state(self):
"""Return the name of the sensor."""
return self.current_value
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
if self.vera_device.category == "Temperature Sensor":
return self._temperature_units
elif self.vera_device.category == "Light Sensor":
return 'lux'
elif self.vera_device.category == "Humidity Sensor":
return '%'
def update(self):
"""Update the state."""
if self.vera_device.category == "Temperature Sensor":
self.current_value = self.vera_device.temperature
vera_temp_units = (
self.vera_device.vera_controller.temperature_units)
if vera_temp_units == 'F':
self._temperature_units = TEMP_FAHRENHEIT
else:
self._temperature_units = TEMP_CELSIUS
elif self.vera_device.category == "Light Sensor":
self.current_value = self.vera_device.light
elif self.vera_device.category == "Humidity Sensor":
self.current_value = self.vera_device.humidity
elif self.vera_device.category == "Sensor":
tripped = self.vera_device.is_tripped
self.current_value = 'Tripped' if tripped else 'Not Tripped'
else:
self.current_value = 'Unknown'
| morphis/home-assistant | homeassistant/components/sensor/vera.py | Python | apache-2.0 | 2,532 |
from __future__ import print_function
from pysb.simulator import ScipyOdeSimulator
from tutorial_a import model
t = [0, 10, 20, 30, 40, 50, 60]
simulator = ScipyOdeSimulator(model, tspan=t)
simresult = simulator.run()
print(simresult.species)
| LoLab-VU/pysb | pysb/examples/run_tutorial_a.py | Python | bsd-2-clause | 244 |
#!/usr/bin/python
#
# Copyright (C) 2012 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Script for testing ganeti.tools.node_daemon_setup"""
import unittest
from ganeti import errors
from ganeti import constants
from ganeti.tools import node_daemon_setup
import testutils
_SetupError = node_daemon_setup.SetupError
class TestVerifySsconf(unittest.TestCase):
def testNoSsconf(self):
self.assertRaises(_SetupError, node_daemon_setup.VerifySsconf,
{}, NotImplemented, _verify_fn=NotImplemented)
for items in [None, {}]:
self.assertRaises(_SetupError, node_daemon_setup.VerifySsconf, {
constants.NDS_SSCONF: items,
}, NotImplemented, _verify_fn=NotImplemented)
def _Check(self, names):
self.assertEqual(frozenset(names), frozenset([
constants.SS_CLUSTER_NAME,
constants.SS_INSTANCE_LIST,
]))
def testSuccess(self):
ssdata = {
constants.SS_CLUSTER_NAME: "cluster.example.com",
constants.SS_INSTANCE_LIST: [],
}
result = node_daemon_setup.VerifySsconf({
constants.NDS_SSCONF: ssdata,
}, "cluster.example.com", _verify_fn=self._Check)
self.assertEqual(result, ssdata)
self.assertRaises(_SetupError, node_daemon_setup.VerifySsconf, {
constants.NDS_SSCONF: ssdata,
}, "wrong.example.com", _verify_fn=self._Check)
def testInvalidKey(self):
self.assertRaises(errors.GenericError, node_daemon_setup.VerifySsconf, {
constants.NDS_SSCONF: {
"no-valid-ssconf-key": "value",
},
}, NotImplemented)
if __name__ == "__main__":
testutils.GanetiTestProgram()
| bitemyapp/ganeti | test/py/ganeti.tools.node_daemon_setup_unittest.py | Python | bsd-2-clause | 2,901 |
#-*- coding: ISO-8859-1 -*-
# pysqlite2/test/factory.py: tests for the various factories in pysqlite
#
# Copyright (C) 2005-2007 Gerhard Häring <[email protected]>
#
# This file is part of pysqlite.
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
import unittest
import sqlite3 as sqlite
class MyConnection(sqlite.Connection):
def __init__(self, *args, **kwargs):
sqlite.Connection.__init__(self, *args, **kwargs)
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
class MyCursor(sqlite.Cursor):
def __init__(self, *args, **kwargs):
sqlite.Cursor.__init__(self, *args, **kwargs)
self.row_factory = dict_factory
class ConnectionFactoryTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:", factory=MyConnection)
def tearDown(self):
self.con.close()
def CheckIsInstance(self):
self.assertTrue(isinstance(self.con,
MyConnection),
"connection is not instance of MyConnection")
class CursorFactoryTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
def tearDown(self):
self.con.close()
def CheckIsInstance(self):
cur = self.con.cursor(factory=MyCursor)
self.assertTrue(isinstance(cur,
MyCursor),
"cursor is not instance of MyCursor")
class RowFactoryTestsBackwardsCompat(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
def CheckIsProducedByFactory(self):
cur = self.con.cursor(factory=MyCursor)
cur.execute("select 4+5 as foo")
row = cur.fetchone()
self.assertTrue(isinstance(row,
dict),
"row is not instance of dict")
cur.close()
def tearDown(self):
self.con.close()
class RowFactoryTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
def CheckCustomFactory(self):
self.con.row_factory = lambda cur, row: list(row)
row = self.con.execute("select 1, 2").fetchone()
self.assertTrue(isinstance(row,
list),
"row is not instance of list")
def CheckSqliteRowIndex(self):
self.con.row_factory = sqlite.Row
row = self.con.execute("select 1 as a, 2 as b").fetchone()
self.assertTrue(isinstance(row,
sqlite.Row),
"row is not instance of sqlite.Row")
col1, col2 = row["a"], row["b"]
self.assertTrue(col1 == 1, "by name: wrong result for column 'a'")
self.assertTrue(col2 == 2, "by name: wrong result for column 'a'")
col1, col2 = row["A"], row["B"]
self.assertTrue(col1 == 1, "by name: wrong result for column 'A'")
self.assertTrue(col2 == 2, "by name: wrong result for column 'B'")
col1, col2 = row[0], row[1]
self.assertTrue(col1 == 1, "by index: wrong result for column 0")
self.assertTrue(col2 == 2, "by index: wrong result for column 1")
def CheckSqliteRowIter(self):
"""Checks if the row object is iterable"""
self.con.row_factory = sqlite.Row
row = self.con.execute("select 1 as a, 2 as b").fetchone()
for col in row:
pass
def CheckSqliteRowAsTuple(self):
"""Checks if the row object can be converted to a tuple"""
self.con.row_factory = sqlite.Row
row = self.con.execute("select 1 as a, 2 as b").fetchone()
t = tuple(row)
def CheckSqliteRowAsDict(self):
"""Checks if the row object can be correctly converted to a dictionary"""
self.con.row_factory = sqlite.Row
row = self.con.execute("select 1 as a, 2 as b").fetchone()
d = dict(row)
self.assertEqual(d["a"], row["a"])
self.assertEqual(d["b"], row["b"])
def CheckSqliteRowHashCmp(self):
"""Checks if the row object compares and hashes correctly"""
self.con.row_factory = sqlite.Row
row_1 = self.con.execute("select 1 as a, 2 as b").fetchone()
row_2 = self.con.execute("select 1 as a, 2 as b").fetchone()
row_3 = self.con.execute("select 1 as a, 3 as b").fetchone()
self.assertTrue(row_1 == row_1)
self.assertTrue(row_1 == row_2)
self.assertTrue(row_2 != row_3)
self.assertFalse(row_1 != row_1)
self.assertFalse(row_1 != row_2)
self.assertFalse(row_2 == row_3)
self.assertEqual(row_1, row_2)
self.assertEqual(hash(row_1), hash(row_2))
self.assertNotEqual(row_1, row_3)
self.assertNotEqual(hash(row_1), hash(row_3))
def tearDown(self):
self.con.close()
class TextFactoryTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
def CheckUnicode(self):
austria = "Österreich"
row = self.con.execute("select ?", (austria,)).fetchone()
self.assertTrue(type(row[0]) == str, "type of row[0] must be unicode")
def CheckString(self):
self.con.text_factory = bytes
austria = "Österreich"
row = self.con.execute("select ?", (austria,)).fetchone()
self.assertTrue(type(row[0]) == bytes, "type of row[0] must be bytes")
self.assertTrue(row[0] == austria.encode("utf-8"), "column must equal original data in UTF-8")
def CheckCustom(self):
self.con.text_factory = lambda x: str(x, "utf-8", "ignore")
austria = "Österreich"
row = self.con.execute("select ?", (austria,)).fetchone()
self.assertTrue(type(row[0]) == str, "type of row[0] must be unicode")
self.assertTrue(row[0].endswith("reich"), "column must contain original data")
def CheckOptimizedUnicode(self):
self.con.text_factory = sqlite.OptimizedUnicode
austria = "Österreich"
germany = "Deutchland"
a_row = self.con.execute("select ?", (austria,)).fetchone()
d_row = self.con.execute("select ?", (germany,)).fetchone()
self.assertTrue(type(a_row[0]) == str, "type of non-ASCII row must be str")
self.assertTrue(type(d_row[0]) == str, "type of ASCII-only row must be str")
def tearDown(self):
self.con.close()
class TextFactoryTestsWithEmbeddedZeroBytes(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
self.con.execute("create table test (value text)")
self.con.execute("insert into test (value) values (?)", ("a\x00b",))
def CheckString(self):
# text_factory defaults to str
row = self.con.execute("select value from test").fetchone()
self.assertIs(type(row[0]), str)
self.assertEqual(row[0], "a\x00b")
def CheckBytes(self):
self.con.text_factory = bytes
row = self.con.execute("select value from test").fetchone()
self.assertIs(type(row[0]), bytes)
self.assertEqual(row[0], b"a\x00b")
def CheckBytearray(self):
self.con.text_factory = bytearray
row = self.con.execute("select value from test").fetchone()
self.assertIs(type(row[0]), bytearray)
self.assertEqual(row[0], b"a\x00b")
def CheckCustom(self):
# A custom factory should receive a bytes argument
self.con.text_factory = lambda x: x
row = self.con.execute("select value from test").fetchone()
self.assertIs(type(row[0]), bytes)
self.assertEqual(row[0], b"a\x00b")
def tearDown(self):
self.con.close()
def suite():
connection_suite = unittest.makeSuite(ConnectionFactoryTests, "Check")
cursor_suite = unittest.makeSuite(CursorFactoryTests, "Check")
row_suite_compat = unittest.makeSuite(RowFactoryTestsBackwardsCompat, "Check")
row_suite = unittest.makeSuite(RowFactoryTests, "Check")
text_suite = unittest.makeSuite(TextFactoryTests, "Check")
text_zero_bytes_suite = unittest.makeSuite(TextFactoryTestsWithEmbeddedZeroBytes, "Check")
return unittest.TestSuite((connection_suite, cursor_suite, row_suite_compat, row_suite, text_suite, text_zero_bytes_suite))
def test():
runner = unittest.TextTestRunner()
runner.run(suite())
if __name__ == "__main__":
test()
| wdv4758h/ZipPy | lib-python/3/sqlite3/test/factory.py | Python | bsd-3-clause | 9,209 |
# -*- coding: utf-8 -*-
# This technical data was produced for the U. S. Government under Contract No. W15P7T-13-C-F600, and
# is subject to the Rights in Technical Data-Noncommercial Items clause at DFARS 252.227-7013 (FEB 2012)
from django.contrib.gis.db import models
class AOIManager(models.GeoManager):
def add_filters(self, **kwargs):
"""
Returns the queryset with new filters
"""
return super(AOIManager, self).get_query_set().filter(**kwargs)
def unassigned(self):
"""
Returns unassigned AOIs.
"""
return self.add_filters(status='Unassigned')
def assigned(self):
"""
Returns assigned AOIs.
"""
return self.add_filters(status='Assigned')
def in_work(self):
"""
Returns AOIs in work.
"""
return self.add_filters(status='In Work')
def submitted(self):
"""
Returns submitted AOIs.
"""
return self.add_filters(status='Submitted')
def completed(self):
"""
Returns completed AOIs.
"""
return self.add_filters(status='Completed')
| stephenrjones/geoq | geoq/core/managers.py | Python | mit | 1,155 |
"""
Support for EnOcean binary sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/binary_sensor.enocean/
"""
import logging
import voluptuous as vol
from homeassistant.components.binary_sensor import (
BinarySensorDevice, PLATFORM_SCHEMA, SENSOR_CLASSES_SCHEMA)
from homeassistant.components import enocean
from homeassistant.const import (CONF_NAME, CONF_ID, CONF_SENSOR_CLASS)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['enocean']
DEFAULT_NAME = 'EnOcean binary sensor'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_ID): vol.All(cv.ensure_list, [vol.Coerce(int)]),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SENSOR_CLASS, default=None): SENSOR_CLASSES_SCHEMA,
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Binary Sensor platform fo EnOcean."""
dev_id = config.get(CONF_ID)
devname = config.get(CONF_NAME)
sensor_class = config.get(CONF_SENSOR_CLASS)
add_devices([EnOceanBinarySensor(dev_id, devname, sensor_class)])
class EnOceanBinarySensor(enocean.EnOceanDevice, BinarySensorDevice):
"""Representation of EnOcean binary sensors such as wall switches."""
def __init__(self, dev_id, devname, sensor_class):
"""Initialize the EnOcean binary sensor."""
enocean.EnOceanDevice.__init__(self)
self.stype = "listener"
self.dev_id = dev_id
self.which = -1
self.onoff = -1
self.devname = devname
self._sensor_class = sensor_class
@property
def name(self):
"""The default name for the binary sensor."""
return self.devname
@property
def sensor_class(self):
"""Return the class of this sensor."""
return self._sensor_class
def value_changed(self, value, value2):
"""Fire an event with the data that have changed.
This method is called when there is an incoming packet associated
with this platform.
"""
self.update_ha_state()
if value2 == 0x70:
self.which = 0
self.onoff = 0
elif value2 == 0x50:
self.which = 0
self.onoff = 1
elif value2 == 0x30:
self.which = 1
self.onoff = 0
elif value2 == 0x10:
self.which = 1
self.onoff = 1
self.hass.bus.fire('button_pressed', {"id": self.dev_id,
'pushed': value,
'which': self.which,
'onoff': self.onoff})
| xifle/home-assistant | homeassistant/components/binary_sensor/enocean.py | Python | mit | 2,747 |
'''Package for Banded Min Hash based Similarity Calculations'''
from min_hash import *
| ClickSecurity/data_hacking | data_hacking/min_hash/__init__.py | Python | mit | 87 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2011 Nicolas Wack <[email protected]>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__version__ = '0.5.2'
__all__ = ['Guess', 'Language',
'guess_file_info', 'guess_video_info',
'guess_movie_info', 'guess_episode_info']
# Do python3 detection before importing any other module, to be sure that
# it will then always be available
# with code from http://lucumr.pocoo.org/2011/1/22/forwards-compatible-python/
import sys
if sys.version_info[0] >= 3:
PY3 = True
unicode_text_type = str
native_text_type = str
base_text_type = str
def u(x):
return str(x)
def s(x):
return x
class UnicodeMixin(object):
__str__ = lambda x: x.__unicode__()
import binascii
def to_hex(x):
return binascii.hexlify(x).decode('utf-8')
else:
PY3 = False
__all__ = [ str(s) for s in __all__ ] # fix imports for python2
unicode_text_type = unicode
native_text_type = str
base_text_type = basestring
def u(x):
if isinstance(x, str):
return x.decode('utf-8')
return unicode(x)
def s(x):
if isinstance(x, unicode):
return x.encode('utf-8')
if isinstance(x, list):
return [ s(y) for y in x ]
if isinstance(x, tuple):
return tuple(s(y) for y in x)
if isinstance(x, dict):
return dict((s(key), s(value)) for key, value in x.items())
return x
class UnicodeMixin(object):
__str__ = lambda x: unicode(x).encode('utf-8')
def to_hex(x):
return x.encode('hex')
from guessit.guess import Guess, merge_all
from guessit.language import Language
from guessit.matcher import IterativeMatcher
import logging
log = logging.getLogger(__name__)
class NullHandler(logging.Handler):
def emit(self, record):
pass
# let's be a nicely behaving library
h = NullHandler()
log.addHandler(h)
def guess_file_info(filename, filetype, info=None):
"""info can contain the names of the various plugins, such as 'filename' to
detect filename info, or 'hash_md5' to get the md5 hash of the file.
>>> guess_file_info('tests/dummy.srt', 'autodetect', info = ['hash_md5', 'hash_sha1'])
{'hash_md5': 'e781de9b94ba2753a8e2945b2c0a123d', 'hash_sha1': 'bfd18e2f4e5d59775c2bc14d80f56971891ed620'}
"""
result = []
hashers = []
if info is None:
info = ['filename']
if isinstance(info, base_text_type):
info = [info]
for infotype in info:
if infotype == 'filename':
m = IterativeMatcher(filename, filetype=filetype)
result.append(m.matched())
elif infotype == 'hash_mpc':
from guessit.hash_mpc import hash_file
try:
result.append(Guess({'hash_mpc': hash_file(filename)},
confidence=1.0))
except Exception as e:
log.warning('Could not compute MPC-style hash because: %s' % e)
elif infotype == 'hash_ed2k':
from guessit.hash_ed2k import hash_file
try:
result.append(Guess({'hash_ed2k': hash_file(filename)},
confidence=1.0))
except Exception as e:
log.warning('Could not compute ed2k hash because: %s' % e)
elif infotype.startswith('hash_'):
import hashlib
hashname = infotype[5:]
try:
hasher = getattr(hashlib, hashname)()
hashers.append((infotype, hasher))
except AttributeError:
log.warning('Could not compute %s hash because it is not available from python\'s hashlib module' % hashname)
else:
log.warning('Invalid infotype: %s' % infotype)
# do all the hashes now, but on a single pass
if hashers:
try:
blocksize = 8192
hasherobjs = dict(hashers).values()
with open(filename, 'rb') as f:
chunk = f.read(blocksize)
while chunk:
for hasher in hasherobjs:
hasher.update(chunk)
chunk = f.read(blocksize)
for infotype, hasher in hashers:
result.append(Guess({infotype: hasher.hexdigest()},
confidence=1.0))
except Exception as e:
log.warning('Could not compute hash because: %s' % e)
result = merge_all(result)
# last minute adjustments
# if country is in the guessed properties, make it part of the filename
if 'country' in result:
result['series'] += ' (%s)' % result['country'].alpha2.upper()
return result
def guess_video_info(filename, info=None):
return guess_file_info(filename, 'autodetect', info)
def guess_movie_info(filename, info=None):
return guess_file_info(filename, 'movie', info)
def guess_episode_info(filename, info=None):
return guess_file_info(filename, 'episode', info)
| nabsboss/CouchPotatoServer | libs/guessit/__init__.py | Python | gpl-3.0 | 5,753 |
# voronoi.py - functions for computing the Voronoi partition of a graph
#
# Copyright 2016-2018 NetworkX developers.
#
# This file is part of NetworkX.
#
# NetworkX is distributed under a BSD license; see LICENSE.txt for more
# information.
"""Functions for computing the Voronoi cells of a graph."""
import networkx as nx
from networkx.utils import groups
__all__ = ['voronoi_cells']
def voronoi_cells(G, center_nodes, weight='weight'):
"""Returns the Voronoi cells centered at `center_nodes` with respect
to the shortest-path distance metric.
If *C* is a set of nodes in the graph and *c* is an element of *C*,
the *Voronoi cell* centered at a node *c* is the set of all nodes
*v* that are closer to *c* than to any other center node in *C* with
respect to the shortest-path distance metric. [1]_
For directed graphs, this will compute the "outward" Voronoi cells,
as defined in [1]_, in which distance is measured from the center
nodes to the target node. For the "inward" Voronoi cells, use the
:meth:`DiGraph.reverse` method to reverse the orientation of the
edges before invoking this function on the directed graph.
Parameters
----------
G : NetworkX graph
center_nodes : set
A nonempty set of nodes in the graph `G` that represent the
center of the Voronoi cells.
weight : string or function
The edge attribute (or an arbitrary function) representing the
weight of an edge. This keyword argument is as described in the
documentation for :func:`~networkx.multi_source_dijkstra_path`,
for example.
Returns
-------
dictionary
A mapping from center node to set of all nodes in the graph
closer to that center node than to any other center node. The
keys of the dictionary are the element of `center_nodes`, and
the values of the dictionary form a partition of the nodes of
`G`.
Examples
--------
To get only the partition of the graph induced by the Voronoi cells,
take the collection of all values in the returned dictionary::
>>> G = nx.path_graph(6)
>>> center_nodes = {0, 3}
>>> cells = nx.voronoi_cells(G, center_nodes)
>>> partition = set(map(frozenset, cells.values()))
>>> sorted(map(sorted, partition))
[[0, 1], [2, 3, 4, 5]]
Raises
------
ValueError
If `center_nodes` is empty.
References
----------
.. [1] Erwig, Martin. (2000),
"The graph Voronoi diagram with applications."
*Networks*, 36: 156--163.
<dx.doi.org/10.1002/1097-0037(200010)36:3<156::AID-NET2>3.0.CO;2-L>
"""
# Determine the shortest paths from any one of the center nodes to
# every node in the graph.
#
# This raises `ValueError` if `center_nodes` is an empty set.
paths = nx.multi_source_dijkstra_path(G, center_nodes, weight=weight)
# Determine the center node from which the shortest path originates.
nearest = {v: p[0] for v, p in paths.items()}
# Get the mapping from center node to all nodes closer to it than to
# any other center node.
cells = groups(nearest)
# We collect all unreachable nodes under a special key, if there are any.
unreachable = set(G) - set(nearest)
if unreachable:
cells['unreachable'] = unreachable
return cells
| kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/site-packages/networkx/algorithms/voronoi.py | Python | gpl-3.0 | 3,399 |
Subsets and Splits