repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
bretttegart/treadmill | tests/services/network_service_test.py | 1 | 25230 | """Unit test for network_service - Treadmill Network configuration service.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import shutil
import tempfile
import unittest
# Disable W0611: Unused import
import tests.treadmill_test_skip_windows # pylint: disable=W0611
import mock
import treadmill
from treadmill import subproc
from treadmill import services
from treadmill.services import network_service
class NetworkServiceTest(unittest.TestCase):
"""Unit tests for the network service implementation.
"""
def setUp(self):
self.root = tempfile.mkdtemp()
def tearDown(self):
if self.root and os.path.isdir(self.root):
shutil.rmtree(self.root)
@mock.patch('treadmill.netdev.dev_mtu', mock.Mock())
@mock.patch('treadmill.netdev.dev_speed', mock.Mock())
@mock.patch('treadmill.services.network_service._device_ip', mock.Mock())
def test_init(self):
"""Test Network service constructor.
"""
network_service.NetworkResourceService(
ext_device='eth42',
)
treadmill.netdev.dev_mtu.assert_called_with('eth42')
treadmill.netdev.dev_speed.assert_called_with('eth42')
@mock.patch('treadmill.netdev.addr_add', mock.Mock())
@mock.patch('treadmill.netdev.bridge_addif', mock.Mock())
@mock.patch('treadmill.netdev.bridge_create', mock.Mock())
@mock.patch('treadmill.netdev.bridge_delete', mock.Mock())
@mock.patch('treadmill.netdev.bridge_setfd', mock.Mock())
@mock.patch('treadmill.netdev.dev_conf_route_localnet_set', mock.Mock())
@mock.patch('treadmill.netdev.dev_mac',
mock.Mock(return_value='11:22:33:44:55:66'))
@mock.patch('treadmill.netdev.dev_mtu', mock.Mock())
@mock.patch('treadmill.netdev.link_add_veth', mock.Mock())
@mock.patch('treadmill.netdev.link_del_veth', mock.Mock())
@mock.patch('treadmill.netdev.link_set_addr', mock.Mock())
@mock.patch('treadmill.netdev.link_set_down', mock.Mock())
@mock.patch('treadmill.netdev.link_set_mtu', mock.Mock())
@mock.patch('treadmill.netdev.link_set_up', mock.Mock())
@mock.patch('treadmill.services.network_service._device_ip', mock.Mock())
def test__bridge_initialize(self):
"""Test Network service bridge initialization.
"""
# Access to a protected member _bridge_initialize
# pylint: disable=W0212
svc = network_service.NetworkResourceService(
ext_device='eth42',
ext_speed=10000,
ext_mtu=9000,
)
svc._bridge_initialize()
treadmill.netdev.link_set_down.assert_has_calls(
[
mock.call('tm0'),
mock.call('br0'),
]
)
treadmill.netdev.link_del_veth.assert_called_with('tm0')
treadmill.netdev.bridge_delete.assert_has_calls(
[
mock.call('tm0'), # FIXME(boysson): For legacy setup cleanup
mock.call('br0'),
]
)
treadmill.netdev.bridge_create.assert_called_with('br0')
treadmill.netdev.bridge_setfd.assert_called_with('br0', 0)
# Setup the TM link with the right MTU
treadmill.netdev.link_add_veth.assert_called_with('tm0', 'tm1')
treadmill.netdev.link_set_mtu.assert_has_calls(
[
mock.call('tm0', 9000),
mock.call('tm1', 9000),
]
)
treadmill.netdev.link_set_mtu.assert_called_with('tm1', 9000)
# Make sure the bridge's address is fixed
treadmill.netdev.dev_mac.assert_called_with('tm1')
treadmill.netdev.link_set_addr('br0', '11:22:33:44:55:66')
# Add one end of the link to the bridge
treadmill.netdev.bridge_addif.assert_called_with('br0', 'tm1')
# Everything is brought up
treadmill.netdev.link_set_up.assert_has_calls(
[
mock.call('br0'),
mock.call('tm1'),
mock.call('tm0'),
]
)
# And the TM interface has the right IP
treadmill.netdev.addr_add.assert_called_with(
devname='tm0', addr='192.168.254.254/16',
)
treadmill.netdev.dev_conf_route_localnet_set.assert_called_with(
'tm0', True
)
@mock.patch('treadmill.iptables.create_set', mock.Mock())
@mock.patch('treadmill.netdev.bridge_brif',
mock.Mock(return_value=['foo', 'bar']))
@mock.patch('treadmill.netdev.bridge_setfd', mock.Mock())
@mock.patch('treadmill.netdev.dev_conf_route_localnet_set', mock.Mock())
@mock.patch('treadmill.netdev.dev_mtu', mock.Mock())
@mock.patch('treadmill.netdev.link_set_up', mock.Mock())
@mock.patch('treadmill.services.network_service._device_info', mock.Mock())
@mock.patch('treadmill.services.network_service.NetworkResourceService.'
'_bridge_initialize', mock.Mock())
@mock.patch('treadmill.services.network_service._device_ip', mock.Mock())
@mock.patch('treadmill.vipfile.VipMgr', autospec=True)
def test_initialize_quick(self, mock_vipmgr):
"""Test service initialization (quick restart).
"""
# Access to a protected member _device_info of a client class
# pylint: disable=W0212
treadmill.services.network_service._device_info.side_effect = \
lambda dev: {'alias': 'reqid_%s' % dev}
mock_vipmgr_inst = mock_vipmgr.return_value
mock_vipmgr_inst.list.return_value = [
('192.168.1.2', 'reqid_foo'),
('192.168.43.10', 'reqid_bar'),
('192.168.8.9', 'reqid_baz'),
]
svc = network_service.NetworkResourceService(
ext_device='eth42',
ext_speed=10000,
ext_mtu=9000,
)
svc.initialize(self.root)
mock_vipmgr.assert_called_with(
mock.ANY,
svc._service_rsrc_dir
)
mock_vipmgr_inst.garbage_collect.assert_not_called()
treadmill.iptables.create_set.assert_has_calls(
[
mock.call(treadmill.iptables.SET_PROD_CONTAINERS,
family='inet', set_type='hash:ip',
hashsize=1024, maxelem=65536),
mock.call(treadmill.iptables.SET_NONPROD_CONTAINERS,
family='inet', set_type='hash:ip',
hashsize=1024, maxelem=65536),
],
any_order=True
)
treadmill.netdev.link_set_up.assert_has_calls(
[
mock.call('tm0'),
mock.call('tm1'),
mock.call('br0'),
]
)
# Re-init is not called
svc._bridge_initialize.assert_not_called()
mock_vipmgr_inst.initialize.assert_not_called()
treadmill.netdev.bridge_setfd.assert_called_with('br0', 0)
treadmill.netdev.dev_conf_route_localnet_set('tm0', True)
treadmill.netdev.dev_mtu.assert_called_with('br0')
treadmill.netdev.bridge_brif('br0')
treadmill.services.network_service._device_info.assert_has_calls(
[
mock.call('foo'),
mock.call('bar'),
]
)
mock_vipmgr_inst.free.assert_not_called()
self.assertEqual(
svc._devices,
{
'reqid_foo': {
'alias': 'reqid_foo',
'ip': '192.168.1.2',
'stale': True,
},
'reqid_bar': {
'alias': 'reqid_bar',
'ip': '192.168.43.10',
'stale': True,
},
'reqid_baz': {
# No device, so no 'alias'
'ip': '192.168.8.9',
'stale': True,
},
},
'All devices must be unified with their IP and marked stale'
)
@mock.patch('treadmill.iptables.create_set', mock.Mock())
@mock.patch('treadmill.netdev.bridge_brif', mock.Mock(return_value=[]))
@mock.patch('treadmill.netdev.bridge_setfd', mock.Mock())
@mock.patch('treadmill.netdev.dev_conf_route_localnet_set', mock.Mock())
@mock.patch('treadmill.netdev.dev_mtu', mock.Mock())
@mock.patch('treadmill.netdev.link_set_up', mock.Mock())
@mock.patch('treadmill.services.network_service._device_info', mock.Mock())
@mock.patch('treadmill.services.network_service._device_ip', mock.Mock())
@mock.patch('treadmill.services.network_service.NetworkResourceService.'
'_bridge_initialize', mock.Mock())
@mock.patch('treadmill.vipfile.VipMgr', autospec=True)
def test_initialize(self, mock_vipmgr):
"""Test service initialization.
"""
# Access to a protected member _device_info of a client class
# pylint: disable=W0212
treadmill.services.network_service._device_info.side_effect = \
lambda dev: {'alias': 'reqid_%s' % dev}
treadmill.netdev.link_set_up.side_effect = [
subproc.CalledProcessError('any', 'how'),
None,
]
mock_vipmgr_inst = mock_vipmgr.return_value
mock_vipmgr_inst.list.return_value = []
svc = network_service.NetworkResourceService(
ext_device='eth42',
ext_speed=10000,
ext_mtu=9000,
)
svc.initialize(self.root)
mock_vipmgr.assert_called_with(
mock.ANY,
svc._service_rsrc_dir
)
mock_vipmgr_inst.garbage_collect.assert_not_called()
treadmill.iptables.create_set.assert_has_calls(
[
mock.call(treadmill.iptables.SET_PROD_CONTAINERS,
family='inet', set_type='hash:ip',
hashsize=1024, maxelem=65536),
mock.call(treadmill.iptables.SET_NONPROD_CONTAINERS,
family='inet', set_type='hash:ip',
hashsize=1024, maxelem=65536),
],
any_order=True
)
treadmill.netdev.link_set_up.assert_called_with('tm0')
svc._bridge_initialize.assert_called()
mock_vipmgr_inst.initialize.assert_not_called()
treadmill.netdev.bridge_setfd.assert_called_with('br0', 0)
treadmill.netdev.dev_mtu.assert_called_with('br0')
treadmill.netdev.dev_conf_route_localnet_set('tm0', True)
treadmill.netdev.bridge_brif('br0')
treadmill.services.network_service._device_info.assert_not_called()
mock_vipmgr_inst.free.assert_not_called()
self.assertEqual(
svc._devices,
{}
)
@mock.patch('treadmill.netdev.dev_mtu', mock.Mock(set_spec=True))
@mock.patch('treadmill.netdev.dev_speed', mock.Mock(set_spec=True))
@mock.patch('treadmill.iptables.atomic_set', mock.Mock(set_spec=True))
@mock.patch('treadmill.vipfile.VipMgr', autospec=True)
@mock.patch('treadmill.services.network_service._device_ip',
mock.Mock(set_spec=True))
@mock.patch('treadmill.services.network_service.'
'NetworkResourceService.on_delete_request',
mock.Mock(set_spec=True))
def test_synchronize(self, mock_vipmgr):
"""Test service synchronize.
"""
# Access to a protected member _device_info of a client class
# pylint: disable=W0212
svc = network_service.NetworkResourceService(
ext_device='eth42',
ext_speed=10000,
ext_mtu=9000,
)
svc._vips = mock_vipmgr('/mock', '/mock')
svc._devices = {
'reqid_foo': {
'alias': 'reqid_foo',
'device': '0000000ID5678.0',
'environment': 'prod',
'ip': '192.168.1.2',
},
'reqid_bar': {
# Device but no request, no environment
'alias': 'reqid_bar',
'device': '0000000ID1234.0',
'ip': '192.168.43.10',
'stale': True,
},
'reqid_baz': {
# No device, so no 'alias', 'device'.
'ip': '192.168.8.9',
'stale': True,
},
}
def _mock_delete(rsrc_id):
svc._devices.pop(rsrc_id, None)
svc.on_delete_request.side_effect = _mock_delete
svc.synchronize()
svc.on_delete_request.assert_has_calls(
[
mock.call('reqid_bar'),
mock.call('reqid_baz'),
],
any_order=True
)
treadmill.iptables.atomic_set.assert_has_calls(
[
mock.call(
treadmill.iptables.SET_PROD_CONTAINERS,
{'192.168.1.2'},
set_type='hash:ip', family='inet',
hashsize=1024, maxelem=65536,
),
mock.call(
treadmill.iptables.SET_NONPROD_CONTAINERS,
set(),
set_type='hash:ip', family='inet',
hashsize=1024, maxelem=65536,
),
],
any_order=True
)
res = svc.report_status()
self.assertEqual(
res,
{
'bridge_dev': 'br0',
'bridge_mtu': treadmill.netdev.dev_mtu.return_value,
'devices':
{
'reqid_foo':
{
'alias': 'reqid_foo',
'device': '0000000ID5678.0',
'environment': 'prod',
'ip': '192.168.1.2',
}
},
'external_device': 'eth42',
'external_ip': network_service._device_ip.return_value,
'external_mtu': 9000,
'external_speed': 10000,
'internal_device': 'tm0',
'internal_ip': '192.168.254.254'
}
)
@mock.patch('treadmill.services.network_service._device_ip', mock.Mock())
def test_event_handlers(self):
"""Test event_handlers request.
"""
svc = network_service.NetworkResourceService(
ext_device='eth42',
ext_speed=10000,
ext_mtu=9000,
)
self.assertEqual(
svc.event_handlers(),
[]
)
@mock.patch('treadmill.services.network_service._device_ip',
mock.Mock(return_value='a.b.c.d'))
def test_report_status(self):
"""Test service status reporting.
"""
svc = network_service.NetworkResourceService(
ext_device='eth42',
ext_speed=10000,
ext_mtu=9000,
)
status = svc.report_status()
self.assertEqual(
status,
{
'bridge_dev': 'br0',
'bridge_mtu': 0,
'internal_device': 'tm0',
'internal_ip': '192.168.254.254',
'devices': {},
'external_mtu': 9000,
'external_speed': 10000,
'external_ip': 'a.b.c.d',
'external_device': 'eth42',
}
)
@mock.patch('treadmill.netdev.addr_add', mock.Mock(set_spec=True))
@mock.patch('treadmill.netdev.bridge_addif', mock.Mock(set_spec=True))
@mock.patch('treadmill.netdev.link_add_veth', mock.Mock(set_spec=True))
@mock.patch('treadmill.netdev.link_set_alias', mock.Mock(set_spec=True))
@mock.patch('treadmill.netdev.link_set_mtu', mock.Mock(set_spec=True))
@mock.patch('treadmill.netdev.link_set_up', mock.Mock(set_spec=True))
@mock.patch('treadmill.services.network_service._device_info',
autospec=True)
@mock.patch('treadmill.services.network_service._device_ip',
mock.Mock(set_spec=True, return_value='1.2.3.4'))
@mock.patch('treadmill.services.network_service._add_mark_rule',
mock.Mock(set_spec=True))
def test_on_create_request(self, mock_devinfo):
"""Test processing of a network create request.
"""
# Access to a protected member _devices
# pylint: disable=W0212
svc = network_service.NetworkResourceService(
ext_device='eth42',
ext_speed=10000,
ext_mtu=9000,
)
svc._vips = mock.Mock()
mockip = svc._vips.alloc.return_value
request = {
'environment': 'dev',
}
request_id = 'myproid.test-0-ID1234'
mock_devinfo.return_value = {'test': 'me'}
network = svc.on_create_request(request_id, request)
svc._vips.alloc.assert_called_with(request_id)
treadmill.netdev.link_add_veth.assert_called_with(
'0000000ID1234.0', '0000000ID1234.1',
)
treadmill.netdev.link_set_mtu.assert_has_calls(
[
mock.call('0000000ID1234.0', 9000),
mock.call('0000000ID1234.1', 9000),
]
)
treadmill.netdev.link_set_alias.assert_has_calls(
[
mock.call('0000000ID1234.0', request_id),
mock.call('0000000ID1234.1', request_id),
]
)
treadmill.netdev.bridge_addif.assert_called_with(
'br0', '0000000ID1234.0'
)
treadmill.netdev.link_set_up.assert_called_with(
'0000000ID1234.0',
)
mock_devinfo.assert_called_with('0000000ID1234.0')
self.assertEqual(
network,
{
'gateway': '192.168.254.254',
'veth': '0000000ID1234.1',
'vip': mockip,
'external_ip': '1.2.3.4',
}
)
self.assertEqual(
svc._devices,
{
request_id: {
'environment': 'dev',
'ip': mockip,
'test': 'me',
}
}
)
network_service._add_mark_rule.assert_called_with(
mockip, 'dev'
)
@mock.patch('treadmill.netdev.addr_add', mock.Mock(set_spec=True))
@mock.patch('treadmill.netdev.bridge_addif', mock.Mock(set_spec=True))
@mock.patch('treadmill.netdev.link_add_veth', mock.Mock(set_spec=True))
@mock.patch('treadmill.netdev.link_set_alias', mock.Mock(set_spec=True))
@mock.patch('treadmill.netdev.link_set_mtu', mock.Mock(set_spec=True))
@mock.patch('treadmill.netdev.link_set_up', mock.Mock(set_spec=True))
@mock.patch('treadmill.services.network_service._device_info',
autospec=True)
@mock.patch('treadmill.services.network_service._device_ip',
mock.Mock(set_spec=True, return_value='1.2.3.4'))
@mock.patch('treadmill.services.network_service._add_mark_rule',
mock.Mock(set_spec=True))
def test_on_create_request_existing(self, mock_devinfo):
"""Test processing of a network create request when the device exists
(restarts).
"""
# Access to a protected member _devices
# pylint: disable=W0212
svc = network_service.NetworkResourceService(
ext_device='eth42',
ext_speed=10000,
ext_mtu=9000,
)
svc._vips = mock.Mock()
request = {
'environment': 'dev',
}
request_id = 'myproid.test-0-ID1234'
# Fake the exis
svc._devices = {
request_id: {
'ip': 'old_ip',
'device': '0000000ID1234.0',
},
}
mock_devinfo.return_value = {'test': 'me'}
network = svc.on_create_request(request_id, request)
svc._vips.alloc.assert_not_called()
treadmill.netdev.link_add_veth.assert_not_called()
treadmill.netdev.link_set_mtu.assert_not_called()
treadmill.netdev.link_set_alias.assert_not_called()
treadmill.netdev.bridge_addif.assert_not_called()
treadmill.netdev.link_set_up.assert_not_called()
mock_devinfo.assert_called_with('0000000ID1234.0')
network_service._add_mark_rule.assert_called_with(
'old_ip', 'dev'
)
self.assertEqual(
network,
{
'gateway': '192.168.254.254',
'veth': '0000000ID1234.1',
'vip': 'old_ip',
'external_ip': '1.2.3.4',
}
)
self.assertEqual(
svc._devices,
{
request_id: {
'environment': 'dev',
'ip': 'old_ip',
'test': 'me',
}
}
)
@mock.patch('treadmill.netdev.dev_state', mock.Mock(set_spec=True))
@mock.patch('treadmill.netdev.link_del_veth', mock.Mock(set_spec=True))
@mock.patch('treadmill.vipfile.VipMgr', autospec=True)
@mock.patch('treadmill.services.network_service._device_ip',
mock.Mock(set_spec=True))
@mock.patch('treadmill.services.network_service._delete_mark_rule',
mock.Mock(set_spec=True))
def test_on_delete_request(self, mock_vipmgr):
"""Test processing of a localdisk delete request.
"""
# Access to a protected member
# pylint: disable=W0212
svc = network_service.NetworkResourceService(
ext_device='eth42',
ext_speed=10000,
ext_mtu=9000,
)
svc._vips = mock_vipmgr(mock.ANY, mock.ANY)
request_id = 'myproid.test-0-ID1234'
svc._devices[request_id] = {
'ip': 'test_ip',
'environment': 'test_env',
}
svc.on_delete_request(request_id)
treadmill.netdev.dev_state.assert_called_with(
'0000000ID1234.0'
)
treadmill.netdev.link_del_veth.assert_called_with(
'0000000ID1234.0'
)
network_service._delete_mark_rule.assert_called_with(
'test_ip', 'test_env'
)
svc._vips.free.assert_called_with(
request_id, 'test_ip',
)
@mock.patch('treadmill.iptables.add_ip_set', mock.Mock())
@mock.patch('treadmill.iptables.test_ip_set',
mock.Mock(return_value=False))
def test__add_mark_rule(self):
"""Test mark rule addition
"""
# Disable protected-access: Test access protected members .
# pylint: disable=protected-access
# Called with the NONPROD interface
network_service._add_mark_rule('2.2.2.2', 'dev')
treadmill.iptables.add_ip_set.assert_called_with(
treadmill.iptables.SET_NONPROD_CONTAINERS, '2.2.2.2'
)
treadmill.iptables.test_ip_set.assert_called_with(
treadmill.iptables.SET_PROD_CONTAINERS, '2.2.2.2'
)
treadmill.iptables.add_ip_set.reset_mock()
treadmill.iptables.test_ip_set.reset_mock()
# Called with the PROD interface
network_service._add_mark_rule('3.3.3.3', 'prod')
treadmill.iptables.add_ip_set.assert_called_with(
treadmill.iptables.SET_PROD_CONTAINERS, '3.3.3.3'
)
treadmill.iptables.test_ip_set.assert_called_with(
treadmill.iptables.SET_NONPROD_CONTAINERS, '3.3.3.3'
)
@mock.patch('treadmill.iptables.add_ip_set', mock.Mock())
@mock.patch('treadmill.iptables.test_ip_set',
mock.Mock(return_value=True))
def test__add_mark_rule_dup(self):
"""Test mark rule addition (integrity error).
"""
# Access to a protected member _device_info of a client class
# pylint: disable=W0212
self.assertRaises(
Exception,
network_service._add_mark_rule,
'2.2.2.2', 'dev'
)
@mock.patch('treadmill.iptables.rm_ip_set', mock.Mock())
def test__delete_mark_rule(self):
"""Test mark rule deletion.
"""
# Disable protected-access: Test access protected members .
# pylint: disable=protected-access
# Called with the NONPROD interface
network_service._delete_mark_rule('2.2.2.2', 'dev')
treadmill.iptables.rm_ip_set.assert_called_with(
treadmill.iptables.SET_NONPROD_CONTAINERS, '2.2.2.2'
)
treadmill.iptables.rm_ip_set.reset_mock()
# Called with the PROD interface
network_service._delete_mark_rule('4.4.4.4', 'prod')
treadmill.iptables.rm_ip_set.assert_called_with(
treadmill.iptables.SET_PROD_CONTAINERS, '4.4.4.4'
)
def test_load(self):
"""Test loading service using alias."""
# pylint: disable=W0212
self.assertEqual(
network_service.NetworkResourceService,
services.ResourceService(self.root, 'network')._load_impl()
)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
windedge/odoomrp-wip | stock_lock_lot/wizard/wiz_lock_lot.py | 18 | 1358 | # -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from openerp import models, api
class WizLockLot(models.TransientModel):
_name = 'wiz.lock.lot'
@api.multi
def action_lock_lots(self):
lot_obj = self.env['stock.production.lot']
active_ids = self._context['active_ids']
lot_obj.browse(active_ids).button_lock()
@api.multi
def action_unlock_lots(self):
lot_obj = self.env['stock.production.lot']
active_ids = self._context['active_ids']
lot_obj.browse(active_ids).button_unlock()
| agpl-3.0 |
anderspetersson/django-bigbrother | bigbrother/graphs.py | 1 | 1117 | from bigbrother.core import get_module_by_slug
from bigbrother.models import ModuleStat
from django.db.models import Avg
from datetime import datetime, timedelta
import qsstats
class Graph():
stopdate = datetime.utcnow()
showpoints = False
def get_graph_data(self, slug, *args, **kwargs):
module = get_module_by_slug(slug)()
q = ModuleStat.objects.filter(modulename=slug)
qs = qsstats.QuerySetStats(q, 'added', module.get_aggregate_function() or Avg('value'))
data = qs.time_series(self.startdate, self.stopdate, interval=self.interval)
return data
class LineGraph(Graph):
type = 'line'
showpoints = True
class BarGraph(Graph):
type = 'bar'
class LastWeekGraph(LineGraph):
name = 'Last Week'
interval = 'days'
startdate = datetime.utcnow() - timedelta(days=7)
class LastMonthGraph(LineGraph):
name = 'Last Month'
interval = 'days'
startdate = datetime.utcnow() - timedelta(days=30)
class LastYearGraph(LineGraph):
name = 'Last Year'
interval = 'weeks'
startdate = datetime.utcnow() - timedelta(days=365)
| mit |
martinbuc/missionplanner | Lib/site-packages/numpy/f2py/tests/test_array_from_pyobj.py | 59 | 20418 | import unittest
import os
import sys
import copy
import nose
from numpy.testing import *
from numpy import array, alltrue, ndarray, asarray, can_cast,zeros, dtype
from numpy.core.multiarray import typeinfo
import util
wrap = None
def setup():
"""
Build the required testing extension module
"""
global wrap
# Check compiler availability first
if not util.has_c_compiler():
raise nose.SkipTest("No C compiler available")
if wrap is None:
config_code = """
config.add_extension('test_array_from_pyobj_ext',
sources=['wrapmodule.c', 'fortranobject.c'],
define_macros=[])
"""
d = os.path.dirname(__file__)
src = [os.path.join(d, 'src', 'array_from_pyobj', 'wrapmodule.c'),
os.path.join(d, '..', 'src', 'fortranobject.c'),
os.path.join(d, '..', 'src', 'fortranobject.h')]
wrap = util.build_module_distutils(src, config_code,
'test_array_from_pyobj_ext')
def flags_info(arr):
flags = wrap.array_attrs(arr)[6]
return flags2names(flags)
def flags2names(flags):
info = []
for flagname in ['CONTIGUOUS','FORTRAN','OWNDATA','ENSURECOPY',
'ENSUREARRAY','ALIGNED','NOTSWAPPED','WRITEABLE',
'UPDATEIFCOPY','BEHAVED','BEHAVED_RO',
'CARRAY','FARRAY'
]:
if abs(flags) & getattr(wrap,flagname):
info.append(flagname)
return info
class Intent:
def __init__(self,intent_list=[]):
self.intent_list = intent_list[:]
flags = 0
for i in intent_list:
if i=='optional':
flags |= wrap.F2PY_OPTIONAL
else:
flags |= getattr(wrap,'F2PY_INTENT_'+i.upper())
self.flags = flags
def __getattr__(self,name):
name = name.lower()
if name=='in_': name='in'
return self.__class__(self.intent_list+[name])
def __str__(self):
return 'intent(%s)' % (','.join(self.intent_list))
def __repr__(self):
return 'Intent(%r)' % (self.intent_list)
def is_intent(self,*names):
for name in names:
if name not in self.intent_list:
return False
return True
def is_intent_exact(self,*names):
return len(self.intent_list)==len(names) and self.is_intent(*names)
intent = Intent()
class Type(object):
_type_names = ['BOOL','BYTE','UBYTE','SHORT','USHORT','INT','UINT',
'LONG','ULONG','LONGLONG','ULONGLONG',
'FLOAT','DOUBLE','LONGDOUBLE','CFLOAT','CDOUBLE',
'CLONGDOUBLE']
_type_cache = {}
_cast_dict = {'BOOL':['BOOL']}
_cast_dict['BYTE'] = _cast_dict['BOOL'] + ['BYTE']
_cast_dict['UBYTE'] = _cast_dict['BOOL'] + ['UBYTE']
_cast_dict['BYTE'] = ['BYTE']
_cast_dict['UBYTE'] = ['UBYTE']
_cast_dict['SHORT'] = _cast_dict['BYTE'] + ['UBYTE','SHORT']
_cast_dict['USHORT'] = _cast_dict['UBYTE'] + ['BYTE','USHORT']
_cast_dict['INT'] = _cast_dict['SHORT'] + ['USHORT','INT']
_cast_dict['UINT'] = _cast_dict['USHORT'] + ['SHORT','UINT']
_cast_dict['LONG'] = _cast_dict['INT'] + ['LONG']
_cast_dict['ULONG'] = _cast_dict['UINT'] + ['ULONG']
_cast_dict['LONGLONG'] = _cast_dict['LONG'] + ['LONGLONG']
_cast_dict['ULONGLONG'] = _cast_dict['ULONG'] + ['ULONGLONG']
_cast_dict['FLOAT'] = _cast_dict['SHORT'] + ['USHORT','FLOAT']
_cast_dict['DOUBLE'] = _cast_dict['INT'] + ['UINT','FLOAT','DOUBLE']
_cast_dict['LONGDOUBLE'] = _cast_dict['LONG'] + ['ULONG','FLOAT','DOUBLE','LONGDOUBLE']
_cast_dict['CFLOAT'] = _cast_dict['FLOAT'] + ['CFLOAT']
_cast_dict['CDOUBLE'] = _cast_dict['DOUBLE'] + ['CFLOAT','CDOUBLE']
_cast_dict['CLONGDOUBLE'] = _cast_dict['LONGDOUBLE'] + ['CFLOAT','CDOUBLE','CLONGDOUBLE']
def __new__(cls,name):
if isinstance(name,dtype):
dtype0 = name
name = None
for n,i in typeinfo.items():
if isinstance(i,tuple) and dtype0.type is i[-1]:
name = n
break
obj = cls._type_cache.get(name.upper(),None)
if obj is not None:
return obj
obj = object.__new__(cls)
obj._init(name)
cls._type_cache[name.upper()] = obj
return obj
def _init(self,name):
self.NAME = name.upper()
self.type_num = getattr(wrap,'PyArray_'+self.NAME)
assert_equal(self.type_num,typeinfo[self.NAME][1])
self.dtype = typeinfo[self.NAME][-1]
self.elsize = typeinfo[self.NAME][2] / 8
self.dtypechar = typeinfo[self.NAME][0]
def cast_types(self):
return map(self.__class__,self._cast_dict[self.NAME])
def all_types(self):
return map(self.__class__,self._type_names)
def smaller_types(self):
bits = typeinfo[self.NAME][3]
types = []
for name in self._type_names:
if typeinfo[name][3]<bits:
types.append(Type(name))
return types
def equal_types(self):
bits = typeinfo[self.NAME][3]
types = []
for name in self._type_names:
if name==self.NAME: continue
if typeinfo[name][3]==bits:
types.append(Type(name))
return types
def larger_types(self):
bits = typeinfo[self.NAME][3]
types = []
for name in self._type_names:
if typeinfo[name][3]>bits:
types.append(Type(name))
return types
class Array:
def __init__(self,typ,dims,intent,obj):
self.type = typ
self.dims = dims
self.intent = intent
self.obj_copy = copy.deepcopy(obj)
self.obj = obj
# arr.dtypechar may be different from typ.dtypechar
self.arr = wrap.call(typ.type_num,dims,intent.flags,obj)
self.arr_attr = wrap.array_attrs(self.arr)
if len(dims)>1:
if self.intent.is_intent('c'):
assert intent.flags & wrap.F2PY_INTENT_C
assert not self.arr.flags['FORTRAN'],`self.arr.flags,obj.flags`
assert self.arr.flags['CONTIGUOUS']
assert not self.arr_attr[6] & wrap.FORTRAN
else:
assert not intent.flags & wrap.F2PY_INTENT_C
assert self.arr.flags['FORTRAN']
assert not self.arr.flags['CONTIGUOUS']
assert self.arr_attr[6] & wrap.FORTRAN
if obj is None:
self.pyarr = None
self.pyarr_attr = None
return
if intent.is_intent('cache'):
assert isinstance(obj,ndarray),`type(obj)`
self.pyarr = array(obj).reshape(*dims).copy()
else:
self.pyarr = array(array(obj,
dtype = typ.dtypechar).reshape(*dims),
order=self.intent.is_intent('c') and 'C' or 'F')
assert self.pyarr.dtype == typ, \
`self.pyarr.dtype,typ`
assert self.pyarr.flags['OWNDATA'], (obj, intent)
self.pyarr_attr = wrap.array_attrs(self.pyarr)
if len(dims)>1:
if self.intent.is_intent('c'):
assert not self.pyarr.flags['FORTRAN']
assert self.pyarr.flags['CONTIGUOUS']
assert not self.pyarr_attr[6] & wrap.FORTRAN
else:
assert self.pyarr.flags['FORTRAN']
assert not self.pyarr.flags['CONTIGUOUS']
assert self.pyarr_attr[6] & wrap.FORTRAN
assert self.arr_attr[1]==self.pyarr_attr[1] # nd
assert self.arr_attr[2]==self.pyarr_attr[2] # dimensions
if self.arr_attr[1]<=1:
assert self.arr_attr[3]==self.pyarr_attr[3],\
`self.arr_attr[3],self.pyarr_attr[3],self.arr.tostring(),self.pyarr.tostring()` # strides
assert self.arr_attr[5][-2:]==self.pyarr_attr[5][-2:],\
`self.arr_attr[5],self.pyarr_attr[5]` # descr
assert self.arr_attr[6]==self.pyarr_attr[6],\
`self.arr_attr[6],self.pyarr_attr[6],flags2names(0*self.arr_attr[6]-self.pyarr_attr[6]),flags2names(self.arr_attr[6]),intent` # flags
if intent.is_intent('cache'):
assert self.arr_attr[5][3]>=self.type.elsize,\
`self.arr_attr[5][3],self.type.elsize`
else:
assert self.arr_attr[5][3]==self.type.elsize,\
`self.arr_attr[5][3],self.type.elsize`
assert self.arr_equal(self.pyarr,self.arr)
if isinstance(self.obj,ndarray):
if typ.elsize==Type(obj.dtype).elsize:
if not intent.is_intent('copy') and self.arr_attr[1]<=1:
assert self.has_shared_memory()
def arr_equal(self,arr1,arr2):
if arr1.shape != arr2.shape:
return False
s = arr1==arr2
return alltrue(s.flatten())
def __str__(self):
return str(self.arr)
def has_shared_memory(self):
"""Check that created array shares data with input array.
"""
if self.obj is self.arr:
return True
if not isinstance(self.obj,ndarray):
return False
obj_attr = wrap.array_attrs(self.obj)
return obj_attr[0]==self.arr_attr[0]
##################################################
class test_intent(unittest.TestCase):
def test_in_out(self):
assert_equal(str(intent.in_.out),'intent(in,out)')
assert intent.in_.c.is_intent('c')
assert not intent.in_.c.is_intent_exact('c')
assert intent.in_.c.is_intent_exact('c','in')
assert intent.in_.c.is_intent_exact('in','c')
assert not intent.in_.is_intent('c')
class _test_shared_memory:
num2seq = [1,2]
num23seq = [[1,2,3],[4,5,6]]
def test_in_from_2seq(self):
a = self.array([2],intent.in_,self.num2seq)
assert not a.has_shared_memory()
def test_in_from_2casttype(self):
for t in self.type.cast_types():
obj = array(self.num2seq,dtype=t.dtype)
a = self.array([len(self.num2seq)],intent.in_,obj)
if t.elsize==self.type.elsize:
assert a.has_shared_memory(),`self.type.dtype,t.dtype`
else:
assert not a.has_shared_memory(),`t.dtype`
def test_inout_2seq(self):
obj = array(self.num2seq,dtype=self.type.dtype)
a = self.array([len(self.num2seq)],intent.inout,obj)
assert a.has_shared_memory()
try:
a = self.array([2],intent.in_.inout,self.num2seq)
except TypeError,msg:
if not str(msg).startswith('failed to initialize intent(inout|inplace|cache) array'):
raise
else:
raise SystemError,'intent(inout) should have failed on sequence'
def test_f_inout_23seq(self):
obj = array(self.num23seq,dtype=self.type.dtype,order='F')
shape = (len(self.num23seq),len(self.num23seq[0]))
a = self.array(shape,intent.in_.inout,obj)
assert a.has_shared_memory()
obj = array(self.num23seq,dtype=self.type.dtype,order='C')
shape = (len(self.num23seq),len(self.num23seq[0]))
try:
a = self.array(shape,intent.in_.inout,obj)
except ValueError,msg:
if not str(msg).startswith('failed to initialize intent(inout) array'):
raise
else:
raise SystemError,'intent(inout) should have failed on improper array'
def test_c_inout_23seq(self):
obj = array(self.num23seq,dtype=self.type.dtype)
shape = (len(self.num23seq),len(self.num23seq[0]))
a = self.array(shape,intent.in_.c.inout,obj)
assert a.has_shared_memory()
def test_in_copy_from_2casttype(self):
for t in self.type.cast_types():
obj = array(self.num2seq,dtype=t.dtype)
a = self.array([len(self.num2seq)],intent.in_.copy,obj)
assert not a.has_shared_memory(),`t.dtype`
def test_c_in_from_23seq(self):
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_,self.num23seq)
assert not a.has_shared_memory()
def test_in_from_23casttype(self):
for t in self.type.cast_types():
obj = array(self.num23seq,dtype=t.dtype)
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_,obj)
assert not a.has_shared_memory(),`t.dtype`
def test_f_in_from_23casttype(self):
for t in self.type.cast_types():
obj = array(self.num23seq,dtype=t.dtype,order='F')
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_,obj)
if t.elsize==self.type.elsize:
assert a.has_shared_memory(),`t.dtype`
else:
assert not a.has_shared_memory(),`t.dtype`
def test_c_in_from_23casttype(self):
for t in self.type.cast_types():
obj = array(self.num23seq,dtype=t.dtype)
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_.c,obj)
if t.elsize==self.type.elsize:
assert a.has_shared_memory(),`t.dtype`
else:
assert not a.has_shared_memory(),`t.dtype`
def test_f_copy_in_from_23casttype(self):
for t in self.type.cast_types():
obj = array(self.num23seq,dtype=t.dtype,order='F')
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_.copy,obj)
assert not a.has_shared_memory(),`t.dtype`
def test_c_copy_in_from_23casttype(self):
for t in self.type.cast_types():
obj = array(self.num23seq,dtype=t.dtype)
a = self.array([len(self.num23seq),len(self.num23seq[0])],
intent.in_.c.copy,obj)
assert not a.has_shared_memory(),`t.dtype`
def test_in_cache_from_2casttype(self):
for t in self.type.all_types():
if t.elsize != self.type.elsize:
continue
obj = array(self.num2seq,dtype=t.dtype)
shape = (len(self.num2seq),)
a = self.array(shape,intent.in_.c.cache,obj)
assert a.has_shared_memory(),`t.dtype`
a = self.array(shape,intent.in_.cache,obj)
assert a.has_shared_memory(),`t.dtype`
obj = array(self.num2seq,dtype=t.dtype,order='F')
a = self.array(shape,intent.in_.c.cache,obj)
assert a.has_shared_memory(),`t.dtype`
a = self.array(shape,intent.in_.cache,obj)
assert a.has_shared_memory(),`t.dtype`
try:
a = self.array(shape,intent.in_.cache,obj[::-1])
except ValueError,msg:
if not str(msg).startswith('failed to initialize intent(cache) array'):
raise
else:
raise SystemError,'intent(cache) should have failed on multisegmented array'
def test_in_cache_from_2casttype_failure(self):
for t in self.type.all_types():
if t.elsize >= self.type.elsize:
continue
obj = array(self.num2seq,dtype=t.dtype)
shape = (len(self.num2seq),)
try:
a = self.array(shape,intent.in_.cache,obj)
except ValueError,msg:
if not str(msg).startswith('failed to initialize intent(cache) array'):
raise
else:
raise SystemError,'intent(cache) should have failed on smaller array'
def test_cache_hidden(self):
shape = (2,)
a = self.array(shape,intent.cache.hide,None)
assert a.arr.shape==shape
shape = (2,3)
a = self.array(shape,intent.cache.hide,None)
assert a.arr.shape==shape
shape = (-1,3)
try:
a = self.array(shape,intent.cache.hide,None)
except ValueError,msg:
if not str(msg).startswith('failed to create intent(cache|hide)|optional array'):
raise
else:
raise SystemError,'intent(cache) should have failed on undefined dimensions'
def test_hidden(self):
shape = (2,)
a = self.array(shape,intent.hide,None)
assert a.arr.shape==shape
assert a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype))
shape = (2,3)
a = self.array(shape,intent.hide,None)
assert a.arr.shape==shape
assert a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype))
assert a.arr.flags['FORTRAN'] and not a.arr.flags['CONTIGUOUS']
shape = (2,3)
a = self.array(shape,intent.c.hide,None)
assert a.arr.shape==shape
assert a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype))
assert not a.arr.flags['FORTRAN'] and a.arr.flags['CONTIGUOUS']
shape = (-1,3)
try:
a = self.array(shape,intent.hide,None)
except ValueError,msg:
if not str(msg).startswith('failed to create intent(cache|hide)|optional array'):
raise
else:
raise SystemError,'intent(hide) should have failed on undefined dimensions'
def test_optional_none(self):
shape = (2,)
a = self.array(shape,intent.optional,None)
assert a.arr.shape==shape
assert a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype))
shape = (2,3)
a = self.array(shape,intent.optional,None)
assert a.arr.shape==shape
assert a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype))
assert a.arr.flags['FORTRAN'] and not a.arr.flags['CONTIGUOUS']
shape = (2,3)
a = self.array(shape,intent.c.optional,None)
assert a.arr.shape==shape
assert a.arr_equal(a.arr,zeros(shape,dtype=self.type.dtype))
assert not a.arr.flags['FORTRAN'] and a.arr.flags['CONTIGUOUS']
def test_optional_from_2seq(self):
obj = self.num2seq
shape = (len(obj),)
a = self.array(shape,intent.optional,obj)
assert a.arr.shape==shape
assert not a.has_shared_memory()
def test_optional_from_23seq(self):
obj = self.num23seq
shape = (len(obj),len(obj[0]))
a = self.array(shape,intent.optional,obj)
assert a.arr.shape==shape
assert not a.has_shared_memory()
a = self.array(shape,intent.optional.c,obj)
assert a.arr.shape==shape
assert not a.has_shared_memory()
def test_inplace(self):
obj = array(self.num23seq,dtype=self.type.dtype)
assert not obj.flags['FORTRAN'] and obj.flags['CONTIGUOUS']
shape = obj.shape
a = self.array(shape,intent.inplace,obj)
assert obj[1][2]==a.arr[1][2],`obj,a.arr`
a.arr[1][2]=54
assert obj[1][2]==a.arr[1][2]==array(54,dtype=self.type.dtype),`obj,a.arr`
assert a.arr is obj
assert obj.flags['FORTRAN'] # obj attributes are changed inplace!
assert not obj.flags['CONTIGUOUS']
def test_inplace_from_casttype(self):
for t in self.type.cast_types():
if t is self.type:
continue
obj = array(self.num23seq,dtype=t.dtype)
assert obj.dtype.type==t.dtype
assert obj.dtype.type is not self.type.dtype
assert not obj.flags['FORTRAN'] and obj.flags['CONTIGUOUS']
shape = obj.shape
a = self.array(shape,intent.inplace,obj)
assert obj[1][2]==a.arr[1][2],`obj,a.arr`
a.arr[1][2]=54
assert obj[1][2]==a.arr[1][2]==array(54,dtype=self.type.dtype),`obj,a.arr`
assert a.arr is obj
assert obj.flags['FORTRAN'] # obj attributes are changed inplace!
assert not obj.flags['CONTIGUOUS']
assert obj.dtype.type is self.type.dtype # obj type is changed inplace!
for t in Type._type_names:
exec '''\
class test_%s_gen(unittest.TestCase,
_test_shared_memory
):
def setUp(self):
self.type = Type(%r)
array = lambda self,dims,intent,obj: Array(Type(%r),dims,intent,obj)
''' % (t,t,t)
if __name__ == "__main__":
import nose
nose.runmodule()
| gpl-3.0 |
openstack/python-designateclient | designateclient/tests/test_utils.py | 1 | 2478 | # Copyright (c) 2015 Thales Services SAS
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
import uuid
from designateclient import exceptions
from designateclient.tests import base
from designateclient import utils
LIST_MOCK_RESPONSE = [
{'id': '13579bdf-0000-0000-abcd-000000000001', 'name': 'abcd'},
{'id': '13579bdf-0000-0000-baba-000000000001', 'name': 'baba'},
{'id': '13579bdf-0000-0000-baba-000000000002', 'name': 'baba'},
]
class UtilsTestCase(base.TestCase):
def _find_resourceid_by_name_or_id(self, name_or_id, by_name=False):
resource_client = mock.Mock()
resource_client.list.return_value = LIST_MOCK_RESPONSE
resourceid = utils.find_resourceid_by_name_or_id(
resource_client, name_or_id)
self.assertEqual(by_name, resource_client.list.called)
return resourceid
def test_find_resourceid_with_hyphen_uuid(self):
expected = str(uuid.uuid4())
observed = self._find_resourceid_by_name_or_id(expected)
self.assertEqual(expected, observed)
def test_find_resourceid_with_nonhyphen_uuid(self):
expected = str(uuid.uuid4())
fakeid = expected.replace('-', '')
observed = self._find_resourceid_by_name_or_id(fakeid)
self.assertEqual(expected, observed)
def test_find_resourceid_with_unique_resource(self):
observed = self._find_resourceid_by_name_or_id('abcd', by_name=True)
self.assertEqual('13579bdf-0000-0000-abcd-000000000001', observed)
def test_find_resourceid_with_nonexistent_resource(self):
self.assertRaises(exceptions.ResourceNotFound,
self._find_resourceid_by_name_or_id,
'taz', by_name=True)
def test_find_resourceid_with_multiple_resources(self):
self.assertRaises(exceptions.NoUniqueMatch,
self._find_resourceid_by_name_or_id,
'baba', by_name=True)
| apache-2.0 |
Peratham/tweater | py/nltk/compat.py | 4 | 3139 | # Natural Language Toolkit: Compatibility Functions
#
# Copyright (C) 2001-2011 NLTK Project
# Author: Steven Bird <[email protected]>
# Edward Loper <[email protected]>
# URL: <http://www.nltk.org/>
# For license information, see LICENSE.TXT
"""
Backwards compatibility with previous versions of Python.
This module provides backwards compatibility by defining
functions and classes that were not available in earlier versions of
Python. Intented usage:
>>> from nltk.compat import *
Currently, NLTK requires Python 2.4 or later.
"""
######################################################################
# New in Python 2.5
######################################################################
# ElementTree
try:
from xml.etree import ElementTree
except ImportError:
from nltk.etree import ElementTree
# collections.defaultdict
# originally contributed by Yoav Goldberg <[email protected]>
# new version by Jason Kirtland from Python cookbook.
# <http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/523034>
try:
from collections import defaultdict
except ImportError:
class defaultdict(dict):
def __init__(self, default_factory=None, *a, **kw):
if (default_factory is not None and
not hasattr(default_factory, '__call__')):
raise TypeError('first argument must be callable')
dict.__init__(self, *a, **kw)
self.default_factory = default_factory
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
return self.__missing__(key)
def __missing__(self, key):
if self.default_factory is None:
raise KeyError(key)
self[key] = value = self.default_factory()
return value
def __reduce__(self):
if self.default_factory is None:
args = tuple()
else:
args = self.default_factory,
return type(self), args, None, None, self.iteritems()
def copy(self):
return self.__copy__()
def __copy__(self):
return type(self)(self.default_factory, self)
def __deepcopy__(self, memo):
import copy
return type(self)(self.default_factory,
copy.deepcopy(self.items()))
def __repr__(self):
return 'defaultdict(%s, %s)' % (self.default_factory,
dict.__repr__(self))
# [XX] to make pickle happy in python 2.4:
import collections
collections.defaultdict = defaultdict
# all, any
try:
all([True])
all = all
except NameError:
def all(iterable):
for i in iterable:
if not i:
return False
else:
return True
try:
any([True])
any = any
except NameError:
def any(iterable):
for i in iterable:
if i:
return True
else:
return False
__all__ = ['ElementTree', 'defaultdict', 'all', 'any']
| gpl-3.0 |
mathjazz/pontoon | pontoon/api/tests/test_schema.py | 2 | 8648 | import sys
from itertools import product
import pytest
from pontoon.base.models import Project, ProjectLocale
from pontoon.test.factories import ProjectFactory
@pytest.fixture
def setup_excepthook():
# graphql-core's ExecutionContext.report_error uses sys.excepthook to
# print error stack traces. According to Python docs this hooks can be
# safely customized:
#
# The handling of such top-level exceptions can be customized by
# assigning another three-argument function to sys.excepthook.
#
# Cf. https://docs.python.org/2/library/sys.html#sys.excepthook
excepthook_orig = sys.excepthook
sys.excepthook = lambda *x: None
yield
sys.excepthook = excepthook_orig
@pytest.mark.django_db
def test_projects(client):
body = {
"query": """{
projects(includeSystem: true) {
name
}
}"""
}
response = client.get("/graphql", body, HTTP_ACCEPT="application/json")
ProjectFactory.create(visibility=Project.Visibility.PRIVATE)
assert response.status_code == 200
assert response.json() == {
"data": {
"projects": [
{"name": "Pontoon Intro"},
{"name": "Terminology"},
{"name": "Tutorial"},
]
}
}
@pytest.fixture()
def regular_projects(locale_a):
return ProjectFactory.create_batch(3, visibility=Project.Visibility.PUBLIC) + list(
Project.objects.filter(slug__in=["terminology"])
)
@pytest.fixture()
def disabled_projects(locale_a):
return ProjectFactory.create_batch(3, disabled=True)
@pytest.fixture()
def system_projects(locale_a):
return ProjectFactory.create_batch(3, system_project=True) + list(
Project.objects.filter(slug__in=["pontoon-intro", "tutorial"])
)
@pytest.fixture()
def private_projects():
return ProjectFactory.create_batch(3, visibility=Project.Visibility.PRIVATE)
@pytest.mark.django_db
@pytest.mark.parametrize(
"include_disabled,include_system,is_admin",
# Produces a product with all possible project filters combinations
product(*([[True, False]] * 3)),
)
def test_project_filters(
include_disabled,
include_system,
is_admin,
regular_projects,
disabled_projects,
system_projects,
private_projects,
client,
admin,
):
expected_projects = set(
regular_projects
+ (disabled_projects if include_disabled else [])
+ (system_projects if include_system else [])
+ (private_projects if is_admin else [])
)
body = {
"query": """{{
projects(includeDisabled: {include_disabled}, includeSystem: {include_system}) {{
slug,
disabled,
systemProject,
visibility
}}
}}""".format(
include_disabled=str(include_disabled).lower(),
include_system=str(include_system).lower(),
)
}
if is_admin:
client.force_login(admin)
response = client.get("/graphql", body, HTTP_ACCEPT="application/json")
assert response.status_code == 200
assert response.json() == {
"data": {
"projects": [
{
"slug": p.slug,
"visibility": p.visibility,
"systemProject": p.system_project,
"disabled": p.disabled,
}
for p in sorted(expected_projects, key=lambda p: p.pk)
]
}
}
@pytest.mark.django_db
def test_project_localizations(client):
body = {
"query": """{
project(slug: "pontoon-intro") {
localizations {
locale {
name
}
}
}
}"""
}
response = client.get("/graphql", body, HTTP_ACCEPT="application/json")
assert response.status_code == 200
assert response.json() == {
"data": {"project": {"localizations": [{"locale": {"name": "English"}}]}}
}
@pytest.mark.django_db
@pytest.mark.parametrize(
"include_disabled,include_system,is_admin",
# Produces a product with all possible filters combinations
product(*([[True, False]] * 3)),
)
def test_localization_filters(
include_disabled,
include_system,
is_admin,
locale_a,
regular_projects,
disabled_projects,
system_projects,
private_projects,
client,
admin,
):
expected_projects = set(
regular_projects
+ (disabled_projects if include_disabled else [])
+ (system_projects if include_system else [])
+ (private_projects if is_admin else [])
)
ProjectLocale.objects.bulk_create(
[
ProjectLocale(project=p, locale=locale_a)
for p in expected_projects
if p.slug not in ("pontoon-intro", "tutorial", "terminology")
]
)
body = {
"query": """{{
locale (code: \"{locale_code}\") {{
localizations(includeDisabled: {include_disabled}, includeSystem: {include_system}) {{
project {{
slug,
disabled,
systemProject,
visibility
}}
}}
}}
}}""".format(
locale_code=locale_a.code,
include_disabled=str(include_disabled).lower(),
include_system=str(include_system).lower(),
)
}
if is_admin:
client.force_login(admin)
response = client.get("/graphql", body, HTTP_ACCEPT="application/json")
assert response.status_code == 200
assert response.json() == {
"data": {
"locale": {
"localizations": [
{
"project": {
"slug": p.slug,
"visibility": p.visibility,
"systemProject": p.system_project,
"disabled": p.disabled,
}
}
for p in sorted(
expected_projects,
key=lambda p: p.project_locale.filter(locale=locale_a)[0].pk,
)
]
}
}
}
@pytest.mark.django_db
def test_projects_localizations_cyclic(client):
body = {
"query": """{
projects {
localizations {
locale {
localizations {
totalStrings
}
}
}
}
}"""
}
response = client.get("/graphql", body, HTTP_ACCEPT="application/json")
assert response.status_code == 200
assert b"Cyclic queries are forbidden" in response.content
@pytest.mark.django_db
def test_project_localizations_cyclic(client):
body = {
"query": """{
project(slug: "pontoon-intro") {
localizations {
locale {
localizations {
totalStrings
}
}
}
}
}"""
}
response = client.get("/graphql", body, HTTP_ACCEPT="application/json")
assert response.status_code == 200
assert b"Cyclic queries are forbidden" in response.content
@pytest.mark.django_db
def test_locales_localizations_cyclic(client):
body = {
"query": """{
locales {
localizations {
project {
localizations {
totalStrings
}
}
}
}
}"""
}
response = client.get("/graphql", body, HTTP_ACCEPT="application/json")
assert response.status_code == 200
assert b"Cyclic queries are forbidden" in response.content
@pytest.mark.django_db
def test_locale_localizations_cyclic(client):
body = {
"query": """{
locale(code: "en-US") {
localizations {
project {
localizations {
totalStrings
}
}
}
}
}"""
}
response = client.get("/graphql", body, HTTP_ACCEPT="application/json")
assert response.status_code == 200
assert b"Cyclic queries are forbidden" in response.content
| bsd-3-clause |
pgmillon/ansible | lib/ansible/modules/network/fortios/fortios_wireless_controller_setting.py | 19 | 14023 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_wireless_controller_setting
short_description: VDOM wireless controller configuration in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS by allowing the
user to set and modify wireless_controller feature and setting category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.2
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate ip address.
required: true
username:
description:
- FortiOS or FortiGate username.
required: true
password:
description:
- FortiOS or FortiGate password.
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS
protocol
type: bool
default: true
wireless_controller_setting:
description:
- VDOM wireless controller configuration.
default: null
suboptions:
account-id:
description:
- FortiCloud customer account ID.
country:
description:
- Country or region in which the FortiGate is located. The country determines the 802.11 bands and channels that are available.
choices:
- NA
- AL
- DZ
- AO
- AR
- AM
- AU
- AT
- AZ
- BH
- BD
- BB
- BY
- BE
- BZ
- BO
- BA
- BR
- BN
- BG
- KH
- CL
- CN
- CO
- CR
- HR
- CY
- CZ
- DK
- DO
- EC
- EG
- SV
- EE
- FI
- FR
- GE
- DE
- GR
- GL
- GD
- GU
- GT
- HT
- HN
- HK
- HU
- IS
- IN
- ID
- IR
- IE
- IL
- IT
- JM
- JO
- KZ
- KE
- KP
- KR
- KW
- LV
- LB
- LI
- LT
- LU
- MO
- MK
- MY
- MT
- MX
- MC
- MA
- MZ
- MM
- NP
- NL
- AN
- AW
- NZ
- NO
- OM
- PK
- PA
- PG
- PY
- PE
- PH
- PL
- PT
- PR
- QA
- RO
- RU
- RW
- SA
- RS
- ME
- SG
- SK
- SI
- ZA
- ES
- LK
- SE
- SD
- CH
- SY
- TW
- TZ
- TH
- TT
- TN
- TR
- AE
- UA
- GB
- US
- PS
- UY
- UZ
- VE
- VN
- YE
- ZB
- ZW
- JP
- CA
duplicate-ssid:
description:
- Enable/disable allowing Virtual Access Points (VAPs) to use the same SSID name in the same VDOM.
choices:
- enable
- disable
fapc-compatibility:
description:
- Enable/disable FAP-C series compatibility.
choices:
- enable
- disable
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
tasks:
- name: VDOM wireless controller configuration.
fortios_wireless_controller_setting:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
wireless_controller_setting:
account-id: "<your_own_value>"
country: "NA"
duplicate-ssid: "enable"
fapc-compatibility: "enable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password)
def filter_wireless_controller_setting_data(json):
option_list = ['account-id', 'country', 'duplicate-ssid',
'fapc-compatibility']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def flatten_multilists_attributes(data):
multilist_attrs = []
for attr in multilist_attrs:
try:
path = "data['" + "']['".join(elem for elem in attr) + "']"
current_val = eval(path)
flattened_val = ' '.join(elem for elem in current_val)
exec(path + '= flattened_val')
except BaseException:
pass
return data
def wireless_controller_setting(data, fos):
vdom = data['vdom']
wireless_controller_setting_data = data['wireless_controller_setting']
flattened_data = flatten_multilists_attributes(wireless_controller_setting_data)
filtered_data = filter_wireless_controller_setting_data(flattened_data)
return fos.set('wireless-controller',
'setting',
data=filtered_data,
vdom=vdom)
def fortios_wireless_controller(data, fos):
login(data, fos)
if data['wireless_controller_setting']:
resp = wireless_controller_setting(data, fos)
fos.logout()
return not resp['status'] == "success", resp['status'] == "success", resp
def main():
fields = {
"host": {"required": True, "type": "str"},
"username": {"required": True, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"wireless_controller_setting": {
"required": False, "type": "dict",
"options": {
"account-id": {"required": False, "type": "str"},
"country": {"required": False, "type": "str",
"choices": ["NA", "AL", "DZ",
"AO", "AR", "AM",
"AU", "AT", "AZ",
"BH", "BD", "BB",
"BY", "BE", "BZ",
"BO", "BA", "BR",
"BN", "BG", "KH",
"CL", "CN", "CO",
"CR", "HR", "CY",
"CZ", "DK", "DO",
"EC", "EG", "SV",
"EE", "FI", "FR",
"GE", "DE", "GR",
"GL", "GD", "GU",
"GT", "HT", "HN",
"HK", "HU", "IS",
"IN", "ID", "IR",
"IE", "IL", "IT",
"JM", "JO", "KZ",
"KE", "KP", "KR",
"KW", "LV", "LB",
"LI", "LT", "LU",
"MO", "MK", "MY",
"MT", "MX", "MC",
"MA", "MZ", "MM",
"NP", "NL", "AN",
"AW", "NZ", "NO",
"OM", "PK", "PA",
"PG", "PY", "PE",
"PH", "PL", "PT",
"PR", "QA", "RO",
"RU", "RW", "SA",
"RS", "ME", "SG",
"SK", "SI", "ZA",
"ES", "LK", "SE",
"SD", "CH", "SY",
"TW", "TZ", "TH",
"TT", "TN", "TR",
"AE", "UA", "GB",
"US", "PS", "UY",
"UZ", "VE", "VN",
"YE", "ZB", "ZW",
"JP", "CA"]},
"duplicate-ssid": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"fapc-compatibility": {"required": False, "type": "str",
"choices": ["enable", "disable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
is_error, has_changed, result = fortios_wireless_controller(module.params, fos)
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
lalanza808/lalanza808.github.io | vendor/bundle/ruby/2.0.0/gems/pygments.rb-0.6.3/vendor/pygments-main/pygments/lexers/rdf.py | 52 | 3814 | # -*- coding: utf-8 -*-
"""
pygments.lexers.rdf
~~~~~~~~~~~~~~~~~~~
Lexers for semantic web and RDF query languages and markup.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, bygroups, default
from pygments.token import Keyword, Punctuation, String, Number, Operator, \
Whitespace, Name, Literal, Comment, Text
__all__ = ['SparqlLexer']
class SparqlLexer(RegexLexer):
"""
Lexer for `SPARQL <http://www.w3.org/TR/rdf-sparql-query/>`_ query language.
.. versionadded:: 2.0
"""
name = 'SPARQL'
aliases = ['sparql']
filenames = ['*.rq', '*.sparql']
mimetypes = ['application/sparql-query']
flags = re.IGNORECASE
tokens = {
'root': [
(r'\s+', Whitespace),
(r'(select|construct|describe|ask|where|filter|group\s+by|minus|'
r'distinct|reduced|from named|from|order\s+by|limit|'
r'offset|bindings|load|clear|drop|create|add|move|copy|'
r'insert\s+data|delete\s+data|delete\s+where|delete|insert|'
r'using named|using|graph|default|named|all|optional|service|'
r'silent|bind|union|not in|in|as|a)', Keyword),
(r'(prefix|base)(\s+)([a-z][a-z\d_\-]*)(\s*)(\:)',
bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
Punctuation)),
(r'\?[a-z_][a-z\d_]*', Name.Variable),
(r'<[^>]+>', Name.Label),
(r'([a-z][a-z\d_\-]*)(\:)([a-z][a-z\d_\-]*)',
bygroups(Name.Namespace, Punctuation, Name.Tag)),
(r'(str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|'
r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|'
r'contains|strstarts|strends|strbefore|strafter|year|month|day|'
r'hours|minutes|seconds|timezone|tz|now|md5|sha1|sha256|sha384|'
r'sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|'
r'isliteral|isnumeric|regex|substr|replace|exists|not exists|'
r'count|sum|min|max|avg|sample|group_concat|separator)\b',
Name.Function),
(r'(true|false)', Literal),
(r'[+\-]?\d*\.\d+', Number.Float),
(r'[+\-]?\d*(:?\.\d+)?[eE][+\-]?\d+', Number.Float),
(r'[+\-]?\d+', Number.Integer),
(r'(\|\||&&|=|\*|\-|\+|/)', Operator),
(r'[(){}.;,:^]', Punctuation),
(r'#[^\n]+', Comment),
(r'"""', String, 'triple-double-quoted-string'),
(r'"', String, 'single-double-quoted-string'),
(r"'''", String, 'triple-single-quoted-string'),
(r"'", String, 'single-single-quoted-string'),
],
'triple-double-quoted-string': [
(r'"""', String, 'end-of-string'),
(r'[^\\]+', String),
(r'\\', String, 'string-escape'),
],
'single-double-quoted-string': [
(r'"', String, 'end-of-string'),
(r'[^"\\\n]+', String),
(r'\\', String, 'string-escape'),
],
'triple-single-quoted-string': [
(r"'''", String, 'end-of-string'),
(r'[^\\]+', String),
(r'\\', String, 'string-escape'),
],
'single-single-quoted-string': [
(r"'", String, 'end-of-string'),
(r"[^'\\\n]+", String),
(r'\\', String, 'string-escape'),
],
'string-escape': [
(r'.', String, '#pop'),
],
'end-of-string': [
(r'(@)([a-z]+(:?-[a-z0-9]+)*)',
bygroups(Operator, Name.Function), '#pop:2'),
(r'\^\^', Operator, '#pop:2'),
default('#pop:2'),
],
}
| mit |
hvy/chainer | chainer/optimizers/ada_delta.py | 3 | 3514 | import numpy
import chainer
from chainer.backends import cuda
from chainer import optimizer
from chainer import types
if types.TYPE_CHECKING:
import typing_extensions as tpe
class AdaDeltaHyperparameter(tpe.Protocol):
"""Protocol class for hyperparameter of Zeiler's ADADELTA.
This is only for PEP 544 compliant static type checkers.
"""
rho = None # type: float
eps = None # type: float
_default_hyperparam = optimizer.Hyperparameter() # type: AdaDeltaHyperparameter # NOQA
_default_hyperparam.rho = 0.95
_default_hyperparam.eps = 1e-6
class AdaDeltaRule(optimizer.UpdateRule):
"""Update rule of Zeiler's ADADELTA.
See :class:`~chainer.optimizers.AdaDelta` for the default values of the
hyperparameters.
Args:
parent_hyperparam (~chainer.optimizer.Hyperparameter): Hyperparameter
that provides the default values.
rho (float): Exponential decay rate of the first and second order
moments.
eps (float): Small value for the numerical stability.
"""
is_elementwise = True
_kernel = None
def __init__(self, parent_hyperparam=None, rho=None, eps=None):
super(AdaDeltaRule, self).__init__(
parent_hyperparam or _default_hyperparam)
if rho is not None:
self.hyperparam.rho = rho
if eps is not None:
self.hyperparam.eps = eps
def init_state(self, param):
with chainer.using_device(param.device):
xp = param.device.xp
self.state['msg'] = xp.zeros_like(param.data)
self.state['msdx'] = xp.zeros_like(param.data)
def update_core_cpu(self, param):
grad = param.grad
if grad is None:
return
msg, msdx = self.state['msg'], self.state['msdx']
rho = self.hyperparam.rho
eps = self.hyperparam.eps
msg *= rho
msg += (1 - rho) * grad * grad
dx = numpy.sqrt((msdx + eps) / (msg + eps)) * grad
msdx *= rho
msdx += (1 - rho) * dx * dx
param.data -= dx
def update_core_gpu(self, param):
grad = param.grad
if grad is None:
return
if AdaDeltaRule._kernel is None:
AdaDeltaRule._kernel = cuda.elementwise(
'T grad, T one_minus_rho, T eps',
'T param, T msg, T msdx',
'''msg = msg + one_minus_rho * (grad * grad - msg);
T dx = sqrt((msdx + eps) / (msg + eps)) * grad;
msdx += one_minus_rho * (dx * dx - msdx);
param -= dx;''',
'adadelta')
AdaDeltaRule._kernel(
grad, 1 - self.hyperparam.rho, self.hyperparam.eps, param.data,
self.state['msg'], self.state['msdx'])
class AdaDelta(optimizer.GradientMethod):
"""Zeiler's ADADELTA.
See: http://www.matthewzeiler.com/pubs/googleTR2012/googleTR2012.pdf
Args:
rho (float): Exponential decay rate of the first and second order
moments.
eps (float): Small value for the numerical stability.
"""
def __init__(self, rho=_default_hyperparam.rho,
eps=_default_hyperparam.eps):
super(AdaDelta, self).__init__()
self.hyperparam.rho = rho
self.hyperparam.eps = eps
rho = optimizer.HyperparameterProxy('rho')
eps = optimizer.HyperparameterProxy('eps')
def create_update_rule(self):
return AdaDeltaRule(self.hyperparam)
| mit |
erjohnso/ansible | lib/ansible/modules/network/cloudvision/cv_server_provision.py | 49 | 24405 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cv_server_provision
version_added: "2.4"
author: "EOS+ CS ([email protected]) (@mharista)"
short_description:
Provision server port by applying or removing template configuration to an
Arista CloudVision Portal configlet that is applied to a switch.
description:
- This module allows a server team to provision server network ports for
new servers without having to access Arista CVP or asking the network team
to do it for them. Provide the information for connecting to CVP, switch
rack, port the new server is connected to, optional vlan, and an action
and the module will apply the configuration to the switch port via CVP.
Actions are add (applies template config to port),
remove (defaults the interface config) and
show (returns the current port config).
options:
host:
description:
- The hostname or IP address of the CVP node being connected to.
required: true
port:
description:
- The port number to use when making API calls to the CVP node. This
will default to the default port for the specified protocol. Port 80
for http and port 443 for https.
default: None
protocol:
description:
- The protocol to use when making API calls to CVP. CVP defaults to https
and newer versions of CVP no longer support http.
default: https
choices: [https, http]
username:
description:
- The user that will be used to connect to CVP for making API calls.
required: true
password:
description:
- The password of the user that will be used to connect to CVP for API
calls.
required: true
server_name:
description:
- The hostname or identifier for the server that is having it's switch
port provisioned.
required: true
switch_name:
description:
- The hostname of the switch is being configured for the server being
provisioned.
required: true
switch_port:
description:
- The physical port number on the switch that the new server is
connected to.
required: true
port_vlan:
description:
- The vlan that should be applied to the port for this server.
This parameter is dependent on a proper template that supports single
vlan provisioning with it. If a port vlan is specified by the template
specified does not support this the module will exit out with no
changes. If a template is specified that requires a port vlan but no
port vlan is specified the module will exit out with no changes.
default: None
template:
description:
- A path to a Jinja formatted template file that contains the
configuration block that will be applied to the specified switch port.
This template will have variable fields replaced by the module before
being applied to the switch configuration.
required: true
action:
description:
- The action for the module to take. The actions are add, which applies
the specified template config to port, remove, which defaults the
specified interface configuration, and show, which will return the
current port configuration with no changes.
default: show
choices: [show, add, remove]
auto_run:
description:
- Flag that determines whether or not the module will execute the CVP
task spawned as a result of changes to a switch configlet. When an
add or remove action is taken which results in a change to a switch
configlet, CVP will spawn a task that needs to be executed for the
configuration to be applied to the switch. If this option is True then
the module will determined the task number created by the configuration
change, execute it and wait for the task to complete. If the option
is False then the task will remain in the Pending state in CVP for
a network administrator to review and execute.
default: False
type: bool
notes:
requirements: [Jinja2, cvprac >= 0.7.0]
'''
EXAMPLES = '''
- name: Get current configuration for interface Ethernet2
cv_server_provision:
host: cvp_node
username: cvp_user
password: cvp_pass
protocol: https
server_name: new_server
switch_name: eos_switch_1
switch_port: 2
template: template_file.j2
action: show
- name: Remove existing configuration from interface Ethernet2. Run task.
cv_server_provision:
host: cvp_node
username: cvp_user
password: cvp_pass
protocol: https
server_name: new_server
switch_name: eos_switch_1
switch_port: 2
template: template_file.j2
action: remove
auto_run: True
- name: Add template configuration to interface Ethernet2. No VLAN. Run task.
cv_server_provision:
host: cvp_node
username: cvp_user
password: cvp_pass
protocol: https
server_name: new_server
switch_name: eos_switch_1
switch_port: 2
template: single_attached_trunk.j2
action: add
auto_run: True
- name: Add template with VLAN configuration to interface Ethernet2. Run task.
cv_server_provision:
host: cvp_node
username: cvp_user
password: cvp_pass
protocol: https
server_name: new_server
switch_name: eos_switch_1
switch_port: 2
port_vlan: 22
template: single_attached_vlan.j2
action: add
auto_run: True
'''
RETURN = '''
changed:
description: Signifies if a change was made to the configlet
returned: success
type: bool
sample: true
currentConfigBlock:
description: The current config block for the user specified interface
returned: when action = show
type: string
sample: |
interface Ethernet4
!
newConfigBlock:
description: The new config block for the user specified interface
returned: when action = add or remove
type: string
sample: |
interface Ethernet3
description example
no switchport
!
oldConfigBlock:
description: The current config block for the user specified interface
before any changes are made
returned: when action = add or remove
type: string
sample: |
interface Ethernet3
!
fullConfig:
description: The full config of the configlet after being updated
returned: when action = add or remove
type: string
sample: |
!
interface Ethernet3
!
interface Ethernet4
!
updateConfigletResponse:
description: Response returned from CVP when configlet update is triggered
returned: when action = add or remove and configuration changes
type: string
sample: "Configlet veos1-server successfully updated and task initiated."
portConfigurable:
description: Signifies if the user specified port has an entry in the
configlet that Ansible has access to
returned: success
type: bool
sample: true
switchConfigurable:
description: Signifies if the user specified switch has a configlet
applied to it that CVP is allowed to edit
returned: success
type: bool
sample: true
switchInfo:
description: Information from CVP describing the switch being configured
returned: success
type: dictionary
sample: {"architecture": "i386",
"bootupTimeStamp": 1491264298.21,
"complianceCode": "0000",
"complianceIndication": "NONE",
"deviceInfo": "Registered",
"deviceStatus": "Registered",
"fqdn": "veos1",
"hardwareRevision": "",
"internalBuildId": "12-12",
"internalVersion": "4.17.1F-11111.4171F",
"ipAddress": "192.168.1.20",
"isDANZEnabled": "no",
"isMLAGEnabled": "no",
"key": "00:50:56:5d:e5:e0",
"lastSyncUp": 1496432895799,
"memFree": 472976,
"memTotal": 1893460,
"modelName": "vEOS",
"parentContainerId": "container_13_5776759195930",
"serialNumber": "",
"systemMacAddress": "00:50:56:5d:e5:e0",
"taskIdList": [],
"tempAction": null,
"type": "netelement",
"unAuthorized": false,
"version": "4.17.1F",
"ztpMode": "false"}
taskCompleted:
description: Signifies if the task created and executed has completed successfully
returned: when action = add or remove, and auto_run = true,
and configuration changes
type: bool
sample: true
taskCreated:
description: Signifies if a task was created due to configlet changes
returned: when action = add or remove, and auto_run = true or false,
and configuration changes
type: bool
sample: true
taskExecuted:
description: Signifies if the automation executed the spawned task
returned: when action = add or remove, and auto_run = true,
and configuration changes
type: bool
sample: true
taskId:
description: The task ID created by CVP because of changes to configlet
returned: when action = add or remove, and auto_run = true or false,
and configuration changes
type: string
sample: "500"
'''
import re
import time
from ansible.module_utils.basic import AnsibleModule
try:
import jinja2
from jinja2 import meta
HAS_JINJA2 = True
except ImportError:
HAS_JINJA2 = False
try:
from cvprac.cvp_client import CvpClient
from cvprac.cvp_client_errors import CvpLoginError, CvpApiError
HAS_CVPRAC = True
except ImportError:
HAS_CVPRAC = False
def connect(module):
''' Connects to CVP device using user provided credentials from playbook.
:param module: Ansible module with parameters and client connection.
:return: CvpClient object with connection instantiated.
'''
client = CvpClient()
try:
client.connect([module.params['host']],
module.params['username'],
module.params['password'],
protocol=module.params['protocol'],
port=module.params['port'])
except CvpLoginError as e:
module.fail_json(msg=str(e))
return client
def switch_info(module):
''' Get dictionary of switch info from CVP.
:param module: Ansible module with parameters and client connection.
:return: Dict of switch info from CVP or exit with failure if no
info for device is found.
'''
switch_name = module.params['switch_name']
switch_info = module.client.api.get_device_by_name(switch_name)
if not switch_info:
module.fail_json(msg=str("Device with name '%s' does not exist."
% switch_name))
return switch_info
def switch_in_compliance(module, sw_info):
''' Check if switch is currently in compliance.
:param module: Ansible module with parameters and client connection.
:param sw_info: Dict of switch info.
:return: Nothing or exit with failure if device is not in compliance.
'''
compliance = module.client.api.check_compliance(sw_info['key'],
sw_info['type'])
if compliance['complianceCode'] != '0000':
module.fail_json(msg=str('Switch %s is not in compliance. Returned'
' compliance code %s.'
% (sw_info['fqdn'],
compliance['complianceCode'])))
def server_configurable_configlet(module, sw_info):
''' Check CVP that the user specified switch has a configlet assigned to
it that Ansible is allowed to edit.
:param module: Ansible module with parameters and client connection.
:param sw_info: Dict of switch info.
:return: Dict of configlet information or None.
'''
configurable_configlet = None
configlet_name = module.params['switch_name'] + '-server'
switch_configlets = module.client.api.get_configlets_by_device_id(
sw_info['key'])
for configlet in switch_configlets:
if configlet['name'] == configlet_name:
configurable_configlet = configlet
return configurable_configlet
def port_configurable(module, configlet):
''' Check configlet if the user specified port has a configuration entry
in the configlet to determine if Ansible is allowed to configure the
port on this switch.
:param module: Ansible module with parameters and client connection.
:param configlet: Dict of configlet info.
:return: True or False.
'''
configurable = False
regex = r'^interface Ethernet%s' % module.params['switch_port']
for config_line in configlet['config'].split('\n'):
if re.match(regex, config_line):
configurable = True
return configurable
def configlet_action(module, configlet):
''' Take appropriate action based on current state of device and user
requested action.
Return current config block for specified port if action is show.
If action is add or remove make the appropriate changes to the
configlet and return the associated information.
:param module: Ansible module with parameters and client connection.
:param configlet: Dict of configlet info.
:return: Dict of information to updated results with.
'''
result = dict()
existing_config = current_config(module, configlet['config'])
if module.params['action'] == 'show':
result['currentConfigBlock'] = existing_config
return result
elif module.params['action'] == 'add':
result['newConfigBlock'] = config_from_template(module)
elif module.params['action'] == 'remove':
result['newConfigBlock'] = ('interface Ethernet%s\n!'
% module.params['switch_port'])
result['oldConfigBlock'] = existing_config
result['fullConfig'] = updated_configlet_content(module,
configlet['config'],
result['newConfigBlock'])
resp = module.client.api.update_configlet(result['fullConfig'],
configlet['key'],
configlet['name'])
if 'data' in resp:
result['updateConfigletResponse'] = resp['data']
if 'task' in resp['data']:
result['changed'] = True
result['taskCreated'] = True
return result
def current_config(module, config):
''' Parse the full port configuration for the user specified port out of
the full configlet configuration and return as a string.
:param module: Ansible module with parameters and client connection.
:param config: Full config to parse specific port config from.
:return: String of current config block for user specified port.
'''
regex = r'^interface Ethernet%s' % module.params['switch_port']
match = re.search(regex, config, re.M)
if not match:
module.fail_json(msg=str('interface section not found - %s'
% config))
block_start, line_end = match.regs[0]
match = re.search(r'!', config[line_end:], re.M)
if not match:
return config[block_start:]
_, block_end = match.regs[0]
block_end = line_end + block_end
return config[block_start:block_end]
def valid_template(port, template):
''' Test if the user provided Jinja template is valid.
:param port: User specified port.
:param template: Contents of Jinja template.
:return: True or False
'''
valid = True
regex = r'^interface Ethernet%s' % port
match = re.match(regex, template, re.M)
if not match:
valid = False
return valid
def config_from_template(module):
''' Load the Jinja template and apply user provided parameters in necessary
places. Fail if template is not found. Fail if rendered template does
not reference the correct port. Fail if the template requires a VLAN
but the user did not provide one with the port_vlan parameter.
:param module: Ansible module with parameters and client connection.
:return: String of Jinja template rendered with parameters or exit with
failure.
'''
template_loader = jinja2.FileSystemLoader('./templates')
env = jinja2.Environment(loader=template_loader,
undefined=jinja2.DebugUndefined)
template = env.get_template(module.params['template'])
if not template:
module.fail_json(msg=str('Could not find template - %s'
% module.params['template']))
data = {'switch_port': module.params['switch_port'],
'server_name': module.params['server_name']}
temp_source = env.loader.get_source(env, module.params['template'])[0]
parsed_content = env.parse(temp_source)
temp_vars = list(meta.find_undeclared_variables(parsed_content))
if 'port_vlan' in temp_vars:
if module.params['port_vlan']:
data['port_vlan'] = module.params['port_vlan']
else:
module.fail_json(msg=str('Template %s requires a vlan. Please'
' re-run with vlan number provided.'
% module.params['template']))
template = template.render(data)
if not valid_template(module.params['switch_port'], template):
module.fail_json(msg=str('Template content does not configure proper'
' interface - %s' % template))
return template
def updated_configlet_content(module, existing_config, new_config):
''' Update the configlet configuration with the new section for the port
specified by the user.
:param module: Ansible module with parameters and client connection.
:param existing_config: String of current configlet configuration.
:param new_config: String of configuration for user specified port to
replace in the existing config.
:return: String of the full updated configuration.
'''
regex = r'^interface Ethernet%s' % module.params['switch_port']
match = re.search(regex, existing_config, re.M)
if not match:
module.fail_json(msg=str('interface section not found - %s'
% existing_config))
block_start, line_end = match.regs[0]
updated_config = existing_config[:block_start] + new_config
match = re.search(r'!\n', existing_config[line_end:], re.M)
if match:
_, block_end = match.regs[0]
block_end = line_end + block_end
updated_config += '\n%s' % existing_config[block_end:]
return updated_config
def configlet_update_task(module):
''' Poll device info of switch from CVP up to three times to see if the
configlet updates have spawned a task. It sometimes takes a second for
the task to be spawned after configlet updates. If a task is found
return the task ID. Otherwise return None.
:param module: Ansible module with parameters and client connection.
:return: Task ID or None.
'''
for num in range(3):
device_info = switch_info(module)
if (('taskIdList' in device_info) and
(len(device_info['taskIdList']) > 0)):
for task in device_info['taskIdList']:
if ('Configlet Assign' in task['description'] and
task['data']['WORKFLOW_ACTION'] == 'Configlet Push'):
return task['workOrderId']
time.sleep(1)
return None
def wait_for_task_completion(module, task):
''' Poll CVP for the executed task to complete. There is currently no
timeout. Exits with failure if task status is Failed or Cancelled.
:param module: Ansible module with parameters and client connection.
:param task: Task ID to poll for completion.
:return: True or exit with failure if task is cancelled or fails.
'''
task_complete = False
while not task_complete:
task_info = module.client.api.get_task_by_id(task)
task_status = task_info['workOrderUserDefinedStatus']
if task_status == 'Completed':
return True
elif task_status in ['Failed', 'Cancelled']:
module.fail_json(msg=str('Task %s has reported status %s. Please'
' consult the CVP admins for more'
' information.' % (task, task_status)))
time.sleep(2)
def main():
""" main entry point for module execution
"""
argument_spec = dict(
host=dict(required=True),
port=dict(required=False, default=None),
protocol=dict(default='https', choices=['http', 'https']),
username=dict(required=True),
password=dict(required=True, no_log=True),
server_name=dict(required=True),
switch_name=dict(required=True),
switch_port=dict(required=True),
port_vlan=dict(required=False, default=None),
template=dict(require=True),
action=dict(default='show', choices=['show', 'add', 'remove']),
auto_run=dict(type='bool', default=False))
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=False)
if not HAS_JINJA2:
module.fail_json(msg='The Jinja2 python module is required.')
if not HAS_CVPRAC:
module.fail_json(msg='The cvprac python module is required.')
result = dict(changed=False)
module.client = connect(module)
try:
result['switchInfo'] = switch_info(module)
if module.params['action'] in ['add', 'remove']:
switch_in_compliance(module, result['switchInfo'])
switch_configlet = server_configurable_configlet(module,
result['switchInfo'])
if not switch_configlet:
module.fail_json(msg=str('Switch %s has no configurable server'
' ports.' % module.params['switch_name']))
result['switchConfigurable'] = True
if not port_configurable(module, switch_configlet):
module.fail_json(msg=str('Port %s is not configurable as a server'
' port on switch %s.'
% (module.params['switch_port'],
module.params['switch_name'])))
result['portConfigurable'] = True
result['taskCreated'] = False
result['taskExecuted'] = False
result['taskCompleted'] = False
result.update(configlet_action(module, switch_configlet))
if module.params['auto_run'] and module.params['action'] != 'show':
task_id = configlet_update_task(module)
if task_id:
result['taskId'] = task_id
note = ('Update config on %s with %s action from Ansible.'
% (module.params['switch_name'],
module.params['action']))
module.client.api.add_note_to_task(task_id, note)
module.client.api.execute_task(task_id)
result['taskExecuted'] = True
task_completed = wait_for_task_completion(module, task_id)
if task_completed:
result['taskCompleted'] = True
else:
result['taskCreated'] = False
except CvpApiError as e:
module.fail_json(msg=str(e))
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
resamsel/dbmanagr | src/tests/model/test_table.py | 2 | 1843 | # -*- coding: utf-8 -*-
#
# Copyright © 2014 René Samselnig
#
# This file is part of Database Navigator.
#
# Database Navigator is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Database Navigator is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Database Navigator. If not, see <http://www.gnu.org/licenses/>.
#
from tests.testcase import DbTestCase
from dbmanagr.model import table
class TableTestCase(DbTestCase):
def test_autocomplete_(self):
"""Tests the table.autocomplete_ method"""
con = DbTestCase.connection
user = con.table('user')
id = user.column('id')
self.assertEqual(
'dbmanagr-c.sqlite/user/?[BLOB]',
user.autocomplete_(id, buffer('Blub', 0, 4)))
def test_subtitle(self):
"""Tests the table.subitle method"""
con = DbTestCase.connection
user = con.table('user')
self.assertEqual(
'Table',
user.subtitle())
user.owner = 'me'
user.size = '123 kB'
self.assertEqual(
'Owner: me (123 kB)',
user.subtitle())
def test_table(self):
"""Tests the table.Table class"""
con = DbTestCase.connection
user = con.table('user')
self.assertEqual(
len(user.columns()),
len(table.Table(name=user.name, columns=user.columns()).columns())
)
| gpl-3.0 |
yangming85/lettuce | tests/integration/lib/Django-1.2.5/django/db/models/related.py | 81 | 2269 | class BoundRelatedObject(object):
def __init__(self, related_object, field_mapping, original):
self.relation = related_object
self.field_mappings = field_mapping[related_object.name]
def template_name(self):
raise NotImplementedError
def __repr__(self):
return repr(self.__dict__)
class RelatedObject(object):
def __init__(self, parent_model, model, field):
self.parent_model = parent_model
self.model = model
self.opts = model._meta
self.field = field
self.name = '%s:%s' % (self.opts.app_label, self.opts.module_name)
self.var_name = self.opts.object_name.lower()
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
# Defer to the actual field definition for db prep
return self.field.get_db_prep_lookup(lookup_type, value,
connection=connection, prepared=prepared)
def editable_fields(self):
"Get the fields in this class that should be edited inline."
return [f for f in self.opts.fields + self.opts.many_to_many if f.editable and f != self.field]
def __repr__(self):
return "<RelatedObject: %s related to %s>" % (self.name, self.field.name)
def bind(self, field_mapping, original, bound_related_object_class=BoundRelatedObject):
return bound_related_object_class(self, field_mapping, original)
def get_accessor_name(self):
# This method encapsulates the logic that decides what name to give an
# accessor descriptor that retrieves related many-to-one or
# many-to-many objects. It uses the lower-cased object_name + "_set",
# but this can be overridden with the "related_name" option.
if self.field.rel.multiple:
# If this is a symmetrical m2m relation on self, there is no reverse accessor.
if getattr(self.field.rel, 'symmetrical', False) and self.model == self.parent_model:
return None
return self.field.rel.related_name or (self.opts.object_name.lower() + '_set')
else:
return self.field.rel.related_name or (self.opts.object_name.lower())
def get_cache_name(self):
return "_%s_cache" % self.get_accessor_name()
| gpl-3.0 |
broferek/ansible | lib/ansible/modules/cloud/google/gcp_spanner_instance.py | 13 | 13293 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_spanner_instance
description:
- An isolated set of Cloud Spanner resources on which databases can be hosted.
short_description: Creates a GCP Instance
version_added: '2.7'
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
state:
description:
- Whether the given object should exist in GCP
choices:
- present
- absent
default: present
type: str
name:
description:
- A unique identifier for the instance, which cannot be changed after the instance
is created. The name must be between 6 and 30 characters in length.
required: true
type: str
config:
description:
- The name of the instance's configuration (similar but not quite the same as
a region) which defines defines the geographic placement and replication of
your databases in this instance. It determines where your data is stored. Values
are typically of the form `regional-europe-west1` , `us-central` etc.
- In order to obtain a valid list please consult the [Configuration section of
the docs](U(https://cloud.google.com/spanner/docs/instances)).
required: true
type: str
display_name:
description:
- The descriptive name for this instance as it appears in UIs. Must be unique
per project and between 4 and 30 characters in length.
required: true
type: str
node_count:
description:
- The number of nodes allocated to this instance.
required: false
default: '1'
type: int
labels:
description:
- 'An object containing a list of "key": value pairs.'
- 'Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
required: false
type: dict
project:
description:
- The Google Cloud Platform project to use.
type: str
auth_kind:
description:
- The type of credential used.
type: str
required: true
choices:
- application
- machineaccount
- serviceaccount
service_account_contents:
description:
- The contents of a Service Account JSON file, either in a dictionary or as a
JSON string that represents it.
type: jsonarg
service_account_file:
description:
- The path of a Service Account JSON file if serviceaccount is selected as type.
type: path
service_account_email:
description:
- An optional service account email address if machineaccount is selected and
the user does not wish to use the default email.
type: str
scopes:
description:
- Array of scopes to be used
type: list
env_type:
description:
- Specifies which Ansible environment you're running this module within.
- This should not be set unless you know what you're doing.
- This only alters the User Agent string for any API requests.
type: str
notes:
- 'API Reference: U(https://cloud.google.com/spanner/docs/reference/rest/v1/projects.instances)'
- 'Official Documentation: U(https://cloud.google.com/spanner/)'
- for authentication, you can set service_account_file using the C(gcp_service_account_file)
env variable.
- for authentication, you can set service_account_contents using the C(GCP_SERVICE_ACCOUNT_CONTENTS)
env variable.
- For authentication, you can set service_account_email using the C(GCP_SERVICE_ACCOUNT_EMAIL)
env variable.
- For authentication, you can set auth_kind using the C(GCP_AUTH_KIND) env variable.
- For authentication, you can set scopes using the C(GCP_SCOPES) env variable.
- Environment variables values will only be used if the playbook values are not set.
- The I(service_account_email) and I(service_account_file) options are mutually exclusive.
'''
EXAMPLES = '''
- name: create a instance
gcp_spanner_instance:
name: testinstance
display_name: My Spanner Instance
node_count: 2
labels:
cost_center: ti-1700004
config: regional-us-central1
project: test_project
auth_kind: serviceaccount
service_account_file: "/tmp/auth.pem"
state: present
'''
RETURN = '''
name:
description:
- A unique identifier for the instance, which cannot be changed after the instance
is created. The name must be between 6 and 30 characters in length.
returned: success
type: str
config:
description:
- The name of the instance's configuration (similar but not quite the same as a
region) which defines defines the geographic placement and replication of your
databases in this instance. It determines where your data is stored. Values are
typically of the form `regional-europe-west1` , `us-central` etc.
- In order to obtain a valid list please consult the [Configuration section of the
docs](U(https://cloud.google.com/spanner/docs/instances)).
returned: success
type: str
displayName:
description:
- The descriptive name for this instance as it appears in UIs. Must be unique per
project and between 4 and 30 characters in length.
returned: success
type: str
nodeCount:
description:
- The number of nodes allocated to this instance.
returned: success
type: int
labels:
description:
- 'An object containing a list of "key": value pairs.'
- 'Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
returned: success
type: dict
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, replace_resource_dict
import json
import time
################################################################################
# Main
################################################################################
def main():
"""Main function"""
module = GcpModule(
argument_spec=dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
name=dict(required=True, type='str'),
config=dict(required=True, type='str'),
display_name=dict(required=True, type='str'),
node_count=dict(default=1, type='int'),
labels=dict(type='dict'),
)
)
if not module.params['scopes']:
module.params['scopes'] = ['https://www.googleapis.com/auth/spanner.admin']
state = module.params['state']
fetch = fetch_resource(module, self_link(module))
changed = False
if fetch:
if state == 'present':
if is_different(module, fetch):
update(module, self_link(module))
fetch = fetch_resource(module, self_link(module))
changed = True
else:
delete(module, self_link(module))
fetch = {}
changed = True
else:
if state == 'present':
fetch = create(module, collection(module))
changed = True
else:
fetch = {}
fetch.update({'changed': changed})
module.exit_json(**fetch)
def create(module, link):
auth = GcpSession(module, 'spanner')
return wait_for_operation(module, auth.post(link, resource_to_create(module)))
def update(module, link):
module.fail_json(msg="Spanner objects can't be updated to ensure data safety")
def delete(module, link):
auth = GcpSession(module, 'spanner')
return return_if_object(module, auth.delete(link))
def resource_to_request(module):
request = {
u'name': module.params.get('name'),
u'config': module.params.get('config'),
u'displayName': module.params.get('display_name'),
u'nodeCount': module.params.get('node_count'),
u'labels': module.params.get('labels'),
}
return_vals = {}
for k, v in request.items():
if v or v is False:
return_vals[k] = v
return return_vals
def fetch_resource(module, link, allow_not_found=True):
auth = GcpSession(module, 'spanner')
return return_if_object(module, auth.get(link), allow_not_found)
def self_link(module):
return "https://spanner.googleapis.com/v1/projects/{project}/instances/{name}".format(**module.params)
def collection(module):
return "https://spanner.googleapis.com/v1/projects/{project}/instances".format(**module.params)
def return_if_object(module, response, allow_not_found=False):
# If not found, return nothing.
if allow_not_found and response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError):
module.fail_json(msg="Invalid JSON response with error: %s" % response.text)
result = decode_response(result, module)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
def is_different(module, response):
request = resource_to_request(module)
response = response_to_hash(module, response)
request = decode_response(request, module)
# Remove all output-only from response.
response_vals = {}
for k, v in response.items():
if k in request:
response_vals[k] = v
request_vals = {}
for k, v in request.items():
if k in response:
request_vals[k] = v
return GcpRequest(request_vals) != GcpRequest(response_vals)
# Remove unnecessary properties from the response.
# This is for doing comparisons with Ansible's current parameters.
def response_to_hash(module, response):
return {
u'name': module.params.get('name'),
u'config': response.get(u'config'),
u'displayName': response.get(u'displayName'),
u'nodeCount': response.get(u'nodeCount'),
u'labels': response.get(u'labels'),
}
def async_op_url(module, extra_data=None):
if extra_data is None:
extra_data = {}
url = "https://spanner.googleapis.com/v1/{op_id}"
combined = extra_data.copy()
combined.update(module.params)
return url.format(**combined)
def wait_for_operation(module, response):
op_result = return_if_object(module, response)
if op_result is None:
return {}
status = navigate_hash(op_result, ['done'])
wait_done = wait_for_completion(status, op_result, module)
raise_if_errors(wait_done, ['error'], module)
return navigate_hash(wait_done, ['response'])
def wait_for_completion(status, op_result, module):
op_id = navigate_hash(op_result, ['name'])
op_uri = async_op_url(module, {'op_id': op_id})
while not status:
raise_if_errors(op_result, ['error'], module)
time.sleep(1.0)
op_result = fetch_resource(module, op_uri, False)
status = navigate_hash(op_result, ['done'])
return op_result
def raise_if_errors(response, err_path, module):
errors = navigate_hash(response, err_path)
if errors is not None:
module.fail_json(msg=errors)
def resource_to_create(module):
instance = resource_to_request(module)
instance['name'] = "projects/{0}/instances/{1}".format(module.params['project'], module.params['name'])
instance['config'] = "projects/{0}/instanceConfigs/{1}".format(module.params['project'], instance['config'])
return {'instanceId': module.params['name'], 'instance': instance}
def resource_to_update(module):
instance = resource_to_request(module)
instance['name'] = "projects/{0}/instances/{1}".format(module.params['project'], module.params['name'])
instance['config'] = "projects/{0}/instanceConfigs/{1}".format(module.params['project'], instance['config'])
return {'instance': instance, 'fieldMask': "'name' ,'config' ,'displayName' ,'nodeCount' ,'labels'"}
def decode_response(response, module):
if not response:
return response
if '/operations/' in response['name']:
return response
response['name'] = response['name'].split('/')[-1]
response['config'] = response['config'].split('/')[-1]
return response
if __name__ == '__main__':
main()
| gpl-3.0 |
fvincenzo/mbed-os | tools/settings.py | 10 | 3231 | """
mbed SDK
Copyright (c) 2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from os import getenv
from os.path import join, abspath, dirname, exists
import logging
ROOT = abspath(join(dirname(__file__), ".."))
##############################################################################
# Toolchains and Build System Settings
##############################################################################
BUILD_DIR = abspath(join(ROOT, ".build"))
# ARM Compiler 5
ARM_PATH = "C:/Keil_v5/ARM/ARMCC"
# GCC ARM
GCC_ARM_PATH = ""
# GCC CodeRed
GCC_CR_PATH = "C:/code_red/RedSuite_4.2.0_349/redsuite/Tools/bin"
# IAR
IAR_PATH = "C:/Program Files (x86)/IAR Systems/Embedded Workbench 7.3/arm"
# Goanna static analyser. Please overload it in mbed_settings.py
GOANNA_PATH = "c:/Program Files (x86)/RedLizards/Goanna Central 3.2.3/bin"
# cppcheck path (command) and output message format
CPPCHECK_CMD = ["cppcheck", "--enable=all"]
CPPCHECK_MSG_FORMAT = ["--template=[{severity}] {file}@{line}: {id}:{message}"]
BUILD_OPTIONS = []
# mbed.org username
MBED_ORG_USER = ""
CLI_COLOR_MAP = {
"warning": "yellow",
"error" : "red"
}
##############################################################################
# User Settings (file)
##############################################################################
try:
# Allow to overwrite the default settings without the need to edit the
# settings file stored in the repository
from mbed_settings import *
except ImportError:
pass
##############################################################################
# User Settings (env vars)
##############################################################################
_ENV_PATHS = ['ARM_PATH', 'GCC_ARM_PATH', 'GCC_CR_PATH', 'IAR_PATH']
for _n in _ENV_PATHS:
if getenv('MBED_'+_n):
if exists(getenv('MBED_'+_n)):
globals()[_n] = getenv('MBED_'+_n)
else:
print "WARNING: MBED_%s set as environment variable but doesn't exist" % _n
##############################################################################
# Test System Settings
##############################################################################
SERVER_PORT = 59432
SERVER_ADDRESS = "10.2.200.94"
LOCALHOST = "10.2.200.94"
MUTs = {
"1" : {"mcu": "LPC1768",
"port":"COM41", "disk":'E:\\',
"peripherals": ["TMP102", "digital_loop", "port_loop", "analog_loop", "SD"]
},
"2": {"mcu": "LPC11U24",
"port":"COM42", "disk":'F:\\',
"peripherals": ["TMP102", "digital_loop", "port_loop", "SD"]
},
"3" : {"mcu": "KL25Z",
"port":"COM43", "disk":'G:\\',
"peripherals": ["TMP102", "digital_loop", "port_loop", "analog_loop", "SD"]
},
}
| apache-2.0 |
mugwizaleon/PCRasterMapstacks | pcrastermapstackvisualisation.py | 1 | 17920 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
PcrasterMapstackVisualisation
A QGIS plugin
PCRaster Mapstack visualisation
-------------------
begin : 2014-06-28
copyright : (C) 2014 by Leon
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
# Import the PyQt and QGIS libraries
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
import qgis.utils
# Initialize Qt resources from file resources.py
import resources_rc
# Import the code for the dialog
from pcrastermapstackvisualisationdialog import PcrasterMapstackVisualisationDialog
from Animationdialog import AnimationDialog
from TSSvisualizationdialog import TSSVisualizationDialog
# Import modules
import os.path
import os, glob
import time
import sys
import string
class PcrasterMapstackVisualisation:
def __init__(self, iface):
# Save reference to the QGIS interface
self.iface = iface
# initialize plugin directory
self.plugin_dir = os.path.dirname(__file__)
# initialize locale
locale = QSettings().value("locale/userLocale")[0:2]
localePath = os.path.join(self.plugin_dir, 'i18n', 'pcrastermapstackvisualisation_{}.qm'.format(locale))
if os.path.exists(localePath):
self.translator = QTranslator()
self.translator.load(localePath)
if qVersion() > '4.3.3':
QCoreApplication.installTranslator(self.translator)
# Create the dialog (after translation) and keep reference
self.dlg = PcrasterMapstackVisualisationDialog()
self.dlg2 = AnimationDialog()
self.dlg3 = TSSVisualizationDialog()
# Mapstack series visualization
QObject.connect( self.dlg.ui.pushButton_7, SIGNAL( "clicked()" ), self.DisplayTSSnames)
QObject.connect( self.dlg.ui.pushButton_6, SIGNAL( "clicked()" ), self.TSSgraphs)
QObject.connect( self.dlg.ui.btnBaseDir_3, SIGNAL( "clicked()" ), self.selectDir ) #link the button to the function of selecting the directory
QObject.connect( self.dlg.ui.btnBaseDir_3, SIGNAL( "clicked()" ), self.loadMapStackCoreName ) #link the button to the function of selecting the directory
QObject.connect( self.dlg.ui.pushButton_5, SIGNAL( "clicked()" ), self.actionStart)
QObject.connect( self.dlg2.ui.pushButton_2, SIGNAL( "clicked()" ), self.ActionAnim)
QObject.connect( self.dlg2.ui.pushButton_3, SIGNAL( "clicked()" ), self.actionNext)
QObject.connect( self.dlg2.ui.pushButton, SIGNAL( "clicked()" ), self.actionPrevious)
QObject.connect( self.dlg2.ui.pushButton_4, SIGNAL( "clicked()" ), self.actionStart)
QObject.connect( self.dlg2.ui.pushButton_5, SIGNAL( "clicked()" ), self.actionLast)
QObject.connect(self.dlg.ui.comboBox, SIGNAL("currentIndexChanged (const QString&)"), self.changelist) #Change the list of mapstacks
#Close dialogs widgets
QObject.connect( self.dlg.ui.pushButton, SIGNAL( "clicked()" ), self.close1)
QObject.connect( self.dlg3.ui.pushButton, SIGNAL( "clicked()" ), self.close2)
QObject.connect( self.dlg2.ui.pushButton_6, SIGNAL( "clicked()" ), self.close3)
def initGui(self):
# Create action that will start plugin configuration
self.action = QAction(
QIcon(":/plugins/pcrastermapstackvisualisation/Myicon.png"),
u"Mapstacks_visualisation", self.iface.mainWindow())
# connect the action to the run method
self.action.triggered.connect(self.run)
# Add toolbar button and menu item
self.iface.addToolBarIcon(self.action)
self.iface.addPluginToMenu(u"&PCRaster Mapstacks Viewer", self.action)
self.iface.addPluginToRasterMenu(u"&PCRaster Mapstacks Viewer", self.action)
def unload(self):
# Remove the plugin menu item and icon
self.iface.removePluginMenu(u"&PCRaster Time series Viewer", self.action)
self.iface.removeToolBarIcon(self.action)
# run method that performs all the real work
def run(self):
# show the dialog
self.dlg.show()
# Run the dialog event loop
result = self.dlg.exec_()
# See if OK was pressed
def close1(self):
self.dlg.close()
def TSSview(self):
self.dlg3.move(10, 300)
self.dlg3.show()# show the dialog
def close2(self):
self.dlg3.close()
self.dlg.show()
def AnimationDlg (self):
self.dlg2.move(200, 200)
self.dlg2.show()# show the dialog
def close3(self):
self.dlg2.close()
self.dlg.show()
# Selecting the directory containg files
def selectDir( self ):
self.dlg.hide()
settings = QSettings()
path = QFileDialog.getExistingDirectory( self.iface.mainWindow(), "Select a directory")
if path: self.dlg.ui.txtBaseDir2_5.setText( path )
self.dlg.show()
def actionRemove(self):
layers = self.iface.legendInterface().layers()
layer = qgis.utils.iface.activeLayer()
self.PrincipalLayer = layer.name()
for layer in layers :
if layer.name() == self.PrincipalLayer : pass
else : self.iface.legendInterface().moveLayer( layer, 0 )
self.iface.legendInterface().removeGroup(0)
def AddLayer(self, input):
layerPath = os.path.join(self.dataDir, input)
fileInfo = QFileInfo(layerPath)
baseName = fileInfo.baseName()
layer = QgsRasterLayer(layerPath, baseName)
uri = os.path.join(self.dataDir, 'MyFile.qml')
layer.loadNamedStyle(uri)
QgsMapLayerRegistry.instance().addMapLayer(layer)
def loadFiles(self, filename):
self.dataDir = str(self.dlg.ui.txtBaseDir2_5.text())
os.chdir(self.dataDir )
file_list = glob.glob(filename)
for index in file_list:
list = index.split(".")
if (len(list) < 2) :
file_list.remove(index)
for index in file_list:
if index.endswith(".tss"):
file_list.remove(index)
for index in file_list:
if index.endswith(".xml") or index.endswith(".aux.xml") :
file_list.remove(index)
for index in file_list:
if index.endswith(".tss"):
file_list.remove(index)
file_list.sort()
return file_list
def loadMapStackCoreName(self):
self.dataDir = str(self.dlg.ui.txtBaseDir2_5.text())
files= os.listdir(self.dataDir)
self.dlg.ui.comboBox.clear()
self.dlg.ui.comboBox_2.clear()
MyList=[]
MyList2 =[]
MyList3 = []
for index in files:
list = index.split(".")
if (len(list)==2) and (len(list[0])== 8) and (len(list[1])== 3) and (list[1].isdigit()):
MyList.append(index)
if index.endswith(".tss"):
MyList3.append(index)
for index in MyList:
list = index.split(".")
words = list[0].replace("0", "")
MyList2.append(words)
FinalList = []
for i in MyList2:
if i not in FinalList:
FinalList.append(i)
self.dlg.ui.comboBox.addItems(FinalList)
self.dlg.ui.comboBox_2.addItems(MyList3)
def DisplayTSSnames(self):
self.dataDir = str(self.dlg.ui.txtBaseDir2_5.text())
if not self.dataDir : pass
else:
os.chdir(self.dataDir )
if not self.dlg.ui.comboBox.currentText(): pass
else:
filename = '*'+str(self.dlg.ui.comboBox.currentText())+'*'
file_list = self.loadFiles(filename)
self.dlg.ui.listWidget.clear()
for index, file in enumerate(file_list):
self.dlg.ui.listWidget.addItem(file)
def changelist(self):
self.dlg.ui.listWidget.clear()
def ActionAnim(self):
self.actionRemove()
Group = self.iface.legendInterface().addGroup("group_foo")
import numpy
numpy.seterr(divide='ignore', invalid='ignore', over='ignore')
self.dataDir = str(self.dlg.ui.txtBaseDir2_5.text())
os.chdir(self.dataDir )
filename = '*'+str(self.dlg.ui.comboBox.currentText())+'*'
file_list = self.loadFiles(filename)
legend = self.iface.legendInterface()
self.dlg2.ui.pushButton_6.setEnabled(False)
for index, file in enumerate(file_list):
canvas = qgis.utils.iface.mapCanvas()
import Styling
Styling.style1(file_list[index], 'value', self.dataDir, file_list )
uri = os.path.join(self.dataDir, 'MyFile.qml')
self.iface.addRasterLayer(file, os.path.basename(str(file))).loadNamedStyle(uri)
canvas.refresh()
canvas.zoomToFullExtent()
rlayer = qgis.utils.iface.activeLayer()
legend.moveLayer( rlayer, 0 )
time.sleep(float(self.dlg2.ui.txtBaseDir2_5.text()))
self.dlg2.ui.pushButton_6.setEnabled(True)
def actionStart(self):
import Styling
self.dlg.hide()
self.iface.messageBar().clearWidgets ()
layers = self.iface.legendInterface().layers()
for layer in layers :
if self.iface.legendInterface().isLayerVisible(layer) : self.iface.legendInterface().setLayerVisible(layer, False)
import numpy
numpy.seterr(divide='ignore', invalid='ignore', over='ignore')
self.dataDir = str(self.dlg.ui.txtBaseDir2_5.text())
if not self.dataDir :
QMessageBox.information( self.iface.mainWindow(),"Info", "Please select a directory first")
self.dlg.show()
else :
os.chdir(self.dataDir )
filename = '*'+str(self.dlg.ui.comboBox.currentText())+'*'
file_list = self.loadFiles(filename)
if not self.dlg.ui.comboBox.currentText():
QMessageBox.information( self.iface.mainWindow(),"Info", "The are no PCRaster mapstacks in this directory")
self.dlg.show()
# return
else:
self.AnimationDlg()
Styling.style1(filename, 'value', self.dataDir, file_list )
s = QSettings()
oldValidation = s.value( "/Projections/defaultBehaviour", "useGlobal" )
s.setValue( "/Projections/defaultBehaviour", "useGlobal" )
self.AddLayer(str(file_list[0]))
s.setValue( "/Projections/defaultBehaviour", oldValidation )
layer = qgis.utils.iface.activeLayer()
# self.PrincipalLayer = layer.name()
# print self.PrincipalLayer
self.iface.legendInterface().setLayerExpanded(layer, True)
def actionLast(self):
self.actionRemove()
self.dlg.hide()
self.AnimationDlg()
self.iface.messageBar().clearWidgets ()
layers = self.iface.legendInterface().layers()
for layer in layers :
if self.iface.legendInterface().isLayerVisible(layer) : self.iface.legendInterface().setLayerVisible(layer, False)
import numpy
numpy.seterr(divide='ignore', invalid='ignore', over='ignore')
self.dataDir = str(self.dlg.ui.txtBaseDir2_5.text())
os.chdir(self.dataDir )
filename = '*'+str(self.dlg.ui.comboBox.currentText())+'*'
file_list = self.loadFiles(filename)
index = len(file_list) - 1
canvas = qgis.utils.iface.mapCanvas()
import Styling
Styling.style1(file_list[index], 'value', self.dataDir, file_list )
uri = os.path.join(self.dataDir, 'MyFile.qml')
self.iface.addRasterLayer(file_list[index], os.path.basename(str(file_list[index]))).loadNamedStyle(uri)
canvas.refresh()
canvas.zoomToFullExtent()
def actionNext(self):
self.actionRemove()
self.iface.messageBar().clearWidgets ()
import numpy
numpy.seterr(divide='ignore', invalid='ignore', over='ignore')
self.dataDir = str(self.dlg.ui.txtBaseDir2_5.text())
os.chdir(self.dataDir )
filename = '*'+str(self.dlg.ui.comboBox.currentText())+'*'
file_list = self.loadFiles(filename)
layer = qgis.utils.iface.activeLayer()
self.PrincipalLayer = layer.name()
if layer is None :
index = 0
elif layer.name() not in file_list:
index = 0
else :
counter = file_list.index(layer.name())
index = counter + 1
if counter == len(file_list) - 1 :
layers = self.iface.legendInterface().layers()
self.iface.legendInterface().addGroup("group_foo")
for layer in layers :
if layer.name() == self.PrincipalLayer : pass
elif self.iface.legendInterface().isLayerVisible(layer) : self.iface.legendInterface().moveLayer( layer, 0 )
index = 0
canvas = qgis.utils.iface.mapCanvas()
import Styling
Styling.style1(file_list[index], 'value', self.dataDir, file_list )
uri = os.path.join(self.dataDir, 'MyFile.qml')
self.iface.addRasterLayer(file_list[index], os.path.basename(str(file_list[index]))).loadNamedStyle(uri)
canvas.refresh()
canvas.zoomToFullExtent()
def actionPrevious(self):
self.actionRemove()
self.iface.messageBar().clearWidgets ()
import numpy
numpy.seterr(divide='ignore', invalid='ignore', over='ignore')
self.dataDir = str(self.dlg.ui.txtBaseDir2_5.text())
os.chdir(self.dataDir )
filename = '*'+str(self.dlg.ui.comboBox.currentText())+'*'
file_list = self.loadFiles(filename)
layer = qgis.utils.iface.activeLayer()
self.PrincipalLayer = layer.name()
if layer is None :
index = len(file_list) - 1
elif layer.name() not in file_list:
index = len(file_list) - 1
else :
counter = file_list.index(layer.name())
index = counter - 1
if counter == 0 :
layers = self.iface.legendInterface().layers()
self.iface.legendInterface().addGroup("group_foo")
for layer in layers :
if layer.name() == self.PrincipalLayer : pass
elif self.iface.legendInterface().isLayerVisible(layer) : self.iface.legendInterface().moveLayer( layer, 0 )
index = len(file_list) - 1
canvas = qgis.utils.iface.mapCanvas()
import Styling
Styling.style1(file_list[index], 'value', self.dataDir, file_list )
uri = os.path.join(self.dataDir, 'MyFile.qml')
self.iface.addRasterLayer(file_list[index], os.path.basename(str(file_list[index]))).loadNamedStyle(uri)
canvas.refresh()
canvas.zoomToFullExtent()
def TSSgraphs(self):# wtih matplotlib
self.dlg.hide()
filename = str(self.dlg.ui.comboBox_2.currentText())
self.dataDir = str(self.dlg.ui.txtBaseDir2_5.text())
file = os.path.join (self.dataDir, filename)
if os.path.isfile(file):
self.TSSview()
self.dataDir = str(self.dlg.ui.txtBaseDir2_5.text())
os.chdir(self.dataDir )
stripped = []
stripper = open(filename, 'r')
st_lines = stripper.readlines()[4:]
stripper.close()
for lines in st_lines:
stripped_line = " ".join(lines.split())
stripped.append(stripped_line)
data = "\n".join(stripped)
data = data.split('\n')
values = []
dates = []
years = 0
yl = []
for row in data:
x, y = row.split()
values.append(float(y))
year = (int(x.translate(string.maketrans("\n\t\r", " ")).strip()))
dates.append(year)
years = years +1
yl.append(years)
xlabels = yl
self.dlg3.ui.widget.canvas.ax.clear()
self.dlg3.ui.widget.canvas.ax.set_position([0.155,0.15,0.82,0.75])
self.dlg3.ui.widget.canvas.ax.set_title(filename)
self.dlg3.ui.widget.canvas.ax.set_xlabel ('Time step')
self.dlg3.ui.widget.canvas.ax.set_ylabel ('Values')
self.dlg3.ui.widget.canvas.ax.plot(dates, values)
self.dlg3.ui.widget.canvas.ax.set_xticks(dates)
self.dlg3.ui.widget.canvas.ax.set_xticklabels(xlabels, rotation=30, fontsize=10)
self.dlg3.ui.widget.canvas.draw()
else:
QMessageBox.information( self.iface.mainWindow(),"Info", "The are no PCRaster timeseries this directory")
self.dlg.show()
| apache-2.0 |
adamtiger/tensorflow | tensorflow/contrib/receptive_field/python/util/graph_compute_order.py | 25 | 2454 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Library to compute order of computations in a graph.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
class GraphDefHelper(object):
"""Helper class to collect node names and definitions.
Example:
b = GraphDefHelper(graph_def)
# Prints node that produces given output.
print b.output_of['conv/foo/bar']
"""
def __init__(self, gd):
self.output_of = {}
for each in gd.node:
self.output_of[each.name] = each
# pylint: disable=invalid-name
_NodeEntry = collections.namedtuple('NodeEntry', field_names=['order', 'node'])
def _get_computed_nodes(g, output, seen):
"""Traverses the graph in topological order.
Args:
g: GraphDefHelper object.
output: current node.
seen: map of nodes we've already traversed.
Returns:
order in topological sort for 'output'.
"""
if output in seen:
return seen[output].order
node_def = g.output_of.get(output, None)
if node_def is None:
seen[output] = _NodeEntry(0, None)
return 0
r = 0
for each in node_def.input:
# Parses name of input node.
if each.startswith('^'):
each = each[1:]
each = each.split(':')[0]
# Recursively computes ordering.
new_v = _get_computed_nodes(g, each, seen)
r = max(r, new_v + 1)
seen[output] = _NodeEntry(r, node_def)
return seen[output].order
def get_compute_order(graph_def):
"""Computes order of computation for a given graph.
Args:
graph_def: GraphDef object.
Returns:
map: name -> {order, node}
"""
helper = GraphDefHelper(graph_def)
seen = collections.defaultdict(_NodeEntry)
for each in graph_def.node:
_get_computed_nodes(helper, each.name, seen)
return seen
| apache-2.0 |
antmicro/linux-sunxi | tools/perf/util/setup.py | 766 | 1540 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = getenv('CFLAGS', '').split()
# switch off several checks (need to be at the end of cflags list)
cflags += ['-fno-strict-aliasing', '-Wno-write-strings', '-Wno-unused-parameter' ]
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
libtraceevent = getenv('LIBTRACEEVENT')
libapikfs = getenv('LIBAPI')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
extra_objects = [libtraceevent, libapikfs],
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='[email protected]',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
| gpl-2.0 |
lumig242/Hue-Integration-with-CDAP | desktop/core/ext-py/pycrypto-2.6.1/lib/Crypto/SelfTest/Random/__init__.py | 105 | 1973 | # -*- coding: utf-8 -*-
#
# SelfTest/Random/__init__.py: Self-test for random number generation modules
#
# Written in 2008 by Dwayne C. Litzenberger <[email protected]>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test for random number generators"""
__revision__ = "$Id$"
def get_tests(config={}):
tests = []
from Crypto.SelfTest.Random import Fortuna; tests += Fortuna.get_tests(config=config)
from Crypto.SelfTest.Random import OSRNG; tests += OSRNG.get_tests(config=config)
from Crypto.SelfTest.Random import test_random; tests += test_random.get_tests(config=config)
from Crypto.SelfTest.Random import test_rpoolcompat; tests += test_rpoolcompat.get_tests(config=config)
from Crypto.SelfTest.Random import test__UserFriendlyRNG; tests += test__UserFriendlyRNG.get_tests(config=config)
return tests
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| apache-2.0 |
frederickjoe/SEUTools-Alfred-Workflow | bs4/builder/_html5lib.py | 423 | 10647 | __all__ = [
'HTML5TreeBuilder',
]
import warnings
from bs4.builder import (
PERMISSIVE,
HTML,
HTML_5,
HTMLTreeBuilder,
)
from bs4.element import NamespacedAttribute
import html5lib
from html5lib.constants import namespaces
from bs4.element import (
Comment,
Doctype,
NavigableString,
Tag,
)
class HTML5TreeBuilder(HTMLTreeBuilder):
"""Use html5lib to build a tree."""
features = ['html5lib', PERMISSIVE, HTML_5, HTML]
def prepare_markup(self, markup, user_specified_encoding):
# Store the user-specified encoding for use later on.
self.user_specified_encoding = user_specified_encoding
yield (markup, None, None, False)
# These methods are defined by Beautiful Soup.
def feed(self, markup):
if self.soup.parse_only is not None:
warnings.warn("You provided a value for parse_only, but the html5lib tree builder doesn't support parse_only. The entire document will be parsed.")
parser = html5lib.HTMLParser(tree=self.create_treebuilder)
doc = parser.parse(markup, encoding=self.user_specified_encoding)
# Set the character encoding detected by the tokenizer.
if isinstance(markup, unicode):
# We need to special-case this because html5lib sets
# charEncoding to UTF-8 if it gets Unicode input.
doc.original_encoding = None
else:
doc.original_encoding = parser.tokenizer.stream.charEncoding[0]
def create_treebuilder(self, namespaceHTMLElements):
self.underlying_builder = TreeBuilderForHtml5lib(
self.soup, namespaceHTMLElements)
return self.underlying_builder
def test_fragment_to_document(self, fragment):
"""See `TreeBuilder`."""
return u'<html><head></head><body>%s</body></html>' % fragment
class TreeBuilderForHtml5lib(html5lib.treebuilders._base.TreeBuilder):
def __init__(self, soup, namespaceHTMLElements):
self.soup = soup
super(TreeBuilderForHtml5lib, self).__init__(namespaceHTMLElements)
def documentClass(self):
self.soup.reset()
return Element(self.soup, self.soup, None)
def insertDoctype(self, token):
name = token["name"]
publicId = token["publicId"]
systemId = token["systemId"]
doctype = Doctype.for_name_and_ids(name, publicId, systemId)
self.soup.object_was_parsed(doctype)
def elementClass(self, name, namespace):
tag = self.soup.new_tag(name, namespace)
return Element(tag, self.soup, namespace)
def commentClass(self, data):
return TextNode(Comment(data), self.soup)
def fragmentClass(self):
self.soup = BeautifulSoup("")
self.soup.name = "[document_fragment]"
return Element(self.soup, self.soup, None)
def appendChild(self, node):
# XXX This code is not covered by the BS4 tests.
self.soup.append(node.element)
def getDocument(self):
return self.soup
def getFragment(self):
return html5lib.treebuilders._base.TreeBuilder.getFragment(self).element
class AttrList(object):
def __init__(self, element):
self.element = element
self.attrs = dict(self.element.attrs)
def __iter__(self):
return list(self.attrs.items()).__iter__()
def __setitem__(self, name, value):
"set attr", name, value
self.element[name] = value
def items(self):
return list(self.attrs.items())
def keys(self):
return list(self.attrs.keys())
def __len__(self):
return len(self.attrs)
def __getitem__(self, name):
return self.attrs[name]
def __contains__(self, name):
return name in list(self.attrs.keys())
class Element(html5lib.treebuilders._base.Node):
def __init__(self, element, soup, namespace):
html5lib.treebuilders._base.Node.__init__(self, element.name)
self.element = element
self.soup = soup
self.namespace = namespace
def appendChild(self, node):
string_child = child = None
if isinstance(node, basestring):
# Some other piece of code decided to pass in a string
# instead of creating a TextElement object to contain the
# string.
string_child = child = node
elif isinstance(node, Tag):
# Some other piece of code decided to pass in a Tag
# instead of creating an Element object to contain the
# Tag.
child = node
elif node.element.__class__ == NavigableString:
string_child = child = node.element
else:
child = node.element
if not isinstance(child, basestring) and child.parent is not None:
node.element.extract()
if (string_child and self.element.contents
and self.element.contents[-1].__class__ == NavigableString):
# We are appending a string onto another string.
# TODO This has O(n^2) performance, for input like
# "a</a>a</a>a</a>..."
old_element = self.element.contents[-1]
new_element = self.soup.new_string(old_element + string_child)
old_element.replace_with(new_element)
self.soup._most_recent_element = new_element
else:
if isinstance(node, basestring):
# Create a brand new NavigableString from this string.
child = self.soup.new_string(node)
# Tell Beautiful Soup to act as if it parsed this element
# immediately after the parent's last descendant. (Or
# immediately after the parent, if it has no children.)
if self.element.contents:
most_recent_element = self.element._last_descendant(False)
else:
most_recent_element = self.element
self.soup.object_was_parsed(
child, parent=self.element,
most_recent_element=most_recent_element)
def getAttributes(self):
return AttrList(self.element)
def setAttributes(self, attributes):
if attributes is not None and len(attributes) > 0:
converted_attributes = []
for name, value in list(attributes.items()):
if isinstance(name, tuple):
new_name = NamespacedAttribute(*name)
del attributes[name]
attributes[new_name] = value
self.soup.builder._replace_cdata_list_attribute_values(
self.name, attributes)
for name, value in attributes.items():
self.element[name] = value
# The attributes may contain variables that need substitution.
# Call set_up_substitutions manually.
#
# The Tag constructor called this method when the Tag was created,
# but we just set/changed the attributes, so call it again.
self.soup.builder.set_up_substitutions(self.element)
attributes = property(getAttributes, setAttributes)
def insertText(self, data, insertBefore=None):
if insertBefore:
text = TextNode(self.soup.new_string(data), self.soup)
self.insertBefore(data, insertBefore)
else:
self.appendChild(data)
def insertBefore(self, node, refNode):
index = self.element.index(refNode.element)
if (node.element.__class__ == NavigableString and self.element.contents
and self.element.contents[index-1].__class__ == NavigableString):
# (See comments in appendChild)
old_node = self.element.contents[index-1]
new_str = self.soup.new_string(old_node + node.element)
old_node.replace_with(new_str)
else:
self.element.insert(index, node.element)
node.parent = self
def removeChild(self, node):
node.element.extract()
def reparentChildren(self, new_parent):
"""Move all of this tag's children into another tag."""
element = self.element
new_parent_element = new_parent.element
# Determine what this tag's next_element will be once all the children
# are removed.
final_next_element = element.next_sibling
new_parents_last_descendant = new_parent_element._last_descendant(False, False)
if len(new_parent_element.contents) > 0:
# The new parent already contains children. We will be
# appending this tag's children to the end.
new_parents_last_child = new_parent_element.contents[-1]
new_parents_last_descendant_next_element = new_parents_last_descendant.next_element
else:
# The new parent contains no children.
new_parents_last_child = None
new_parents_last_descendant_next_element = new_parent_element.next_element
to_append = element.contents
append_after = new_parent.element.contents
if len(to_append) > 0:
# Set the first child's previous_element and previous_sibling
# to elements within the new parent
first_child = to_append[0]
first_child.previous_element = new_parents_last_descendant
first_child.previous_sibling = new_parents_last_child
# Fix the last child's next_element and next_sibling
last_child = to_append[-1]
last_child.next_element = new_parents_last_descendant_next_element
last_child.next_sibling = None
for child in to_append:
child.parent = new_parent_element
new_parent_element.contents.append(child)
# Now that this element has no children, change its .next_element.
element.contents = []
element.next_element = final_next_element
def cloneNode(self):
tag = self.soup.new_tag(self.element.name, self.namespace)
node = Element(tag, self.soup, self.namespace)
for key,value in self.attributes:
node.attributes[key] = value
return node
def hasContent(self):
return self.element.contents
def getNameTuple(self):
if self.namespace == None:
return namespaces["html"], self.name
else:
return self.namespace, self.name
nameTuple = property(getNameTuple)
class TextNode(Element):
def __init__(self, element, soup):
html5lib.treebuilders._base.Node.__init__(self, None)
self.element = element
self.soup = soup
def cloneNode(self):
raise NotImplementedError
| mit |
michalliu/OpenWrt-Firefly-Libraries | staging_dir/target-mipsel_1004kc+dsp_uClibc-0.9.33.2/usr/lib/python3.4/test/test_profile.py | 96 | 7796 | """Test suite for the profile module."""
import sys
import pstats
import unittest
import os
from difflib import unified_diff
from io import StringIO
from test.support import TESTFN, run_unittest, unlink
from contextlib import contextmanager
import profile
from test.profilee import testfunc, timer
class ProfileTest(unittest.TestCase):
profilerclass = profile.Profile
profilermodule = profile
methodnames = ['print_stats', 'print_callers', 'print_callees']
expected_max_output = ':0(max)'
def tearDown(self):
unlink(TESTFN)
def get_expected_output(self):
return _ProfileOutput
@classmethod
def do_profiling(cls):
results = []
prof = cls.profilerclass(timer, 0.001)
start_timer = timer()
prof.runctx("testfunc()", globals(), locals())
results.append(timer() - start_timer)
for methodname in cls.methodnames:
s = StringIO()
stats = pstats.Stats(prof, stream=s)
stats.strip_dirs().sort_stats("stdname")
getattr(stats, methodname)()
output = s.getvalue().splitlines()
mod_name = testfunc.__module__.rsplit('.', 1)[1]
# Only compare against stats originating from the test file.
# Prevents outside code (e.g., the io module) from causing
# unexpected output.
output = [line.rstrip() for line in output if mod_name in line]
results.append('\n'.join(output))
return results
def test_cprofile(self):
results = self.do_profiling()
expected = self.get_expected_output()
self.assertEqual(results[0], 1000)
for i, method in enumerate(self.methodnames):
if results[i+1] != expected[method]:
print("Stats.%s output for %s doesn't fit expectation!" %
(method, self.profilerclass.__name__))
print('\n'.join(unified_diff(
results[i+1].split('\n'),
expected[method].split('\n'))))
def test_calling_conventions(self):
# Issue #5330: profile and cProfile wouldn't report C functions called
# with keyword arguments. We test all calling conventions.
stmts = [
"max([0])",
"max([0], key=int)",
"max([0], **dict(key=int))",
"max(*([0],))",
"max(*([0],), key=int)",
"max(*([0],), **dict(key=int))",
]
for stmt in stmts:
s = StringIO()
prof = self.profilerclass(timer, 0.001)
prof.runctx(stmt, globals(), locals())
stats = pstats.Stats(prof, stream=s)
stats.print_stats()
res = s.getvalue()
self.assertIn(self.expected_max_output, res,
"Profiling {0!r} didn't report max:\n{1}".format(stmt, res))
def test_run(self):
with silent():
self.profilermodule.run("int('1')")
self.profilermodule.run("int('1')", filename=TESTFN)
self.assertTrue(os.path.exists(TESTFN))
def test_runctx(self):
with silent():
self.profilermodule.runctx("testfunc()", globals(), locals())
self.profilermodule.runctx("testfunc()", globals(), locals(),
filename=TESTFN)
self.assertTrue(os.path.exists(TESTFN))
def regenerate_expected_output(filename, cls):
filename = filename.rstrip('co')
print('Regenerating %s...' % filename)
results = cls.do_profiling()
newfile = []
with open(filename, 'r') as f:
for line in f:
newfile.append(line)
if line.startswith('#--cut'):
break
with open(filename, 'w') as f:
f.writelines(newfile)
f.write("_ProfileOutput = {}\n")
for i, method in enumerate(cls.methodnames):
f.write('_ProfileOutput[%r] = """\\\n%s"""\n' % (
method, results[i+1]))
f.write('\nif __name__ == "__main__":\n main()\n')
@contextmanager
def silent():
stdout = sys.stdout
try:
sys.stdout = StringIO()
yield
finally:
sys.stdout = stdout
def test_main():
run_unittest(ProfileTest)
def main():
if '-r' not in sys.argv:
test_main()
else:
regenerate_expected_output(__file__, ProfileTest)
# Don't remove this comment. Everything below it is auto-generated.
#--cut--------------------------------------------------------------------------
_ProfileOutput = {}
_ProfileOutput['print_stats'] = """\
28 27.972 0.999 27.972 0.999 profilee.py:110(__getattr__)
1 269.996 269.996 999.769 999.769 profilee.py:25(testfunc)
23/3 149.937 6.519 169.917 56.639 profilee.py:35(factorial)
20 19.980 0.999 19.980 0.999 profilee.py:48(mul)
2 39.986 19.993 599.830 299.915 profilee.py:55(helper)
4 115.984 28.996 119.964 29.991 profilee.py:73(helper1)
2 -0.006 -0.003 139.946 69.973 profilee.py:84(helper2_indirect)
8 311.976 38.997 399.912 49.989 profilee.py:88(helper2)
8 63.976 7.997 79.960 9.995 profilee.py:98(subhelper)"""
_ProfileOutput['print_callers'] = """\
:0(append) <- profilee.py:73(helper1)(4) 119.964
:0(exc_info) <- profilee.py:73(helper1)(4) 119.964
:0(hasattr) <- profilee.py:73(helper1)(4) 119.964
profilee.py:88(helper2)(8) 399.912
profilee.py:110(__getattr__) <- :0(hasattr)(12) 11.964
profilee.py:98(subhelper)(16) 79.960
profilee.py:25(testfunc) <- <string>:1(<module>)(1) 999.767
profilee.py:35(factorial) <- profilee.py:25(testfunc)(1) 999.769
profilee.py:35(factorial)(20) 169.917
profilee.py:84(helper2_indirect)(2) 139.946
profilee.py:48(mul) <- profilee.py:35(factorial)(20) 169.917
profilee.py:55(helper) <- profilee.py:25(testfunc)(2) 999.769
profilee.py:73(helper1) <- profilee.py:55(helper)(4) 599.830
profilee.py:84(helper2_indirect) <- profilee.py:55(helper)(2) 599.830
profilee.py:88(helper2) <- profilee.py:55(helper)(6) 599.830
profilee.py:84(helper2_indirect)(2) 139.946
profilee.py:98(subhelper) <- profilee.py:88(helper2)(8) 399.912"""
_ProfileOutput['print_callees'] = """\
:0(hasattr) -> profilee.py:110(__getattr__)(12) 27.972
<string>:1(<module>) -> profilee.py:25(testfunc)(1) 999.769
profilee.py:110(__getattr__) ->
profilee.py:25(testfunc) -> profilee.py:35(factorial)(1) 169.917
profilee.py:55(helper)(2) 599.830
profilee.py:35(factorial) -> profilee.py:35(factorial)(20) 169.917
profilee.py:48(mul)(20) 19.980
profilee.py:48(mul) ->
profilee.py:55(helper) -> profilee.py:73(helper1)(4) 119.964
profilee.py:84(helper2_indirect)(2) 139.946
profilee.py:88(helper2)(6) 399.912
profilee.py:73(helper1) -> :0(append)(4) -0.004
profilee.py:84(helper2_indirect) -> profilee.py:35(factorial)(2) 169.917
profilee.py:88(helper2)(2) 399.912
profilee.py:88(helper2) -> :0(hasattr)(8) 11.964
profilee.py:98(subhelper)(8) 79.960
profilee.py:98(subhelper) -> profilee.py:110(__getattr__)(16) 27.972"""
if __name__ == "__main__":
main()
| gpl-2.0 |
randynobx/ansible | lib/ansible/plugins/action/normal.py | 62 | 2115 | # (c) 2012, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
from ansible.utils.vars import merge_hash
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=None):
# individual modules might disagree but as the generic the action plugin, pass at this point.
self._supports_check_mode = True
self._supports_async = True
results = super(ActionModule, self).run(tmp, task_vars)
if not results.get('skipped'):
if results.get('invocation', {}).get('module_args'):
# avoid passing to modules in case of no_log
# should not be set anymore but here for backwards compatibility
del results['invocation']['module_args']
# FUTURE: better to let _execute_module calculate this internally?
wrap_async = self._task.async and not self._connection.has_native_async
# do work!
results = merge_hash(results, self._execute_module(tmp=tmp, task_vars=task_vars, wrap_async=wrap_async))
# hack to keep --verbose from showing all the setup module results
# moved from setup module as now we filter out all _ansible_ from results
if self._task.action == 'setup':
results['_ansible_verbose_override'] = True
return results
| gpl-3.0 |
blueboxgroup/ansible | lib/ansible/utils/module_docs_fragments/openstack.py | 118 | 4021 | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard openstack documentation fragment
DOCUMENTATION = '''
options:
cloud:
description:
- Named cloud to operate against. Provides default values for I(auth) and
I(auth_type). This parameter is not needed if I(auth) is provided or if
OpenStack OS_* environment variables are present.
required: false
auth:
description:
- Dictionary containing auth information as needed by the cloud's auth
plugin strategy. For the default I(password) plugin, this would contain
I(auth_url), I(username), I(password), I(project_name) and any
information about domains if the cloud supports them. For other plugins,
this param will need to contain whatever parameters that auth plugin
requires. This parameter is not needed if a named cloud is provided or
OpenStack OS_* environment variables are present.
required: false
auth_type:
description:
- Name of the auth plugin to use. If the cloud uses something other than
password authentication, the name of the plugin should be indicated here
and the contents of the I(auth) parameter should be updated accordingly.
required: false
default: password
region_name:
description:
- Name of the region.
required: false
availability_zone:
description:
- Name of the availability zone.
required: false
wait:
description:
- Should ansible wait until the requested resource is complete.
required: false
default: "yes"
choices: ["yes", "no"]
timeout:
description:
- How long should ansible wait for the requested resource.
required: false
default: 180
api_timeout:
description:
- How long should the socket layer wait before timing out for API calls.
If this is omitted, nothing will be passed to the requests library.
required: false
default: None
validate_certs:
description:
- Whether or not SSL API requests should be verified.
required: false
default: True
aliases: ['verify']
cacert:
description:
- A path to a CA Cert bundle that can be used as part of verifying
SSL API requests.
required: false
default: None
cert:
description:
- A path to a client certificate to use as part of the SSL transaction
required: false
default: None
key:
description:
- A path to a client key to use as part of the SSL transaction
required: false
default: None
endpoint_type:
description:
- Endpoint URL type to fetch from the service catalog.
choices: [public, internal, admin]
required: false
default: public
requirements:
- python >= 2.7
- shade
notes:
- The standard OpenStack environment variables, such as C(OS_USERNAME)
may be used instead of providing explicit values.
- Auth information is driven by os-client-config, which means that values
can come from a yaml config file in /etc/ansible/openstack.yaml,
/etc/openstack/clouds.yaml or ~/.config/openstack/clouds.yaml, then from
standard environment variables, then finally by explicit parameters in
plays. More information can be found at
U(http://docs.openstack.org/developer/os-client-config)
'''
| gpl-3.0 |
guewen/odoo | addons/payment_paypal/controllers/main.py | 66 | 3201 | # -*- coding: utf-8 -*-
try:
import simplejson as json
except ImportError:
import json
import logging
import pprint
import urllib2
import werkzeug
from openerp import http, SUPERUSER_ID
from openerp.http import request
_logger = logging.getLogger(__name__)
class PaypalController(http.Controller):
_notify_url = '/payment/paypal/ipn/'
_return_url = '/payment/paypal/dpn/'
_cancel_url = '/payment/paypal/cancel/'
def _get_return_url(self, **post):
""" Extract the return URL from the data coming from paypal. """
return_url = post.pop('return_url', '')
if not return_url:
custom = json.loads(post.pop('custom', '{}'))
return_url = custom.get('return_url', '/')
return return_url
def paypal_validate_data(self, **post):
""" Paypal IPN: three steps validation to ensure data correctness
- step 1: return an empty HTTP 200 response -> will be done at the end
by returning ''
- step 2: POST the complete, unaltered message back to Paypal (preceded
by cmd=_notify-validate), with same encoding
- step 3: paypal send either VERIFIED or INVALID (single word)
Once data is validated, process it. """
res = False
new_post = dict(post, cmd='_notify-validate')
urequest = urllib2.Request("https://www.sandbox.paypal.com/cgi-bin/webscr", werkzeug.url_encode(new_post))
uopen = urllib2.urlopen(urequest)
resp = uopen.read()
if resp == 'VERIFIED':
_logger.info('Paypal: validated data')
cr, uid, context = request.cr, SUPERUSER_ID, request.context
res = request.registry['payment.transaction'].form_feedback(cr, uid, post, 'paypal', context=context)
elif resp == 'INVALID':
_logger.warning('Paypal: answered INVALID on data verification')
else:
_logger.warning('Paypal: unrecognized paypal answer, received %s instead of VERIFIED or INVALID' % resp.text)
return res
@http.route('/payment/paypal/ipn/', type='http', auth='none', methods=['POST'])
def paypal_ipn(self, **post):
""" Paypal IPN. """
_logger.info('Beginning Paypal IPN form_feedback with post data %s', pprint.pformat(post)) # debug
self.paypal_validate_data(**post)
return ''
@http.route('/payment/paypal/dpn', type='http', auth="none", methods=['POST'])
def paypal_dpn(self, **post):
""" Paypal DPN """
_logger.info('Beginning Paypal DPN form_feedback with post data %s', pprint.pformat(post)) # debug
return_url = self._get_return_url(**post)
self.paypal_validate_data(**post)
return werkzeug.utils.redirect(return_url)
@http.route('/payment/paypal/cancel', type='http', auth="none")
def paypal_cancel(self, **post):
""" When the user cancels its Paypal payment: GET on this route """
cr, uid, context = request.cr, SUPERUSER_ID, request.context
_logger.info('Beginning Paypal cancel with post data %s', pprint.pformat(post)) # debug
return_url = self._get_return_url(**post)
return werkzeug.utils.redirect(return_url)
| agpl-3.0 |
vityagi/azure-linux-extensions | RDMAUpdate/main/CommandExecuter.py | 8 | 1689 | #!/usr/bin/env python
#
# VMEncryption extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess
import os
import os.path
import shlex
import sys
from subprocess import *
class CommandExecuter(object):
"""description of class"""
def __init__(self, logger):
self.logger = logger
def Execute(self, command_to_execute):
self.logger.log("Executing:" + command_to_execute)
args = shlex.split(command_to_execute)
proc = Popen(args)
returnCode = proc.wait()
return returnCode
def RunGetOutput(self, command_to_execute):
try:
output=subprocess.check_output(command_to_execute,stderr=subprocess.STDOUT,shell=True)
return 0,output.decode('latin-1')
except subprocess.CalledProcessError as e :
self.logger.log('CalledProcessError. Error Code is ' + str(e.returncode) )
self.logger.log('CalledProcessError. Command string was ' + e.cmd )
self.logger.log('CalledProcessError. Command result was ' + (e.output[:-1]).decode('latin-1'))
return e.returncode,e.output.decode('latin-1')
| apache-2.0 |
potatolondon/django-nonrel-1-4 | django/contrib/gis/gdal/geomtype.py | 404 | 2967 | from django.contrib.gis.gdal.error import OGRException
#### OGRGeomType ####
class OGRGeomType(object):
"Encapulates OGR Geometry Types."
wkb25bit = -2147483648
# Dictionary of acceptable OGRwkbGeometryType s and their string names.
_types = {0 : 'Unknown',
1 : 'Point',
2 : 'LineString',
3 : 'Polygon',
4 : 'MultiPoint',
5 : 'MultiLineString',
6 : 'MultiPolygon',
7 : 'GeometryCollection',
100 : 'None',
101 : 'LinearRing',
1 + wkb25bit: 'Point25D',
2 + wkb25bit: 'LineString25D',
3 + wkb25bit: 'Polygon25D',
4 + wkb25bit: 'MultiPoint25D',
5 + wkb25bit : 'MultiLineString25D',
6 + wkb25bit : 'MultiPolygon25D',
7 + wkb25bit : 'GeometryCollection25D',
}
# Reverse type dictionary, keyed by lower-case of the name.
_str_types = dict([(v.lower(), k) for k, v in _types.items()])
def __init__(self, type_input):
"Figures out the correct OGR Type based upon the input."
if isinstance(type_input, OGRGeomType):
num = type_input.num
elif isinstance(type_input, basestring):
type_input = type_input.lower()
if type_input == 'geometry': type_input='unknown'
num = self._str_types.get(type_input, None)
if num is None:
raise OGRException('Invalid OGR String Type "%s"' % type_input)
elif isinstance(type_input, int):
if not type_input in self._types:
raise OGRException('Invalid OGR Integer Type: %d' % type_input)
num = type_input
else:
raise TypeError('Invalid OGR input type given.')
# Setting the OGR geometry type number.
self.num = num
def __str__(self):
"Returns the value of the name property."
return self.name
def __eq__(self, other):
"""
Does an equivalence test on the OGR type with the given
other OGRGeomType, the short-hand string, or the integer.
"""
if isinstance(other, OGRGeomType):
return self.num == other.num
elif isinstance(other, basestring):
return self.name.lower() == other.lower()
elif isinstance(other, int):
return self.num == other
else:
return False
def __ne__(self, other):
return not (self == other)
@property
def name(self):
"Returns a short-hand string form of the OGR Geometry type."
return self._types[self.num]
@property
def django(self):
"Returns the Django GeometryField for this OGR Type."
s = self.name.replace('25D', '')
if s in ('LinearRing', 'None'):
return None
elif s == 'Unknown':
s = 'Geometry'
return s + 'Field'
| bsd-3-clause |
voussoir/praw | praw/const.py | 1 | 10141 | """praw constants."""
import sys
__version__ = '5.2.1.dev0'
API_PATH = {
'about_edited': 'r/{subreddit}/about/edited/',
'about_log': 'r/{subreddit}/about/log/',
'about_modqueue': 'r/{subreddit}/about/modqueue/',
'about_reports': 'r/{subreddit}/about/reports/',
'about_spam': 'r/{subreddit}/about/spam/',
'about_sticky': 'r/{subreddit}/about/sticky/',
'about_stylesheet': 'r/{subreddit}/about/stylesheet/',
'about_traffic': 'r/{subreddit}/about/traffic/',
'about_unmoderated': 'r/{subreddit}/about/unmoderated/',
'accept_mod_invite': 'r/{subreddit}/api/accept_moderator_invite',
'approve': 'api/approve/',
'block': 'api/block',
'block_user': '/api/block_user/',
'blocked': 'prefs/blocked/',
'collapse': 'api/collapse_message/',
'comment': 'api/comment/',
'comment_replies': 'message/comments/',
'compose': 'api/compose/',
'contest_mode': 'api/set_contest_mode/',
'del': 'api/del/',
'deleteflair': 'r/{subreddit}/api/deleteflair',
'delete_sr_banner': 'r/{subreddit}/api/delete_sr_banner',
'delete_sr_header': 'r/{subreddit}/api/delete_sr_header',
'delete_sr_icon': 'r/{subreddit}/api/delete_sr_icon',
'delete_sr_image': 'r/{subreddit}/api/delete_sr_img',
'distinguish': 'api/distinguish/',
'domain': 'domain/{domain}/',
'duplicates': 'duplicates/{submission_id}/',
'edit': 'api/editusertext/',
'flair': 'r/{subreddit}/api/flair/',
'flairconfig': 'r/{subreddit}/api/flairconfig/',
'flaircsv': 'r/{subreddit}/api/flaircsv/',
'flairlist': 'r/{subreddit}/api/flairlist/',
'flairselector': 'r/{subreddit}/api/flairselector/',
'flairtemplate': 'r/{subreddit}/api/flairtemplate/',
'flairtemplateclear': 'r/{subreddit}/api/clearflairtemplates/',
'flairtemplatedelete': 'r/{subreddit}/api/deleteflairtemplate/',
'friend': 'r/{subreddit}/api/friend/',
'friend_v1': 'api/v1/me/friends/{user}',
'friends': 'api/v1/me/friends/',
'gild_thing': 'api/v1/gold/gild/{fullname}/',
'gild_user': 'api/v1/gold/give/{username}/',
'hide': 'api/hide/',
'ignore_reports': 'api/ignore_reports/',
'inbox': 'message/inbox/',
'info': 'api/info/',
'karma': 'api/v1/me/karma',
'leavecontributor': 'api/leavecontributor',
'leavemoderator': 'api/leavemoderator',
'link_flair': 'r/{subreddit}/api/link_flair',
'list_banned': 'r/{subreddit}/about/banned/',
'list_contributor': 'r/{subreddit}/about/contributors/',
'list_moderator': 'r/{subreddit}/about/moderators/',
'list_muted': 'r/{subreddit}/about/muted/',
'list_wikibanned': 'r/{subreddit}/about/wikibanned/',
'list_wikicontributor': 'r/{subreddit}/about/wikicontributors/',
'live_accept_invite': 'api/live/{id}/accept_contributor_invite',
'live_add_update': 'api/live/{id}/update',
'live_close': 'api/live/{id}/close_thread',
'live_contributors': 'live/{id}/contributors',
'live_discussions': 'live/{id}/discussions',
'live_focus': 'live/{thread_id}/updates/{update_id}',
'live_info': 'api/live/by_id/{ids}',
'live_invite': 'api/live/{id}/invite_contributor',
'live_leave': 'api/live/{id}/leave_contributor',
'live_now': 'api/live/happening_now',
'live_remove_update': 'api/live/{id}/delete_update',
'live_remove_contrib': 'api/live/{id}/rm_contributor',
'live_remove_invite': 'api/live/{id}/rm_contributor_invite',
'live_report': 'api/live/{id}/report',
'live_strike': 'api/live/{id}/strike_update',
'live_update_perms': 'api/live/{id}/set_contributor_permissions',
'live_update_thread': 'api/live/{id}/edit',
'live_updates': 'live/{id}',
'liveabout': 'api/live/{id}/about/',
'livecreate': 'api/live/create',
'lock': 'api/lock/',
'me': 'api/v1/me',
'mentions': 'message/mentions',
'message': 'message/messages/{id}/',
'messages': 'message/messages/',
'moderator_messages': 'r/{subreddit}/message/moderator/',
'moderator_unread': 'r/{subreddit}/message/moderator/unread/',
'morechildren': 'api/morechildren/',
'my_contributor': 'subreddits/mine/contributor/',
'my_moderator': 'subreddits/mine/moderator/',
'my_multireddits': 'api/multi/mine/',
'my_subreddits': 'subreddits/mine/subscriber/',
'marknsfw': 'api/marknsfw/',
'modmail_archive': 'api/mod/conversations/{id}/archive',
'modmail_bulk_read': 'api/mod/conversations/bulk/read',
'modmail_conversation': 'api/mod/conversations/{id}',
'modmail_conversations': 'api/mod/conversations/',
'modmail_highlight': 'api/mod/conversations/{id}/highlight',
'modmail_mute': 'api/mod/conversations/{id}/mute',
'modmail_read': 'api/mod/conversations/read',
'modmail_subreddits': 'api/mod/conversations/subreddits',
'modmail_unarchive': 'api/mod/conversations/{id}/unarchive',
'modmail_unmute': 'api/mod/conversations/{id}/unmute',
'modmail_unread': 'api/mod/conversations/unread',
'modmail_unread_count': 'api/mod/conversations/unread/count',
'multireddit': 'user/{user}/m/{multi}/',
'multireddit_api': 'api/multi/user/{user}/m/{multi}/',
'multireddit_base': 'api/multi/',
'multireddit_copy': 'api/multi/copy/',
'multireddit_rename': 'api/multi/rename/',
'multireddit_update': 'api/multi/user/{user}/m/{multi}/r/{subreddit}',
'multireddit_user': 'api/multi/user/{user}/',
'mute_sender': 'api/mute_message_author/',
'quarantine_opt_in': 'api/quarantine_optin',
'quarantine_opt_out': 'api/quarantine_optout',
'read_message': 'api/read_message/',
'remove': 'api/remove/',
'report': 'api/report/',
'rules': 'r/{subreddit}/about/rules',
'save': 'api/save/',
'search': 'r/{subreddit}/search/',
'select_flair': 'r/{subreddit}/api/selectflair/',
'sendreplies': 'api/sendreplies',
'sent': 'message/sent/',
'setpermissions': 'r/{subreddit}/api/setpermissions/',
'spoiler': 'api/spoiler/',
'site_admin': 'api/site_admin/',
'sticky_submission': 'api/set_subreddit_sticky/',
'sub_recommended': 'api/recommend/sr/{subreddits}',
'submission': 'comments/{id}/',
'submission_replies': 'message/selfreply/',
'submit': 'api/submit/',
'subreddit': 'r/{subreddit}/',
'subreddit_about': 'r/{subreddit}/about/',
'subreddit_filter': ('api/filter/user/{user}/f/{special}/'
'r/{subreddit}'),
'subreddit_filter_list': 'api/filter/user/{user}/f/{special}',
'subreddit_random': 'r/{subreddit}/random/',
'subreddit_settings': 'r/{subreddit}/about/edit/',
'subreddit_stylesheet': 'r/{subreddit}/api/subreddit_stylesheet/',
'subreddits_by_topic': 'api/subreddits_by_topic',
'subreddits_default': 'subreddits/default/',
'subreddits_gold': 'subreddits/gold/',
'subreddits_new': 'subreddits/new/',
'subreddits_popular': 'subreddits/popular/',
'subreddits_name_search': 'api/search_reddit_names/',
'subreddits_search': 'subreddits/search/',
'subscribe': 'api/subscribe/',
'suggested_sort': 'api/set_suggested_sort/',
'uncollapse': 'api/uncollapse_message/',
'unfriend': 'r/{subreddit}/api/unfriend/',
'unhide': 'api/unhide/',
'unignore_reports': 'api/unignore_reports/',
'unlock': 'api/unlock/',
'unmarknsfw': 'api/unmarknsfw/',
'unmute_sender': 'api/unmute_message_author/',
'unread': 'message/unread/',
'unread_message': 'api/unread_message/',
'unsave': 'api/unsave/',
'unspoiler': 'api/unspoiler/',
'upload_image': 'r/{subreddit}/api/upload_sr_img',
'user': 'user/{user}/',
'user_about': 'user/{user}/about/',
'vote': 'api/vote/',
'wiki_edit': 'r/{subreddit}/api/wiki/edit/',
'wiki_page': 'r/{subreddit}/wiki/{page}',
'wiki_page_editor': 'r/{subreddit}/api/wiki/alloweditor/{method}',
'wiki_page_revisions': 'r/{subreddit}/wiki/revisions/{page}',
'wiki_page_settings': 'r/{subreddit}/wiki/settings/{page}',
'wiki_pages': 'r/{subreddit}/wiki/pages/',
'wiki_revisions': 'r/{subreddit}/wiki/revisions/'}
JPEG_HEADER = b'\xff\xd8\xff'
MAX_IMAGE_SIZE = 512000
MIN_PNG_SIZE = 67
MIN_JPEG_SIZE = 128
PNG_HEADER = b'\x89\x50\x4e\x47\x0d\x0a\x1a\x0a'
USER_AGENT_FORMAT = '{{}} PRAW/{}'.format(__version__)
# pylint: disable=import-error,no-name-in-module,unused-import
if sys.version_info.major == 2:
import ConfigParser as configparser # NOQA
from urlparse import urljoin, urlparse # NOQA
else:
import configparser # NOQA
from urllib.parse import urljoin, urlparse # NOQA
| gpl-3.0 |
AlphaCluster/NewsBlur | vendor/paypal/pro/helpers.py | 18 | 12548 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import logging
import pprint
import time
from django.conf import settings
from django.forms.models import fields_for_model
from django.http import QueryDict
from django.utils.functional import cached_property
from django.utils.http import urlencode
from six.moves.urllib.request import urlopen
from paypal.pro.signals import payment_was_successful, recurring_cancel, recurring_suspend, recurring_reactivate, payment_profile_created
from paypal.pro.models import PayPalNVP
from paypal.pro.exceptions import PayPalFailure
USER = settings.PAYPAL_WPP_USER
PASSWORD = settings.PAYPAL_WPP_PASSWORD
SIGNATURE = settings.PAYPAL_WPP_SIGNATURE
VERSION = 116.0
BASE_PARAMS = dict(USER=USER, PWD=PASSWORD, SIGNATURE=SIGNATURE, VERSION=VERSION)
ENDPOINT = "https://api-3t.paypal.com/nvp"
SANDBOX_ENDPOINT = "https://api-3t.sandbox.paypal.com/nvp"
log = logging.getLogger(__file__)
def paypal_time(time_obj=None):
"""Returns a time suitable for PayPal time fields."""
if time_obj is None:
time_obj = time.gmtime()
return time.strftime(PayPalNVP.TIMESTAMP_FORMAT, time_obj)
def paypaltime2datetime(s):
"""Convert a PayPal time string to a DateTime."""
return datetime.datetime(*(time.strptime(s, PayPalNVP.TIMESTAMP_FORMAT)[:6]))
class PayPalError(TypeError):
"""Error thrown when something be wrong."""
class PayPalWPP(object):
"""
Wrapper class for the PayPal Website Payments Pro.
Website Payments Pro Integration Guide:
https://cms.paypal.com/cms_content/US/en_US/files/developer/PP_WPP_IntegrationGuide.pdf
Name-Value Pair API Developer Guide and Reference:
https://cms.paypal.com/cms_content/US/en_US/files/developer/PP_NVPAPI_DeveloperGuide.pdf
"""
def __init__(self, request, params=BASE_PARAMS):
"""Required - USER / PWD / SIGNATURE / VERSION"""
self.request = request
if getattr(settings, 'PAYPAL_TEST', True):
self.endpoint = SANDBOX_ENDPOINT
else:
self.endpoint = ENDPOINT
self.signature_values = params
self.signature = urlencode(self.signature_values) + "&"
@cached_property
def NVP_FIELDS(self):
# Put this onto class and load lazily, because in some cases there is an
# import order problem if we put it at module level.
return list(fields_for_model(PayPalNVP).keys())
def doDirectPayment(self, params):
"""Call PayPal DoDirectPayment method."""
defaults = {"method": "DoDirectPayment", "paymentaction": "Sale"}
required = ["creditcardtype",
"acct",
"expdate",
"cvv2",
"ipaddress",
"firstname",
"lastname",
"street",
"city",
"state",
"countrycode",
"zip",
"amt",
]
nvp_obj = self._fetch(params, required, defaults)
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info)
payment_was_successful.send(sender=nvp_obj, **params)
# @@@ Could check cvv2match / avscode are both 'X' or '0'
# qd = django.http.QueryDict(nvp_obj.response)
# if qd.get('cvv2match') not in ['X', '0']:
# nvp_obj.set_flag("Invalid cvv2match: %s" % qd.get('cvv2match')
# if qd.get('avscode') not in ['X', '0']:
# nvp_obj.set_flag("Invalid avscode: %s" % qd.get('avscode')
return nvp_obj
def setExpressCheckout(self, params):
"""
Initiates an Express Checkout transaction.
Optionally, the SetExpressCheckout API operation can set up billing agreements for
reference transactions and recurring payments.
Returns a NVP instance - check for token and payerid to continue!
"""
if "amt" in params:
import warnings
warnings.warn("'amt' has been deprecated. 'paymentrequest_0_amt' "
"should be used instead.", DeprecationWarning)
# Make a copy so we don't change things unexpectedly
params = params.copy()
params.update({'paymentrequest_0_amt': params['amt']})
del params['amt']
if self._is_recurring(params):
params = self._recurring_setExpressCheckout_adapter(params)
defaults = {"method": "SetExpressCheckout", "noshipping": 1}
required = ["returnurl", "cancelurl", "paymentrequest_0_amt"]
nvp_obj = self._fetch(params, required, defaults)
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info)
return nvp_obj
def doExpressCheckoutPayment(self, params):
"""
Check the dude out:
"""
if "amt" in params:
import warnings
warnings.warn("'amt' has been deprecated. 'paymentrequest_0_amt' "
"should be used instead.", DeprecationWarning)
# Make a copy so we don't change things unexpectedly
params = params.copy()
params.update({'paymentrequest_0_amt': params['amt']})
del params['amt']
defaults = {"method": "DoExpressCheckoutPayment", "paymentaction": "Sale"}
required = ["returnurl", "cancelurl", "paymentrequest_0_amt", "token", "payerid"]
nvp_obj = self._fetch(params, required, defaults)
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info)
payment_was_successful.send(sender=nvp_obj, **params)
return nvp_obj
def createRecurringPaymentsProfile(self, params, direct=False):
"""
Set direct to True to indicate that this is being called as a directPayment.
Returns True PayPal successfully creates the profile otherwise False.
"""
defaults = {"method": "CreateRecurringPaymentsProfile"}
required = ["profilestartdate", "billingperiod", "billingfrequency", "amt"]
# Direct payments require CC data
if direct:
required + ["creditcardtype", "acct", "expdate", "firstname", "lastname"]
else:
required + ["token", "payerid"]
nvp_obj = self._fetch(params, required, defaults)
# Flag if profile_type != ActiveProfile
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info)
payment_profile_created.send(sender=nvp_obj, **params)
return nvp_obj
def getExpressCheckoutDetails(self, params):
defaults = {"method": "GetExpressCheckoutDetails"}
required = ["token"]
nvp_obj = self._fetch(params, required, defaults)
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info)
return nvp_obj
def setCustomerBillingAgreement(self, params):
raise DeprecationWarning
def createBillingAgreement(self, params):
"""
Create a billing agreement for future use, without any initial payment
"""
defaults = {"method": "CreateBillingAgreement"}
required = ["token"]
nvp_obj = self._fetch(params, required, defaults)
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info)
return nvp_obj
def getTransactionDetails(self, params):
defaults = {"method": "GetTransactionDetails"}
required = ["transactionid"]
nvp_obj = self._fetch(params, required, defaults)
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info)
return nvp_obj
def massPay(self, params):
raise NotImplementedError
def getRecurringPaymentsProfileDetails(self, params):
raise NotImplementedError
def updateRecurringPaymentsProfile(self, params):
defaults = {"method": "UpdateRecurringPaymentsProfile"}
required = ["profileid"]
nvp_obj = self._fetch(params, required, defaults)
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info)
return nvp_obj
def billOutstandingAmount(self, params):
raise NotImplementedError
def manangeRecurringPaymentsProfileStatus(self, params, fail_silently=False):
"""
Requires `profileid` and `action` params.
Action must be either "Cancel", "Suspend", or "Reactivate".
"""
defaults = {"method": "ManageRecurringPaymentsProfileStatus"}
required = ["profileid", "action"]
nvp_obj = self._fetch(params, required, defaults)
# TODO: This fail silently check should be using the error code, but its not easy to access
if not nvp_obj.flag or (
fail_silently and nvp_obj.flag_info == 'Invalid profile status for cancel action; profile should be active or suspended'):
if params['action'] == 'Cancel':
recurring_cancel.send(sender=nvp_obj)
elif params['action'] == 'Suspend':
recurring_suspend.send(sender=nvp_obj)
elif params['action'] == 'Reactivate':
recurring_reactivate.send(sender=nvp_obj)
else:
raise PayPalFailure(nvp_obj.flag_info)
return nvp_obj
def refundTransaction(self, params):
raise NotImplementedError
def doReferenceTransaction(self, params):
"""
Process a payment from a buyer's account, identified by a previous
transaction.
The `paymentaction` param defaults to "Sale", but may also contain the
values "Authorization" or "Order".
"""
defaults = {"method": "DoReferenceTransaction",
"paymentaction": "Sale"}
required = ["referenceid", "amt"]
nvp_obj = self._fetch(params, required, defaults)
if nvp_obj.flag:
raise PayPalFailure(nvp_obj.flag_info)
return nvp_obj
def _is_recurring(self, params):
"""Returns True if the item passed is a recurring transaction."""
return 'billingfrequency' in params
def _recurring_setExpressCheckout_adapter(self, params):
"""
The recurring payment interface to SEC is different than the recurring payment
interface to ECP. This adapts a normal call to look like a SEC call.
"""
params['l_billingtype0'] = "RecurringPayments"
params['l_billingagreementdescription0'] = params['desc']
REMOVE = ["billingfrequency", "billingperiod", "profilestartdate", "desc"]
for k in params.keys():
if k in REMOVE:
del params[k]
return params
def _fetch(self, params, required, defaults):
"""Make the NVP request and store the response."""
defaults.update(params)
pp_params = self._check_and_update_params(required, defaults)
pp_string = self.signature + urlencode(pp_params)
response = self._request(pp_string)
response_params = self._parse_response(response)
if getattr(settings, 'PAYPAL_DEBUG', settings.DEBUG):
log.debug('PayPal Request:\n%s\n', pprint.pformat(defaults))
log.debug('PayPal Response:\n%s\n', pprint.pformat(response_params))
# Gather all NVP parameters to pass to a new instance.
nvp_params = {}
tmpd = defaults.copy()
tmpd.update(response_params)
for k, v in tmpd.items():
if k in self.NVP_FIELDS:
nvp_params[str(k)] = v
# PayPal timestamp has to be formatted.
if 'timestamp' in nvp_params:
nvp_params['timestamp'] = paypaltime2datetime(nvp_params['timestamp'])
nvp_obj = PayPalNVP(**nvp_params)
nvp_obj.init(self.request, params, response_params)
nvp_obj.save()
return nvp_obj
def _request(self, data):
"""Moved out to make testing easier."""
return urlopen(self.endpoint, data.encode("ascii")).read()
def _check_and_update_params(self, required, params):
"""
Ensure all required parameters were passed to the API call and format
them correctly.
"""
for r in required:
if r not in params:
raise PayPalError("Missing required param: %s" % r)
# Upper case all the parameters for PayPal.
return (dict((k.upper(), v) for k, v in params.items()))
def _parse_response(self, response):
"""Turn the PayPal response into a dict"""
q = QueryDict(response, encoding='UTF-8').dict()
return {k.lower(): v for k,v in q.items()}
| mit |
jhaux/tensorflow | tensorflow/compiler/tests/nullary_ops_test.py | 122 | 2094 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test cases for operators with no arguments."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.compiler.tests.xla_test import XLATestCase
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.platform import googletest
class NullaryOpsTest(XLATestCase):
def _testNullary(self, op, expected):
with self.test_session() as session:
with self.test_scope():
output = op()
result = session.run(output)
self.assertAllClose(result, expected, rtol=1e-3)
def testNoOp(self):
with self.test_session():
with self.test_scope():
output = control_flow_ops.no_op()
# This should not crash.
output.run()
def testConstants(self):
constants = [
np.float32(42),
np.array([], dtype=np.float32),
np.array([1, 2], dtype=np.float32),
np.array([[1, 2, 3], [4, 5, 6]], dtype=np.float32),
np.array([[[1, 2], [3, 4], [5, 6]], [[10, 20], [30, 40], [50, 60]]],
dtype=np.float32),
np.array([[[]], [[]]], dtype=np.float32),
np.array([[[[1]]]], dtype=np.float32),
]
for c in constants:
self._testNullary(lambda c=c: constant_op.constant(c), expected=c)
if __name__ == "__main__":
googletest.main()
| apache-2.0 |
memsharded/conan | conans/test/unittests/client/tools/test_env.py | 1 | 2363 | # coding=utf-8
import os
import unittest
import mock
from conans.client.tools import env
class ToolsEnvTest(unittest.TestCase):
def test_environment_append_variables(self):
with mock.patch.dict('os.environ', {}),\
env.environment_append({'env_var1': 'value',
'env_var2': 'value2'}):
self.assertEqual(os.environ['env_var1'], 'value')
self.assertEqual(os.environ['env_var2'], 'value2')
def test_environment_append_variables_without_values(self):
with mock.patch.dict('os.environ',
{'env_var1': 'value',
'env_var2': 'value2'}),\
env.environment_append({}):
self.assertEqual(os.environ['env_var1'], 'value')
self.assertEqual(os.environ['env_var2'], 'value2')
def test_environment_append_overwriting(self):
with mock.patch.dict('os.environ', {'env_var1': 'value'}),\
env.environment_append({'env_var1': 'new_value'}):
self.assertEqual(os.environ['env_var1'], 'new_value')
def test_environment_append_list(self):
with mock.patch.dict('os.environ', {}),\
env.environment_append({'env_var1': ['value1', 'value2']}):
self.assertEqual(os.environ['env_var1'], 'value1' +
os.pathsep + 'value2')
def test_environment_append_unsetting_some_variables(self):
with mock.patch.dict('os.environ', {'env_var1': 'value'}),\
env.environment_append({'env_var1': None, 'env_var2': 'value2'}):
self.assertNotIn('env_var1', os.environ)
self.assertEqual(os.environ['env_var2'], 'value2')
def test_environment_append_unsetting_all_variables(self):
with mock.patch.dict('os.environ',
{'env_var1': 'value',
'env_var2': 'value2'}),\
env.environment_append({'env_var1': None}):
self.assertNotIn('env_var1', os.environ)
def test_environment_append_unsetting_non_existing_variables(self):
with mock.patch.dict('os.environ',
{'env_var2': 'value2'}),\
env.environment_append({'env_var1': None}):
self.assertNotIn('env_var1', os.environ)
| mit |
fernandoacorreia/DjangoWAWSLogging | DjangoWAWSLogging/env/Lib/site-packages/pywin32-218-py2.7-win32.egg/scripts/killProcName.py | 38 | 1766 | # Kills a process by process name
#
# Uses the Performance Data Helper to locate the PID, then kills it.
# Will only kill the process if there is only one process of that name
# (eg, attempting to kill "Python.exe" will only work if there is only
# one Python.exe running. (Note that the current process does not
# count - ie, if Python.exe is hosting this script, you can still kill
# another Python.exe (as long as there is only one other Python.exe)
# Really just a demo for the win32pdh(util) module, which allows you
# to get all sorts of information about a running process and many
# other aspects of your system.
import win32api, win32pdhutil, win32con, sys
def killProcName(procname):
# Change suggested by Dan Knierim, who found that this performed a
# "refresh", allowing us to kill processes created since this was run
# for the first time.
try:
win32pdhutil.GetPerformanceAttributes('Process','ID Process',procname)
except:
pass
pids = win32pdhutil.FindPerformanceAttributesByName(procname)
# If _my_ pid in there, remove it!
try:
pids.remove(win32api.GetCurrentProcessId())
except ValueError:
pass
if len(pids)==0:
result = "Can't find %s" % procname
elif len(pids)>1:
result = "Found too many %s's - pids=`%s`" % (procname,pids)
else:
handle = win32api.OpenProcess(win32con.PROCESS_TERMINATE, 0,pids[0])
win32api.TerminateProcess(handle,0)
win32api.CloseHandle(handle)
result = ""
return result
if __name__ == '__main__':
if len(sys.argv)>1:
for procname in sys.argv[1:]:
result = killProcName(procname)
if result:
print result
print "Dumping all processes..."
win32pdhutil.ShowAllProcesses()
else:
print "Killed %s" % procname
else:
print "Usage: killProcName.py procname ..."
| mit |
ruzette/project-euler-solutions | old/src/pythagoras.py | 1 | 1204 | '''
Project Euler Problem 9
A Pythagorean triplet is a set of three natural numbers, a < b < c, for which,
a**2 + b**2 = c**2
For example, 3**2 + 4**2 = 9 + 16 = 25 = 5**2.
There exists exactly one Pythagorean triplet for which a + b + c = 1000.
Find the product abc.
Link : https://projecteuler.net/problem=9
'''
from operator import mul
class Pythagoras(object):
def __init__(self, sides=[]):
self.sides = sides
def get_sides_from_sum(self, psum=0):
if psum <= 0 :
print "Error: Pythagorean sum cannot be less than 0"
return None
for b in range(int(psum/5), int(psum/2)):
a = ((((psum**2)/2) - (psum * b)))/(psum - b)
c = psum - a - b
if c < 0 :
continue
print a, b, c
if ((a**2) + (b**2)) == (c**2):
self.sides = [a, b, c]
print self.sides
return self.sides
return None
def get_product(self, sides=[]):
if self.sides == [] and sides:
self.sides = sides
product = reduce(mul, self.sides)
return product
def main():
pythagoras = Pythagoras()
print "Special Pythagorean Triplets"
if pythagoras.get_sides_from_sum(1000):
print "Product is ", pythagoras.get_product()
if __name__ == "__main__":
main() | gpl-2.0 |
zhuwenping/python-for-android | python-modules/twisted/twisted/conch/test/test_transport.py | 49 | 73648 | # Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for ssh/transport.py and the classes therein.
"""
try:
import pyasn1
except ImportError:
pyasn1 = None
try:
import Crypto.Cipher.DES3
except ImportError:
Crypto = None
if pyasn1 is not None and Crypto is not None:
dependencySkip = None
from twisted.conch.ssh import transport, common, keys, factory
from twisted.conch.test import keydata
else:
if pyasn1 is None:
dependencySkip = "can't run w/o PyASN1"
elif Crypto is None:
dependencySkip = "can't run w/o PyCrypto"
class transport: # fictional modules to make classes work
class SSHTransportBase: pass
class SSHServerTransport: pass
class SSHClientTransport: pass
class factory:
class SSHFactory:
pass
from twisted.trial import unittest
from twisted.internet import defer
from twisted.protocols import loopback
from twisted.python import randbytes
from twisted.python.reflect import qual
from twisted.python.hashlib import md5, sha1
from twisted.conch.ssh import service
from twisted.test import proto_helpers
from twisted.conch.error import ConchError
class MockTransportBase(transport.SSHTransportBase):
"""
A base class for the client and server protocols. Stores the messages
it receieves instead of ignoring them.
@ivar errors: a list of tuples: (reasonCode, description)
@ivar unimplementeds: a list of integers: sequence number
@ivar debugs: a list of tuples: (alwaysDisplay, message, lang)
@ivar ignoreds: a list of strings: ignored data
"""
def connectionMade(self):
"""
Set up instance variables.
"""
transport.SSHTransportBase.connectionMade(self)
self.errors = []
self.unimplementeds = []
self.debugs = []
self.ignoreds = []
self.gotUnsupportedVersion = None
def _unsupportedVersionReceived(self, remoteVersion):
"""
Intercept unsupported version call.
@type remoteVersion: C{str}
"""
self.gotUnsupportedVersion = remoteVersion
return transport.SSHTransportBase._unsupportedVersionReceived(self, remoteVersion)
def receiveError(self, reasonCode, description):
"""
Store any errors received.
@type reasonCode: C{int}
@type description: C{str}
"""
self.errors.append((reasonCode, description))
def receiveUnimplemented(self, seqnum):
"""
Store any unimplemented packet messages.
@type seqnum: C{int}
"""
self.unimplementeds.append(seqnum)
def receiveDebug(self, alwaysDisplay, message, lang):
"""
Store any debug messages.
@type alwaysDisplay: C{bool}
@type message: C{str}
@type lang: C{str}
"""
self.debugs.append((alwaysDisplay, message, lang))
def ssh_IGNORE(self, packet):
"""
Store any ignored data.
@type packet: C{str}
"""
self.ignoreds.append(packet)
class MockCipher(object):
"""
A mocked-up version of twisted.conch.ssh.transport.SSHCiphers.
"""
outCipType = 'test'
encBlockSize = 6
inCipType = 'test'
decBlockSize = 6
inMACType = 'test'
outMACType = 'test'
verifyDigestSize = 1
usedEncrypt = False
usedDecrypt = False
outMAC = (None, '', '', 1)
inMAC = (None, '', '', 1)
keys = ()
def encrypt(self, x):
"""
Called to encrypt the packet. Simply record that encryption was used
and return the data unchanged.
"""
self.usedEncrypt = True
if (len(x) % self.encBlockSize) != 0:
raise RuntimeError("length %i modulo blocksize %i is not 0: %i" %
(len(x), self.encBlockSize, len(x) % self.encBlockSize))
return x
def decrypt(self, x):
"""
Called to decrypt the packet. Simply record that decryption was used
and return the data unchanged.
"""
self.usedDecrypt = True
if (len(x) % self.encBlockSize) != 0:
raise RuntimeError("length %i modulo blocksize %i is not 0: %i" %
(len(x), self.decBlockSize, len(x) % self.decBlockSize))
return x
def makeMAC(self, outgoingPacketSequence, payload):
"""
Make a Message Authentication Code by sending the character value of
the outgoing packet.
"""
return chr(outgoingPacketSequence)
def verify(self, incomingPacketSequence, packet, macData):
"""
Verify the Message Authentication Code by checking that the packet
sequence number is the same.
"""
return chr(incomingPacketSequence) == macData
def setKeys(self, ivOut, keyOut, ivIn, keyIn, macIn, macOut):
"""
Record the keys.
"""
self.keys = (ivOut, keyOut, ivIn, keyIn, macIn, macOut)
class MockCompression:
"""
A mocked-up compression, based on the zlib interface. Instead of
compressing, it reverses the data and adds a 0x66 byte to the end.
"""
def compress(self, payload):
return payload[::-1] # reversed
def decompress(self, payload):
return payload[:-1][::-1]
def flush(self, kind):
return '\x66'
class MockService(service.SSHService):
"""
A mocked-up service, based on twisted.conch.ssh.service.SSHService.
@ivar started: True if this service has been started.
@ivar stopped: True if this service has been stopped.
"""
name = "MockService"
started = False
stopped = False
protocolMessages = {0xff: "MSG_TEST", 71: "MSG_fiction"}
def logPrefix(self):
return "MockService"
def serviceStarted(self):
"""
Record that the service was started.
"""
self.started = True
def serviceStopped(self):
"""
Record that the service was stopped.
"""
self.stopped = True
def ssh_TEST(self, packet):
"""
A message that this service responds to.
"""
self.transport.sendPacket(0xff, packet)
class MockFactory(factory.SSHFactory):
"""
A mocked-up factory based on twisted.conch.ssh.factory.SSHFactory.
"""
services = {
'ssh-userauth': MockService}
def getPublicKeys(self):
"""
Return the public keys that authenticate this server.
"""
return {
'ssh-rsa': keys.Key.fromString(keydata.publicRSA_openssh),
'ssh-dsa': keys.Key.fromString(keydata.publicDSA_openssh)}
def getPrivateKeys(self):
"""
Return the private keys that authenticate this server.
"""
return {
'ssh-rsa': keys.Key.fromString(keydata.privateRSA_openssh),
'ssh-dsa': keys.Key.fromString(keydata.privateDSA_openssh)}
def getPrimes(self):
"""
Return the Diffie-Hellman primes that can be used for the
diffie-hellman-group-exchange-sha1 key exchange.
"""
return {
1024: ((2, transport.DH_PRIME),),
2048: ((3, transport.DH_PRIME),),
4096: ((5, 7),)}
class MockOldFactoryPublicKeys(MockFactory):
"""
The old SSHFactory returned mappings from key names to strings from
getPublicKeys(). We return those here for testing.
"""
def getPublicKeys(self):
"""
We used to map key types to public key blobs as strings.
"""
keys = MockFactory.getPublicKeys(self)
for name, key in keys.items()[:]:
keys[name] = key.blob()
return keys
class MockOldFactoryPrivateKeys(MockFactory):
"""
The old SSHFactory returned mappings from key names to PyCrypto key
objects from getPrivateKeys(). We return those here for testing.
"""
def getPrivateKeys(self):
"""
We used to map key types to PyCrypto key objects.
"""
keys = MockFactory.getPrivateKeys(self)
for name, key in keys.items()[:]:
keys[name] = key.keyObject
return keys
class TransportTestCase(unittest.TestCase):
"""
Base class for transport test cases.
"""
klass = None
if Crypto is None:
skip = "cannot run w/o PyCrypto"
if pyasn1 is None:
skip = "cannot run w/o PyASN1"
def setUp(self):
self.transport = proto_helpers.StringTransport()
self.proto = self.klass()
self.packets = []
def secureRandom(len):
"""
Return a consistent entropy value
"""
return '\x99' * len
self.oldSecureRandom = randbytes.secureRandom
randbytes.secureRandom = secureRandom
def stubSendPacket(messageType, payload):
self.packets.append((messageType, payload))
self.proto.makeConnection(self.transport)
# we just let the kex packet go into the transport
self.proto.sendPacket = stubSendPacket
def tearDown(self):
randbytes.secureRandom = self.oldSecureRandom
self.oldSecureRandom = None
class BaseSSHTransportTestCase(TransportTestCase):
"""
Test TransportBase. It implements the non-server/client specific
parts of the SSH transport protocol.
"""
klass = MockTransportBase
def test_sendVersion(self):
"""
Test that the first thing sent over the connection is the version
string.
"""
# the other setup was done in the setup method
self.assertEquals(self.transport.value().split('\r\n', 1)[0],
"SSH-2.0-Twisted")
def test_sendPacketPlain(self):
"""
Test that plain (unencrypted, uncompressed) packets are sent
correctly. The format is::
uint32 length (including type and padding length)
byte padding length
byte type
bytes[length-padding length-2] data
bytes[padding length] padding
"""
proto = MockTransportBase()
proto.makeConnection(self.transport)
self.transport.clear()
message = ord('A')
payload = 'BCDEFG'
proto.sendPacket(message, payload)
value = self.transport.value()
self.assertEquals(value, '\x00\x00\x00\x0c\x04ABCDEFG\x99\x99\x99\x99')
def test_sendPacketEncrypted(self):
"""
Test that packets sent while encryption is enabled are sent
correctly. The whole packet should be encrypted.
"""
proto = MockTransportBase()
proto.makeConnection(self.transport)
proto.currentEncryptions = testCipher = MockCipher()
message = ord('A')
payload = 'BC'
self.transport.clear()
proto.sendPacket(message, payload)
self.assertTrue(testCipher.usedEncrypt)
value = self.transport.value()
self.assertEquals(value, '\x00\x00\x00\x08\x04ABC\x99\x99\x99\x99\x01')
def test_sendPacketCompressed(self):
"""
Test that packets sent while compression is enabled are sent
correctly. The packet type and data should be encrypted.
"""
proto = MockTransportBase()
proto.makeConnection(self.transport)
proto.outgoingCompression = MockCompression()
self.transport.clear()
proto.sendPacket(ord('A'), 'B')
value = self.transport.value()
self.assertEquals(
value,
'\x00\x00\x00\x0c\x08BA\x66\x99\x99\x99\x99\x99\x99\x99\x99')
def test_sendPacketBoth(self):
"""
Test that packets sent while compression and encryption are
enabled are sent correctly. The packet type and data should be
compressed and then the whole packet should be encrypted.
"""
proto = MockTransportBase()
proto.makeConnection(self.transport)
proto.currentEncryptions = testCipher = MockCipher()
proto.outgoingCompression = MockCompression()
message = ord('A')
payload = 'BC'
self.transport.clear()
proto.sendPacket(message, payload)
value = self.transport.value()
self.assertEquals(
value,
'\x00\x00\x00\x0e\x09CBA\x66\x99\x99\x99\x99\x99\x99\x99\x99\x99'
'\x01')
def test_getPacketPlain(self):
"""
Test that packets are retrieved correctly out of the buffer when
no encryption is enabled.
"""
proto = MockTransportBase()
proto.makeConnection(self.transport)
self.transport.clear()
proto.sendPacket(ord('A'), 'BC')
proto.buf = self.transport.value() + 'extra'
self.assertEquals(proto.getPacket(), 'ABC')
self.assertEquals(proto.buf, 'extra')
def test_getPacketEncrypted(self):
"""
Test that encrypted packets are retrieved correctly.
See test_sendPacketEncrypted.
"""
proto = MockTransportBase()
proto.sendKexInit = lambda: None # don't send packets
proto.makeConnection(self.transport)
self.transport.clear()
proto.currentEncryptions = testCipher = MockCipher()
proto.sendPacket(ord('A'), 'BCD')
value = self.transport.value()
proto.buf = value[:MockCipher.decBlockSize]
self.assertEquals(proto.getPacket(), None)
self.assertTrue(testCipher.usedDecrypt)
self.assertEquals(proto.first, '\x00\x00\x00\x0e\x09A')
proto.buf += value[MockCipher.decBlockSize:]
self.assertEquals(proto.getPacket(), 'ABCD')
self.assertEquals(proto.buf, '')
def test_getPacketCompressed(self):
"""
Test that compressed packets are retrieved correctly. See
test_sendPacketCompressed.
"""
proto = MockTransportBase()
proto.makeConnection(self.transport)
self.transport.clear()
proto.outgoingCompression = MockCompression()
proto.incomingCompression = proto.outgoingCompression
proto.sendPacket(ord('A'), 'BCD')
proto.buf = self.transport.value()
self.assertEquals(proto.getPacket(), 'ABCD')
def test_getPacketBoth(self):
"""
Test that compressed and encrypted packets are retrieved correctly.
See test_sendPacketBoth.
"""
proto = MockTransportBase()
proto.sendKexInit = lambda: None
proto.makeConnection(self.transport)
self.transport.clear()
proto.currentEncryptions = testCipher = MockCipher()
proto.outgoingCompression = MockCompression()
proto.incomingCompression = proto.outgoingCompression
proto.sendPacket(ord('A'), 'BCDEFG')
proto.buf = self.transport.value()
self.assertEquals(proto.getPacket(), 'ABCDEFG')
def test_ciphersAreValid(self):
"""
Test that all the supportedCiphers are valid.
"""
ciphers = transport.SSHCiphers('A', 'B', 'C', 'D')
iv = key = '\x00' * 16
for cipName in self.proto.supportedCiphers:
self.assertTrue(ciphers._getCipher(cipName, iv, key))
def test_sendKexInit(self):
"""
Test that the KEXINIT (key exchange initiation) message is sent
correctly. Payload::
bytes[16] cookie
string key exchange algorithms
string public key algorithms
string outgoing ciphers
string incoming ciphers
string outgoing MACs
string incoming MACs
string outgoing compressions
string incoming compressions
bool first packet follows
uint32 0
"""
value = self.transport.value().split('\r\n', 1)[1]
self.proto.buf = value
packet = self.proto.getPacket()
self.assertEquals(packet[0], chr(transport.MSG_KEXINIT))
self.assertEquals(packet[1:17], '\x99' * 16)
(kex, pubkeys, ciphers1, ciphers2, macs1, macs2, compressions1,
compressions2, languages1, languages2,
buf) = common.getNS(packet[17:], 10)
self.assertEquals(kex, ','.join(self.proto.supportedKeyExchanges))
self.assertEquals(pubkeys, ','.join(self.proto.supportedPublicKeys))
self.assertEquals(ciphers1, ','.join(self.proto.supportedCiphers))
self.assertEquals(ciphers2, ','.join(self.proto.supportedCiphers))
self.assertEquals(macs1, ','.join(self.proto.supportedMACs))
self.assertEquals(macs2, ','.join(self.proto.supportedMACs))
self.assertEquals(compressions1,
','.join(self.proto.supportedCompressions))
self.assertEquals(compressions2,
','.join(self.proto.supportedCompressions))
self.assertEquals(languages1, ','.join(self.proto.supportedLanguages))
self.assertEquals(languages2, ','.join(self.proto.supportedLanguages))
self.assertEquals(buf, '\x00' * 5)
def test_sendDebug(self):
"""
Test that debug messages are sent correctly. Payload::
bool always display
string debug message
string language
"""
self.proto.sendDebug("test", True, 'en')
self.assertEquals(
self.packets,
[(transport.MSG_DEBUG,
"\x01\x00\x00\x00\x04test\x00\x00\x00\x02en")])
def test_receiveDebug(self):
"""
Test that debug messages are received correctly. See test_sendDebug.
"""
self.proto.dispatchMessage(
transport.MSG_DEBUG,
'\x01\x00\x00\x00\x04test\x00\x00\x00\x02en')
self.assertEquals(self.proto.debugs, [(True, 'test', 'en')])
def test_sendIgnore(self):
"""
Test that ignored messages are sent correctly. Payload::
string ignored data
"""
self.proto.sendIgnore("test")
self.assertEquals(
self.packets, [(transport.MSG_IGNORE,
'\x00\x00\x00\x04test')])
def test_receiveIgnore(self):
"""
Test that ignored messages are received correctly. See
test_sendIgnore.
"""
self.proto.dispatchMessage(transport.MSG_IGNORE, 'test')
self.assertEquals(self.proto.ignoreds, ['test'])
def test_sendUnimplemented(self):
"""
Test that unimplemented messages are sent correctly. Payload::
uint32 sequence number
"""
self.proto.sendUnimplemented()
self.assertEquals(
self.packets, [(transport.MSG_UNIMPLEMENTED,
'\x00\x00\x00\x00')])
def test_receiveUnimplemented(self):
"""
Test that unimplemented messages are received correctly. See
test_sendUnimplemented.
"""
self.proto.dispatchMessage(transport.MSG_UNIMPLEMENTED,
'\x00\x00\x00\xff')
self.assertEquals(self.proto.unimplementeds, [255])
def test_sendDisconnect(self):
"""
Test that disconnection messages are sent correctly. Payload::
uint32 reason code
string reason description
string language
"""
disconnected = [False]
def stubLoseConnection():
disconnected[0] = True
self.transport.loseConnection = stubLoseConnection
self.proto.sendDisconnect(0xff, "test")
self.assertEquals(
self.packets,
[(transport.MSG_DISCONNECT,
"\x00\x00\x00\xff\x00\x00\x00\x04test\x00\x00\x00\x00")])
self.assertTrue(disconnected[0])
def test_receiveDisconnect(self):
"""
Test that disconnection messages are received correctly. See
test_sendDisconnect.
"""
disconnected = [False]
def stubLoseConnection():
disconnected[0] = True
self.transport.loseConnection = stubLoseConnection
self.proto.dispatchMessage(transport.MSG_DISCONNECT,
'\x00\x00\x00\xff\x00\x00\x00\x04test')
self.assertEquals(self.proto.errors, [(255, 'test')])
self.assertTrue(disconnected[0])
def test_dataReceived(self):
"""
Test that dataReceived parses packets and dispatches them to
ssh_* methods.
"""
kexInit = [False]
def stubKEXINIT(packet):
kexInit[0] = True
self.proto.ssh_KEXINIT = stubKEXINIT
self.proto.dataReceived(self.transport.value())
self.assertTrue(self.proto.gotVersion)
self.assertEquals(self.proto.ourVersionString,
self.proto.otherVersionString)
self.assertTrue(kexInit[0])
def test_service(self):
"""
Test that the transport can set the running service and dispatches
packets to the service's packetReceived method.
"""
service = MockService()
self.proto.setService(service)
self.assertEquals(self.proto.service, service)
self.assertTrue(service.started)
self.proto.dispatchMessage(0xff, "test")
self.assertEquals(self.packets, [(0xff, "test")])
service2 = MockService()
self.proto.setService(service2)
self.assertTrue(service2.started)
self.assertTrue(service.stopped)
self.proto.connectionLost(None)
self.assertTrue(service2.stopped)
def test_avatar(self):
"""
Test that the transport notifies the avatar of disconnections.
"""
disconnected = [False]
def logout():
disconnected[0] = True
self.proto.logoutFunction = logout
self.proto.avatar = True
self.proto.connectionLost(None)
self.assertTrue(disconnected[0])
def test_isEncrypted(self):
"""
Test that the transport accurately reflects its encrypted status.
"""
self.assertFalse(self.proto.isEncrypted('in'))
self.assertFalse(self.proto.isEncrypted('out'))
self.assertFalse(self.proto.isEncrypted('both'))
self.proto.currentEncryptions = MockCipher()
self.assertTrue(self.proto.isEncrypted('in'))
self.assertTrue(self.proto.isEncrypted('out'))
self.assertTrue(self.proto.isEncrypted('both'))
self.proto.currentEncryptions = transport.SSHCiphers('none', 'none',
'none', 'none')
self.assertFalse(self.proto.isEncrypted('in'))
self.assertFalse(self.proto.isEncrypted('out'))
self.assertFalse(self.proto.isEncrypted('both'))
self.assertRaises(TypeError, self.proto.isEncrypted, 'bad')
def test_isVerified(self):
"""
Test that the transport accurately reflects its verified status.
"""
self.assertFalse(self.proto.isVerified('in'))
self.assertFalse(self.proto.isVerified('out'))
self.assertFalse(self.proto.isVerified('both'))
self.proto.currentEncryptions = MockCipher()
self.assertTrue(self.proto.isVerified('in'))
self.assertTrue(self.proto.isVerified('out'))
self.assertTrue(self.proto.isVerified('both'))
self.proto.currentEncryptions = transport.SSHCiphers('none', 'none',
'none', 'none')
self.assertFalse(self.proto.isVerified('in'))
self.assertFalse(self.proto.isVerified('out'))
self.assertFalse(self.proto.isVerified('both'))
self.assertRaises(TypeError, self.proto.isVerified, 'bad')
def test_loseConnection(self):
"""
Test that loseConnection sends a disconnect message and closes the
connection.
"""
disconnected = [False]
def stubLoseConnection():
disconnected[0] = True
self.transport.loseConnection = stubLoseConnection
self.proto.loseConnection()
self.assertEquals(self.packets[0][0], transport.MSG_DISCONNECT)
self.assertEquals(self.packets[0][1][3],
chr(transport.DISCONNECT_CONNECTION_LOST))
def test_badVersion(self):
"""
Test that the transport disconnects when it receives a bad version.
"""
def testBad(version):
self.packets = []
self.proto.gotVersion = False
disconnected = [False]
def stubLoseConnection():
disconnected[0] = True
self.transport.loseConnection = stubLoseConnection
for c in version + '\r\n':
self.proto.dataReceived(c)
self.assertTrue(disconnected[0])
self.assertEquals(self.packets[0][0], transport.MSG_DISCONNECT)
self.assertEquals(
self.packets[0][1][3],
chr(transport.DISCONNECT_PROTOCOL_VERSION_NOT_SUPPORTED))
testBad('SSH-1.5-OpenSSH')
testBad('SSH-3.0-Twisted')
testBad('GET / HTTP/1.1')
def test_dataBeforeVersion(self):
"""
Test that the transport ignores data sent before the version string.
"""
proto = MockTransportBase()
proto.makeConnection(proto_helpers.StringTransport())
data = ("""here's some stuff beforehand
here's some other stuff
""" + proto.ourVersionString + "\r\n")
[proto.dataReceived(c) for c in data]
self.assertTrue(proto.gotVersion)
self.assertEquals(proto.otherVersionString, proto.ourVersionString)
def test_compatabilityVersion(self):
"""
Test that the transport treats the compatbility version (1.99)
as equivalent to version 2.0.
"""
proto = MockTransportBase()
proto.makeConnection(proto_helpers.StringTransport())
proto.dataReceived("SSH-1.99-OpenSSH\n")
self.assertTrue(proto.gotVersion)
self.assertEquals(proto.otherVersionString, "SSH-1.99-OpenSSH")
def test_supportedVersionsAreAllowed(self):
"""
If an unusual SSH version is received and is included in
C{supportedVersions}, an unsupported version error is not emitted.
"""
proto = MockTransportBase()
proto.supportedVersions = ("9.99", )
proto.makeConnection(proto_helpers.StringTransport())
proto.dataReceived("SSH-9.99-OpenSSH\n")
self.assertFalse(proto.gotUnsupportedVersion)
def test_unsupportedVersionsCallUnsupportedVersionReceived(self):
"""
If an unusual SSH version is received and is not included in
C{supportedVersions}, an unsupported version error is emitted.
"""
proto = MockTransportBase()
proto.supportedVersions = ("2.0", )
proto.makeConnection(proto_helpers.StringTransport())
proto.dataReceived("SSH-9.99-OpenSSH\n")
self.assertEquals("9.99", proto.gotUnsupportedVersion)
def test_badPackets(self):
"""
Test that the transport disconnects with an error when it receives
bad packets.
"""
def testBad(packet, error=transport.DISCONNECT_PROTOCOL_ERROR):
self.packets = []
self.proto.buf = packet
self.assertEquals(self.proto.getPacket(), None)
self.assertEquals(len(self.packets), 1)
self.assertEquals(self.packets[0][0], transport.MSG_DISCONNECT)
self.assertEquals(self.packets[0][1][3], chr(error))
testBad('\xff' * 8) # big packet
testBad('\x00\x00\x00\x05\x00BCDE') # length not modulo blocksize
oldEncryptions = self.proto.currentEncryptions
self.proto.currentEncryptions = MockCipher()
testBad('\x00\x00\x00\x08\x06AB123456', # bad MAC
transport.DISCONNECT_MAC_ERROR)
self.proto.currentEncryptions.decrypt = lambda x: x[:-1]
testBad('\x00\x00\x00\x08\x06BCDEFGHIJK') # bad decryption
self.proto.currentEncryptions = oldEncryptions
self.proto.incomingCompression = MockCompression()
def stubDecompress(payload):
raise Exception('bad compression')
self.proto.incomingCompression.decompress = stubDecompress
testBad('\x00\x00\x00\x04\x00BCDE', # bad decompression
transport.DISCONNECT_COMPRESSION_ERROR)
self.flushLoggedErrors()
def test_unimplementedPackets(self):
"""
Test that unimplemented packet types cause MSG_UNIMPLEMENTED packets
to be sent.
"""
seqnum = self.proto.incomingPacketSequence
def checkUnimplemented(seqnum=seqnum):
self.assertEquals(self.packets[0][0],
transport.MSG_UNIMPLEMENTED)
self.assertEquals(self.packets[0][1][3], chr(seqnum))
self.proto.packets = []
seqnum += 1
self.proto.dispatchMessage(40, '')
checkUnimplemented()
transport.messages[41] = 'MSG_fiction'
self.proto.dispatchMessage(41, '')
checkUnimplemented()
self.proto.dispatchMessage(60, '')
checkUnimplemented()
self.proto.setService(MockService())
self.proto.dispatchMessage(70, '')
checkUnimplemented()
self.proto.dispatchMessage(71, '')
checkUnimplemented()
def test_getKey(self):
"""
Test that _getKey generates the correct keys.
"""
self.proto.sessionID = 'EF'
k1 = sha1('AB' + 'CD' + 'K' + self.proto.sessionID).digest()
k2 = sha1('ABCD' + k1).digest()
self.assertEquals(self.proto._getKey('K', 'AB', 'CD'), k1 + k2)
def test_multipleClasses(self):
"""
Test that multiple instances have distinct states.
"""
proto = self.proto
proto.dataReceived(self.transport.value())
proto.currentEncryptions = MockCipher()
proto.outgoingCompression = MockCompression()
proto.incomingCompression = MockCompression()
proto.setService(MockService())
proto2 = MockTransportBase()
proto2.makeConnection(proto_helpers.StringTransport())
proto2.sendIgnore('')
self.failIfEquals(proto.gotVersion, proto2.gotVersion)
self.failIfEquals(proto.transport, proto2.transport)
self.failIfEquals(proto.outgoingPacketSequence,
proto2.outgoingPacketSequence)
self.failIfEquals(proto.incomingPacketSequence,
proto2.incomingPacketSequence)
self.failIfEquals(proto.currentEncryptions,
proto2.currentEncryptions)
self.failIfEquals(proto.service, proto2.service)
class ServerAndClientSSHTransportBaseCase:
"""
Tests that need to be run on both the server and the client.
"""
def checkDisconnected(self, kind=None):
"""
Helper function to check if the transport disconnected.
"""
if kind is None:
kind = transport.DISCONNECT_PROTOCOL_ERROR
self.assertEquals(self.packets[-1][0], transport.MSG_DISCONNECT)
self.assertEquals(self.packets[-1][1][3], chr(kind))
def connectModifiedProtocol(self, protoModification,
kind=None):
"""
Helper function to connect a modified protocol to the test protocol
and test for disconnection.
"""
if kind is None:
kind = transport.DISCONNECT_KEY_EXCHANGE_FAILED
proto2 = self.klass()
protoModification(proto2)
proto2.makeConnection(proto_helpers.StringTransport())
self.proto.dataReceived(proto2.transport.value())
if kind:
self.checkDisconnected(kind)
return proto2
def test_disconnectIfCantMatchKex(self):
"""
Test that the transport disconnects if it can't match the key
exchange
"""
def blankKeyExchanges(proto2):
proto2.supportedKeyExchanges = []
self.connectModifiedProtocol(blankKeyExchanges)
def test_disconnectIfCantMatchKeyAlg(self):
"""
Like test_disconnectIfCantMatchKex, but for the key algorithm.
"""
def blankPublicKeys(proto2):
proto2.supportedPublicKeys = []
self.connectModifiedProtocol(blankPublicKeys)
def test_disconnectIfCantMatchCompression(self):
"""
Like test_disconnectIfCantMatchKex, but for the compression.
"""
def blankCompressions(proto2):
proto2.supportedCompressions = []
self.connectModifiedProtocol(blankCompressions)
def test_disconnectIfCantMatchCipher(self):
"""
Like test_disconnectIfCantMatchKex, but for the encryption.
"""
def blankCiphers(proto2):
proto2.supportedCiphers = []
self.connectModifiedProtocol(blankCiphers)
def test_disconnectIfCantMatchMAC(self):
"""
Like test_disconnectIfCantMatchKex, but for the MAC.
"""
def blankMACs(proto2):
proto2.supportedMACs = []
self.connectModifiedProtocol(blankMACs)
class ServerSSHTransportTestCase(ServerAndClientSSHTransportBaseCase,
TransportTestCase):
"""
Tests for the SSHServerTransport.
"""
klass = transport.SSHServerTransport
def setUp(self):
TransportTestCase.setUp(self)
self.proto.factory = MockFactory()
self.proto.factory.startFactory()
def tearDown(self):
TransportTestCase.tearDown(self)
self.proto.factory.stopFactory()
del self.proto.factory
def test_KEXINIT(self):
"""
Test that receiving a KEXINIT packet sets up the correct values on the
server.
"""
self.proto.dataReceived( 'SSH-2.0-Twisted\r\n\x00\x00\x01\xd4\t\x14'
'\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99'
'\x99\x00\x00\x00=diffie-hellman-group1-sha1,diffie-hellman-g'
'roup-exchange-sha1\x00\x00\x00\x0fssh-dss,ssh-rsa\x00\x00\x00'
'\x85aes128-ctr,aes128-cbc,aes192-ctr,aes192-cbc,aes256-ctr,ae'
's256-cbc,cast128-ctr,cast128-cbc,blowfish-ctr,blowfish-cbc,3d'
'es-ctr,3des-cbc\x00\x00\x00\x85aes128-ctr,aes128-cbc,aes192-c'
'tr,aes192-cbc,aes256-ctr,aes256-cbc,cast128-ctr,cast128-cbc,b'
'lowfish-ctr,blowfish-cbc,3des-ctr,3des-cbc\x00\x00\x00\x12hma'
'c-md5,hmac-sha1\x00\x00\x00\x12hmac-md5,hmac-sha1\x00\x00\x00'
'\tnone,zlib\x00\x00\x00\tnone,zlib\x00\x00\x00\x00\x00\x00'
'\x00\x00\x00\x00\x00\x00\x00\x99\x99\x99\x99\x99\x99\x99\x99'
'\x99')
self.assertEquals(self.proto.kexAlg,
'diffie-hellman-group1-sha1')
self.assertEquals(self.proto.keyAlg,
'ssh-dss')
self.assertEquals(self.proto.outgoingCompressionType,
'none')
self.assertEquals(self.proto.incomingCompressionType,
'none')
ne = self.proto.nextEncryptions
self.assertEquals(ne.outCipType, 'aes128-ctr')
self.assertEquals(ne.inCipType, 'aes128-ctr')
self.assertEquals(ne.outMACType, 'hmac-md5')
self.assertEquals(ne.inMACType, 'hmac-md5')
def test_ignoreGuessPacketKex(self):
"""
The client is allowed to send a guessed key exchange packet
after it sends the KEXINIT packet. However, if the key exchanges
do not match, that guess packet must be ignored. This tests that
the packet is ignored in the case of the key exchange method not
matching.
"""
kexInitPacket = '\x00' * 16 + (
''.join([common.NS(x) for x in
[','.join(y) for y in
[self.proto.supportedKeyExchanges[::-1],
self.proto.supportedPublicKeys,
self.proto.supportedCiphers,
self.proto.supportedCiphers,
self.proto.supportedMACs,
self.proto.supportedMACs,
self.proto.supportedCompressions,
self.proto.supportedCompressions,
self.proto.supportedLanguages,
self.proto.supportedLanguages]]])) + (
'\xff\x00\x00\x00\x00')
self.proto.ssh_KEXINIT(kexInitPacket)
self.assertTrue(self.proto.ignoreNextPacket)
self.proto.ssh_DEBUG("\x01\x00\x00\x00\x04test\x00\x00\x00\x00")
self.assertTrue(self.proto.ignoreNextPacket)
self.proto.ssh_KEX_DH_GEX_REQUEST_OLD('\x00\x00\x08\x00')
self.assertFalse(self.proto.ignoreNextPacket)
self.assertEquals(self.packets, [])
self.proto.ignoreNextPacket = True
self.proto.ssh_KEX_DH_GEX_REQUEST('\x00\x00\x08\x00' * 3)
self.assertFalse(self.proto.ignoreNextPacket)
self.assertEquals(self.packets, [])
def test_ignoreGuessPacketKey(self):
"""
Like test_ignoreGuessPacketKex, but for an incorrectly guessed
public key format.
"""
kexInitPacket = '\x00' * 16 + (
''.join([common.NS(x) for x in
[','.join(y) for y in
[self.proto.supportedKeyExchanges,
self.proto.supportedPublicKeys[::-1],
self.proto.supportedCiphers,
self.proto.supportedCiphers,
self.proto.supportedMACs,
self.proto.supportedMACs,
self.proto.supportedCompressions,
self.proto.supportedCompressions,
self.proto.supportedLanguages,
self.proto.supportedLanguages]]])) + (
'\xff\x00\x00\x00\x00')
self.proto.ssh_KEXINIT(kexInitPacket)
self.assertTrue(self.proto.ignoreNextPacket)
self.proto.ssh_DEBUG("\x01\x00\x00\x00\x04test\x00\x00\x00\x00")
self.assertTrue(self.proto.ignoreNextPacket)
self.proto.ssh_KEX_DH_GEX_REQUEST_OLD('\x00\x00\x08\x00')
self.assertFalse(self.proto.ignoreNextPacket)
self.assertEquals(self.packets, [])
self.proto.ignoreNextPacket = True
self.proto.ssh_KEX_DH_GEX_REQUEST('\x00\x00\x08\x00' * 3)
self.assertFalse(self.proto.ignoreNextPacket)
self.assertEquals(self.packets, [])
def test_KEXDH_INIT(self):
"""
Test that the KEXDH_INIT packet causes the server to send a
KEXDH_REPLY with the server's public key and a signature.
"""
self.proto.supportedKeyExchanges = ['diffie-hellman-group1-sha1']
self.proto.supportedPublicKeys = ['ssh-rsa']
self.proto.dataReceived(self.transport.value())
e = pow(transport.DH_GENERATOR, 5000,
transport.DH_PRIME)
self.proto.ssh_KEX_DH_GEX_REQUEST_OLD(common.MP(e))
y = common.getMP('\x00\x00\x00\x40' + '\x99' * 64)[0]
f = common._MPpow(transport.DH_GENERATOR, y, transport.DH_PRIME)
sharedSecret = common._MPpow(e, y, transport.DH_PRIME)
h = sha1()
h.update(common.NS(self.proto.ourVersionString) * 2)
h.update(common.NS(self.proto.ourKexInitPayload) * 2)
h.update(common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()))
h.update(common.MP(e))
h.update(f)
h.update(sharedSecret)
exchangeHash = h.digest()
signature = self.proto.factory.privateKeys['ssh-rsa'].sign(
exchangeHash)
self.assertEquals(
self.packets,
[(transport.MSG_KEXDH_REPLY,
common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob())
+ f + common.NS(signature)),
(transport.MSG_NEWKEYS, '')])
def test_KEX_DH_GEX_REQUEST_OLD(self):
"""
Test that the KEX_DH_GEX_REQUEST_OLD message causes the server
to reply with a KEX_DH_GEX_GROUP message with the correct
Diffie-Hellman group.
"""
self.proto.supportedKeyExchanges = [
'diffie-hellman-group-exchange-sha1']
self.proto.supportedPublicKeys = ['ssh-rsa']
self.proto.dataReceived(self.transport.value())
self.proto.ssh_KEX_DH_GEX_REQUEST_OLD('\x00\x00\x04\x00')
self.assertEquals(
self.packets,
[(transport.MSG_KEX_DH_GEX_GROUP,
common.MP(transport.DH_PRIME) + '\x00\x00\x00\x01\x02')])
self.assertEquals(self.proto.g, 2)
self.assertEquals(self.proto.p, transport.DH_PRIME)
def test_KEX_DH_GEX_REQUEST_OLD_badKexAlg(self):
"""
Test that if the server recieves a KEX_DH_GEX_REQUEST_OLD message
and the key exchange algorithm is not 'diffie-hellman-group1-sha1' or
'diffie-hellman-group-exchange-sha1', we raise a ConchError.
"""
self.proto.kexAlg = None
self.assertRaises(ConchError, self.proto.ssh_KEX_DH_GEX_REQUEST_OLD,
None)
def test_KEX_DH_GEX_REQUEST(self):
"""
Test that the KEX_DH_GEX_REQUEST message causes the server to reply
with a KEX_DH_GEX_GROUP message with the correct Diffie-Hellman
group.
"""
self.proto.supportedKeyExchanges = [
'diffie-hellman-group-exchange-sha1']
self.proto.supportedPublicKeys = ['ssh-rsa']
self.proto.dataReceived(self.transport.value())
self.proto.ssh_KEX_DH_GEX_REQUEST('\x00\x00\x04\x00\x00\x00\x08\x00' +
'\x00\x00\x0c\x00')
self.assertEquals(
self.packets,
[(transport.MSG_KEX_DH_GEX_GROUP,
common.MP(transport.DH_PRIME) + '\x00\x00\x00\x01\x03')])
self.assertEquals(self.proto.g, 3)
self.assertEquals(self.proto.p, transport.DH_PRIME)
def test_KEX_DH_GEX_INIT_after_REQUEST(self):
"""
Test that the KEX_DH_GEX_INIT message after the client sends
KEX_DH_GEX_REQUEST causes the server to send a KEX_DH_GEX_INIT message
with a public key and signature.
"""
self.test_KEX_DH_GEX_REQUEST()
e = pow(self.proto.g, 3, self.proto.p)
y = common.getMP('\x00\x00\x00\x80' + '\x99' * 128)[0]
f = common._MPpow(self.proto.g, y, self.proto.p)
sharedSecret = common._MPpow(e, y, self.proto.p)
h = sha1()
h.update(common.NS(self.proto.ourVersionString) * 2)
h.update(common.NS(self.proto.ourKexInitPayload) * 2)
h.update(common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()))
h.update('\x00\x00\x04\x00\x00\x00\x08\x00\x00\x00\x0c\x00')
h.update(common.MP(self.proto.p))
h.update(common.MP(self.proto.g))
h.update(common.MP(e))
h.update(f)
h.update(sharedSecret)
exchangeHash = h.digest()
self.proto.ssh_KEX_DH_GEX_INIT(common.MP(e))
self.assertEquals(
self.packets[1],
(transport.MSG_KEX_DH_GEX_REPLY,
common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()) +
f + common.NS(self.proto.factory.privateKeys['ssh-rsa'].sign(
exchangeHash))))
def test_KEX_DH_GEX_INIT_after_REQUEST_OLD(self):
"""
Test that the KEX_DH_GEX_INIT message after the client sends
KEX_DH_GEX_REQUEST_OLD causes the server to sent a KEX_DH_GEX_INIT
message with a public key and signature.
"""
self.test_KEX_DH_GEX_REQUEST_OLD()
e = pow(self.proto.g, 3, self.proto.p)
y = common.getMP('\x00\x00\x00\x80' + '\x99' * 128)[0]
f = common._MPpow(self.proto.g, y, self.proto.p)
sharedSecret = common._MPpow(e, y, self.proto.p)
h = sha1()
h.update(common.NS(self.proto.ourVersionString) * 2)
h.update(common.NS(self.proto.ourKexInitPayload) * 2)
h.update(common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()))
h.update('\x00\x00\x04\x00')
h.update(common.MP(self.proto.p))
h.update(common.MP(self.proto.g))
h.update(common.MP(e))
h.update(f)
h.update(sharedSecret)
exchangeHash = h.digest()
self.proto.ssh_KEX_DH_GEX_INIT(common.MP(e))
self.assertEquals(
self.packets[1:],
[(transport.MSG_KEX_DH_GEX_REPLY,
common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()) +
f + common.NS(self.proto.factory.privateKeys['ssh-rsa'].sign(
exchangeHash))),
(transport.MSG_NEWKEYS, '')])
def test_keySetup(self):
"""
Test that _keySetup sets up the next encryption keys.
"""
self.proto.nextEncryptions = MockCipher()
self.proto._keySetup('AB', 'CD')
self.assertEquals(self.proto.sessionID, 'CD')
self.proto._keySetup('AB', 'EF')
self.assertEquals(self.proto.sessionID, 'CD')
self.assertEquals(self.packets[-1], (transport.MSG_NEWKEYS, ''))
newKeys = [self.proto._getKey(c, 'AB', 'EF') for c in 'ABCDEF']
self.assertEquals(
self.proto.nextEncryptions.keys,
(newKeys[1], newKeys[3], newKeys[0], newKeys[2], newKeys[5],
newKeys[4]))
def test_NEWKEYS(self):
"""
Test that NEWKEYS transitions the keys in nextEncryptions to
currentEncryptions.
"""
self.test_KEXINIT()
self.proto.nextEncryptions = transport.SSHCiphers('none', 'none',
'none', 'none')
self.proto.ssh_NEWKEYS('')
self.assertIdentical(self.proto.currentEncryptions,
self.proto.nextEncryptions)
self.assertIdentical(self.proto.outgoingCompression, None)
self.assertIdentical(self.proto.incomingCompression, None)
self.proto.outgoingCompressionType = 'zlib'
self.proto.ssh_NEWKEYS('')
self.failIfIdentical(self.proto.outgoingCompression, None)
self.proto.incomingCompressionType = 'zlib'
self.proto.ssh_NEWKEYS('')
self.failIfIdentical(self.proto.incomingCompression, None)
def test_SERVICE_REQUEST(self):
"""
Test that the SERVICE_REQUEST message requests and starts a
service.
"""
self.proto.ssh_SERVICE_REQUEST(common.NS('ssh-userauth'))
self.assertEquals(self.packets, [(transport.MSG_SERVICE_ACCEPT,
common.NS('ssh-userauth'))])
self.assertEquals(self.proto.service.name, 'MockService')
def test_disconnectNEWKEYSData(self):
"""
Test that NEWKEYS disconnects if it receives data.
"""
self.proto.ssh_NEWKEYS("bad packet")
self.checkDisconnected()
def test_disconnectSERVICE_REQUESTBadService(self):
"""
Test that SERVICE_REQUESTS disconnects if an unknown service is
requested.
"""
self.proto.ssh_SERVICE_REQUEST(common.NS('no service'))
self.checkDisconnected(transport.DISCONNECT_SERVICE_NOT_AVAILABLE)
class ClientSSHTransportTestCase(ServerAndClientSSHTransportBaseCase,
TransportTestCase):
"""
Tests for SSHClientTransport.
"""
klass = transport.SSHClientTransport
def test_KEXINIT(self):
"""
Test that receiving a KEXINIT packet sets up the correct values on the
client. The way algorithms are picks is that the first item in the
client's list that is also in the server's list is chosen.
"""
self.proto.dataReceived( 'SSH-2.0-Twisted\r\n\x00\x00\x01\xd4\t\x14'
'\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99'
'\x99\x00\x00\x00=diffie-hellman-group1-sha1,diffie-hellman-g'
'roup-exchange-sha1\x00\x00\x00\x0fssh-dss,ssh-rsa\x00\x00\x00'
'\x85aes128-ctr,aes128-cbc,aes192-ctr,aes192-cbc,aes256-ctr,ae'
's256-cbc,cast128-ctr,cast128-cbc,blowfish-ctr,blowfish-cbc,3d'
'es-ctr,3des-cbc\x00\x00\x00\x85aes128-ctr,aes128-cbc,aes192-c'
'tr,aes192-cbc,aes256-ctr,aes256-cbc,cast128-ctr,cast128-cbc,b'
'lowfish-ctr,blowfish-cbc,3des-ctr,3des-cbc\x00\x00\x00\x12hma'
'c-md5,hmac-sha1\x00\x00\x00\x12hmac-md5,hmac-sha1\x00\x00\x00'
'\tzlib,none\x00\x00\x00\tzlib,none\x00\x00\x00\x00\x00\x00'
'\x00\x00\x00\x00\x00\x00\x00\x99\x99\x99\x99\x99\x99\x99\x99'
'\x99')
self.assertEquals(self.proto.kexAlg,
'diffie-hellman-group-exchange-sha1')
self.assertEquals(self.proto.keyAlg,
'ssh-rsa')
self.assertEquals(self.proto.outgoingCompressionType,
'none')
self.assertEquals(self.proto.incomingCompressionType,
'none')
ne = self.proto.nextEncryptions
self.assertEquals(ne.outCipType, 'aes256-ctr')
self.assertEquals(ne.inCipType, 'aes256-ctr')
self.assertEquals(ne.outMACType, 'hmac-sha1')
self.assertEquals(ne.inMACType, 'hmac-sha1')
def verifyHostKey(self, pubKey, fingerprint):
"""
Mock version of SSHClientTransport.verifyHostKey.
"""
self.calledVerifyHostKey = True
self.assertEquals(pubKey, self.blob)
self.assertEquals(fingerprint.replace(':', ''),
md5(pubKey).hexdigest())
return defer.succeed(True)
def setUp(self):
TransportTestCase.setUp(self)
self.blob = keys.Key.fromString(keydata.publicRSA_openssh).blob()
self.privObj = keys.Key.fromString(keydata.privateRSA_openssh)
self.calledVerifyHostKey = False
self.proto.verifyHostKey = self.verifyHostKey
def test_notImplementedClientMethods(self):
"""
verifyHostKey() should return a Deferred which fails with a
NotImplementedError exception. connectionSecure() should raise
NotImplementedError().
"""
self.assertRaises(NotImplementedError, self.klass().connectionSecure)
def _checkRaises(f):
f.trap(NotImplementedError)
d = self.klass().verifyHostKey(None, None)
return d.addCallback(self.fail).addErrback(_checkRaises)
def test_KEXINIT_groupexchange(self):
"""
Test that a KEXINIT packet with a group-exchange key exchange results
in a KEX_DH_GEX_REQUEST_OLD message..
"""
self.proto.supportedKeyExchanges = [
'diffie-hellman-group-exchange-sha1']
self.proto.dataReceived(self.transport.value())
self.assertEquals(self.packets, [(transport.MSG_KEX_DH_GEX_REQUEST_OLD,
'\x00\x00\x08\x00')])
def test_KEXINIT_group1(self):
"""
Like test_KEXINIT_groupexchange, but for the group-1 key exchange.
"""
self.proto.supportedKeyExchanges = ['diffie-hellman-group1-sha1']
self.proto.dataReceived(self.transport.value())
self.assertEquals(common.MP(self.proto.x)[5:], '\x99' * 64)
self.assertEquals(self.packets,
[(transport.MSG_KEXDH_INIT, self.proto.e)])
def test_KEXINIT_badKexAlg(self):
"""
Test that the client raises a ConchError if it receives a
KEXINIT message bug doesn't have a key exchange algorithm that we
understand.
"""
self.proto.supportedKeyExchanges = ['diffie-hellman-group2-sha1']
data = self.transport.value().replace('group1', 'group2')
self.assertRaises(ConchError, self.proto.dataReceived, data)
def test_KEXDH_REPLY(self):
"""
Test that the KEXDH_REPLY message verifies the server.
"""
self.test_KEXINIT_group1()
sharedSecret = common._MPpow(transport.DH_GENERATOR,
self.proto.x, transport.DH_PRIME)
h = sha1()
h.update(common.NS(self.proto.ourVersionString) * 2)
h.update(common.NS(self.proto.ourKexInitPayload) * 2)
h.update(common.NS(self.blob))
h.update(self.proto.e)
h.update('\x00\x00\x00\x01\x02') # f
h.update(sharedSecret)
exchangeHash = h.digest()
def _cbTestKEXDH_REPLY(value):
self.assertIdentical(value, None)
self.assertEquals(self.calledVerifyHostKey, True)
self.assertEquals(self.proto.sessionID, exchangeHash)
signature = self.privObj.sign(exchangeHash)
d = self.proto.ssh_KEX_DH_GEX_GROUP(
(common.NS(self.blob) + '\x00\x00\x00\x01\x02' +
common.NS(signature)))
d.addCallback(_cbTestKEXDH_REPLY)
return d
def test_KEX_DH_GEX_GROUP(self):
"""
Test that the KEX_DH_GEX_GROUP message results in a
KEX_DH_GEX_INIT message with the client's Diffie-Hellman public key.
"""
self.test_KEXINIT_groupexchange()
self.proto.ssh_KEX_DH_GEX_GROUP(
'\x00\x00\x00\x01\x0f\x00\x00\x00\x01\x02')
self.assertEquals(self.proto.p, 15)
self.assertEquals(self.proto.g, 2)
self.assertEquals(common.MP(self.proto.x)[5:], '\x99' * 40)
self.assertEquals(self.proto.e,
common.MP(pow(2, self.proto.x, 15)))
self.assertEquals(self.packets[1:], [(transport.MSG_KEX_DH_GEX_INIT,
self.proto.e)])
def test_KEX_DH_GEX_REPLY(self):
"""
Test that the KEX_DH_GEX_REPLY message results in a verified
server.
"""
self.test_KEX_DH_GEX_GROUP()
sharedSecret = common._MPpow(3, self.proto.x, self.proto.p)
h = sha1()
h.update(common.NS(self.proto.ourVersionString) * 2)
h.update(common.NS(self.proto.ourKexInitPayload) * 2)
h.update(common.NS(self.blob))
h.update('\x00\x00\x08\x00\x00\x00\x00\x01\x0f\x00\x00\x00\x01\x02')
h.update(self.proto.e)
h.update('\x00\x00\x00\x01\x03') # f
h.update(sharedSecret)
exchangeHash = h.digest()
def _cbTestKEX_DH_GEX_REPLY(value):
self.assertIdentical(value, None)
self.assertEquals(self.calledVerifyHostKey, True)
self.assertEquals(self.proto.sessionID, exchangeHash)
signature = self.privObj.sign(exchangeHash)
d = self.proto.ssh_KEX_DH_GEX_REPLY(
common.NS(self.blob) +
'\x00\x00\x00\x01\x03' +
common.NS(signature))
d.addCallback(_cbTestKEX_DH_GEX_REPLY)
return d
def test_keySetup(self):
"""
Test that _keySetup sets up the next encryption keys.
"""
self.proto.nextEncryptions = MockCipher()
self.proto._keySetup('AB', 'CD')
self.assertEquals(self.proto.sessionID, 'CD')
self.proto._keySetup('AB', 'EF')
self.assertEquals(self.proto.sessionID, 'CD')
self.assertEquals(self.packets[-1], (transport.MSG_NEWKEYS, ''))
newKeys = [self.proto._getKey(c, 'AB', 'EF') for c in 'ABCDEF']
self.assertEquals(self.proto.nextEncryptions.keys,
(newKeys[0], newKeys[2], newKeys[1], newKeys[3],
newKeys[4], newKeys[5]))
def test_NEWKEYS(self):
"""
Test that NEWKEYS transitions the keys from nextEncryptions to
currentEncryptions.
"""
self.test_KEXINIT()
secure = [False]
def stubConnectionSecure():
secure[0] = True
self.proto.connectionSecure = stubConnectionSecure
self.proto.nextEncryptions = transport.SSHCiphers('none', 'none',
'none', 'none')
self.proto.ssh_NEWKEYS('')
self.failIfIdentical(self.proto.currentEncryptions,
self.proto.nextEncryptions)
self.proto.nextEncryptions = MockCipher()
self.proto._keySetup('AB', 'EF')
self.assertIdentical(self.proto.outgoingCompression, None)
self.assertIdentical(self.proto.incomingCompression, None)
self.assertIdentical(self.proto.currentEncryptions,
self.proto.nextEncryptions)
self.assertTrue(secure[0])
self.proto.outgoingCompressionType = 'zlib'
self.proto.ssh_NEWKEYS('')
self.failIfIdentical(self.proto.outgoingCompression, None)
self.proto.incomingCompressionType = 'zlib'
self.proto.ssh_NEWKEYS('')
self.failIfIdentical(self.proto.incomingCompression, None)
def test_SERVICE_ACCEPT(self):
"""
Test that the SERVICE_ACCEPT packet starts the requested service.
"""
self.proto.instance = MockService()
self.proto.ssh_SERVICE_ACCEPT('\x00\x00\x00\x0bMockService')
self.assertTrue(self.proto.instance.started)
def test_requestService(self):
"""
Test that requesting a service sends a SERVICE_REQUEST packet.
"""
self.proto.requestService(MockService())
self.assertEquals(self.packets, [(transport.MSG_SERVICE_REQUEST,
'\x00\x00\x00\x0bMockService')])
def test_disconnectKEXDH_REPLYBadSignature(self):
"""
Test that KEXDH_REPLY disconnects if the signature is bad.
"""
self.test_KEXDH_REPLY()
self.proto._continueKEXDH_REPLY(None, self.blob, 3, "bad signature")
self.checkDisconnected(transport.DISCONNECT_KEY_EXCHANGE_FAILED)
def test_disconnectGEX_REPLYBadSignature(self):
"""
Like test_disconnectKEXDH_REPLYBadSignature, but for DH_GEX_REPLY.
"""
self.test_KEX_DH_GEX_REPLY()
self.proto._continueGEX_REPLY(None, self.blob, 3, "bad signature")
self.checkDisconnected(transport.DISCONNECT_KEY_EXCHANGE_FAILED)
def test_disconnectNEWKEYSData(self):
"""
Test that NEWKEYS disconnects if it receives data.
"""
self.proto.ssh_NEWKEYS("bad packet")
self.checkDisconnected()
def test_disconnectSERVICE_ACCEPT(self):
"""
Test that SERVICE_ACCEPT disconnects if the accepted protocol is
differet from the asked-for protocol.
"""
self.proto.instance = MockService()
self.proto.ssh_SERVICE_ACCEPT('\x00\x00\x00\x03bad')
self.checkDisconnected()
class SSHCiphersTestCase(unittest.TestCase):
"""
Tests for the SSHCiphers helper class.
"""
if Crypto is None:
skip = "cannot run w/o PyCrypto"
if pyasn1 is None:
skip = "cannot run w/o PyASN1"
def test_init(self):
"""
Test that the initializer sets up the SSHCiphers object.
"""
ciphers = transport.SSHCiphers('A', 'B', 'C', 'D')
self.assertEquals(ciphers.outCipType, 'A')
self.assertEquals(ciphers.inCipType, 'B')
self.assertEquals(ciphers.outMACType, 'C')
self.assertEquals(ciphers.inMACType, 'D')
def test_getCipher(self):
"""
Test that the _getCipher method returns the correct cipher.
"""
ciphers = transport.SSHCiphers('A', 'B', 'C', 'D')
iv = key = '\x00' * 16
for cipName, (modName, keySize, counter) in ciphers.cipherMap.items():
cip = ciphers._getCipher(cipName, iv, key)
if cipName == 'none':
self.assertIsInstance(cip, transport._DummyCipher)
else:
self.assertTrue(str(cip).startswith('<' + modName))
def test_getMAC(self):
"""
Test that the _getMAC method returns the correct MAC.
"""
ciphers = transport.SSHCiphers('A', 'B', 'C', 'D')
key = '\x00' * 64
for macName, mac in ciphers.macMap.items():
mod = ciphers._getMAC(macName, key)
if macName == 'none':
self.assertIdentical(mac, None)
else:
self.assertEquals(mod[0], mac)
self.assertEquals(mod[1],
Crypto.Cipher.XOR.new('\x36').encrypt(key))
self.assertEquals(mod[2],
Crypto.Cipher.XOR.new('\x5c').encrypt(key))
self.assertEquals(mod[3], len(mod[0]().digest()))
def test_setKeysCiphers(self):
"""
Test that setKeys sets up the ciphers.
"""
key = '\x00' * 64
cipherItems = transport.SSHCiphers.cipherMap.items()
for cipName, (modName, keySize, counter) in cipherItems:
encCipher = transport.SSHCiphers(cipName, 'none', 'none', 'none')
decCipher = transport.SSHCiphers('none', cipName, 'none', 'none')
cip = encCipher._getCipher(cipName, key, key)
bs = cip.block_size
encCipher.setKeys(key, key, '', '', '', '')
decCipher.setKeys('', '', key, key, '', '')
self.assertEquals(encCipher.encBlockSize, bs)
self.assertEquals(decCipher.decBlockSize, bs)
enc = cip.encrypt(key[:bs])
enc2 = cip.encrypt(key[:bs])
if counter:
self.failIfEquals(enc, enc2)
self.assertEquals(encCipher.encrypt(key[:bs]), enc)
self.assertEquals(encCipher.encrypt(key[:bs]), enc2)
self.assertEquals(decCipher.decrypt(enc), key[:bs])
self.assertEquals(decCipher.decrypt(enc2), key[:bs])
def test_setKeysMACs(self):
"""
Test that setKeys sets up the MACs.
"""
key = '\x00' * 64
for macName, mod in transport.SSHCiphers.macMap.items():
outMac = transport.SSHCiphers('none', 'none', macName, 'none')
inMac = transport.SSHCiphers('none', 'none', 'none', macName)
outMac.setKeys('', '', '', '', key, '')
inMac.setKeys('', '', '', '', '', key)
if mod:
ds = mod().digest_size
else:
ds = 0
self.assertEquals(inMac.verifyDigestSize, ds)
if mod:
mod, i, o, ds = outMac._getMAC(macName, key)
seqid = 0
data = key
packet = '\x00' * 4 + key
if mod:
mac = mod(o + mod(i + packet).digest()).digest()
else:
mac = ''
self.assertEquals(outMac.makeMAC(seqid, data), mac)
self.assertTrue(inMac.verify(seqid, data, mac))
class CounterTestCase(unittest.TestCase):
"""
Tests for the _Counter helper class.
"""
if Crypto is None:
skip = "cannot run w/o PyCrypto"
if pyasn1 is None:
skip = "cannot run w/o PyASN1"
def test_init(self):
"""
Test that the counter is initialized correctly.
"""
counter = transport._Counter('\x00' * 8 + '\xff' * 8, 8)
self.assertEquals(counter.blockSize, 8)
self.assertEquals(counter.count.tostring(), '\x00' * 8)
def test_count(self):
"""
Test that the counter counts incrementally and wraps at the top.
"""
counter = transport._Counter('\x00', 1)
self.assertEquals(counter(), '\x01')
self.assertEquals(counter(), '\x02')
[counter() for i in range(252)]
self.assertEquals(counter(), '\xff')
self.assertEquals(counter(), '\x00')
class TransportLoopbackTestCase(unittest.TestCase):
"""
Test the server transport and client transport against each other,
"""
if Crypto is None:
skip = "cannot run w/o PyCrypto"
if pyasn1 is None:
skip = "cannot run w/o PyASN1"
def _runClientServer(self, mod):
"""
Run an async client and server, modifying each using the mod function
provided. Returns a Deferred called back when both Protocols have
disconnected.
@type mod: C{func}
@rtype: C{defer.Deferred}
"""
factory = MockFactory()
server = transport.SSHServerTransport()
server.factory = factory
factory.startFactory()
server.errors = []
server.receiveError = lambda code, desc: server.errors.append((
code, desc))
client = transport.SSHClientTransport()
client.verifyHostKey = lambda x, y: defer.succeed(None)
client.errors = []
client.receiveError = lambda code, desc: client.errors.append((
code, desc))
client.connectionSecure = lambda: client.loseConnection()
server = mod(server)
client = mod(client)
def check(ignored, server, client):
name = repr([server.supportedCiphers[0],
server.supportedMACs[0],
server.supportedKeyExchanges[0],
server.supportedCompressions[0]])
self.assertEquals(client.errors, [])
self.assertEquals(server.errors, [(
transport.DISCONNECT_CONNECTION_LOST,
"user closed connection")])
if server.supportedCiphers[0] == 'none':
self.assertFalse(server.isEncrypted(), name)
self.assertFalse(client.isEncrypted(), name)
else:
self.assertTrue(server.isEncrypted(), name)
self.assertTrue(client.isEncrypted(), name)
if server.supportedMACs[0] == 'none':
self.assertFalse(server.isVerified(), name)
self.assertFalse(client.isVerified(), name)
else:
self.assertTrue(server.isVerified(), name)
self.assertTrue(client.isVerified(), name)
d = loopback.loopbackAsync(server, client)
d.addCallback(check, server, client)
return d
def test_ciphers(self):
"""
Test that the client and server play nicely together, in all
the various combinations of ciphers.
"""
deferreds = []
for cipher in transport.SSHTransportBase.supportedCiphers + ['none']:
def setCipher(proto):
proto.supportedCiphers = [cipher]
return proto
deferreds.append(self._runClientServer(setCipher))
return defer.DeferredList(deferreds, fireOnOneErrback=True)
def test_macs(self):
"""
Like test_ciphers, but for the various MACs.
"""
deferreds = []
for mac in transport.SSHTransportBase.supportedMACs + ['none']:
def setMAC(proto):
proto.supportedMACs = [mac]
return proto
deferreds.append(self._runClientServer(setMAC))
return defer.DeferredList(deferreds, fireOnOneErrback=True)
def test_keyexchanges(self):
"""
Like test_ciphers, but for the various key exchanges.
"""
deferreds = []
for kex in transport.SSHTransportBase.supportedKeyExchanges:
def setKeyExchange(proto):
proto.supportedKeyExchanges = [kex]
return proto
deferreds.append(self._runClientServer(setKeyExchange))
return defer.DeferredList(deferreds, fireOnOneErrback=True)
def test_compressions(self):
"""
Like test_ciphers, but for the various compressions.
"""
deferreds = []
for compression in transport.SSHTransportBase.supportedCompressions:
def setCompression(proto):
proto.supportedCompressions = [compression]
return proto
deferreds.append(self._runClientServer(setCompression))
return defer.DeferredList(deferreds, fireOnOneErrback=True)
class RandomNumberTestCase(unittest.TestCase):
"""
Tests for the random number generator L{_getRandomNumber} and private
key generator L{_generateX}.
"""
skip = dependencySkip
def test_usesSuppliedRandomFunction(self):
"""
L{_getRandomNumber} returns an integer constructed directly from the
bytes returned by the random byte generator passed to it.
"""
def random(bytes):
# The number of bytes requested will be the value of each byte
# we return.
return chr(bytes) * bytes
self.assertEquals(
transport._getRandomNumber(random, 32),
4 << 24 | 4 << 16 | 4 << 8 | 4)
def test_rejectsNonByteMultiples(self):
"""
L{_getRandomNumber} raises L{ValueError} if the number of bits
passed to L{_getRandomNumber} is not a multiple of 8.
"""
self.assertRaises(
ValueError,
transport._getRandomNumber, None, 9)
def test_excludesSmall(self):
"""
If the random byte generator passed to L{_generateX} produces bytes
which would result in 0 or 1 being returned, these bytes are
discarded and another attempt is made to produce a larger value.
"""
results = [chr(0), chr(1), chr(127)]
def random(bytes):
return results.pop(0) * bytes
self.assertEquals(
transport._generateX(random, 8),
127)
def test_excludesLarge(self):
"""
If the random byte generator passed to L{_generateX} produces bytes
which would result in C{(2 ** bits) - 1} being returned, these bytes
are discarded and another attempt is made to produce a smaller
value.
"""
results = [chr(255), chr(64)]
def random(bytes):
return results.pop(0) * bytes
self.assertEquals(
transport._generateX(random, 8),
64)
class OldFactoryTestCase(unittest.TestCase):
"""
The old C{SSHFactory.getPublicKeys}() returned mappings of key names to
strings of key blobs and mappings of key names to PyCrypto key objects from
C{SSHFactory.getPrivateKeys}() (they could also be specified with the
C{publicKeys} and C{privateKeys} attributes). This is no longer supported
by the C{SSHServerTransport}, so we warn the user if they create an old
factory.
"""
if Crypto is None:
skip = "cannot run w/o PyCrypto"
if pyasn1 is None:
skip = "cannot run w/o PyASN1"
def test_getPublicKeysWarning(self):
"""
If the return value of C{getPublicKeys}() isn't a mapping from key
names to C{Key} objects, then warn the user and convert the mapping.
"""
sshFactory = MockOldFactoryPublicKeys()
self.assertWarns(DeprecationWarning,
"Returning a mapping from strings to strings from"
" getPublicKeys()/publicKeys (in %s) is deprecated. Return "
"a mapping from strings to Key objects instead." %
(qual(MockOldFactoryPublicKeys),),
factory.__file__, sshFactory.startFactory)
self.assertEquals(sshFactory.publicKeys, MockFactory().getPublicKeys())
def test_getPrivateKeysWarning(self):
"""
If the return value of C{getPrivateKeys}() isn't a mapping from key
names to C{Key} objects, then warn the user and convert the mapping.
"""
sshFactory = MockOldFactoryPrivateKeys()
self.assertWarns(DeprecationWarning,
"Returning a mapping from strings to PyCrypto key objects from"
" getPrivateKeys()/privateKeys (in %s) is deprecated. Return"
" a mapping from strings to Key objects instead." %
(qual(MockOldFactoryPrivateKeys),),
factory.__file__, sshFactory.startFactory)
self.assertEquals(sshFactory.privateKeys,
MockFactory().getPrivateKeys())
def test_publicKeysWarning(self):
"""
If the value of the C{publicKeys} attribute isn't a mapping from key
names to C{Key} objects, then warn the user and convert the mapping.
"""
sshFactory = MockOldFactoryPublicKeys()
sshFactory.publicKeys = sshFactory.getPublicKeys()
self.assertWarns(DeprecationWarning,
"Returning a mapping from strings to strings from"
" getPublicKeys()/publicKeys (in %s) is deprecated. Return "
"a mapping from strings to Key objects instead." %
(qual(MockOldFactoryPublicKeys),),
factory.__file__, sshFactory.startFactory)
self.assertEquals(sshFactory.publicKeys, MockFactory().getPublicKeys())
def test_privateKeysWarning(self):
"""
If the return value of C{privateKeys} attribute isn't a mapping from
key names to C{Key} objects, then warn the user and convert the
mapping.
"""
sshFactory = MockOldFactoryPrivateKeys()
sshFactory.privateKeys = sshFactory.getPrivateKeys()
self.assertWarns(DeprecationWarning,
"Returning a mapping from strings to PyCrypto key objects from"
" getPrivateKeys()/privateKeys (in %s) is deprecated. Return"
" a mapping from strings to Key objects instead." %
(qual(MockOldFactoryPrivateKeys),),
factory.__file__, sshFactory.startFactory)
self.assertEquals(sshFactory.privateKeys,
MockFactory().getPrivateKeys())
| apache-2.0 |
chauhanhardik/populo | cms/djangoapps/contentstore/views/tabs.py | 125 | 7890 | """
Views related to course tabs
"""
from student.auth import has_course_author_access
from util.json_request import expect_json, JsonResponse
from django.http import HttpResponseNotFound
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.views.decorators.csrf import ensure_csrf_cookie
from django.views.decorators.http import require_http_methods
from edxmako.shortcuts import render_to_response
from xmodule.modulestore.django import modulestore
from xmodule.modulestore import ModuleStoreEnum
from xmodule.tabs import CourseTabList, CourseTab, InvalidTabsException, StaticTab
from opaque_keys.edx.keys import CourseKey, UsageKey
from ..utils import get_lms_link_for_item
__all__ = ['tabs_handler']
@expect_json
@login_required
@ensure_csrf_cookie
@require_http_methods(("GET", "POST", "PUT"))
def tabs_handler(request, course_key_string):
"""
The restful handler for static tabs.
GET
html: return page for editing static tabs
json: not supported
PUT or POST
json: update the tab order. It is expected that the request body contains a JSON-encoded dict with entry "tabs".
The value for "tabs" is an array of tab locators, indicating the desired order of the tabs.
Creating a tab, deleting a tab, or changing its contents is not supported through this method.
Instead use the general xblock URL (see item.xblock_handler).
"""
course_key = CourseKey.from_string(course_key_string)
if not has_course_author_access(request.user, course_key):
raise PermissionDenied()
course_item = modulestore().get_course(course_key)
if 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json'):
if request.method == 'GET':
raise NotImplementedError('coming soon')
else:
if 'tabs' in request.json:
return reorder_tabs_handler(course_item, request)
elif 'tab_id_locator' in request.json:
return edit_tab_handler(course_item, request)
else:
raise NotImplementedError('Creating or changing tab content is not supported.')
elif request.method == 'GET': # assume html
# get all tabs from the tabs list: static tabs (a.k.a. user-created tabs) and built-in tabs
# present in the same order they are displayed in LMS
tabs_to_render = []
for tab in CourseTabList.iterate_displayable(course_item, inline_collections=False):
if isinstance(tab, StaticTab):
# static tab needs its locator information to render itself as an xmodule
static_tab_loc = course_key.make_usage_key('static_tab', tab.url_slug)
tab.locator = static_tab_loc
tabs_to_render.append(tab)
return render_to_response('edit-tabs.html', {
'context_course': course_item,
'tabs_to_render': tabs_to_render,
'lms_link': get_lms_link_for_item(course_item.location),
})
else:
return HttpResponseNotFound()
def reorder_tabs_handler(course_item, request):
"""
Helper function for handling reorder of tabs request
"""
# Tabs are identified by tab_id or locators.
# The locators are used to identify static tabs since they are xmodules.
# Although all tabs have tab_ids, newly created static tabs do not know
# their tab_ids since the xmodule editor uses only locators to identify new objects.
requested_tab_id_locators = request.json['tabs']
# original tab list in original order
old_tab_list = course_item.tabs
# create a new list in the new order
new_tab_list = []
for tab_id_locator in requested_tab_id_locators:
tab = get_tab_by_tab_id_locator(old_tab_list, tab_id_locator)
if tab is None:
return JsonResponse(
{"error": "Tab with id_locator '{0}' does not exist.".format(tab_id_locator)}, status=400
)
new_tab_list.append(tab)
# the old_tab_list may contain additional tabs that were not rendered in the UI because of
# global or course settings. so add those to the end of the list.
non_displayed_tabs = set(old_tab_list) - set(new_tab_list)
new_tab_list.extend(non_displayed_tabs)
# validate the tabs to make sure everything is Ok (e.g., did the client try to reorder unmovable tabs?)
try:
CourseTabList.validate_tabs(new_tab_list)
except InvalidTabsException, exception:
return JsonResponse(
{"error": "New list of tabs is not valid: {0}.".format(str(exception))}, status=400
)
# persist the new order of the tabs
course_item.tabs = new_tab_list
modulestore().update_item(course_item, request.user.id)
return JsonResponse()
def edit_tab_handler(course_item, request):
"""
Helper function for handling requests to edit settings of a single tab
"""
# Tabs are identified by tab_id or locator
tab_id_locator = request.json['tab_id_locator']
# Find the given tab in the course
tab = get_tab_by_tab_id_locator(course_item.tabs, tab_id_locator)
if tab is None:
return JsonResponse(
{"error": "Tab with id_locator '{0}' does not exist.".format(tab_id_locator)}, status=400
)
if 'is_hidden' in request.json:
# set the is_hidden attribute on the requested tab
tab.is_hidden = request.json['is_hidden']
modulestore().update_item(course_item, request.user.id)
else:
raise NotImplementedError('Unsupported request to edit tab: {0}'.format(request.json))
return JsonResponse()
def get_tab_by_tab_id_locator(tab_list, tab_id_locator):
"""
Look for a tab with the specified tab_id or locator. Returns the first matching tab.
"""
if 'tab_id' in tab_id_locator:
tab = CourseTabList.get_tab_by_id(tab_list, tab_id_locator['tab_id'])
elif 'tab_locator' in tab_id_locator:
tab = get_tab_by_locator(tab_list, tab_id_locator['tab_locator'])
return tab
def get_tab_by_locator(tab_list, usage_key_string):
"""
Look for a tab with the specified locator. Returns the first matching tab.
"""
tab_location = UsageKey.from_string(usage_key_string)
item = modulestore().get_item(tab_location)
static_tab = StaticTab(
name=item.display_name,
url_slug=item.location.name,
)
return CourseTabList.get_tab_by_id(tab_list, static_tab.tab_id)
# "primitive" tab edit functions driven by the command line.
# These should be replaced/deleted by a more capable GUI someday.
# Note that the command line UI identifies the tabs with 1-based
# indexing, but this implementation code is standard 0-based.
def validate_args(num, tab_type):
"Throws for the disallowed cases."
if num <= 1:
raise ValueError('Tabs 1 and 2 cannot be edited')
if tab_type == 'static_tab':
raise ValueError('Tabs of type static_tab cannot be edited here (use Studio)')
def primitive_delete(course, num):
"Deletes the given tab number (0 based)."
tabs = course.tabs
validate_args(num, tabs[num].get('type', ''))
del tabs[num]
# Note for future implementations: if you delete a static_tab, then Chris Dodge
# points out that there's other stuff to delete beyond this element.
# This code happens to not delete static_tab so it doesn't come up.
modulestore().update_item(course, ModuleStoreEnum.UserID.primitive_command)
def primitive_insert(course, num, tab_type, name):
"Inserts a new tab at the given number (0 based)."
validate_args(num, tab_type)
new_tab = CourseTab.from_json({u'type': unicode(tab_type), u'name': unicode(name)})
tabs = course.tabs
tabs.insert(num, new_tab)
modulestore().update_item(course, ModuleStoreEnum.UserID.primitive_command)
| agpl-3.0 |
Jgarcia-IAS/ReporsitorioVacioOdoo | openerp/addons/sale_analytic_plans/__init__.py | 443 | 1208 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#----------------------------------------------------------
# Init Sales
#----------------------------------------------------------
import sale_analytic_plans
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Vagab0nd/SiCKRAGE | lib3/js2py/pyjs.py | 9 | 2904 | from .base import *
from .constructors.jsmath import Math
from .constructors.jsdate import Date
from .constructors.jsobject import Object
from .constructors.jsfunction import Function
from .constructors.jsstring import String
from .constructors.jsnumber import Number
from .constructors.jsboolean import Boolean
from .constructors.jsregexp import RegExp
from .constructors.jsarray import Array
from .constructors.jsarraybuffer import ArrayBuffer
from .constructors.jsint8array import Int8Array
from .constructors.jsuint8array import Uint8Array
from .constructors.jsuint8clampedarray import Uint8ClampedArray
from .constructors.jsint16array import Int16Array
from .constructors.jsuint16array import Uint16Array
from .constructors.jsint32array import Int32Array
from .constructors.jsuint32array import Uint32Array
from .constructors.jsfloat32array import Float32Array
from .constructors.jsfloat64array import Float64Array
from .prototypes.jsjson import JSON
from .host.console import console
from .host.jseval import Eval
from .host.jsfunctions import parseFloat, parseInt, isFinite, \
isNaN, escape, unescape, encodeURI, decodeURI, encodeURIComponent, decodeURIComponent
# Now we have all the necessary items to create global environment for script
__all__ = [
'Js', 'PyJsComma', 'PyJsStrictEq', 'PyJsStrictNeq', 'PyJsException',
'PyJsBshift', 'Scope', 'PyExceptionToJs', 'JsToPyException', 'JS_BUILTINS',
'appengine', 'set_global_object', 'JsRegExp', 'PyJsException',
'PyExceptionToJs', 'JsToPyException', 'PyJsSwitchException'
]
# these were defined in base.py
builtins = (
'true',
'false',
'null',
'undefined',
'Infinity',
'NaN',
'console',
'String',
'Number',
'Boolean',
'RegExp',
'Math',
'Date',
'Object',
'Function',
'Array',
'Int8Array',
'Uint8Array',
'Uint8ClampedArray',
'Int16Array',
'Uint16Array',
'Int32Array',
'Uint32Array',
'Float32Array',
'Float64Array',
'ArrayBuffer',
'parseFloat',
'parseInt',
'isFinite',
'isNaN',
'escape',
'unescape',
'encodeURI',
'decodeURI',
'encodeURIComponent',
'decodeURIComponent',
)
#Array, Function, JSON, Error is done later :)
# also some built in functions like eval...
def set_global_object(obj):
obj.IS_CHILD_SCOPE = False
this = This({})
this.own = obj.own
this.prototype = obj.prototype
PyJs.GlobalObject = this
# make this available
obj.register('this')
obj.put('this', this)
# also add window and set it to be a global object for compatibility
obj.register('window')
obj.put('window', this)
scope = dict(zip(builtins, [globals()[e] for e in builtins]))
# Now add errors:
for name, error in ERRORS.items():
scope[name] = error
#add eval
scope['eval'] = Eval
scope['JSON'] = JSON
JS_BUILTINS = dict((k, v) for k, v in scope.items())
| gpl-3.0 |
agileblaze/OpenStackTwoFactorAuthentication | openstack_dashboard/dashboards/admin/defaults/workflows.py | 51 | 4515 | # Copyright 2013 Kylin, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import workflows
from openstack_dashboard.api import base
from openstack_dashboard.api import cinder
from openstack_dashboard.api import nova
from openstack_dashboard.usage import quotas
ALL_NOVA_QUOTA_FIELDS = quotas.NOVA_QUOTA_FIELDS + quotas.MISSING_QUOTA_FIELDS
class UpdateDefaultQuotasAction(workflows.Action):
ifcb_label = _("Injected File Content Bytes")
ifpb_label = _("Length of Injected File Path")
injected_file_content_bytes = forms.IntegerField(min_value=-1,
label=ifcb_label)
metadata_items = forms.IntegerField(min_value=-1,
label=_("Metadata Items"))
ram = forms.IntegerField(min_value=-1, label=_("RAM (MB)"))
floating_ips = forms.IntegerField(min_value=-1, label=_("Floating IPs"))
key_pairs = forms.IntegerField(min_value=-1, label=_("Key Pairs"))
injected_file_path_bytes = forms.IntegerField(min_value=-1,
label=ifpb_label)
instances = forms.IntegerField(min_value=-1, label=_("Instances"))
security_group_rules = forms.IntegerField(min_value=-1,
label=_("Security Group Rules"))
injected_files = forms.IntegerField(min_value=-1,
label=_("Injected Files"))
cores = forms.IntegerField(min_value=-1, label=_("VCPUs"))
security_groups = forms.IntegerField(min_value=-1,
label=_("Security Groups"))
gigabytes = forms.IntegerField(
min_value=-1,
label=_("Total Size of Volumes and Snapshots (GB)"))
snapshots = forms.IntegerField(min_value=-1, label=_("Volume Snapshots"))
volumes = forms.IntegerField(min_value=-1, label=_("Volumes"))
def __init__(self, request, *args, **kwargs):
super(UpdateDefaultQuotasAction, self).__init__(request,
*args,
**kwargs)
disabled_quotas = quotas.get_disabled_quotas(request)
for field in disabled_quotas:
if field in self.fields:
self.fields[field].required = False
self.fields[field].widget = forms.HiddenInput()
class Meta(object):
name = _("Default Quotas")
slug = 'update_default_quotas'
help_text = _("From here you can update the default quotas "
"(max limits).")
class UpdateDefaultQuotasStep(workflows.Step):
action_class = UpdateDefaultQuotasAction
contributes = (quotas.QUOTA_FIELDS + quotas.MISSING_QUOTA_FIELDS)
class UpdateDefaultQuotas(workflows.Workflow):
slug = "update_default_quotas"
name = _("Update Default Quotas")
finalize_button_name = _("Update Defaults")
success_message = _('Default quotas updated.')
failure_message = _('Unable to update default quotas.')
success_url = "horizon:admin:defaults:index"
default_steps = (UpdateDefaultQuotasStep,)
def handle(self, request, data):
# Update the default quotas.
# `fixed_ips` update for quota class is not supported by novaclient
nova_data = dict([(key, data[key]) for key in ALL_NOVA_QUOTA_FIELDS
if key != 'fixed_ips'])
try:
nova.default_quota_update(request, **nova_data)
if base.is_service_enabled(request, 'volume'):
cinder_data = dict([(key, data[key]) for key in
quotas.CINDER_QUOTA_FIELDS])
cinder.default_quota_update(request, **cinder_data)
except Exception:
exceptions.handle(request, _('Unable to update default quotas.'))
return True
| apache-2.0 |
hkchenhongyi/django | tests/lookup/models.py | 235 | 2178 | """
The lookup API
This demonstrates features of the database API.
"""
from __future__ import unicode_literals
from django.db import models
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
class Alarm(models.Model):
desc = models.CharField(max_length=100)
time = models.TimeField()
def __str__(self):
return '%s (%s)' % (self.time, self.desc)
class Author(models.Model):
name = models.CharField(max_length=100)
class Meta:
ordering = ('name', )
@python_2_unicode_compatible
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateTimeField()
author = models.ForeignKey(Author, models.SET_NULL, blank=True, null=True)
class Meta:
ordering = ('-pub_date', 'headline')
def __str__(self):
return self.headline
class Tag(models.Model):
articles = models.ManyToManyField(Article)
name = models.CharField(max_length=100)
class Meta:
ordering = ('name', )
@python_2_unicode_compatible
class Season(models.Model):
year = models.PositiveSmallIntegerField()
gt = models.IntegerField(null=True, blank=True)
def __str__(self):
return six.text_type(self.year)
@python_2_unicode_compatible
class Game(models.Model):
season = models.ForeignKey(Season, models.CASCADE, related_name='games')
home = models.CharField(max_length=100)
away = models.CharField(max_length=100)
def __str__(self):
return "%s at %s" % (self.away, self.home)
@python_2_unicode_compatible
class Player(models.Model):
name = models.CharField(max_length=100)
games = models.ManyToManyField(Game, related_name='players')
def __str__(self):
return self.name
# To test __search lookup a fulltext index is needed. This
# is only available when using MySQL 5.6, or when using MyISAM
# tables. As 5.6 isn't common yet, lets use MyISAM table for
# testing. The table is manually created by the test method.
class MyISAMArticle(models.Model):
headline = models.CharField(max_length=100)
class Meta:
db_table = 'myisam_article'
managed = False
| bsd-3-clause |
dangoldin/bots | database.py | 1 | 1126 | #!/usr/bin/python
import sqlite3
class Database:
def __init__(self):
self.connect()
def connect(self):
self.conn = sqlite3.connect('lifebot.db')
def get_cursor(self):
# A bit weird for now but trying to figure out SQLite
try:
return self.conn.cursor()
except Exception, e:
self.connect()
return self.conn.cursor()
def create_table(self, query):
c = self.get_cursor()
c.execute(query)
self.conn.commit()
self.conn.close()
def get(self, query, args = None):
if args is None:
args = tuple()
c = self.get_cursor()
c.execute(query, args)
return c.fetchone()
def get_all(self, query, args = None):
if args is None:
args = tuple()
c = self.get_cursor()
c.execute(query, args)
return c.fetchall()
def insert(self, query, args = None):
if args is None:
args = tuple()
c = self.get_cursor()
c.execute(query, args)
self.conn.commit()
return c.lastrowid
| mit |
dmkent/cattrack | cattrack/settings.py | 1 | 4095 | """
Django settings for cattrack project.
Generated by 'django-admin startproject' using Django 1.9.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import datetime
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '76kw_-twd)u$+_ltr&r_%z&cw^7yepzg@$rm&d%lvox7lb7ra&'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'django_filters',
'corsheaders',
'ctrack',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'cattrack.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'cattrack.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "client/dist/prod"),
os.path.join(BASE_DIR, "static"),
]
CTRACK_CATEGORISER = 'SklearnCategoriser'
CTRACK_CATEGORISER_FILE = 'categoriser.pkl'
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.BasicAuthentication',
'rest_framework_jwt.authentication.JSONWebTokenAuthentication',
],
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.IsAuthenticated'
],
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend',),
}
JWT_AUTH = {
'JWT_EXPIRATION_DELTA': datetime.timedelta(seconds=600),
'JWT_ALLOW_REFRESH': True,
}
CORS_ORIGIN_ALLOW_ALL = True | mit |
cloudera/hue | desktop/core/ext-py/boto-2.46.1/tests/integration/dynamodb/test_table.py | 136 | 3553 | # Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import time
from tests.unit import unittest
from boto.dynamodb.layer2 import Layer2
from boto.dynamodb.table import Table
from boto.dynamodb.schema import Schema
class TestDynamoDBTable(unittest.TestCase):
dynamodb = True
def setUp(self):
self.dynamodb = Layer2()
self.schema = Schema.create(('foo', 'N'), ('bar', 'S'))
self.table_name = 'testtable%s' % int(time.time())
def create_table(self, table_name, schema, read_units, write_units):
result = self.dynamodb.create_table(table_name, schema, read_units, write_units)
self.addCleanup(self.dynamodb.delete_table, result)
return result
def assertAllEqual(self, *items):
first = items[0]
for item in items[1:]:
self.assertEqual(first, item)
def test_table_retrieval_parity(self):
created_table = self.dynamodb.create_table(
self.table_name, self.schema, 1, 1)
created_table.refresh(wait_for_active=True)
retrieved_table = self.dynamodb.get_table(self.table_name)
constructed_table = self.dynamodb.table_from_schema(self.table_name,
self.schema)
# All three tables should have the same name
# and schema attributes.
self.assertAllEqual(created_table.name,
retrieved_table.name,
constructed_table.name)
self.assertAllEqual(created_table.schema,
retrieved_table.schema,
constructed_table.schema)
# However for create_time, status, read/write units,
# only the created/retrieved table will have equal
# values.
self.assertEqual(created_table.create_time,
retrieved_table.create_time)
self.assertEqual(created_table.status,
retrieved_table.status)
self.assertEqual(created_table.read_units,
retrieved_table.read_units)
self.assertEqual(created_table.write_units,
retrieved_table.write_units)
# The constructed table will have values of None.
self.assertIsNone(constructed_table.create_time)
self.assertIsNone(constructed_table.status)
self.assertIsNone(constructed_table.read_units)
self.assertIsNone(constructed_table.write_units)
| apache-2.0 |
liuyi1112/rst2pdf | rst2pdf/tenjin.py | 10 | 38176 | # -*- coding: utf-8 -*-
##
## $Rev: 137 $
## $Release: 0.6.2 $
## copyright(c) 2007-2008 kuwata-lab.com all rights reserved.
##
## Permission is hereby granted, free of charge, to any person obtaining
## a copy of this software and associated documentation files (the
## "Software"), to deal in the Software without restriction, including
## without limitation the rights to use, copy, modify, merge, publish,
## distribute, sublicense, and/or sell copies of the Software, and to
## permit persons to whom the Software is furnished to do so, subject to
## the following conditions:
##
## The above copyright notice and this permission notice shall be
## included in all copies or substantial portions of the Software.
##
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
## EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
## MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
## NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
## LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
## OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
## WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
##
"""Very fast and light-weight template engine based embedded Python.
pyTenjin is similar to PHP or eRuby (embedded Ruby).
* '<?py ... ?>' represents python statement.
* '#{...}' represents python expression.
* '${...}' represents python expression with escaping.
And it provides the following features.
* Layout template and nested template
* Including other template files
* Template caching
* Capturing
See help of tenjin.Template and tenjin.Engine for details.
"""
__revision__ = "$Rev: 137 $"[6:-2]
__release__ = "0.6.2"
__license__ = "MIT License"
__all__ = ['Template', 'Engine', 'helpers', 'html', ]
import re, sys, os, time, marshal
##
## utilities
##
try:
import fcntl
def _lock_file(file, content):
fcntl.flock(file.fileno(), fcntl.LOCK_EX)
except ImportError, ex:
try:
import msvcrt
def _lock_file(file, content):
msvcrt.locking(file.fileno(), msvcrt.LK_LOCK, len(content))
except ImportError, ex:
def _lock_file(file, content):
pass
def _write_file_with_lock(filename, content):
f = None
try:
f = open(filename, 'wb')
_lock_file(f, content)
f.write(content)
finally:
if f:
f.close()
def _create_module(module_name):
"""ex. mod = _create_module('tenjin.util')"""
import new
mod = new.module(module_name.split('.')[-1])
sys.modules[module_name] = mod
return mod
##
## helper method's module
##
def _create_helpers_module():
def to_str(val):
"""Convert value into string. Return '' if val is None.
ex.
>>> to_str(None)
''
>>> to_str("foo")
'foo'
>>> to_str(u"\u65e5\u672c\u8a9e")
u'\u65e5\u672c\u8a9e'
>>> to_str(123)
'123'
"""
if val is None: return ''
if isinstance(val, str): return val
if isinstance(val, unicode): return val
return str(val)
def generate_tostrfunc(encoding):
"""Generate 'to_str' function which encodes unicode to str.
ex.
import tenjin
from tenjin.helpers import escape
to_str = tenjin.generate_tostrfunc('utf-8')
engine = tenjin.Engine()
context = { 'items': [u'AAA', u'BBB', u'CCC'] }
output = engine.render('example.pyhtml')
print output
"""
def to_str(val):
if val is None: return ''
if isinstance(val, str): return val
if isinstance(val, unicode): return val.encode(encoding)
return str(val)
return to_str
def echo(string):
"""add string value into _buf. this is equivarent to '#{string}'."""
frame = sys._getframe(1)
context = frame.f_locals
context['_buf'].append(string)
def start_capture(varname=None):
"""
start capturing with name.
ex. list.rbhtml
<html><body>
<?py start_capture('itemlist') ?>
<ul>
<?py for item in list: ?>
<li>${item}</li>
<?py #end ?>
</ul>
<?py stop_capture() ?>
</body></html>
ex. layout.rbhtml
<html xml:lang="en" lang="en">
<head>
<title>Capture Example</title>
</head>
<body>
<!-- content -->
#{itemlist}
<!-- /content -->
</body>
</html>
"""
frame = sys._getframe(1)
context = frame.f_locals
context['_buf_tmp'] = context['_buf']
context['_capture_varname'] = varname
context['_buf'] = []
def stop_capture(store_to_context=True):
"""
stop capturing and return the result of capturing.
if store_to_context is True then the result is stored into _context[varname].
"""
frame = sys._getframe(1)
context = frame.f_locals
result = ''.join(context['_buf'])
context['_buf'] = context.pop('_buf_tmp')
varname = context.pop('_capture_varname')
if varname:
context[varname] = result
if store_to_context:
context['_context'][varname] = result
return result
def captured_as(name):
"""
helper method for layout template.
if captured string is found then append it to _buf and return True,
else return False.
"""
frame = sys._getframe(1)
context = frame.f_locals
if context.has_key(name):
_buf = context['_buf']
_buf.append(context[name])
return True
return False
def _p(arg):
"""ex. '/show/'+_p("item['id']") => "/show/#{item['id']}" """
return '<`#%s#`>' % arg # decoded into #{...} by preprocessor
def _P(arg):
"""ex. '<b>%s</b>' % _P("item['id']") => "<b>${item['id']}</b>" """
return '<`$%s$`>' % arg # decoded into ${...} by preprocessor
def _decode_params(s):
"""decode <`#...#`> and <`$...$`> into #{...} and ${...}"""
from urllib import unquote
dct = { 'lt':'<', 'gt':'>', 'amp':'&', 'quot':'"', '#039':"'", }
def unescape(s):
#return s.replace('<', '<').replace('>', '>').replace('"', '"').replace(''', "'").replace('&', '&')
return re.sub(r'&(lt|gt|quot|amp|#039);', lambda m: dct[m.group(1)], s)
s = re.sub(r'%3C%60%23(.*?)%23%60%3E', lambda m: '#{%s}' % unquote(m.group(1)), s)
s = re.sub(r'%3C%60%24(.*?)%24%60%3E', lambda m: '${%s}' % unquote(m.group(1)), s)
s = re.sub(r'<`#(.*?)#`>', lambda m: '#{%s}' % unescape(m.group(1)), s)
s = re.sub(r'<`\$(.*?)\$`>', lambda m: '${%s}' % unescape(m.group(1)), s)
s = re.sub(r'<`#(.*?)#`>', r'#{\1}', s)
s = re.sub(r'<`\$(.*?)\$`>', r'${\1}', s)
return s
mod = _create_module('tenjin.helpers')
mod.to_str = to_str
mod.generate_tostrfunc = generate_tostrfunc
mod.echo = echo
mod.start_capture = start_capture
mod.stop_capture = stop_capture
mod.captured_as = captured_as
mod._p = _p
mod._P = _P
mod._decode_params = _decode_params
mod.__all__ = ['escape', 'to_str', 'echo', 'generate_tostrfunc',
'start_capture', 'stop_capture', 'captured_as',
'_p', '_P', '_decode_params',
]
return mod
helpers = _create_helpers_module()
del _create_helpers_module
generate_tostrfunc = helpers.generate_tostrfunc
##
## module for html
##
def _create_html_module():
to_str = helpers.to_str
_escape_table = { '&': '&', '<': '<', '>': '>', '"': '"' }
_escape_pattern = re.compile(r'[&<>"]')
_escape_callable = lambda m: _escape_table[m.group(0)]
def escape_xml(s):
"""Escape '&', '<', '>', '"' into '&', '<', '>', '"'.
"""
return _escape_pattern.sub(_escape_callable, s)
#return s.replace('&','&').replace('<','<').replace('>','>').replace('"','"')
def tagattr(name, expr, value=None, escape=True):
"""return empty string when expr is false value, ' name="value"' when
value is specified, or ' name="expr"' when value is None.
ex.
>>> tagattr('size', 20)
' size="20"'
>>> tagattr('size', 0)
''
>>> tagattr('checked', True, 'checked')
' checked="checked"'
>>> tagattr('checked', False, 'checked')
''
"""
if not expr:
return ''
if value is None:
value = to_str(expr)
else:
value = to_str(value)
if escape:
value = escape_xml(value)
return ' %s="%s"' % (name, value)
def checked(expr):
"""return ' checked="checked"' if expr is true."""
return expr and ' checked="checked"' or ''
#return attr('checked', expr, 'checked')
def selected(expr):
"""return ' selected="selected"' if expr is true."""
return expr and ' selected="selected"' or ''
#return attr('selected', expr, 'selected')
def disabled(expr):
"""return ' disabled="disabled"' if expr is true."""
return expr and ' disabled="disabled"' or ''
#return attr('disabled, expr, 'disabled')
def nl2br(text):
"""replace "\n" to "<br />\n" and return it."""
if not text:
return ''
return text.replace('\n', '<br />\n')
def text2html(text):
"""(experimental) escape xml characters, replace "\n" to "<br />\n", and return it."""
if not text:
return ''
return nl2br(escape_xml(text).replace(' ', ' '))
mod = _create_module('tenjin.helpers.html')
mod._escape_table = _escape_table
mod.escape_xml = escape_xml
mod.escape = escape_xml
mod.tagattr = tagattr
mod.checked = checked
mod.selected = selected
mod.disabled = disabled
mod.nl2br = nl2br
mod.text2html = text2html
return mod
helpers.html = _create_html_module()
del _create_html_module
helpers.escape = helpers.html.escape_xml
##
## Template class
##
class Template(object):
"""Convert and evaluate embedded python string.
Notation:
* '<?py ... ?>' means python statement code.
* '#{...}' means python expression code.
* '${...}' means python escaped expression code.
ex. example.pyhtml
<table>
<?py is_odd = False ?>
<?py for item in items: ?>
<?py is_oddd = not is_odd ?>
<?py color = is_odd and '#FFF' or '#FCF' ?>
<tr bgcolor="#{color}">
<td>${item}</td>
</tr>
<?py #end ?>
</table>
ex.
>>> filename = 'example.pyhtml'
>>> import tenjin
>>> from tenjin.helpers import escape, to_str
>>> template = tenjin.Template(filename)
>>> script = template.script
>>> ## or
>>> #template = tenjin.Template()
>>> #script = template.convert_file(filename)
>>> ## or
>>> #template = tenjin.Template()
>>> #input = open(filename).read()
>>> #script = template.convert(input, filename) # filename is optional
>>> print script
>>> context = {'items': ['<foo>','bar&bar','"baz"']}
>>> output = template.render(context)
>>> print output
<table>
<tr bgcolor="#FFF">
<td><foo></td>
</tr>
<tr bgcolor="#FCF">
<td>bar&bar</td>
</tr>
<tr bgcolor="#FFF">
<td>"baz"</td>
</tr>
</table>
"""
## default value of attributes
filename = None
encoding = None
escapefunc = 'escape'
tostrfunc = 'to_str'
indent = 4
preamble = None
postamble = None # "_buf = []"
smarttrim = None # "print ''.join(_buf)"
args = None
def __init__(self, filename=None, encoding=None, escapefunc=None, tostrfunc=None, indent=None, preamble=None, postamble=None, smarttrim=None):
"""Initailizer of Template class.
filename:str (=None)
Filename to convert (optional). If None, no convert.
encoding:str (=None)
Encoding name. If specified, template string is converted into
unicode object internally.
Template.render() returns str object if encoding is None,
else returns unicode object if encoding name is specified.
escapefunc:str (='escape')
Escape function name.
tostrfunc:str (='to_str')
'to_str' function name.
indent:int (=4)
Indent width.
preamble:str or bool (=None)
Preamble string which is inserted into python code.
If true, '_buf = []' is used insated.
postamble:str or bool (=None)
Postamble string which is appended to python code.
If true, 'print "".join(_buf)' is used instead.
smarttrim:bool (=None)
If True then "<div>\\n#{_context}\\n</div>" is parsed as
"<div>\\n#{_context}</div>".
"""
if encoding is not None: self.encoding = encoding
if escapefunc is not None: self.escapefunc = escapefunc
if tostrfunc is not None: self.tostrfunc = tostrfunc
if indent is not None: self.indent = indent
if preamble is not None: self.preamble = preamble
if postamble is not None: self.postamble = postamble
if smarttrim is not None: self.smarttrim = smarttrim
#
if preamble is True: self.preamble = "_buf = []"
if postamble is True: self.postamble = "print ''.join(_buf)"
if filename:
self.convert_file(filename)
else:
self._reset()
def _reset(self, input=None, filename=None):
self._spaces = ''
self.script = None
self.bytecode = None
self.input = input
self.filename = filename
if input != None:
i = input.find("\n")
if i < 0:
self.newline = "\n" # or None
elif len(input) >= 2 and input[i-1] == "\r":
self.newline = "\r\n"
else:
self.newline = "\n"
def before_convert(self, buf):
#buf.append('_buf = []; ')
if self.preamble:
buf.append(self.preamble)
buf.append(self.input.startswith('<?py') and "\n" or "; ")
def after_convert(self, buf):
if self.postamble:
if not buf[-1].endswith("\n"):
buf.append("\n")
buf.append(self.postamble + "\n")
def convert_file(self, filename):
"""Convert file into python script and return it.
This is equivarent to convert(open(filename).read(), filename).
"""
input = open(filename, 'rb').read()
return self.convert(input, filename)
def convert(self, input, filename=None):
"""Convert string in which python code is embedded into python script and return it.
input:str
Input string to convert into python code.
filename:str (=None)
Filename of input. this is optional but recommended to report errors.
ex.
>>> import tenjin
>>> from tenjin.helpers import escape, to_str
>>> template = tenjin.Template()
>>> filename = 'example.html'
>>> input = open(filename).read()
>>> script = template.convert(input, filename) # filename is optional
>>> print script
"""
if self.encoding and isinstance(input, str):
input = input.decode(self.encoding)
self._reset(input, filename)
buf = []
self.before_convert(buf)
self.parse_stmts(buf, input)
self.after_convert(buf)
script = ''.join(buf)
self.script = script
return script
def compile_stmt_pattern(pi):
return re.compile(r'<\?%s( |\t|\r?\n)(.*?) ?\?>([ \t]*\r?\n)?' % pi, re.S)
STMT_PATTERN = compile_stmt_pattern('py')
compile_stmt_pattern = staticmethod(compile_stmt_pattern)
def stmt_pattern(self):
return Template.STMT_PATTERN
def parse_stmts(self, buf, input):
if not input:
return
rexp = self.stmt_pattern()
is_bol = True
index = 0
for m in rexp.finditer(input):
mspace, code, rspace = m.groups()
#mspace, close, rspace = m.groups()
#code = input[m.start()+4+len(mspace):m.end()-len(close)-(rspace and len(rspace) or 0)]
text = input[index:m.start()]
index = m.end()
## detect spaces at beginning of line
lspace = None
if text == '':
if is_bol:
lspace = ''
elif text[-1] == '\n':
lspace = ''
else:
rindex = text.rfind('\n')
if rindex < 0:
if is_bol and text.isspace():
lspace = text
text = ''
else:
s = text[rindex+1:]
if s.isspace():
lspace = s
text = text[:rindex+1]
#is_bol = rspace is not None
## add text, spaces, and statement
self.parse_exprs(buf, text, is_bol)
is_bol = rspace is not None
if lspace:
buf.append(lspace)
if mspace != " ":
#buf.append(mspace)
buf.append(mspace == "\t" and "\t" or "\n") # don't append "\r\n"!
if code:
code = self.statement_hook(code)
self.add_stmt(buf, code)
self._set_spaces(code, lspace, mspace)
if rspace:
#buf.append(rspace)
buf.append("\n") # don't append "\r\n"!
rest = input[index:]
if rest:
self.parse_exprs(buf, rest)
def statement_hook(self, stmt):
"""expand macros and parse '#@ARGS' in a statement."""
## macro expantion
#macro_pattern = r'^(\s*)(\w+)\((.*?)\);?\s*$';
#m = re.match(macro_pattern, stmt)
#if m:
# lspace, name, arg = m.group(1), m.group(2), m.group(3)
# handler = self.get_macro_handler(name)
# return handler is None and stmt or lspace + handler(arg)
## arguments declaration
if self.args is None:
args_pattern = r'^ *#@ARGS(?:[ \t]+(.*?))?$'
m = re.match(args_pattern, stmt)
if m:
arr = (m.group(1) or '').split(',')
args = []; declares = []
for s in arr:
arg = s.strip()
if not s: continue
if not re.match('^[a-zA-Z_]\w*$', arg):
raise ValueError("%s: invalid template argument." % arg)
args.append(arg)
declares.append("%s = _context.get('%s'); " % (arg, arg))
self.args = args
return ''.join(declares)
##
return stmt
#MACRO_HANDLER_TABLE = {
# "echo":
# lambda arg: "_buf.append(%s); " % arg,
# "include":
# lambda arg: "_buf.append(_context['_engine'].render(%s, _context, layout=False)); " % arg,
# "start_capture":
# lambda arg: "_buf_bkup = _buf; _buf = []; _capture_varname = %s; " % arg,
# "stop_capture":
# lambda arg: "_context[_capture_varname] = ''.join(_buf); _buf = _buf_bkup; ",
# "start_placeholder":
# lambda arg: "if (_context[%s]) _buf.push(_context[%s]); else:" % (arg, arg),
# "stop_placeholder":
# lambda arg: "#endif",
#}
#
#def get_macro_handler(name):
# return MACRO_HANDLER_TABLE.get(name)
EXPR_PATTERN = re.compile(r'([#$])\{(.*?)\}', re.S)
def expr_pattern(self):
return Template.EXPR_PATTERN
def get_expr_and_escapeflag(self, match):
return match.group(2), match.group(1) == '$'
def parse_exprs(self, buf, input, is_bol=False):
if not input:
return
if self._spaces:
buf.append(self._spaces)
self.start_text_part(buf)
rexp = self.expr_pattern()
smarttrim = self.smarttrim
nl = self.newline
nl_len = len(nl)
pos = 0
for m in rexp.finditer(input):
start = m.start()
text = input[pos:start]
pos = m.end()
expr, flag_escape = self.get_expr_and_escapeflag(m)
#
if text:
self.add_text(buf, text)
#if text[-1] == "\n":
# buf.append("\n")
# if self._spaces:
# buf.append(self._spaces)
self.add_expr(buf, expr, flag_escape)
#
if smarttrim:
flag_bol = text.endswith(nl) or not text and (start > 0 or is_bol)
if flag_bol and not flag_escape and input[pos:pos+nl_len] == nl:
pos += nl_len
buf.append("\n")
if smarttrim:
if buf and buf[-1] == "\n":
buf.pop()
rest = input[pos:]
if rest:
self.add_text(buf, rest, True)
self.stop_text_part(buf)
if input[-1] == '\n':
buf.append("\n")
def start_text_part(self, buf):
buf.append("_buf.extend((")
def stop_text_part(self, buf):
buf.append("));")
_quote_rexp = re.compile(r"(['\\\\])")
def add_text(self, buf, text, encode_newline=False):
if not text:
return;
if self.encoding:
buf.append("u'''")
else:
buf.append("'''")
#text = re.sub(r"(['\\\\])", r"\\\1", text)
text = Template._quote_rexp.sub(r"\\\1", text)
if not encode_newline or text[-1] != "\n":
buf.append(text)
buf.append("''', ")
elif len(text) >= 2 and text[-2] == "\r":
buf.append(text[0:-2])
buf.append("\\r\\n''', ")
else:
buf.append(text[0:-1])
buf.append("\\n''', ")
_add_text = add_text
def add_expr(self, buf, code, flag_escape=None):
if not code or code.isspace():
return
if flag_escape is None:
buf.append(code); buf.append(", ");
elif flag_escape is False:
buf.extend((self.tostrfunc, "(", code, "), "))
else:
buf.extend((self.escapefunc, "(", self.tostrfunc, "(", code, ")), "))
def add_stmt(self, buf, code):
if self.newline == "\r\n":
code = code.replace("\r\n", "\n")
buf.append(code)
#if code[-1] != '\n':
# buf.append(self.newline)
def _set_spaces(self, code, lspace, mspace):
if lspace:
if mspace == " ":
code = lspace + code
elif mspace == "\t":
code = lspace + "\t" + code
#i = code.rstrip().rfind("\n")
#if i < 0: # i == -1
# i = 0
#else:
# i += 1
i = code.rstrip().rfind("\n") + 1
indent = 0
n = len(code)
ch = None
while i < n:
ch = code[i]
if ch == " ": indent += 1
elif ch == "\t": indent += 8
else: break
i += 1
if ch:
if code.rstrip()[-1] == ':':
indent += self.indent
self._spaces = ' ' * indent
def render(self, context=None, globals=None, _buf=None):
"""Evaluate python code with context dictionary.
If _buf is None then return the result of evaluation as str,
else return None.
context:dict (=None)
Context object to evaluate. If None then new dict is created.
globals:dict (=None)
Global object. If None then globals() is used.
_buf:list (=None)
If None then new list is created.
ex.
>>> import tenjin
>>> from tenjin.helpers import escape, to_str
>>> template = tenjin.Template('example.pyhtml')
>>> context = {'items': ['foo','bar','baz'], 'title': 'example'}
>>> output = template.evaluate(context)
>>> print output,
"""
if context is None:
locals = context = {}
elif self.args is None:
locals = context.copy()
else:
locals = {}
if context.has_key('_engine'):
context.get('_engine').hook_context(locals)
locals['_context'] = context
if globals is None:
globals = sys._getframe(1).f_globals
bufarg = _buf
if _buf is None:
_buf = []
locals['_buf'] = _buf
if not self.bytecode:
self.compile()
exec self.bytecode in globals, locals
if bufarg is None:
s = ''.join(_buf)
#if self.encoding:
# s = s.encode(self.encoding)
return s
else:
return None
def compile(self):
"""compile self.script into self.bytecode"""
self.bytecode = compile(self.script, self.filename or '(tenjin)', 'exec')
##
## preprocessor class
##
class Preprocessor(Template):
STMT_PATTERN = Template.compile_stmt_pattern('PY')
def stmt_pattern(self):
return Preprocessor.STMT_PATTERN
EXPR_PATTERN = re.compile(r'([#$])\{\{(.*?)\}\}', re.S)
def expr_pattern(self):
return Preprocessor.EXPR_PATTERN
#def get_expr_and_escapeflag(self, match):
# return match.group(2), match.group(1) == '$'
def add_expr(self, buf, code, flag_escape=None):
if not code or code.isspace():
return
code = "_decode_params(%s)" % code
Template.add_expr(self, buf, code, flag_escape)
##
## template engine class
##
class Engine(object):
"""Engine class of templates.
ex.
>>> ## create engine
>>> import tenjin
>>> from tenjin.helpers import *
>>> prefix = 'user_'
>>> postfix = '.pyhtml'
>>> layout = 'layout.pyhtml'
>>> path = ['views']
>>> engine = tenjin.Engine(prefix=prefix, postfix=postfix,
... layout=layout, path=path, encoding='utf-8')
>>> ## evaluate template(='views/user_create.pyhtml') with context object.
>>> ## (layout template (='views/layout.pyhtml') are used.)
>>> context = {'title': 'Create User', 'user': user}
>>> print engine.render(':create', context)
>>> ## evaluate template without layout template.
>>> print engine.render(':create', context, layout=False)
In template file, the followings are available.
* include(template_name, append_to_buf=True) :
Include other template
* _content :
Result of evaluating template (available only in layout file).
ex. file 'layout.pyhtml':
<html>
<body>
<div class="sidemenu">
<?py include(':sidemenu') ?>
</div>
<div class="maincontent">
#{_content}
</div>
</body>
</html>
"""
## default value of attributes
prefix = ''
postfix = ''
layout = None
templateclass = Template
path = None
cache = False
preprocess = False
def __init__(self, prefix=None, postfix=None, layout=None, path=None, cache=None, preprocess=None, templateclass=None, **kwargs):
"""Initializer of Engine class.
prefix:str (='')
Prefix string used to convert template short name to template filename.
postfix:str (='')
Postfix string used to convert template short name to template filename.
layout:str (=None)
Default layout template name.
path:list of str(=None)
List of directory names which contain template files.
cache:bool (=True)
Cache converted python code into file.
preprocess:bool(=False)
Activate preprocessing or not.
templateclass:class (=Template)
Template class which engine creates automatically.
kwargs:dict
Options for Template class constructor.
See document of Template.__init__() for details.
"""
if prefix: self.prefix = prefix
if postfix: self.postfix = postfix
if layout: self.layout = layout
if templateclass: self.templateclass = templateclass
if path is not None: self.path = path
if cache is not None: self.cache = cache
if preprocess is not None: self.preprocess = preprocess
self.kwargs = kwargs
self.templates = {} # template_name => Template object
def to_filename(self, template_name):
"""Convert template short name to filename.
ex.
>>> engine = tenjin.Engine(prefix='user_', postfix='.pyhtml')
>>> engine.to_filename('list')
'list'
>>> engine.to_filename(':list')
'user_list.pyhtml'
"""
if template_name[0] == ':' :
return self.prefix + template_name[1:] + self.postfix
return template_name
def find_template_file(self, template_name):
"""Find template file and return it's filename.
When template file is not found, IOError is raised.
"""
filename = self.to_filename(template_name)
if self.path:
for dirname in self.path:
filepath = dirname + os.path.sep + filename
if os.path.isfile(filepath):
return filepath
else:
if os.path.isfile(filename):
return filename
raise IOError('%s: filename not found (path=%s).' % (filename, repr(self.path)))
def register_template(self, template_name, template):
"""Register an template object."""
if not hasattr(template, 'timestamp'):
template.timestamp = None # or time.time()
self.templates[template_name] = template
def load_cachefile(self, cache_filename, template):
"""load marshaled cache file"""
#template.bytecode = marshal.load(open(cache_filename, 'rb'))
dct = marshal.load(open(cache_filename, 'rb'))
template.args = dct['args']
template.script = dct['script']
template.bytecode = dct['bytecode']
def _load_cachefile_for_script(self, cache_filename, template):
s = open(cache_filename).read()
if s.startswith('#@ARGS '):
pos = s.find("\n")
args_str = s[len('#@ARGS '):pos]
template.args = args_str and args_str.split(', ') or []
s = s[pos+1:]
else:
template.args = None
if template.encoding:
#s = s.decode(template.encoding)
s = s.decode('utf-8')
template.script = s
template.compile()
def store_cachefile(self, cache_filename, template):
"""store template into marshal file"""
dct = { 'args': template.args,
'script': template.script,
'bytecode': template.bytecode }
_write_file_with_lock(cache_filename, marshal.dumps(dct))
def _store_cachefile_for_script(self, cache_filename, template):
s = template.script
if template.encoding and isinstance(s, unicode):
s = s.encode(template.encoding)
#s = s.encode('utf-8')
if template.args is not None:
s = "#@ARGS %s\n%s" % (', '.join(template.args), s)
_write_file_with_lock(cache_filename, s)
def cachename(self, filename):
return os.path.join(os.path.expanduser('~'),'.rst2pdf', os.path.basename(filename) + '.cache')
def create_template(self, filename, _context, _globals):
"""Read template file and create template object."""
template = self.templateclass(None, **self.kwargs)
template.timestamp = time.time()
cache_filename = self.cachename(filename)
getmtime = os.path.getmtime
if not self.cache:
input = self.read_template_file(filename, _context, _globals)
template.convert(input, filename)
#template.compile()
elif os.path.exists(cache_filename) and getmtime(cache_filename) >= getmtime(filename):
#Tenjin.logger.info("** debug: %s: cache found." % filename)
template.filename = filename
self.load_cachefile(cache_filename, template)
if template.bytecode is None:
template.compile()
else:
#Tenjin.logger.info("** debug: %s: cache not found." % filename)
input = self.read_template_file(filename, _context, _globals)
template.convert(input, filename)
template.compile()
self.store_cachefile(cache_filename, template)
return template
def read_template_file(self, filename, _context, _globals):
if not self.preprocess:
return open(filename).read()
if _context is None:
_context = {}
if not _context.has_key('_engine'):
self.hook_context(_context)
if _globals is None:
_globals = sys._getframe(2).f_globals
preprocessor = Preprocessor(filename)
return preprocessor.render(_context, globals=_globals)
def get_template(self, template_name, _context=None, _globals=None):
"""Return template object.
If template object has not registered, template engine creates
and registers template object automatically.
"""
template = self.templates.get(template_name)
t = template
if t is None or t.timestamp and t.filename and t.timestamp < os.path.getmtime(t.filename):
filename = self.find_template_file(template_name)
# context and globals are passed only for preprocessing
if _globals is None:
_globals = sys._getframe(1).f_globals
template = self.create_template(filename, _context, _globals)
self.register_template(template_name, template)
return template
def include(self, template_name, append_to_buf=True):
"""Evaluate template using current local variables as context.
template_name:str
Filename (ex. 'user_list.pyhtml') or short name (ex. ':list') of template.
append_to_buf:boolean (=True)
If True then append output into _buf and return None,
else return stirng output.
ex.
<?py include('file.pyhtml') ?>
#{include('file.pyhtml', False)}
<?py val = include('file.pyhtml', False) ?>
"""
frame = sys._getframe(1)
locals = frame.f_locals
globals = frame.f_globals
assert locals.has_key('_context')
context = locals['_context']
# context and globals are passed to get_template() only for preprocessing.
template = self.get_template(template_name, context, globals)
if append_to_buf:
_buf = locals['_buf']
else:
_buf = None
return template.render(context, globals, _buf=_buf)
def render(self, template_name, context=None, globals=None, layout=True):
"""Evaluate template with layout file and return result of evaluation.
template_name:str
Filename (ex. 'user_list.pyhtml') or short name (ex. ':list') of template.
context:dict (=None)
Context object to evaluate. If None then new dict is used.
globals:dict (=None)
Global context to evaluate. If None then globals() is used.
layout:str or Bool(=True)
If True, the default layout name specified in constructor is used.
If False, no layout template is used.
If str, it is regarded as layout template name.
If temlate object related with the 'template_name' argument is not exist,
engine generates a template object and register it automatically.
"""
if context is None:
context = {}
if globals is None:
globals = sys._getframe(1).f_globals
self.hook_context(context)
while True:
# context and globals are passed to get_template() only for preprocessing
template = self.get_template(template_name, context, globals)
content = template.render(context, globals)
layout = context.pop('_layout', layout)
if layout is True or layout is None:
layout = self.layout
if not layout:
break
template_name = layout
layout = False
context['_content'] = content
context.pop('_content', None)
return content
def hook_context(self, context):
context['_engine'] = self
#context['render'] = self.render
context['include'] = self.include
| mit |
jellysheep/pyload | module/lib/beaker/cache.py | 45 | 15814 | """Cache object
The Cache object is used to manage a set of cache files and their
associated backend. The backends can be rotated on the fly by
specifying an alternate type when used.
Advanced users can add new backends in beaker.backends
"""
import warnings
import beaker.container as container
import beaker.util as util
from beaker.exceptions import BeakerException, InvalidCacheBackendError
import beaker.ext.memcached as memcached
import beaker.ext.database as database
import beaker.ext.sqla as sqla
import beaker.ext.google as google
# Initialize the basic available backends
clsmap = {
'memory':container.MemoryNamespaceManager,
'dbm':container.DBMNamespaceManager,
'file':container.FileNamespaceManager,
'ext:memcached':memcached.MemcachedNamespaceManager,
'ext:database':database.DatabaseNamespaceManager,
'ext:sqla': sqla.SqlaNamespaceManager,
'ext:google': google.GoogleNamespaceManager,
}
# Initialize the cache region dict
cache_regions = {}
cache_managers = {}
try:
import pkg_resources
# Load up the additional entry point defined backends
for entry_point in pkg_resources.iter_entry_points('beaker.backends'):
try:
NamespaceManager = entry_point.load()
name = entry_point.name
if name in clsmap:
raise BeakerException("NamespaceManager name conflict,'%s' "
"already loaded" % name)
clsmap[name] = NamespaceManager
except (InvalidCacheBackendError, SyntaxError):
# Ignore invalid backends
pass
except:
import sys
from pkg_resources import DistributionNotFound
# Warn when there's a problem loading a NamespaceManager
if not isinstance(sys.exc_info()[1], DistributionNotFound):
import traceback
from StringIO import StringIO
tb = StringIO()
traceback.print_exc(file=tb)
warnings.warn("Unable to load NamespaceManager entry point: '%s': "
"%s" % (entry_point, tb.getvalue()), RuntimeWarning,
2)
except ImportError:
pass
def cache_region(region, *deco_args):
"""Decorate a function to cache itself using a cache region
The region decorator requires arguments if there are more than
2 of the same named function, in the same module. This is
because the namespace used for the functions cache is based on
the functions name and the module.
Example::
# Add cache region settings to beaker:
beaker.cache.cache_regions.update(dict_of_config_region_options))
@cache_region('short_term', 'some_data')
def populate_things(search_term, limit, offset):
return load_the_data(search_term, limit, offset)
return load('rabbits', 20, 0)
.. note::
The function being decorated must only be called with
positional arguments.
"""
cache = [None]
def decorate(func):
namespace = util.func_namespace(func)
def cached(*args):
reg = cache_regions[region]
if not reg.get('enabled', True):
return func(*args)
if not cache[0]:
if region not in cache_regions:
raise BeakerException('Cache region not configured: %s' % region)
cache[0] = Cache._get_cache(namespace, reg)
cache_key = " ".join(map(str, deco_args + args))
def go():
return func(*args)
return cache[0].get_value(cache_key, createfunc=go)
cached._arg_namespace = namespace
cached._arg_region = region
return cached
return decorate
def region_invalidate(namespace, region, *args):
"""Invalidate a cache region namespace or decorated function
This function only invalidates cache spaces created with the
cache_region decorator.
:param namespace: Either the namespace of the result to invalidate, or the
cached function reference
:param region: The region the function was cached to. If the function was
cached to a single region then this argument can be None
:param args: Arguments that were used to differentiate the cached
function as well as the arguments passed to the decorated
function
Example::
# Add cache region settings to beaker:
beaker.cache.cache_regions.update(dict_of_config_region_options))
def populate_things(invalidate=False):
@cache_region('short_term', 'some_data')
def load(search_term, limit, offset):
return load_the_data(search_term, limit, offset)
# If the results should be invalidated first
if invalidate:
region_invalidate(load, None, 'some_data',
'rabbits', 20, 0)
return load('rabbits', 20, 0)
"""
if callable(namespace):
if not region:
region = namespace._arg_region
namespace = namespace._arg_namespace
if not region:
raise BeakerException("Region or callable function "
"namespace is required")
else:
region = cache_regions[region]
cache = Cache._get_cache(namespace, region)
cache_key = " ".join(str(x) for x in args)
cache.remove_value(cache_key)
class Cache(object):
"""Front-end to the containment API implementing a data cache.
:param namespace: the namespace of this Cache
:param type: type of cache to use
:param expire: seconds to keep cached data
:param expiretime: seconds to keep cached data (legacy support)
:param starttime: time when cache was cache was
"""
def __init__(self, namespace, type='memory', expiretime=None,
starttime=None, expire=None, **nsargs):
try:
cls = clsmap[type]
if isinstance(cls, InvalidCacheBackendError):
raise cls
except KeyError:
raise TypeError("Unknown cache implementation %r" % type)
self.namespace = cls(namespace, **nsargs)
self.expiretime = expiretime or expire
self.starttime = starttime
self.nsargs = nsargs
@classmethod
def _get_cache(cls, namespace, kw):
key = namespace + str(kw)
try:
return cache_managers[key]
except KeyError:
cache_managers[key] = cache = cls(namespace, **kw)
return cache
def put(self, key, value, **kw):
self._get_value(key, **kw).set_value(value)
set_value = put
def get(self, key, **kw):
"""Retrieve a cached value from the container"""
return self._get_value(key, **kw).get_value()
get_value = get
def remove_value(self, key, **kw):
mycontainer = self._get_value(key, **kw)
if mycontainer.has_current_value():
mycontainer.clear_value()
remove = remove_value
def _get_value(self, key, **kw):
if isinstance(key, unicode):
key = key.encode('ascii', 'backslashreplace')
if 'type' in kw:
return self._legacy_get_value(key, **kw)
kw.setdefault('expiretime', self.expiretime)
kw.setdefault('starttime', self.starttime)
return container.Value(key, self.namespace, **kw)
@util.deprecated("Specifying a "
"'type' and other namespace configuration with cache.get()/put()/etc. "
"is deprecated. Specify 'type' and other namespace configuration to "
"cache_manager.get_cache() and/or the Cache constructor instead.")
def _legacy_get_value(self, key, type, **kw):
expiretime = kw.pop('expiretime', self.expiretime)
starttime = kw.pop('starttime', None)
createfunc = kw.pop('createfunc', None)
kwargs = self.nsargs.copy()
kwargs.update(kw)
c = Cache(self.namespace.namespace, type=type, **kwargs)
return c._get_value(key, expiretime=expiretime, createfunc=createfunc,
starttime=starttime)
def clear(self):
"""Clear all the values from the namespace"""
self.namespace.remove()
# dict interface
def __getitem__(self, key):
return self.get(key)
def __contains__(self, key):
return self._get_value(key).has_current_value()
def has_key(self, key):
return key in self
def __delitem__(self, key):
self.remove_value(key)
def __setitem__(self, key, value):
self.put(key, value)
class CacheManager(object):
def __init__(self, **kwargs):
"""Initialize a CacheManager object with a set of options
Options should be parsed with the
:func:`~beaker.util.parse_cache_config_options` function to
ensure only valid options are used.
"""
self.kwargs = kwargs
self.regions = kwargs.pop('cache_regions', {})
# Add these regions to the module global
cache_regions.update(self.regions)
def get_cache(self, name, **kwargs):
kw = self.kwargs.copy()
kw.update(kwargs)
return Cache._get_cache(name, kw)
def get_cache_region(self, name, region):
if region not in self.regions:
raise BeakerException('Cache region not configured: %s' % region)
kw = self.regions[region]
return Cache._get_cache(name, kw)
def region(self, region, *args):
"""Decorate a function to cache itself using a cache region
The region decorator requires arguments if there are more than
2 of the same named function, in the same module. This is
because the namespace used for the functions cache is based on
the functions name and the module.
Example::
# Assuming a cache object is available like:
cache = CacheManager(dict_of_config_options)
def populate_things():
@cache.region('short_term', 'some_data')
def load(search_term, limit, offset):
return load_the_data(search_term, limit, offset)
return load('rabbits', 20, 0)
.. note::
The function being decorated must only be called with
positional arguments.
"""
return cache_region(region, *args)
def region_invalidate(self, namespace, region, *args):
"""Invalidate a cache region namespace or decorated function
This function only invalidates cache spaces created with the
cache_region decorator.
:param namespace: Either the namespace of the result to invalidate, or the
name of the cached function
:param region: The region the function was cached to. If the function was
cached to a single region then this argument can be None
:param args: Arguments that were used to differentiate the cached
function as well as the arguments passed to the decorated
function
Example::
# Assuming a cache object is available like:
cache = CacheManager(dict_of_config_options)
def populate_things(invalidate=False):
@cache.region('short_term', 'some_data')
def load(search_term, limit, offset):
return load_the_data(search_term, limit, offset)
# If the results should be invalidated first
if invalidate:
cache.region_invalidate(load, None, 'some_data',
'rabbits', 20, 0)
return load('rabbits', 20, 0)
"""
return region_invalidate(namespace, region, *args)
if callable(namespace):
if not region:
region = namespace._arg_region
namespace = namespace._arg_namespace
if not region:
raise BeakerException("Region or callable function "
"namespace is required")
else:
region = self.regions[region]
cache = self.get_cache(namespace, **region)
cache_key = " ".join(str(x) for x in args)
cache.remove_value(cache_key)
def cache(self, *args, **kwargs):
"""Decorate a function to cache itself with supplied parameters
:param args: Used to make the key unique for this function, as in region()
above.
:param kwargs: Parameters to be passed to get_cache(), will override defaults
Example::
# Assuming a cache object is available like:
cache = CacheManager(dict_of_config_options)
def populate_things():
@cache.cache('mycache', expire=15)
def load(search_term, limit, offset):
return load_the_data(search_term, limit, offset)
return load('rabbits', 20, 0)
.. note::
The function being decorated must only be called with
positional arguments.
"""
cache = [None]
key = " ".join(str(x) for x in args)
def decorate(func):
namespace = util.func_namespace(func)
def cached(*args):
if not cache[0]:
cache[0] = self.get_cache(namespace, **kwargs)
cache_key = key + " " + " ".join(str(x) for x in args)
def go():
return func(*args)
return cache[0].get_value(cache_key, createfunc=go)
cached._arg_namespace = namespace
return cached
return decorate
def invalidate(self, func, *args, **kwargs):
"""Invalidate a cache decorated function
This function only invalidates cache spaces created with the
cache decorator.
:param func: Decorated function to invalidate
:param args: Used to make the key unique for this function, as in region()
above.
:param kwargs: Parameters that were passed for use by get_cache(), note that
this is only required if a ``type`` was specified for the
function
Example::
# Assuming a cache object is available like:
cache = CacheManager(dict_of_config_options)
def populate_things(invalidate=False):
@cache.cache('mycache', type="file", expire=15)
def load(search_term, limit, offset):
return load_the_data(search_term, limit, offset)
# If the results should be invalidated first
if invalidate:
cache.invalidate(load, 'mycache', 'rabbits', 20, 0, type="file")
return load('rabbits', 20, 0)
"""
namespace = func._arg_namespace
cache = self.get_cache(namespace, **kwargs)
cache_key = " ".join(str(x) for x in args)
cache.remove_value(cache_key)
| gpl-3.0 |
sashs/Ropper | ropper/ropchain/arch/ropchainx86.py | 1 | 45777 | # coding=utf-8
# Copyright 2018 Sascha Schirra
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" A ND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE..
from ropper.gadget import Category
from ropper.common.error import *
from ropper.common.utils import *
from ropper.rop import Ropper
from ropper.arch import x86
from ropper.ropchain.ropchain import *
from ropper.loaders.loader import Type
from ropper.loaders.pe import PE
from ropper.loaders.elf import ELF
from ropper.loaders.mach_o import MachO
from ropper.loaders.raw import Raw
from ropper.gadget import Gadget
from re import match
from filebytes.pe import ImageDirectoryEntry
import itertools
import math
import sys
if sys.version_info.major == 2:
range = xrange
class RopChainX86(RopChain):
MAX_QUALI = 7
def _printHeader(self):
toReturn = ''
toReturn += ('#!/usr/bin/env python\n')
toReturn += ('# Generated by ropper ropchain generator #\n')
toReturn += ('from struct import pack\n')
toReturn += ('\n')
toReturn += ('p = lambda x : pack(\'I\', x)\n')
toReturn += ('\n')
return toReturn
def _printRebase(self):
toReturn = ''
for binary,section in self._usedBinaries:
imageBase = Gadget.IMAGE_BASES[binary]
toReturn += ('IMAGE_BASE_%d = %s # %s\n' % (self._usedBinaries.index((binary, section)),toHex(imageBase , 4), binary))
toReturn += ('rebase_%d = lambda x : p(x + IMAGE_BASE_%d)\n\n'% (self._usedBinaries.index((binary, section)),self._usedBinaries.index((binary, section))))
return toReturn
@classmethod
def name(cls):
return ''
@classmethod
def availableGenerators(cls):
return [RopChainX86System, RopChainX86Mprotect, RopChainX86VirtualProtect]
@classmethod
def archs(self):
return [x86]
def _createDependenceChain(self, gadgets):
"""
gadgets - list with tuples
tuple contains:
- method to create chaingadget
- list with arguments
- dict with named arguments
- list with registers which are not allowed to override in the gadget
"""
failed = []
cur_len = 0
cur_chain = ''
counter = 0
failed_chains={}
max_perm = math.factorial(len(gadgets))
for x in itertools.permutations(gadgets):
counter += 1
self._printMessage('[*] Try permuation %d / %d' % (counter, max_perm))
found = False
for y in failed:
if x[:len(y)] == y:
found = True
break
if found:
continue
try:
fail = []
chain2 = ''
dontModify = []
badRegs = []
c = 0
for idx in range(len(x)):
g = x[idx]
if idx != 0:
badRegs.extend(x[idx-1][3])
dontModify.extend(g[3])
fail.append(g)
chain2 += g[0](*g[1], badRegs=badRegs, dontModify=dontModify,**g[2])[0]
cur_chain += chain2
break
except RopChainError as e:
failed_chains[chain2] = fail
failed.append(tuple(fail))
else:
self._printMessage('Cannot create chain which fills all registers')
fail_tmp = None
fail_max = []
chain_tmp = None
for chain,fail in failed_chains.items():
if len(fail) > len(fail_max):
fail_max = fail
chain_tmp = chain
cur_chain = '# Filled registers: '
for fa in fail_max[:-1]:
cur_chain += (fa[2]['reg']) + ', '
cur_chain += '\n'
cur_chain += chain_tmp
# print('Impossible to create complete chain')
return cur_chain
def _isModifiedOrDereferencedAccess(self, gadget, dontModify):
regs = []
for line in gadget.lines[1:]:
line = line[1]
if '[' in line:
return True
if dontModify:
m = match('[a-z]+ (e?[abcds][ixlh]),?.*', line)
if m and m.group(1) in dontModify:
return True
return False
def _paddingNeededFor(self, gadget):
regs = []
for idx in range(1,len(gadget.lines)):
line = gadget.lines[idx][1]
matched = match('^pop (...)$', line)
if matched:
regs.append(matched.group(1))
return regs
def _printRopInstruction(self, gadget, padding=True, number=None, value=None):
value_first = False
toReturn = ('rop += rebase_%d(%s) # %s\n' % (self._usedBinaries.index((gadget.fileName, gadget.section)),toHex(gadget.lines[0][0],4), gadget.simpleString()))
if number is not None:
toReturn +=self._printPaddingInstruction(number)
if padding:
regs = self._paddingNeededFor(gadget)
if len(regs) > 0:
dst = gadget.category[2]['dst']
search = '^pop (%s)$' % dst
first_line = gadget.lines[0][1]
if match(search, first_line):
value_first = True
padding_str = ''
for i in range(len(regs)):
padding_str +=self._printPaddingInstruction()
if value_first:
toReturn += value
toReturn += padding_str
else:
toReturn += padding_str
if value:
toReturn += value
return toReturn
def _printAddString(self, string):
return ('rop += \'%s\'\n' % string)
def _printRebasedAddress(self, addr, comment='', idx=0):
return ('rop += rebase_%d(%s)\n' % (idx,addr))
def _printPaddingInstruction(self, addr='0xdeadbeef'):
return ('rop += p(%s)\n' % addr)
def _containsZeroByte(self, addr):
return self.containsBadbytes(addr)
def _createZeroByteFillerForSub(self, number):
start = 0x01010101
for i in range(start, 0x0f0f0f0f):
if not self._containsZeroByte(i) and not self._containsZeroByte(i+number):
return i
raise RopChainError("Could not create Number for substract gadget")
def _createZeroByteFillerForAdd(self, number):
start = 0x01010101
for i in range(start, 0x0f0f0f0f):
if not self._containsZeroByte(i) and not self._containsZeroByte(number-i):
return i
raise RopChainError("Could not create Number for addition gadget")
def _find(self, category, reg=None, srcdst='dst', badDst=[], badSrc=None, dontModify=None, srcEqDst=False, switchRegs=False ):
quali = 1
while quali < RopChainX86System.MAX_QUALI:
for binary in self._binaries:
for gadget in self._gadgets[binary]:
if gadget.category[0] == category and gadget.category[1] == quali:
if badSrc and (gadget.category[2]['src'] in badSrc \
or gadget.affected_regs.intersection(badSrc)):
continue
if badDst and (gadget.category[2]['dst'] in badDst \
or gadget.affected_regs.intersection(badDst)):
continue
if not gadget.lines[len(gadget.lines)-1][1].strip().endswith('ret') or 'esp' in gadget.simpleString():
continue
if srcEqDst and (not (gadget.category[2]['dst'] == gadget.category[2]['src'])):
continue
elif not srcEqDst and 'src' in gadget.category[2] and (gadget.category[2]['dst'] == gadget.category[2]['src']):
continue
if self._isModifiedOrDereferencedAccess(gadget, dontModify):
continue
if reg:
if gadget.category[2][srcdst] == reg:
self._updateUsedBinaries(gadget)
return gadget
elif switchRegs:
other = 'src' if srcdst == 'dst' else 'dst'
if gadget.category[2][other] == reg:
self._updateUsedBinaries(gadget)
return gadget
else:
self._updateUsedBinaries(gadget)
return gadget
quali += 1
def _createWriteStringWhere(self, what, where, reg=None, dontModify=[], idx=0):
badRegs = []
badDst = []
while True:
popReg = self._find(Category.LOAD_REG, reg=reg, badDst=badRegs, dontModify=dontModify)
if not popReg:
raise RopChainError('Cannot build writewhatwhere gadget!')
write4 = self._find(Category.WRITE_MEM, reg=popReg.category[2]['dst'], badDst=
badDst, srcdst='src')
if not write4:
badRegs.append(popReg.category[2]['dst'])
continue
else:
popReg2 = self._find(Category.LOAD_REG, reg=write4.category[2]['dst'], dontModify=[popReg.category[2]['dst']]+dontModify)
if not popReg2:
badDst.append(write4.category[2]['dst'])
continue
else:
break;
if len(what) % 4 > 0:
what += ' ' * (4 - len(what) % 4)
toReturn = ''
for index in range(0,len(what),4):
part = what[index:index+4]
toReturn += self._printRopInstruction(popReg,False)
toReturn += self._printAddString(part)
regs = self._paddingNeededFor(popReg)
for i in range(len(regs)):
toReturn +=self._printPaddingInstruction()
toReturn += self._printRopInstruction(popReg2, False)
toReturn += self._printRebasedAddress(toHex(where+index,4), idx=idx)
regs = self._paddingNeededFor(popReg2)
for i in range(len(regs)):
toReturn +=self._printPaddingInstruction()
toReturn += self._printRopInstruction(write4)
return (toReturn,popReg.category[2]['dst'], popReg2.category[2]['dst'])
def _createWriteRegValueWhere(self, what, where, dontModify=[], idx=0):
badRegs = []
badDst = []
while True:
write4 = self._find(Category.WRITE_MEM, reg=what, badDst=badDst, dontModify=dontModify, srcdst='src')
if not write4:
raise RopChainError('Cannot build writewhatwhere gadget!')
else:
popReg2 = self._find(Category.LOAD_REG, reg=write4.category[2]['dst'], dontModify=[what]+dontModify)
if not popReg2:
badDst.append(write4.category[2]['dst'])
continue
else:
break;
toReturn = self._printRopInstruction(popReg2, False)
toReturn += self._printRebasedAddress(toHex(where,4), idx=idx)
regs = self._paddingNeededFor(popReg2)
for i in range(len(regs)):
toReturn +=self._printPaddingInstruction()
toReturn += self._printRopInstruction(write4)
return (toReturn,what, popReg2.category[2]['dst'])
def _createLoadRegValueFrom(self, what, from_reg, dontModify=[], idx=0):
try:
return self._createLoadRegValueFromMov(what, from_reg, dontModify, idx)
except RopChainError:
return self._createLoadRegValueFromXchg(what, from_reg, dontModify, idx)
def _createLoadRegValueFromMov(self, what, from_reg, dontModify=[], idx=0):
badRegs = []
badDst = []
while True:
load4 = self._find(Category.LOAD_MEM, reg=what, badDst=badDst, dontModify=dontModify, srcdst='dst')
if not load4:
raise RopChainError('Cannot build loadwhere gadget!')
else:
popReg2 = self._find(Category.LOAD_REG, reg=load4.category[2]['src'], dontModify=[what,load4.category[2]['src']]+dontModify)
if not popReg2:
badDst.append(load4.category[2]['src'])
continue
else:
break;
value = self._printPaddingInstruction(toHex(from_reg,4))
toReturn = self._printRopInstruction(popReg2, False, value=value)
regs = self._paddingNeededFor(popReg2)
for i in range(len(regs)):
toReturn +=self._printPaddingInstruction()
toReturn += self._printRopInstruction(load4)
return (toReturn,what, popReg2.category[2]['dst'])
def _createLoadRegValueFromXchg(self, what, from_reg, dontModify=[], idx=0):
badRegs = []
badDst = []
while True:
load4 = self._find(Category.XCHG_REG, reg=what, badDst=badDst, dontModify=dontModify, srcdst='src')
if not load4:
raise RopChainError('Cannot build loadwhere gadget!')
else:
mov = self._find(Category.LOAD_MEM, reg=load4.category[2]['dst'], badDst=badDst, dontModify=[load4.category[2]['dst']]+dontModify, srcdst='dst')
if not mov:
badDst.append(load4.category[2]['dst'])
continue
popReg2 = self._find(Category.LOAD_REG, reg=mov.category[2]['src'], dontModify=[what,load4.category[2]['src']]+dontModify)
if not popReg2:
badDst.append(load4.category[2]['src'])
continue
else:
break;
toReturn = self._printRopInstruction(popReg2, False)
toReturn += self._printPaddingInstruction(toHex(from_reg,4))
regs = self._paddingNeededFor(popReg2)
for i in range(len(regs)):
toReturn +=self._printPaddingInstruction()
toReturn += self._printRopInstruction(mov)
toReturn += self._printRopInstruction(load4)
return (toReturn,what, popReg2.category[2]['dst'])
def _createNumberSubtract(self, number, reg=None, badRegs=None, dontModify=None):
if not badRegs:
badRegs=[]
while True:
sub = self._find(Category.SUB_REG, reg=reg, badDst=badRegs, badSrc=badRegs, dontModify=dontModify)
if not sub:
raise RopChainError('Cannot build number with subtract gadget for reg %s!' % reg)
popSrc = self._find(Category.LOAD_REG, reg=sub.category[2]['src'], dontModify=dontModify)
if not popSrc:
badRegs.append=[sub.category[2]['src']]
continue
popDst = self._find(Category.LOAD_REG, reg=sub.category[2]['dst'], dontModify=[sub.category[2]['src']]+dontModify)
if not popDst:
badRegs.append=[sub.category[2]['dst']]
continue
else:
break;
filler = self._createZeroByteFillerForSub(number)
toReturn = self._printRopInstruction(popSrc, False)
toReturn += self._printPaddingInstruction(toHex(filler,4))
regs = self._paddingNeededFor(popSrc)
for i in range(len(regs)):
toReturn += self._printPaddingInstruction()
toReturn += self._printRopInstruction(popDst, False)
toReturn += self._printPaddingInstruction(toHex(filler+number,4))
regs = self._paddingNeededFor(popDst)
for i in range(len(regs)):
toReturn += self._printPaddingInstruction()
toReturn += self._printRopInstruction(sub)
return (toReturn, popDst.category[2]['dst'],popSrc.category[2]['dst'])
def _createNumberAddition(self, number, reg=None, badRegs=None, dontModify=None):
if not badRegs:
badRegs=[]
while True:
sub = self._find(Category.ADD_REG, reg=reg, badDst=badRegs, badSrc=badRegs, dontModify=dontModify)
if not sub:
raise RopChainError('Cannot build number with addition gadget for reg %s!' % reg)
popSrc = self._find(Category.LOAD_REG, reg=sub.category[2]['src'], dontModify=dontModify)
if not popSrc:
badRegs.append=[sub.category[2]['src']]
continue
popDst = self._find(Category.LOAD_REG, reg=sub.category[2]['dst'], dontModify=[sub.category[2]['src']]+dontModify)
if not popDst:
badRegs.append(sub.category[2]['dst'])
continue
else:
break;
filler = self._createZeroByteFillerForAdd(number)
toReturn = self._printRopInstruction(popSrc, False)
toReturn += self._printPaddingInstruction(toHex(filler,4))
regs = self._paddingNeededFor(popSrc)
for i in range(len(regs)):
toReturn += self._printPaddingInstruction()
toReturn += self._printRopInstruction(popDst, False)
toReturn += self._printPaddingInstruction(toHex(number - filler,4))
regs = self._paddingNeededFor(popDst)
for i in range(len(regs)):
toReturn += self._printPaddingInstruction()
toReturn += self._printRopInstruction(sub)
return (toReturn, popDst.category[2]['dst'],popSrc.category[2]['dst'])
def _createNumberPop(self, number, reg=None, badRegs=None, dontModify=None):
if self._containsZeroByte(0xffffffff):
raise RopChainError("Cannot write value with pop -1 and inc gadgets, because there are badbytes in the negated number")
while True:
popReg = self._find(Category.LOAD_REG, reg=reg, badDst=badRegs,dontModify=dontModify)
if not popReg:
raise RopChainError('Cannot build number with xor gadget!')
incReg = self._find(Category.INC_REG, reg=popReg.category[2]['dst'], dontModify=dontModify)
if not incReg:
if not badRegs:
badRegs = []
badRegs.append(popReg.category[2]['dst'])
else:
break
value = self._printPaddingInstruction(toHex(0xffffffff,4))
toReturn = self._printRopInstruction(popReg, value=value)
for i in range(number+1):
toReturn += self._printRopInstruction(incReg)
return (toReturn ,popReg.category[2]['dst'],)
def _createNumberXOR(self, number, reg=None, badRegs=None, dontModify=None):
while True:
clearReg = self._find(Category.CLEAR_REG, reg=reg, badDst=badRegs, badSrc=badRegs,dontModify=dontModify, srcEqDst=True)
if not clearReg:
raise RopChainError('Cannot build number with xor gadget!')
if number > 0:
incReg = self._find(Category.INC_REG, reg=clearReg.category[2]['src'], dontModify=dontModify)
if not incReg:
if not badRegs:
badRegs = []
badRegs.append(clearReg.category[2]['src'])
else:
break
else:
break
toReturn = self._printRopInstruction(clearReg)
for i in range(number):
toReturn += self._printRopInstruction(incReg)
return (toReturn, clearReg.category[2]['dst'],)
def _createNumberXchg(self, number, reg=None, badRegs=None, dontModify=None):
xchg = self._find(Category.XCHG_REG, reg=reg, badDst=badRegs, dontModify=dontModify)
if not xchg:
raise RopChainError('Cannot build number gadget with xchg!')
other = xchg.category[2]['src'] if xchg.category[2]['dst'] else xchg.category[2]['dst']
toReturn = self._createNumber(number, other, badRegs, dontModify)[0]
toReturn += self._printRopInstruction(xchg)
return (toReturn, reg, other)
def _createNumberNeg(self, number, reg=None, badRegs=None, dontModify=None):
if number == 0:
raise RopChainError('Cannot build number gadget with neg if number is 0!')
if self._containsZeroByte((~number)+1):
raise RopChainError("Cannot use neg gadget, because there are badbytes in the negated number")
neg = self._find(Category.NEG_REG, reg=reg, badDst=badRegs, dontModify=dontModify)
if not neg:
raise RopChainError('Cannot build number gadget with neg!')
pop = self._find(Category.LOAD_REG, reg=reg, badDst=badRegs, dontModify=dontModify)
if not pop:
raise RopChainError('Cannot build number gadget with neg!')
value = self._printPaddingInstruction(toHex((~number)+1)) # two's complement
toReturn = self._printRopInstruction(pop, value=value)
toReturn += self._printRopInstruction(neg)
return (toReturn, reg,)
def _createNumber(self, number, reg=None, badRegs=None, dontModify=None, xchg=True):
try:
if self._containsZeroByte(number):
try:
return self._createNumberNeg(number, reg, badRegs,dontModify)
except RopChainError as e:
if number < 0x50:
try:
return self._createNumberXOR(number, reg, badRegs,dontModify)
except RopChainError:
try:
return self._createNumberPop(number, reg, badRegs,dontModify)
except RopChainError:
try:
return self._createNumberSubtract(number, reg, badRegs,dontModify)
except RopChainError:
return self._createNumberAddition(number, reg, badRegs,dontModify)
else :
try:
return self._createNumberSubtract(number, reg, badRegs,dontModify)
except RopChainError:
return self._createNumberAddition(number, reg, badRegs,dontModify)
else:
popReg =self._find(Category.LOAD_REG, reg=reg, badDst=badRegs,dontModify=dontModify)
if not popReg:
raise RopChainError('Cannot build number gadget!')
toReturn = self._printRopInstruction(popReg, padding=True, number=toHex(number,4))
return (toReturn , popReg.category[2]['dst'])
except RopChainError as e:
return self._createNumberXchg(number, reg, badRegs, dontModify)
def _createAddress(self, address, reg=None, badRegs=None, dontModify=None):
popReg = self._find(Category.LOAD_REG, reg=reg, badDst=badRegs,dontModify=dontModify)
if not popReg:
raise RopChainError('Cannot build address gadget!')
toReturn = ''
toReturn += self._printRopInstruction(popReg,False)
toReturn += self._printRebasedAddress(toHex(address, 4), idx=self._usedBinaries.index((popReg.fileName, popReg.section)))
regs = self._paddingNeededFor(popReg)
for i in range(len(regs)):
toReturn +=self._printPaddingInstruction()
return (toReturn,popReg.category[2]['dst'])
def _createSyscall(self, reg=None, badRegs=None, dontModify=None):
syscall = self._find(Category.SYSCALL, reg=None, badDst=None, dontModify=dontModify)
if not syscall:
raise RopChainError('Cannot build syscall gadget!')
toReturn = ''
toReturn += self._printRopInstruction(syscall)
return (toReturn,)
def _createOpcode(self, opcode):
return self._printRopInstruction(self._searchOpcode(opcode))
def _searchOpcode(self, opcode):
r = Ropper()
gadgets = []
for binary in self._binaries:
for section in binary.executableSections:
vaddr = section.virtualAddress
gadgets.extend(r.searchOpcode(binary,opcode=opcode,disass=True))
if len(gadgets) > 0:
for gadget in gadgets:
if not gadget:
continue
if not self.containsBadbytes(gadget.IMAGE_BASES.get(gadget.fileName,0) + gadget.lines[0][0]):
self._updateUsedBinaries(gadget)
return gadget
else:
raise RopChainError('Cannot create gadget for opcode: %s' % opcode)
def create(self):
pass
class RopChainX86System(RopChainX86):
@classmethod
def usableTypes(self):
return (ELF, Raw)
@classmethod
def name(cls):
return 'execve'
def _createCommand(self, what, where, reg=None, dontModify=[], idx=0):
if len(what) % 4 > 0:
what = '/' * (4 - len(what) % 4) + what
return self._createWriteStringWhere(what,where, idx=idx)
def create(self, options={}):
cmd = options.get('cmd')
address = options.get('address')
if not cmd:
cmd = '/bin/sh'
if len(cmd.split(' ')) > 1:
raise RopChainError('No argument support for execve commands')
self._printMessage('ROPchain Generator for syscall execve:\n')
self._printMessage('\nwrite command into data section\neax 0xb\nebx address to cmd\necx address to null\nedx address to null\n')
chain = self._printHeader()
gadgets = []
can_create_command = False
chain_tmp = '\n'
if address is None:
section = self._binaries[0].getSection('.data')
length = math.ceil(float(len(cmd))/4) * 4
nulladdress = section.offset+length
try:
cmdaddress = section.offset
chain_tmp += self._createCommand(cmd,cmdaddress)[0]
can_create_command = True
except RopChainError as e:
self._printMessage('Cannot create gadget: writewhatwhere')
self._printMessage('Use 0x41414141 as command address. Please replace that value.')
cmdaddress = 0x41414141
if can_create_command:
badregs = []
while True:
c = ''
ret = self._createNumber(0x0, badRegs=badregs)
c += ret[0]
try:
c += self._createWriteRegValueWhere(ret[1], nulladdress)[0]
chain_tmp += c
break
except BaseException as e:
#raise e
badregs.append(ret[1])
gadgets.append((self._createAddress, [cmdaddress],{'reg':'ebx'},['ebx', 'bx', 'bl', 'bh']))
gadgets.append((self._createAddress, [nulladdress],{'reg':'ecx'},['ecx', 'cx', 'cl', 'ch']))
gadgets.append((self._createAddress, [nulladdress],{'reg':'edx'},['edx', 'dx', 'dl', 'dh']))
gadgets.append((self._createNumber, [0xb],{'reg':'eax'},['eax', 'ax', 'al', 'ah']))
if address is not None and not can_create_command:
if type(address) is str:
cmdaddress = int(address, 16)
nulladdress = options.get('nulladdress')
if nulladdress is None:
self._printMessage('No address to a null bytes was given, 0x42424242 is used instead.')
self._printMessage('Please replace that value.')
nulladdress = 0x42424242
elif type(nulladdress) is str:
nulladdress = int(nulladdress,16)
gadgets.append((self._createNumber, [cmdaddress],{'reg':'ebx'},['ebx', 'bx', 'bl', 'bh']))
gadgets.append((self._createNumber, [nulladdress],{'reg':'ecx'},['ecx', 'cx', 'cl', 'ch']))
gadgets.append((self._createNumber, [nulladdress],{'reg':'edx'},['edx', 'dx', 'dl', 'dh']))
gadgets.append((self._createNumber, [0xb],{'reg':'eax'},['eax', 'ax', 'al', 'ah']))
self._printMessage('Try to create chain which fills registers without delete content of previous filled registers')
chain_tmp += self._createDependenceChain(gadgets)
try:
self._printMessage('Look for syscall gadget')
chain_tmp += self._createSyscall()[0]
self._printMessage('syscall gadget found')
except RopChainError:
try:
self._printMessage('No syscall gadget found!')
self._printMessage('Look for int 0x80 opcode')
chain_tmp += self._createOpcode('cd80')
self._printMessage('int 0x80 opcode found')
except RopChainError:
try:
self._printMessage('No int 0x80 opcode found')
self._printMessage('Look for call gs:[0x10] opcode')
chain_tmp += self._createOpcode('65ff1510000000')
self._printMessage('call gs:[0x10] found')
except RopChainError:
chain_tmp += '# INSERT SYSCALL GADGET HERE\n'
self._printMessage('No call gs:[0x10] opcode found')
chain += self._printRebase()
chain += 'rop = \'\'\n'
chain += chain_tmp
chain += 'print rop'
return chain
class RopChainX86Mprotect(RopChainX86):
"""
Builds a ropchain for mprotect syscall
eax 0x7b
ebx address
ecx size
edx 0x7 -> RWE
"""
@classmethod
def usableTypes(self):
return (ELF, Raw)
@classmethod
def name(cls):
return 'mprotect'
def _createJmp(self, reg=['esp']):
r = Ropper()
gadgets = []
for binary in self._binaries:
for section in binary.executableSections:
vaddr = section.virtualAddress
gadgets.extend(
r.searchJmpReg(binary,reg))
if len(gadgets) > 0:
self._updateUsedBinaries(gadgets[0])
return self._printRopInstruction(gadgets[0])
else:
return None
def create(self, options={}):
address = options.get('address')
size = options.get('size')
if not address:
raise RopChainError('Missing parameter: address')
if not size:
raise RopChainError('Missing parameter: size')
if not match('0x[0-9a-fA-F]{1,8}', address):
raise RopChainError('Parameter address have to have the following format: <hexnumber>')
if not match('0x[0-9a-fA-F]+', size):
raise RopChainError('Parameter size have to have the following format: <hexnumber>')
address = int(address, 16)
size = int(size, 16)
self._printMessage('ROPchain Generator for syscall mprotect:\n')
self._printMessage('eax 0x7b\nebx address\necx size\nedx 0x7 -> RWE\n')
chain = self._printHeader()
chain += 'shellcode = \'\\xcc\'*100\n\n'
gadgets = []
gadgets.append((self._createNumber, [address],{'reg':'ebx'},['ebx', 'bx', 'bl', 'bh']))
gadgets.append((self._createNumber, [size],{'reg':'ecx'},['ecx', 'cx', 'cl', 'ch']))
gadgets.append((self._createNumber, [0x7],{'reg':'edx'},['edx', 'dx', 'dl', 'dh']))
gadgets.append((self._createNumber, [0x7d],{'reg':'eax'},['eax', 'ax', 'al', 'ah']))
self._printMessage('Try to create chain which fills registers without delete content of previous filled registers')
chain_tmp = ''
chain_tmp += self._createDependenceChain(gadgets)
try:
self._printMessage('Look for syscall gadget')
chain_tmp += self._createSyscall()[0]
self._printMessage('syscall gadget found')
except RopChainError:
chain_tmp += '\n# ADD HERE SYSCALL GADGET\n\n'
self._printMessage('No syscall gadget found!')
self._printMessage('Look for jmp esp')
jmp_esp = self._createJmp()
if jmp_esp:
self._printMessage('jmp esp found')
chain_tmp += jmp_esp
else:
self._printMessage('no jmp esp found')
chain_tmp += '\n# ADD HERE JMP ESP\n\n'
chain += self._printRebase()
chain += '\nrop = \'\'\n'
chain += chain_tmp
chain += 'rop += shellcode\n\n'
chain += 'print(rop)\n'
return chain
class RopChainX86VirtualProtect(RopChainX86):
"""
Builds a ropchain for a VirtualProtect call using pushad
eax 0x90909090
ecx old protection (writable addr)
edx 0x40 (RWE)
ebx size
esp address
ebp return address (jmp esp)
esi pointer to VirtualProtect
edi ret (rop nop)
"""
@classmethod
def usableTypes(self):
return (PE, Raw)
@classmethod
def name(cls):
return 'virtualprotect'
def _createPushad(self):
pushad = self._find(Category.PUSHAD)
if pushad:
return self._printRopInstruction(pushad)
else:
self._printMessage('No pushad found!')
return '# Add here PUSHAD gadget!'
def _createJmp(self, reg=['esp']):
r = Ropper()
gadgets = []
for binary in self._binaries:
for section in binary.executableSections:
vaddr = section.offset
gadgets.extend(
r.searchJmpReg(binary,reg))
if len(gadgets) > 0:
self._updateUsedBinaries(gadgets[0])
return gadgets[0]
else:
return None
def __extract(self, param):
if (not match('0x[0-9a-fA-F]{1,8},0x[0-9a-fA-F]+', param)) and (not match('0x[0-9a-fA-F]+', param)):
raise RopChainError('Parameter have to have the following format: <hexnumber>')
return (None, int(param, 16))
def __getVirtualProtectEntry(self):
for binary in self._binaries:
if binary.type == Type.PE:
imports = binary._binary.dataDirectory[ImageDirectoryEntry.IMPORT]
if not imports:
return None
for descriptorData in imports:
for thunk in descriptorData.importAddressTable:
if thunk.importByName and thunk.importByName.name == 'VirtualProtect':
return thunk.rva, binary.imageBase
else:
self._printMessage('File is not a PE file.')
return None
def create(self, options={}):
self._printMessage('Ropchain Generator for VirtualProtect:\n')
self._printMessage('eax 0x90909090\necx old protection (writable addr)\nedx 0x40 (RWE)\nebx size\nesp address\nebp return address (jmp esp)\nesi pointer to VirtualProtect\nedi ret (rop nop)\n')
image_base = 0
address = options.get('address')
given = False
if not address:
virtual_protect = self.__getVirtualProtectEntry()
if virtual_protect:
address, image_base = virtual_protect
if not address:
self._printMessage('No IAT-Entry for VirtualProtect found!')
raise RopChainError('No IAT-Entry for VirtualProtect found and no address is given')
else:
if address:
if not match('0x[0-9a-fA-F]{1,8}', address):
raise RopChainError('Parameter address have to have the following format: <hexnumber>')
address = int(address, 16)
given = True
writeable_ptr = self._binaries[0].getWriteableSection().offset
for i in range(0,0x10000,4):
if not self.containsBadbytes((writeable_ptr + i) & 0xffff,2):
writeable_ptr += i
break
jmp_esp = self._createJmp()
ret_addr = self._searchOpcode('c3')
chain = self._printHeader()
chain += '\n\nshellcode = \'\\xcc\'*100\n\n'
gadgets = []
to_extend = []
chain_tmp = ''
got_jmp_esp = False
try:
self._printMessage('Try to create gadget to fill esi with content of IAT address: 0x%x' % (address + image_base))
chain_tmp += self._createLoadRegValueFrom('esi', address+image_base)[0]
gadgets.append((self._createNumber, [0x90909090],{'reg':'eax'},['eax', 'ax', 'ah', 'al','esi','si']))
to_extend = ['esi','si']
if jmp_esp:
gadgets.append((self._createAddress, [jmp_esp.lines[0][0]],{'reg':'ebp'},['ebp', 'bp']+to_extend))
got_jmp_esp = True
except RopChainError:
self._printMessage('Cannot create fill esi gadget!')
self._printMessage('Try to create this chain:\n')
self._printMessage('eax Pointer to VirtualProtect\necx old protection (writable addr)\nedx 0x40 (RWE)\nebx size\nesp address\nebp return address (pop ebp;ret)\nesi pointer to jmp [eax]\nedi ret (rop nop)\n')
jmp_eax = self._searchOpcode('ff20') # jmp [eax]
gadgets.append((self._createAddress, [jmp_eax.lines[0][0]],{'reg':'esi'},['esi','si']))
gadgets.append((self._createNumber, [address],{'reg':'eax'},['eax', 'ax', 'ah', 'al']))
pop_ebp = self._searchOpcode('5dc3')
if pop_ebp:
gadgets.append((self._createAddress, [pop_ebp.lines[0][0]],{'reg':'ebp'},['ebp', 'bp']+to_extend))
gadgets.append((self._createNumber, [0x1],{'reg':'ebx'},['ebx', 'bx', 'bl', 'bh']+to_extend))
gadgets.append((self._createAddress, [writeable_ptr],{'reg':'ecx'},['ecx', 'cx', 'cl', 'ch']+to_extend))
gadgets.append((self._createNumber, [0x40],{'reg':'edx'},['edx', 'dx', 'dh', 'dl']+to_extend))
gadgets.append((self._createAddress, [ret_addr.lines[0][0]],{'reg':'edi'},['edi', 'di']+to_extend))
self._printMessage('Try to create chain which fills registers without delete content of previous filled registers')
chain_tmp += self._createDependenceChain(gadgets)
self._printMessage('Look for pushad gadget')
chain_tmp += self._createPushad()
if not got_jmp_esp and jmp_esp:
chain_tmp += self._printRopInstruction(jmp_esp)
chain += self._printRebase()
chain += 'rop = \'\'\n'
chain += chain_tmp
chain += 'rop += shellcode\n\n'
chain += 'print(rop)\n'
return chain
# class RopChainX86VirtualAlloc(RopChainX86):
# """
# Builds a ropchain for a VirtualProtect call using pushad
# eax 0x90909090
# ecx old protection (writable addr)
# edx 0x40 (RWE)
# ebx size
# esp address
# ebp return address (jmp esp)
# esi pointer to VirtualProtect
# edi ret (rop nop)
# """
# @classmethod
# def name(cls):
# return 'virtualalloc'
# def _createPushad(self):
# pushad = self._find(Category.PUSHAD)
# if pushad:
# return self._printRopInstruction(pushad)
# else:
# self._printer.printInfo('No pushad found!')
# return '# Add here PUSHAD gadget!'
# def _createJmp(self, reg=['esp']):
# r = Ropper()
# gadgets = []
# for binary in self._binaries:
# for section in binary.executableSections:
# vaddr = section.offset
# gadgets.extend(
# r.searchJmpReg(self._binaries[0],reg))
# if len(gadgets) > 0:
# if (gadgets[0]._binary, gadgets[0]._section) not in self._usedBinaries:
# self._usedBinaries.append((gadgets[0]._binary, gadgets[0]._section))
# return gadgets[0]
# else:
# return None
# def __extract(self, param):
# if (not match('0x[0-9a-fA-F]{1,8},0x[0-9a-fA-F]+', param)) and (not match('0x[0-9a-fA-F]+', param)):
# raise RopChainError('Parameter have to have the following format: <hexnumber>,<hexnumber> or <hexnumber>')
# split = param.split(',')
# if len(split) == 2:
# if isHex(split[1]):
# return (int(split[0], 16), int(split[1], 16))
# else:
# return (None, int(split[0], 16))
# def __getVirtualProtectEntry(self):
# for binary in self._binaries:
# if binary.type == Type.PE:
# s = binary._binary.dataDirectory[ImageDirectoryEntry.IMPORT]
# for thunk in s.importNameTable:
# if thunk.importByName.name == 'VirtualAlloc':
# return thunk.rva + binary.imageBase
# else:
# self._printer.printError('File is not a PE file.')
# return None
# def create(self, param=None):
# if not param:
# raise RopChainError('Missing parameter: address,size or size')
# self._printer.printInfo('Ropchain Generator for VirtualProtect:\n')
# self._printer.println('eax 0x90909090\necx old protection (writable addr)\nedx 0x40 (RWE)\nebx size\nesp address\nebp return address (jmp esp)\nesi pointer to VirtualProtect\nedi ret (rop nop)\n')
# address, size = self.__extract(param)
# given = False
# if not address:
# address = self.__getVirtualProtectEntry()
# if not address:
# self._printer.printError('No IAT-Entry for VirtualProtect found!')
# raise RopChainError('No IAT-Entry for VirtualProtect found and no address is given')
# else:
# given = True
# jmp_esp = self._createJmp()
# ret_addr = self._searchOpcode('c3')
# chain = self._printHeader()
# chain += '\n\nshellcode = \'\\xcc\'*100\n\n'
# gadgets = []
# to_extend = []
# chain_tmp = ''
# try:
# self._printer.printInfo('Try to create gadget to fill esi with content of IAT address: %s' % address)
# chain_tmp += self._createLoadRegValueFrom('esi', address)[0]
# if given:
# gadgets.append((self._createNumber, [address],{'reg':'eax'},['eax', 'ax', 'ah', 'al','esi','si']))
# else:
# gadgets.append((self._createAddress, [address],{'reg':'eax'},['eax', 'ax', 'ah', 'al','esi','si']))
# to_extend = ['esi','si']
# except:
# self._printer.printInfo('Cannot create fill esi gadget!')
# self._printer.printInfo('Try to create this chain:\n')
# self._printer.println('eax Pointer to VirtualProtect\necx old protection (writable addr)\nedx 0x40 (RWE)\nebx size\nesp address\nebp return address (jmp esp)\nesi pointer to jmp [eax]\nedi ret (rop nop)\n')
# jmp_eax = self._searchOpcode('ff20') # jmp [eax]
# gadgets.append((self._createAddress, [jmp_eax.lines[0][0]],{'reg':'esi'},['esi','si']))
# if given:
# gadgets.append((self._createNumber, [address],{'reg':'eax'},['eax', 'ax', 'ah', 'al']))
# else:
# gadgets.append((self._createAddress, [address],{'reg':'eax'},['eax', 'ax', 'ah', 'al']))
# gadgets.append((self._createNumber, [size],{'reg':'ebx'},['ebx', 'bx', 'bl', 'bh']+to_extend))
# gadgets.append((self._createNumber, [0x40],{'reg':'ecx'},['ecx', 'cx', 'cl', 'ch']+to_extend))
# if jmp_esp:
# gadgets.append((self._createAddress, [jmp_esp.lines[0][0]],{'reg':'ebp'},['ebp', 'bp']+to_extend))
# gadgets.append((self._createNumber, [0x1000],{'reg':'edx'},['edx', 'dx', 'dh', 'dl']+to_extend))
# gadgets.append((self._createAddress, [ret_addr.lines[0][0]],{'reg':'edi'},['edi', 'di']+to_extend))
# self._printer.printInfo('Try to create chain which fills registers without delete content of previous filled registers')
# chain_tmp += self._createDependenceChain(gadgets)
# self._printer.printInfo('Look for pushad gadget')
# chain_tmp += self._createPushad()
# chain += self._printRebase()
# chain += 'rop = \'\'\n'
# chain += chain_tmp
# chain += 'rop += shellcode\n\n'
# chain += 'print(rop)\n'
# return chain
| bsd-3-clause |
chromium/chromium | tools/perf/page_sets/webgl_supported_shared_state.py | 10 | 1431 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
from telemetry.page import shared_page_state
class WebGLSupportedSharedState(shared_page_state.SharedPageState):
def CanRunOnBrowser(self, browser_info, page):
assert hasattr(page, 'skipped_gpus')
if not browser_info.HasWebGLSupport():
logging.warning('Browser does not support webgl, skipping test')
return False
# Check the skipped GPUs list.
# Requires the page provide a "skipped_gpus" property.
browser = browser_info.browser
system_info = browser.GetSystemInfo()
if system_info:
gpu_info = system_info.gpu
gpu_vendor = self._GetGpuVendorString(gpu_info)
if gpu_vendor in page.skipped_gpus:
return False
return True
def _GetGpuVendorString(self, gpu_info):
if gpu_info:
primary_gpu = gpu_info.devices[0]
if primary_gpu:
vendor_string = primary_gpu.vendor_string.lower()
vendor_id = primary_gpu.vendor_id
if vendor_string:
return vendor_string.split(' ')[0]
elif vendor_id == 0x10DE:
return 'nvidia'
elif vendor_id == 0x1002:
return 'amd'
elif vendor_id == 0x8086:
return 'intel'
elif vendor_id == 0x15AD:
return 'vmware'
return 'unknown_gpu'
| bsd-3-clause |
anbangleo/NlsdeWeb | Python-3.6.0/Lib/test/test_deque.py | 2 | 34169 | from collections import deque
import unittest
from test import support, seq_tests
import gc
import weakref
import copy
import pickle
from io import StringIO
import random
import struct
BIG = 100000
def fail():
raise SyntaxError
yield 1
class BadCmp:
def __eq__(self, other):
raise RuntimeError
class MutateCmp:
def __init__(self, deque, result):
self.deque = deque
self.result = result
def __eq__(self, other):
self.deque.clear()
return self.result
class TestBasic(unittest.TestCase):
def test_basics(self):
d = deque(range(-5125, -5000))
d.__init__(range(200))
for i in range(200, 400):
d.append(i)
for i in reversed(range(-200, 0)):
d.appendleft(i)
self.assertEqual(list(d), list(range(-200, 400)))
self.assertEqual(len(d), 600)
left = [d.popleft() for i in range(250)]
self.assertEqual(left, list(range(-200, 50)))
self.assertEqual(list(d), list(range(50, 400)))
right = [d.pop() for i in range(250)]
right.reverse()
self.assertEqual(right, list(range(150, 400)))
self.assertEqual(list(d), list(range(50, 150)))
def test_maxlen(self):
self.assertRaises(ValueError, deque, 'abc', -1)
self.assertRaises(ValueError, deque, 'abc', -2)
it = iter(range(10))
d = deque(it, maxlen=3)
self.assertEqual(list(it), [])
self.assertEqual(repr(d), 'deque([7, 8, 9], maxlen=3)')
self.assertEqual(list(d), [7, 8, 9])
self.assertEqual(d, deque(range(10), 3))
d.append(10)
self.assertEqual(list(d), [8, 9, 10])
d.appendleft(7)
self.assertEqual(list(d), [7, 8, 9])
d.extend([10, 11])
self.assertEqual(list(d), [9, 10, 11])
d.extendleft([8, 7])
self.assertEqual(list(d), [7, 8, 9])
d = deque(range(200), maxlen=10)
d.append(d)
support.unlink(support.TESTFN)
fo = open(support.TESTFN, "w")
try:
fo.write(str(d))
fo.close()
fo = open(support.TESTFN, "r")
self.assertEqual(fo.read(), repr(d))
finally:
fo.close()
support.unlink(support.TESTFN)
d = deque(range(10), maxlen=None)
self.assertEqual(repr(d), 'deque([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])')
fo = open(support.TESTFN, "w")
try:
fo.write(str(d))
fo.close()
fo = open(support.TESTFN, "r")
self.assertEqual(fo.read(), repr(d))
finally:
fo.close()
support.unlink(support.TESTFN)
def test_maxlen_zero(self):
it = iter(range(100))
deque(it, maxlen=0)
self.assertEqual(list(it), [])
it = iter(range(100))
d = deque(maxlen=0)
d.extend(it)
self.assertEqual(list(it), [])
it = iter(range(100))
d = deque(maxlen=0)
d.extendleft(it)
self.assertEqual(list(it), [])
def test_maxlen_attribute(self):
self.assertEqual(deque().maxlen, None)
self.assertEqual(deque('abc').maxlen, None)
self.assertEqual(deque('abc', maxlen=4).maxlen, 4)
self.assertEqual(deque('abc', maxlen=2).maxlen, 2)
self.assertEqual(deque('abc', maxlen=0).maxlen, 0)
with self.assertRaises(AttributeError):
d = deque('abc')
d.maxlen = 10
def test_count(self):
for s in ('', 'abracadabra', 'simsalabim'*500+'abc'):
s = list(s)
d = deque(s)
for letter in 'abcdefghijklmnopqrstuvwxyz':
self.assertEqual(s.count(letter), d.count(letter), (s, d, letter))
self.assertRaises(TypeError, d.count) # too few args
self.assertRaises(TypeError, d.count, 1, 2) # too many args
class BadCompare:
def __eq__(self, other):
raise ArithmeticError
d = deque([1, 2, BadCompare(), 3])
self.assertRaises(ArithmeticError, d.count, 2)
d = deque([1, 2, 3])
self.assertRaises(ArithmeticError, d.count, BadCompare())
class MutatingCompare:
def __eq__(self, other):
self.d.pop()
return True
m = MutatingCompare()
d = deque([1, 2, 3, m, 4, 5])
m.d = d
self.assertRaises(RuntimeError, d.count, 3)
# test issue11004
# block advance failed after rotation aligned elements on right side of block
d = deque([None]*16)
for i in range(len(d)):
d.rotate(-1)
d.rotate(1)
self.assertEqual(d.count(1), 0)
self.assertEqual(d.count(None), 16)
def test_comparisons(self):
d = deque('xabc'); d.popleft()
for e in [d, deque('abc'), deque('ab'), deque(), list(d)]:
self.assertEqual(d==e, type(d)==type(e) and list(d)==list(e))
self.assertEqual(d!=e, not(type(d)==type(e) and list(d)==list(e)))
args = map(deque, ('', 'a', 'b', 'ab', 'ba', 'abc', 'xba', 'xabc', 'cba'))
for x in args:
for y in args:
self.assertEqual(x == y, list(x) == list(y), (x,y))
self.assertEqual(x != y, list(x) != list(y), (x,y))
self.assertEqual(x < y, list(x) < list(y), (x,y))
self.assertEqual(x <= y, list(x) <= list(y), (x,y))
self.assertEqual(x > y, list(x) > list(y), (x,y))
self.assertEqual(x >= y, list(x) >= list(y), (x,y))
def test_contains(self):
n = 200
d = deque(range(n))
for i in range(n):
self.assertTrue(i in d)
self.assertTrue((n+1) not in d)
# Test detection of mutation during iteration
d = deque(range(n))
d[n//2] = MutateCmp(d, False)
with self.assertRaises(RuntimeError):
n in d
# Test detection of comparison exceptions
d = deque(range(n))
d[n//2] = BadCmp()
with self.assertRaises(RuntimeError):
n in d
def test_extend(self):
d = deque('a')
self.assertRaises(TypeError, d.extend, 1)
d.extend('bcd')
self.assertEqual(list(d), list('abcd'))
d.extend(d)
self.assertEqual(list(d), list('abcdabcd'))
def test_add(self):
d = deque()
e = deque('abc')
f = deque('def')
self.assertEqual(d + d, deque())
self.assertEqual(e + f, deque('abcdef'))
self.assertEqual(e + e, deque('abcabc'))
self.assertEqual(e + d, deque('abc'))
self.assertEqual(d + e, deque('abc'))
self.assertIsNot(d + d, deque())
self.assertIsNot(e + d, deque('abc'))
self.assertIsNot(d + e, deque('abc'))
g = deque('abcdef', maxlen=4)
h = deque('gh')
self.assertEqual(g + h, deque('efgh'))
with self.assertRaises(TypeError):
deque('abc') + 'def'
def test_iadd(self):
d = deque('a')
d += 'bcd'
self.assertEqual(list(d), list('abcd'))
d += d
self.assertEqual(list(d), list('abcdabcd'))
def test_extendleft(self):
d = deque('a')
self.assertRaises(TypeError, d.extendleft, 1)
d.extendleft('bcd')
self.assertEqual(list(d), list(reversed('abcd')))
d.extendleft(d)
self.assertEqual(list(d), list('abcddcba'))
d = deque()
d.extendleft(range(1000))
self.assertEqual(list(d), list(reversed(range(1000))))
self.assertRaises(SyntaxError, d.extendleft, fail())
def test_getitem(self):
n = 200
d = deque(range(n))
l = list(range(n))
for i in range(n):
d.popleft()
l.pop(0)
if random.random() < 0.5:
d.append(i)
l.append(i)
for j in range(1-len(l), len(l)):
assert d[j] == l[j]
d = deque('superman')
self.assertEqual(d[0], 's')
self.assertEqual(d[-1], 'n')
d = deque()
self.assertRaises(IndexError, d.__getitem__, 0)
self.assertRaises(IndexError, d.__getitem__, -1)
def test_index(self):
for n in 1, 2, 30, 40, 200:
d = deque(range(n))
for i in range(n):
self.assertEqual(d.index(i), i)
with self.assertRaises(ValueError):
d.index(n+1)
# Test detection of mutation during iteration
d = deque(range(n))
d[n//2] = MutateCmp(d, False)
with self.assertRaises(RuntimeError):
d.index(n)
# Test detection of comparison exceptions
d = deque(range(n))
d[n//2] = BadCmp()
with self.assertRaises(RuntimeError):
d.index(n)
# Test start and stop arguments behavior matches list.index()
elements = 'ABCDEFGHI'
nonelement = 'Z'
d = deque(elements * 2)
s = list(elements * 2)
for start in range(-5 - len(s)*2, 5 + len(s) * 2):
for stop in range(-5 - len(s)*2, 5 + len(s) * 2):
for element in elements + 'Z':
try:
target = s.index(element, start, stop)
except ValueError:
with self.assertRaises(ValueError):
d.index(element, start, stop)
else:
self.assertEqual(d.index(element, start, stop), target)
def test_index_bug_24913(self):
d = deque('A' * 3)
with self.assertRaises(ValueError):
i = d.index("Hello world", 0, 4)
def test_insert(self):
# Test to make sure insert behaves like lists
elements = 'ABCDEFGHI'
for i in range(-5 - len(elements)*2, 5 + len(elements) * 2):
d = deque('ABCDEFGHI')
s = list('ABCDEFGHI')
d.insert(i, 'Z')
s.insert(i, 'Z')
self.assertEqual(list(d), s)
def test_insert_bug_26194(self):
data = 'ABC'
d = deque(data, maxlen=len(data))
with self.assertRaises(IndexError):
d.insert(2, None)
elements = 'ABCDEFGHI'
for i in range(-len(elements), len(elements)):
d = deque(elements, maxlen=len(elements)+1)
d.insert(i, 'Z')
if i >= 0:
self.assertEqual(d[i], 'Z')
else:
self.assertEqual(d[i-1], 'Z')
def test_imul(self):
for n in (-10, -1, 0, 1, 2, 10, 1000):
d = deque()
d *= n
self.assertEqual(d, deque())
self.assertIsNone(d.maxlen)
for n in (-10, -1, 0, 1, 2, 10, 1000):
d = deque('a')
d *= n
self.assertEqual(d, deque('a' * n))
self.assertIsNone(d.maxlen)
for n in (-10, -1, 0, 1, 2, 10, 499, 500, 501, 1000):
d = deque('a', 500)
d *= n
self.assertEqual(d, deque('a' * min(n, 500)))
self.assertEqual(d.maxlen, 500)
for n in (-10, -1, 0, 1, 2, 10, 1000):
d = deque('abcdef')
d *= n
self.assertEqual(d, deque('abcdef' * n))
self.assertIsNone(d.maxlen)
for n in (-10, -1, 0, 1, 2, 10, 499, 500, 501, 1000):
d = deque('abcdef', 500)
d *= n
self.assertEqual(d, deque(('abcdef' * n)[-500:]))
self.assertEqual(d.maxlen, 500)
def test_mul(self):
d = deque('abc')
self.assertEqual(d * -5, deque())
self.assertEqual(d * 0, deque())
self.assertEqual(d * 1, deque('abc'))
self.assertEqual(d * 2, deque('abcabc'))
self.assertEqual(d * 3, deque('abcabcabc'))
self.assertIsNot(d * 1, d)
self.assertEqual(deque() * 0, deque())
self.assertEqual(deque() * 1, deque())
self.assertEqual(deque() * 5, deque())
self.assertEqual(-5 * d, deque())
self.assertEqual(0 * d, deque())
self.assertEqual(1 * d, deque('abc'))
self.assertEqual(2 * d, deque('abcabc'))
self.assertEqual(3 * d, deque('abcabcabc'))
d = deque('abc', maxlen=5)
self.assertEqual(d * -5, deque())
self.assertEqual(d * 0, deque())
self.assertEqual(d * 1, deque('abc'))
self.assertEqual(d * 2, deque('bcabc'))
self.assertEqual(d * 30, deque('bcabc'))
def test_setitem(self):
n = 200
d = deque(range(n))
for i in range(n):
d[i] = 10 * i
self.assertEqual(list(d), [10*i for i in range(n)])
l = list(d)
for i in range(1-n, 0, -1):
d[i] = 7*i
l[i] = 7*i
self.assertEqual(list(d), l)
def test_delitem(self):
n = 500 # O(n**2) test, don't make this too big
d = deque(range(n))
self.assertRaises(IndexError, d.__delitem__, -n-1)
self.assertRaises(IndexError, d.__delitem__, n)
for i in range(n):
self.assertEqual(len(d), n-i)
j = random.randrange(-len(d), len(d))
val = d[j]
self.assertIn(val, d)
del d[j]
self.assertNotIn(val, d)
self.assertEqual(len(d), 0)
def test_reverse(self):
n = 500 # O(n**2) test, don't make this too big
data = [random.random() for i in range(n)]
for i in range(n):
d = deque(data[:i])
r = d.reverse()
self.assertEqual(list(d), list(reversed(data[:i])))
self.assertIs(r, None)
d.reverse()
self.assertEqual(list(d), data[:i])
self.assertRaises(TypeError, d.reverse, 1) # Arity is zero
def test_rotate(self):
s = tuple('abcde')
n = len(s)
d = deque(s)
d.rotate(1) # verify rot(1)
self.assertEqual(''.join(d), 'eabcd')
d = deque(s)
d.rotate(-1) # verify rot(-1)
self.assertEqual(''.join(d), 'bcdea')
d.rotate() # check default to 1
self.assertEqual(tuple(d), s)
for i in range(n*3):
d = deque(s)
e = deque(d)
d.rotate(i) # check vs. rot(1) n times
for j in range(i):
e.rotate(1)
self.assertEqual(tuple(d), tuple(e))
d.rotate(-i) # check that it works in reverse
self.assertEqual(tuple(d), s)
e.rotate(n-i) # check that it wraps forward
self.assertEqual(tuple(e), s)
for i in range(n*3):
d = deque(s)
e = deque(d)
d.rotate(-i)
for j in range(i):
e.rotate(-1) # check vs. rot(-1) n times
self.assertEqual(tuple(d), tuple(e))
d.rotate(i) # check that it works in reverse
self.assertEqual(tuple(d), s)
e.rotate(i-n) # check that it wraps backaround
self.assertEqual(tuple(e), s)
d = deque(s)
e = deque(s)
e.rotate(BIG+17) # verify on long series of rotates
dr = d.rotate
for i in range(BIG+17):
dr()
self.assertEqual(tuple(d), tuple(e))
self.assertRaises(TypeError, d.rotate, 'x') # Wrong arg type
self.assertRaises(TypeError, d.rotate, 1, 10) # Too many args
d = deque()
d.rotate() # rotate an empty deque
self.assertEqual(d, deque())
def test_len(self):
d = deque('ab')
self.assertEqual(len(d), 2)
d.popleft()
self.assertEqual(len(d), 1)
d.pop()
self.assertEqual(len(d), 0)
self.assertRaises(IndexError, d.pop)
self.assertEqual(len(d), 0)
d.append('c')
self.assertEqual(len(d), 1)
d.appendleft('d')
self.assertEqual(len(d), 2)
d.clear()
self.assertEqual(len(d), 0)
def test_underflow(self):
d = deque()
self.assertRaises(IndexError, d.pop)
self.assertRaises(IndexError, d.popleft)
def test_clear(self):
d = deque(range(100))
self.assertEqual(len(d), 100)
d.clear()
self.assertEqual(len(d), 0)
self.assertEqual(list(d), [])
d.clear() # clear an empty deque
self.assertEqual(list(d), [])
def test_remove(self):
d = deque('abcdefghcij')
d.remove('c')
self.assertEqual(d, deque('abdefghcij'))
d.remove('c')
self.assertEqual(d, deque('abdefghij'))
self.assertRaises(ValueError, d.remove, 'c')
self.assertEqual(d, deque('abdefghij'))
# Handle comparison errors
d = deque(['a', 'b', BadCmp(), 'c'])
e = deque(d)
self.assertRaises(RuntimeError, d.remove, 'c')
for x, y in zip(d, e):
# verify that original order and values are retained.
self.assertTrue(x is y)
# Handle evil mutator
for match in (True, False):
d = deque(['ab'])
d.extend([MutateCmp(d, match), 'c'])
self.assertRaises(IndexError, d.remove, 'c')
self.assertEqual(d, deque())
def test_repr(self):
d = deque(range(200))
e = eval(repr(d))
self.assertEqual(list(d), list(e))
d.append(d)
self.assertIn('...', repr(d))
def test_print(self):
d = deque(range(200))
d.append(d)
try:
support.unlink(support.TESTFN)
fo = open(support.TESTFN, "w")
print(d, file=fo, end='')
fo.close()
fo = open(support.TESTFN, "r")
self.assertEqual(fo.read(), repr(d))
finally:
fo.close()
support.unlink(support.TESTFN)
def test_init(self):
self.assertRaises(TypeError, deque, 'abc', 2, 3);
self.assertRaises(TypeError, deque, 1);
def test_hash(self):
self.assertRaises(TypeError, hash, deque('abc'))
def test_long_steadystate_queue_popleft(self):
for size in (0, 1, 2, 100, 1000):
d = deque(range(size))
append, pop = d.append, d.popleft
for i in range(size, BIG):
append(i)
x = pop()
if x != i - size:
self.assertEqual(x, i-size)
self.assertEqual(list(d), list(range(BIG-size, BIG)))
def test_long_steadystate_queue_popright(self):
for size in (0, 1, 2, 100, 1000):
d = deque(reversed(range(size)))
append, pop = d.appendleft, d.pop
for i in range(size, BIG):
append(i)
x = pop()
if x != i - size:
self.assertEqual(x, i-size)
self.assertEqual(list(reversed(list(d))),
list(range(BIG-size, BIG)))
def test_big_queue_popleft(self):
pass
d = deque()
append, pop = d.append, d.popleft
for i in range(BIG):
append(i)
for i in range(BIG):
x = pop()
if x != i:
self.assertEqual(x, i)
def test_big_queue_popright(self):
d = deque()
append, pop = d.appendleft, d.pop
for i in range(BIG):
append(i)
for i in range(BIG):
x = pop()
if x != i:
self.assertEqual(x, i)
def test_big_stack_right(self):
d = deque()
append, pop = d.append, d.pop
for i in range(BIG):
append(i)
for i in reversed(range(BIG)):
x = pop()
if x != i:
self.assertEqual(x, i)
self.assertEqual(len(d), 0)
def test_big_stack_left(self):
d = deque()
append, pop = d.appendleft, d.popleft
for i in range(BIG):
append(i)
for i in reversed(range(BIG)):
x = pop()
if x != i:
self.assertEqual(x, i)
self.assertEqual(len(d), 0)
def test_roundtrip_iter_init(self):
d = deque(range(200))
e = deque(d)
self.assertNotEqual(id(d), id(e))
self.assertEqual(list(d), list(e))
def test_pickle(self):
for d in deque(range(200)), deque(range(200), 100):
for i in range(pickle.HIGHEST_PROTOCOL + 1):
s = pickle.dumps(d, i)
e = pickle.loads(s)
self.assertNotEqual(id(e), id(d))
self.assertEqual(list(e), list(d))
self.assertEqual(e.maxlen, d.maxlen)
def test_pickle_recursive(self):
for d in deque('abc'), deque('abc', 3):
d.append(d)
for i in range(pickle.HIGHEST_PROTOCOL + 1):
e = pickle.loads(pickle.dumps(d, i))
self.assertNotEqual(id(e), id(d))
self.assertEqual(id(e[-1]), id(e))
self.assertEqual(e.maxlen, d.maxlen)
def test_iterator_pickle(self):
orig = deque(range(200))
data = [i*1.01 for i in orig]
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
# initial iterator
itorg = iter(orig)
dump = pickle.dumps((itorg, orig), proto)
it, d = pickle.loads(dump)
for i, x in enumerate(data):
d[i] = x
self.assertEqual(type(it), type(itorg))
self.assertEqual(list(it), data)
# running iterator
next(itorg)
dump = pickle.dumps((itorg, orig), proto)
it, d = pickle.loads(dump)
for i, x in enumerate(data):
d[i] = x
self.assertEqual(type(it), type(itorg))
self.assertEqual(list(it), data[1:])
# empty iterator
for i in range(1, len(data)):
next(itorg)
dump = pickle.dumps((itorg, orig), proto)
it, d = pickle.loads(dump)
for i, x in enumerate(data):
d[i] = x
self.assertEqual(type(it), type(itorg))
self.assertEqual(list(it), [])
# exhausted iterator
self.assertRaises(StopIteration, next, itorg)
dump = pickle.dumps((itorg, orig), proto)
it, d = pickle.loads(dump)
for i, x in enumerate(data):
d[i] = x
self.assertEqual(type(it), type(itorg))
self.assertEqual(list(it), [])
def test_deepcopy(self):
mut = [10]
d = deque([mut])
e = copy.deepcopy(d)
self.assertEqual(list(d), list(e))
mut[0] = 11
self.assertNotEqual(id(d), id(e))
self.assertNotEqual(list(d), list(e))
def test_copy(self):
mut = [10]
d = deque([mut])
e = copy.copy(d)
self.assertEqual(list(d), list(e))
mut[0] = 11
self.assertNotEqual(id(d), id(e))
self.assertEqual(list(d), list(e))
for i in range(5):
for maxlen in range(-1, 6):
s = [random.random() for j in range(i)]
d = deque(s) if maxlen == -1 else deque(s, maxlen)
e = d.copy()
self.assertEqual(d, e)
self.assertEqual(d.maxlen, e.maxlen)
self.assertTrue(all(x is y for x, y in zip(d, e)))
def test_copy_method(self):
mut = [10]
d = deque([mut])
e = d.copy()
self.assertEqual(list(d), list(e))
mut[0] = 11
self.assertNotEqual(id(d), id(e))
self.assertEqual(list(d), list(e))
def test_reversed(self):
for s in ('abcd', range(2000)):
self.assertEqual(list(reversed(deque(s))), list(reversed(s)))
def test_reversed_new(self):
klass = type(reversed(deque()))
for s in ('abcd', range(2000)):
self.assertEqual(list(klass(deque(s))), list(reversed(s)))
def test_gc_doesnt_blowup(self):
import gc
# This used to assert-fail in deque_traverse() under a debug
# build, or run wild with a NULL pointer in a release build.
d = deque()
for i in range(100):
d.append(1)
gc.collect()
def test_container_iterator(self):
# Bug #3680: tp_traverse was not implemented for deque iterator objects
class C(object):
pass
for i in range(2):
obj = C()
ref = weakref.ref(obj)
if i == 0:
container = deque([obj, 1])
else:
container = reversed(deque([obj, 1]))
obj.x = iter(container)
del obj, container
gc.collect()
self.assertTrue(ref() is None, "Cycle was not collected")
check_sizeof = support.check_sizeof
@support.cpython_only
def test_sizeof(self):
BLOCKLEN = 64
basesize = support.calcvobjsize('2P4nP')
blocksize = struct.calcsize('P%dPP' % BLOCKLEN)
self.assertEqual(object.__sizeof__(deque()), basesize)
check = self.check_sizeof
check(deque(), basesize + blocksize)
check(deque('a'), basesize + blocksize)
check(deque('a' * (BLOCKLEN - 1)), basesize + blocksize)
check(deque('a' * BLOCKLEN), basesize + 2 * blocksize)
check(deque('a' * (42 * BLOCKLEN)), basesize + 43 * blocksize)
class TestVariousIteratorArgs(unittest.TestCase):
def test_constructor(self):
for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)):
for g in (seq_tests.Sequence, seq_tests.IterFunc,
seq_tests.IterGen, seq_tests.IterFuncStop,
seq_tests.itermulti, seq_tests.iterfunc):
self.assertEqual(list(deque(g(s))), list(g(s)))
self.assertRaises(TypeError, deque, seq_tests.IterNextOnly(s))
self.assertRaises(TypeError, deque, seq_tests.IterNoNext(s))
self.assertRaises(ZeroDivisionError, deque, seq_tests.IterGenExc(s))
def test_iter_with_altered_data(self):
d = deque('abcdefg')
it = iter(d)
d.pop()
self.assertRaises(RuntimeError, next, it)
def test_runtime_error_on_empty_deque(self):
d = deque()
it = iter(d)
d.append(10)
self.assertRaises(RuntimeError, next, it)
class Deque(deque):
pass
class DequeWithBadIter(deque):
def __iter__(self):
raise TypeError
class TestSubclass(unittest.TestCase):
def test_basics(self):
d = Deque(range(25))
d.__init__(range(200))
for i in range(200, 400):
d.append(i)
for i in reversed(range(-200, 0)):
d.appendleft(i)
self.assertEqual(list(d), list(range(-200, 400)))
self.assertEqual(len(d), 600)
left = [d.popleft() for i in range(250)]
self.assertEqual(left, list(range(-200, 50)))
self.assertEqual(list(d), list(range(50, 400)))
right = [d.pop() for i in range(250)]
right.reverse()
self.assertEqual(right, list(range(150, 400)))
self.assertEqual(list(d), list(range(50, 150)))
d.clear()
self.assertEqual(len(d), 0)
def test_copy_pickle(self):
d = Deque('abc')
e = d.__copy__()
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
e = Deque(d)
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
s = pickle.dumps(d, proto)
e = pickle.loads(s)
self.assertNotEqual(id(d), id(e))
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
d = Deque('abcde', maxlen=4)
e = d.__copy__()
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
e = Deque(d)
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
s = pickle.dumps(d, proto)
e = pickle.loads(s)
self.assertNotEqual(id(d), id(e))
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
def test_pickle_recursive(self):
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
for d in Deque('abc'), Deque('abc', 3):
d.append(d)
e = pickle.loads(pickle.dumps(d, proto))
self.assertNotEqual(id(e), id(d))
self.assertEqual(type(e), type(d))
self.assertEqual(e.maxlen, d.maxlen)
dd = d.pop()
ee = e.pop()
self.assertEqual(id(ee), id(e))
self.assertEqual(e, d)
d.x = d
e = pickle.loads(pickle.dumps(d, proto))
self.assertEqual(id(e.x), id(e))
for d in DequeWithBadIter('abc'), DequeWithBadIter('abc', 2):
self.assertRaises(TypeError, pickle.dumps, d, proto)
def test_weakref(self):
d = deque('gallahad')
p = weakref.proxy(d)
self.assertEqual(str(p), str(d))
d = None
self.assertRaises(ReferenceError, str, p)
def test_strange_subclass(self):
class X(deque):
def __iter__(self):
return iter([])
d1 = X([1,2,3])
d2 = X([4,5,6])
d1 == d2 # not clear if this is supposed to be True or False,
# but it used to give a SystemError
class SubclassWithKwargs(deque):
def __init__(self, newarg=1):
deque.__init__(self)
class TestSubclassWithKwargs(unittest.TestCase):
def test_subclass_with_kwargs(self):
# SF bug #1486663 -- this used to erroneously raise a TypeError
SubclassWithKwargs(newarg=1)
class TestSequence(seq_tests.CommonTest):
type2test = deque
def test_getitem(self):
# For now, bypass tests that require slicing
pass
def test_getslice(self):
# For now, bypass tests that require slicing
pass
def test_subscript(self):
# For now, bypass tests that require slicing
pass
def test_free_after_iterating(self):
# For now, bypass tests that require slicing
self.skipTest("Exhausted deque iterator doesn't free a deque")
#==============================================================================
libreftest = """
Example from the Library Reference: Doc/lib/libcollections.tex
>>> from collections import deque
>>> d = deque('ghi') # make a new deque with three items
>>> for elem in d: # iterate over the deque's elements
... print(elem.upper())
G
H
I
>>> d.append('j') # add a new entry to the right side
>>> d.appendleft('f') # add a new entry to the left side
>>> d # show the representation of the deque
deque(['f', 'g', 'h', 'i', 'j'])
>>> d.pop() # return and remove the rightmost item
'j'
>>> d.popleft() # return and remove the leftmost item
'f'
>>> list(d) # list the contents of the deque
['g', 'h', 'i']
>>> d[0] # peek at leftmost item
'g'
>>> d[-1] # peek at rightmost item
'i'
>>> list(reversed(d)) # list the contents of a deque in reverse
['i', 'h', 'g']
>>> 'h' in d # search the deque
True
>>> d.extend('jkl') # add multiple elements at once
>>> d
deque(['g', 'h', 'i', 'j', 'k', 'l'])
>>> d.rotate(1) # right rotation
>>> d
deque(['l', 'g', 'h', 'i', 'j', 'k'])
>>> d.rotate(-1) # left rotation
>>> d
deque(['g', 'h', 'i', 'j', 'k', 'l'])
>>> deque(reversed(d)) # make a new deque in reverse order
deque(['l', 'k', 'j', 'i', 'h', 'g'])
>>> d.clear() # empty the deque
>>> d.pop() # cannot pop from an empty deque
Traceback (most recent call last):
File "<pyshell#6>", line 1, in -toplevel-
d.pop()
IndexError: pop from an empty deque
>>> d.extendleft('abc') # extendleft() reverses the input order
>>> d
deque(['c', 'b', 'a'])
>>> def delete_nth(d, n):
... d.rotate(-n)
... d.popleft()
... d.rotate(n)
...
>>> d = deque('abcdef')
>>> delete_nth(d, 2) # remove the entry at d[2]
>>> d
deque(['a', 'b', 'd', 'e', 'f'])
>>> def roundrobin(*iterables):
... pending = deque(iter(i) for i in iterables)
... while pending:
... task = pending.popleft()
... try:
... yield next(task)
... except StopIteration:
... continue
... pending.append(task)
...
>>> for value in roundrobin('abc', 'd', 'efgh'):
... print(value)
...
a
d
e
b
f
c
g
h
>>> def maketree(iterable):
... d = deque(iterable)
... while len(d) > 1:
... pair = [d.popleft(), d.popleft()]
... d.append(pair)
... return list(d)
...
>>> print(maketree('abcdefgh'))
[[[['a', 'b'], ['c', 'd']], [['e', 'f'], ['g', 'h']]]]
"""
#==============================================================================
__test__ = {'libreftest' : libreftest}
def test_main(verbose=None):
import sys
test_classes = (
TestBasic,
TestVariousIteratorArgs,
TestSubclass,
TestSubclassWithKwargs,
TestSequence,
)
support.run_unittest(*test_classes)
# verify reference counting
if verbose and hasattr(sys, "gettotalrefcount"):
import gc
counts = [None] * 5
for i in range(len(counts)):
support.run_unittest(*test_classes)
gc.collect()
counts[i] = sys.gettotalrefcount()
print(counts)
# doctests
from test import test_deque
support.run_doctest(test_deque, verbose)
if __name__ == "__main__":
test_main(verbose=True)
| mit |
mewtaylor/django | tests/template_tests/filter_tests/test_pluralize.py | 430 | 1200 | from decimal import Decimal
from django.template.defaultfilters import pluralize
from django.test import SimpleTestCase
class FunctionTests(SimpleTestCase):
def test_integers(self):
self.assertEqual(pluralize(1), '')
self.assertEqual(pluralize(0), 's')
self.assertEqual(pluralize(2), 's')
def test_floats(self):
self.assertEqual(pluralize(0.5), 's')
self.assertEqual(pluralize(1.5), 's')
def test_decimals(self):
self.assertEqual(pluralize(Decimal(1)), '')
self.assertEqual(pluralize(Decimal(0)), 's')
self.assertEqual(pluralize(Decimal(2)), 's')
def test_lists(self):
self.assertEqual(pluralize([1]), '')
self.assertEqual(pluralize([]), 's')
self.assertEqual(pluralize([1, 2, 3]), 's')
def test_suffixes(self):
self.assertEqual(pluralize(1, 'es'), '')
self.assertEqual(pluralize(0, 'es'), 'es')
self.assertEqual(pluralize(2, 'es'), 'es')
self.assertEqual(pluralize(1, 'y,ies'), 'y')
self.assertEqual(pluralize(0, 'y,ies'), 'ies')
self.assertEqual(pluralize(2, 'y,ies'), 'ies')
self.assertEqual(pluralize(0, 'y,ies,error'), '')
| bsd-3-clause |
pinghe/ensime-sublime | rpc.py | 2 | 19402 | from __future__ import unicode_literals
import inspect
import functools
from functools import partial as bind
from . import sexp
from .sexp import key, sym
import collections
# ############################# DATA STRUCTURES ##############################
class ActiveRecord(object):
@classmethod
def parse_list(cls, raw):
if not raw:
return []
if type(raw[0]) == type(key(":key")):
m = sexp.sexp_to_key_map(raw)
field = ":" + cls.__name__.lower() + "s"
return [cls.parse(raw) for raw in (m[field] if field in m else [])]
else:
return [cls.parse(raw) for raw in raw]
@classmethod
def parse(cls, raw):
"""Parse a data type from a raw data structure"""
if not raw:
return None
value_map = sexp.sexp_to_key_map(raw)
self = cls()
populate = getattr(self, "populate")
populate(value_map)
return self
def unparse(self):
raise Exception("abstract method: ActiveRecord.unparse - on " + str(this))
def __str__(self):
return str(self.__dict__)
class Note(ActiveRecord):
def populate(self, m):
self.message = m[":msg"]
self.file_name = m[":file"]
self.severity = m[":severity"]
self.start = m[":beg"]
self.end = m[":end"]
self.line = m[":line"]
self.col = m[":col"]
class CompletionInfoList(ActiveRecord):
@classmethod
def create(cls, prefix, completions):
self = CompletionInfoList()
self.prefix = prefix
self.completions = completions
return self
def populate(self, m):
self.prefix = m[":prefix"]
self.completions = CompletionInfo.parse_list(m[":completions"])
class CompletionSignature(ActiveRecord):
"""A completion signature consists of the parameter 'sections' which is a list of name to type) and a 'result' type.
n.b. these are user readable rather than programmtic for presentation to the user.
# sections: List[List[(String, String)]],
# result: String
"""
def __init__(self, sections, result):
self.sections = sections
self.result = result
@classmethod
def from_raw(cls, data):
# this hacky is all because () in both false and and empty list
# the parser cannot tell, so hack it until we move to jerk
sections_raw = data[0] if(data[0] is not False) else []
sections = []
for s in sections_raw:
if not s:
sections.append([])
else:
sections.append(s)
result = data[1]
return CompletionSignature(sections, result)
def __repr__(self):
return 'CompletionSignature("{str(self.sections)}", "{self.result}")'.format(self=self)
class CompletionInfo(ActiveRecord):
def populate(self, m):
self.name = m[":name"]
self.signature = CompletionSignature.from_raw(m[":type-sig"])
self.is_callable = bool(m[":is-callable"]) if ":is-callable" in m else False
self.type_id = m[":type-id"]
self.to_insert = m[":to-insert"] if ":to-insert" in m else None
def __repr__(self):
return 'CompletionInfo("{self.name}", "{self.signature}", {self.is_callable}, {self.type_id}, ...)'.format(
self=self)
class SourcePosition(ActiveRecord):
def populate(self, m):
# [:type, line, :file,
# '/workspace/ensime-test-project/.ensime_cache/dep-src/source-jars/java/io/PrintStream.java', :line, 697]
# [:type, offset, :file, '/workspace/ensime-test-project/src/main/scala/Foo.scala', :offset, 150]
self.type_str = str(m[":type"])
self.file_name = m[":file"] if ":file" in m else None
self.line = m[":line"] if ":line" in m else None
self.offset = m[":offset"] if ":offset" in m else None
self.is_line = self.type_str == "line"
self.is_offset = self.type_str == "offset"
self.is_empty = self.type_str == "empty"
class SymbolInfo(ActiveRecord):
def populate(self, m):
self.name = m[":name"]
self.type = TypeInfo.parse(m[":type"])
self.decl_pos = SourcePosition.parse(m[":decl-pos"]) if ":decl-pos" in m else None
self.is_callable = bool(m[":is-callable"]) if ":is-callable" in m else False
self.owner_type_id = m[":owner-type-id"] if ":owner-type-id" in m else None
class TypeInfo(ActiveRecord):
def populate(self, m):
self.name = m[":name"]
self.type_id = m[":type-id"]
isArrowType = bool(m[":arrow-type"]) if ":arrow-type" in m else False
if isArrowType:
self.arrow_type = True
self.result_type = TypeInfo.parse(m[":result-type"])
self.param_sections = ParamSectionInfo.parse_list(m[":param-sections"]) if ":param-sections" in m else []
else:
# Basic type
self.arrow_type = False
self.full_name = m[":full-name"] if ":full-name" in m else None
self.decl_as = m[":decl-as"] if ":decl-as" in m else None
self.decl_pos = SourcePosition.parse(m[":pos"]) if ":pos" in m else None
self.type_args = TypeInfo.parse_list(m[":type-args"]) if ":type-args" in m else []
self.outer_type_id = m[":outer-type-id"] if ":outer-type-id" in m else None
self.members = Member.parse_list(m[":members"]) if ":members" in m else []
class SymbolSearchResults(ActiveRecord):
# we override parse here because raw contains a List of SymbolSearchResult
# typehe ActiveRecord parse method expects raw to contain an object at this point
# and calls sexp_to_key_map
@classmethod
def parse(cls, raw):
if not raw:
return None
self = cls()
self.populate(raw)
return self
def populate(self, m):
self.results = SymbolSearchResult.parse_list(m)
class SymbolSearchResult(ActiveRecord):
def populate(self, m):
self.name = m[":name"]
self.local_name = m[":local-name"]
self.decl_as = m[":decl-as"] if ":decl-as" in m else None
self.pos = SourcePosition.parse(m[":pos"]) if ":pos" in m else None
class RefactorResult(ActiveRecord):
def populate(self, m):
self.status = str(m[":status"])
self.procedure_id = m[":procedure-id"]
if self.status == "success":
self.done = True
pass
elif self.status == "failure":
self.done = False
self.reason = m[":reason"]
class Member(ActiveRecord):
def populate(self, m):
pass
class ParamSectionInfo(ActiveRecord):
def populate(self, m):
self.is_implicit = bool(m[":is-implicit"]) if ":is-implicit" in m else False
if ":params" in m and m[":params"]:
keyed_params = [{':param-name': p[0], ':param-type': p[1]} for p in m[":params"]]
self.params = [Param(kp) for kp in keyed_params]
else:
self.params = []
class Param:
def __init__(self, m):
self.param_name = m[":param-name"]
self.param_type = TypeInfo.parse(m[":param-type"])
class DebugEvent(ActiveRecord):
def populate(self, m):
self.type = str(m[":type"])
if self.type == "output":
self.body = m[":body"]
elif self.type == "step":
self.thread_id = m[":thread-id"]
self.thread_name = m[":thread-name"]
self.file_name = m[":file"]
self.line = m[":line"]
elif self.type == "breakpoint":
self.thread_id = m[":thread-id"]
self.thread_name = m[":thread-name"]
self.file_name = m[":file"]
self.line = m[":line"]
elif self.type == "death":
pass
elif self.type == "start":
pass
elif self.type == "disconnect":
pass
elif self.type == "exception":
self.exception_id = m[":exception"]
self.thread_id = m[":thread-id"]
self.thread_name = m[":thread-name"]
self.file_name = m[":file"]
self.line = m[":line"]
elif self.type == "threadStart":
self.thread_id = m[":thread-id"]
elif self.type == "threadDeath":
self.thread_id = m[":thread-id"]
else:
raise Exception("unexpected debug event of type " + str(self.type) + ": " + str(m))
class DebugKickoffResult(ActiveRecord):
def __bool__(self):
return not self.error
def populate(self, m):
status = m[":status"]
if status == "success":
self.error = False
elif status == "error":
self.error = True
self.code = m[":error-code"]
self.details = m[":details"]
else:
raise Exception("unexpected status: " + str(status))
class DebugBacktrace(ActiveRecord):
def populate(self, m):
self.frames = DebugStackFrame.parse_list(m[":frames"]) if ":frames" in m else []
self.thread_id = m[":thread-id"]
self.thread_name = m[":thread-name"]
class SourceFileInfo(ActiveRecord):
def populate(self, m):
self.file = m[":file"]
self.contents = m[":contents"] if ":contents" in m else None
self.contents_in = m[":contents-in"] if ":contents-in" in m else None
def __init__(self, file_name, contents=None, contents_in=None):
self.file = file_name
self.contents = contents
self.contents_in = contents_in
def unparse(self):
base = [key(":file"), self.file]
if self.contents is not None:
base.extend([key(":contents"), self.contents])
if self.contents_in is not None:
base.extend([key(":contents-in"), self.contents_in])
return [base]
class DebugStackFrame(ActiveRecord):
def populate(self, m):
self.index = m[":index"]
self.locals = DebugStackLocal.parse_list(m[":locals"]) if ":locals" in m else []
self.num_args = m[":num-args"]
self.class_name = m[":class-name"]
self.method_name = m[":method-name"]
self.pc_location = DebugSourcePosition.parse(m[":pc-location"])
self.this_object_id = m[":this-object-id"]
class DebugSourcePosition(ActiveRecord):
def populate(self, m):
self.file_name = m[":file"]
self.line = m[":line"]
class DebugStackLocal(ActiveRecord):
def populate(self, m):
self.index = m[":index"]
self.name = m[":name"]
self.summary = m[":summary"]
self.type_name = m[":type-name"]
class DebugValue(ActiveRecord):
def populate(self, m):
self.type = m[":val-type"]
self.type_name = m[":type-name"]
self.length = m[":length"] if ":length" in m else None
self.element_type_name = m[":element-type-name"] if ":element-type-name" in m else None
self.summary = m[":summary"] if ":summary" in m else None
self.object_id = m[":object-id"] if ":object-id" in m else None
self.fields = DebugObjectField.parse_list(m[":fields"]) if ":fields" in m else []
if str(self.type) == "null" or str(self.type) == "prim" or str(self.type) == "obj" or str(
self.type) == "str" or str(self.type) == "arr":
pass
else:
raise Exception("unexpected debug value of type " + str(self.type) + ": " + str(m))
class DebugObjectField(ActiveRecord):
def populate(self, m):
self.index = m[":index"]
self.name = m[":name"]
self.summary = m[":summary"]
self.type_name = m[":type-name"]
class DebugLocation(ActiveRecord):
def populate(self, m):
self.type = str(m[":type"])
if self.type == "reference":
self.object_id = m[":object-id"]
elif self.type == "element":
self.object_id = m[":object-id"]
self.index = m[":index"]
elif self.type == "field":
self.object_id = m[":object-id"]
self.field = m[":field"]
elif self.type == "slot":
self.thread_id = m[":thread-id"]
self.frame = m[":frame"]
self.offset = m[":offset"]
else:
raise Exception("unexpected debug location of type " + str(self.type) + ": " + str(m))
class DebugLocationReference(DebugLocation):
def __init__(self, object_id):
self.object_id = object_id
def unparse(self):
return [[key(":type"), sym("reference"), key(":object-id"), self.object_id]]
class DebugLocationElement(DebugLocation):
def __init__(self, object_id, index):
self.object_id = object_id
self.index = index
def unparse(self):
return [[key(":type"), sym("element"), key(":object-id"), self.object_id, key(":index"), self.index]]
class DebugLocationField(DebugLocation):
def __init__(self, object_id, field):
self.object_id = object_id
self.field = field
def unparse(self):
return [[key(":type"), sym("field"), key(":object-id"), self.object_id, key(":field"), self.field]]
class DebugLocationSlot(DebugLocation):
def __init__(self, thread_id, frame, offset):
self.thread_id = thread_id
self.frame = frame
self.offset = offset
def unparse(self):
return [
[key(":type"), sym("slot"), key(":thread-id"), self.thread_id, key(":frame"), self.frame, key(":offset"),
self.offset]]
# ############################# REMOTE PROCEDURES ##############################
def _mk_req(func, *args, **kwargs):
if kwargs:
raise Exception("kwargs are not supported by the RPC proxy")
req = []
def translate_name(name):
if name.startswith("_"):
name = name[1:]
name = name.replace("_", "-")
return name
req.append(sym("swank:" + translate_name(func.__name__)))
(spec_args, spec_varargs, spec_keywords, spec_defaults) = inspect.getargspec(func)
if spec_varargs:
raise Exception("varargs in signature of " + str(func))
if spec_keywords:
raise Exception("keywords in signature of " + str(func))
if len(spec_args) != len(args):
if len(args) < len(spec_args) and len(args) + len(spec_defaults) >= len(spec_args):
# everything is fine. we can use default values for parameters to provide arguments to the call
args += spec_defaults[len(spec_defaults) - len(spec_args) + len(args):]
else:
preamble = "argc mismatch in signature of " + str(func) + ": "
expected = "expected " + str(len(spec_args)) + " args " + str(spec_args) + ", "
actual = "actual " + str(len(args)) + " args " + str(args) + " with types " + str([type(a) for a in args])
raise Exception(preamble + expected + actual)
for arg in args[1:]: # strip off self
if hasattr(arg, "unparse"):
argreq = arg.unparse()
else:
argreq = [arg]
req.extend(argreq)
return req
def async_rpc(*args):
parser = args[0] if args else lambda raw: raw
def wrapper(func):
def wrapped(*args, **kwargs):
self = args[0]
if isinstance(args[-1], collections.Callable):
on_complete = args[-1]
args = args[:-1]
else:
on_complete = None
req = _mk_req(func, *args, **kwargs)
def callback(payload):
data = parser(payload)
if on_complete:
on_complete(data)
self.env.controller.client.async_req(req, callback, call_back_into_ui_thread=True)
return wrapped
return wrapper
def sync_rpc(*args):
parser = args[0] if args else lambda raw: raw
def wrapper(func):
def wrapped(*args, **kwargs):
self = args[0]
req = _mk_req(func, *args, **kwargs)
timeout = self.env.settings.get("timeout_" + func.__name__)
raw = self.env.controller.client.sync_req(req, timeout=timeout)
return parser(raw)
return wrapped
return wrapper
class Rpc(object):
def __init__(self, env):
self.env = env
@sync_rpc()
def shutdown_server(self):
pass
@async_rpc()
def typecheck_file(self, file):
pass
@async_rpc()
def typecheck_all(self):
pass
@async_rpc()
def patch_source(self, file_name, edits):
pass
@sync_rpc(CompletionInfoList.parse)
def completions(self, file_name, position, max_results, case_sensitive, reload_from_disk):
pass
@async_rpc(TypeInfo.parse)
def type_at_point(self, file_name, position):
pass
@async_rpc(SymbolInfo.parse)
def symbol_at_point(self, file_name, position):
pass
@async_rpc(SymbolInfo.parse)
def symbol_by_name(self, symbol, token, t):
pass
@async_rpc(SymbolSearchResults.parse_list)
def import_suggestions(self, file_name, position, type_names, max_results):
pass
@async_rpc(RefactorResult.parse)
def prepare_refactor(self, procedure_id, refactor_type, parameters, require_confirmation):
pass
@async_rpc()
def exec_refactor(self, procedure_id, refactor_type):
pass
@async_rpc()
def debug_set_break(self, file_name, line):
pass
@async_rpc()
def debug_clear_break(self, file_name, line):
pass
@async_rpc()
def debug_clear_all_breaks(self):
pass
@async_rpc(DebugKickoffResult.parse)
def _debug_start(self, command_line):
pass
@async_rpc(DebugKickoffResult.parse)
def _debug_attach(self, host, port):
pass
def debug_start(self, launch, breakpoints, on_complete=None):
def set_breakpoints(breakpoints, status):
if status:
if breakpoints:
self.debug_set_break(breakpoints[0].file_name, breakpoints[0].line,
bind(set_breakpoints, breakpoints[1:]))
else:
if launch.main_class:
self._debug_start(launch.command_line, on_complete)
elif launch.remote_address:
self._debug_attach(launch.remote_host, launch.remote_port, on_complete)
else:
raise Exception("unsupported launch: " + str(launch))
elif on_complete:
on_complete(status)
def clear_breakpoints():
def callback(status):
if status:
set_breakpoints(breakpoints, status)
elif on_complete:
on_complete(status)
self.debug_clear_all_breaks(callback)
clear_breakpoints()
@async_rpc()
def debug_stop(self):
pass
@async_rpc()
def debug_step(self, thread_id):
pass
@async_rpc()
def debug_next(self, thread_id):
pass
@async_rpc()
def debug_continue(self, thread_id):
pass
@sync_rpc(DebugBacktrace.parse)
def debug_backtrace(self, thread_id, first_frame=0, num_frames=-1):
pass
@sync_rpc(DebugValue.parse)
def debug_value(self, debug_location):
pass
@sync_rpc()
def debug_to_string(self, thread_id, debug_location):
pass
| mit |
jazzTheJackRabbit/blog | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-2.7/pygments/lexers/__init__.py | 194 | 7698 | # -*- coding: utf-8 -*-
"""
pygments.lexers
~~~~~~~~~~~~~~~
Pygments lexers.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
import types
import fnmatch
from os.path import basename
from pygments.lexers._mapping import LEXERS
from pygments.modeline import get_filetype_from_buffer
from pygments.plugin import find_plugin_lexers
from pygments.util import ClassNotFound, bytes
__all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class',
'guess_lexer'] + LEXERS.keys()
_lexer_cache = {}
def _load_lexers(module_name):
"""
Load a lexer (and all others in the module too).
"""
mod = __import__(module_name, None, None, ['__all__'])
for lexer_name in mod.__all__:
cls = getattr(mod, lexer_name)
_lexer_cache[cls.name] = cls
def get_all_lexers():
"""
Return a generator of tuples in the form ``(name, aliases,
filenames, mimetypes)`` of all know lexers.
"""
for item in LEXERS.itervalues():
yield item[1:]
for lexer in find_plugin_lexers():
yield lexer.name, lexer.aliases, lexer.filenames, lexer.mimetypes
def find_lexer_class(name):
"""
Lookup a lexer class by name. Return None if not found.
"""
if name in _lexer_cache:
return _lexer_cache[name]
# lookup builtin lexers
for module_name, lname, aliases, _, _ in LEXERS.itervalues():
if name == lname:
_load_lexers(module_name)
return _lexer_cache[name]
# continue with lexers from setuptools entrypoints
for cls in find_plugin_lexers():
if cls.name == name:
return cls
def get_lexer_by_name(_alias, **options):
"""
Get a lexer by an alias.
"""
# lookup builtin lexers
for module_name, name, aliases, _, _ in LEXERS.itervalues():
if _alias in aliases:
if name not in _lexer_cache:
_load_lexers(module_name)
return _lexer_cache[name](**options)
# continue with lexers from setuptools entrypoints
for cls in find_plugin_lexers():
if _alias in cls.aliases:
return cls(**options)
raise ClassNotFound('no lexer for alias %r found' % _alias)
def get_lexer_for_filename(_fn, code=None, **options):
"""
Get a lexer for a filename. If multiple lexers match the filename
pattern, use ``analyze_text()`` to figure out which one is more
appropriate.
"""
matches = []
fn = basename(_fn)
for modname, name, _, filenames, _ in LEXERS.itervalues():
for filename in filenames:
if fnmatch.fnmatch(fn, filename):
if name not in _lexer_cache:
_load_lexers(modname)
matches.append((_lexer_cache[name], filename))
for cls in find_plugin_lexers():
for filename in cls.filenames:
if fnmatch.fnmatch(fn, filename):
matches.append((cls, filename))
if sys.version_info > (3,) and isinstance(code, bytes):
# decode it, since all analyse_text functions expect unicode
code = code.decode('latin1')
def get_rating(info):
cls, filename = info
# explicit patterns get a bonus
bonus = '*' not in filename and 0.5 or 0
# The class _always_ defines analyse_text because it's included in
# the Lexer class. The default implementation returns None which
# gets turned into 0.0. Run scripts/detect_missing_analyse_text.py
# to find lexers which need it overridden.
if code:
return cls.analyse_text(code) + bonus
return cls.priority + bonus
if matches:
matches.sort(key=get_rating)
#print "Possible lexers, after sort:", matches
return matches[-1][0](**options)
raise ClassNotFound('no lexer for filename %r found' % _fn)
def get_lexer_for_mimetype(_mime, **options):
"""
Get a lexer for a mimetype.
"""
for modname, name, _, _, mimetypes in LEXERS.itervalues():
if _mime in mimetypes:
if name not in _lexer_cache:
_load_lexers(modname)
return _lexer_cache[name](**options)
for cls in find_plugin_lexers():
if _mime in cls.mimetypes:
return cls(**options)
raise ClassNotFound('no lexer for mimetype %r found' % _mime)
def _iter_lexerclasses():
"""
Return an iterator over all lexer classes.
"""
for key in sorted(LEXERS):
module_name, name = LEXERS[key][:2]
if name not in _lexer_cache:
_load_lexers(module_name)
yield _lexer_cache[name]
for lexer in find_plugin_lexers():
yield lexer
def guess_lexer_for_filename(_fn, _text, **options):
"""
Lookup all lexers that handle those filenames primary (``filenames``)
or secondary (``alias_filenames``). Then run a text analysis for those
lexers and choose the best result.
usage::
>>> from pygments.lexers import guess_lexer_for_filename
>>> guess_lexer_for_filename('hello.html', '<%= @foo %>')
<pygments.lexers.templates.RhtmlLexer object at 0xb7d2f32c>
>>> guess_lexer_for_filename('hello.html', '<h1>{{ title|e }}</h1>')
<pygments.lexers.templates.HtmlDjangoLexer object at 0xb7d2f2ac>
>>> guess_lexer_for_filename('style.css', 'a { color: <?= $link ?> }')
<pygments.lexers.templates.CssPhpLexer object at 0xb7ba518c>
"""
fn = basename(_fn)
primary = None
matching_lexers = set()
for lexer in _iter_lexerclasses():
for filename in lexer.filenames:
if fnmatch.fnmatch(fn, filename):
matching_lexers.add(lexer)
primary = lexer
for filename in lexer.alias_filenames:
if fnmatch.fnmatch(fn, filename):
matching_lexers.add(lexer)
if not matching_lexers:
raise ClassNotFound('no lexer for filename %r found' % fn)
if len(matching_lexers) == 1:
return matching_lexers.pop()(**options)
result = []
for lexer in matching_lexers:
rv = lexer.analyse_text(_text)
if rv == 1.0:
return lexer(**options)
result.append((rv, lexer))
result.sort()
if not result[-1][0] and primary is not None:
return primary(**options)
return result[-1][1](**options)
def guess_lexer(_text, **options):
"""
Guess a lexer by strong distinctions in the text (eg, shebang).
"""
# try to get a vim modeline first
ft = get_filetype_from_buffer(_text)
if ft is not None:
try:
return get_lexer_by_name(ft, **options)
except ClassNotFound:
pass
best_lexer = [0.0, None]
for lexer in _iter_lexerclasses():
rv = lexer.analyse_text(_text)
if rv == 1.0:
return lexer(**options)
if rv > best_lexer[0]:
best_lexer[:] = (rv, lexer)
if not best_lexer[0] or best_lexer[1] is None:
raise ClassNotFound('no lexer matching the text found')
return best_lexer[1](**options)
class _automodule(types.ModuleType):
"""Automatically import lexers."""
def __getattr__(self, name):
info = LEXERS.get(name)
if info:
_load_lexers(info[0])
cls = _lexer_cache[info[1]]
setattr(self, name, cls)
return cls
raise AttributeError(name)
oldmod = sys.modules['pygments.lexers']
newmod = _automodule('pygments.lexers')
newmod.__dict__.update(oldmod.__dict__)
sys.modules['pygments.lexers'] = newmod
del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
| mit |
christian-oudard/htmltreediff | setup.py | 1 | 1156 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import codecs
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
long_description = codecs.open("README.rst", "r", "utf-8").read()
setup(
name="html-tree-diff",
version="0.1.2",
description="Structure-aware diff for html and xml documents",
author="Christian Oudard",
author_email="[email protected]",
url="http://github.com/christian-oudard/htmltreediff/",
platforms=["any"],
license="BSD",
packages=find_packages(),
scripts=[],
zip_safe=False,
install_requires=['lxml', 'html5lib'],
cmdclass={},
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Topic :: Text Processing :: Markup :: HTML",
"Topic :: Text Processing :: Markup :: XML",
],
long_description=long_description,
)
| bsd-3-clause |
wonder-sk/QGIS | tests/src/python/test_offline_editing_wfs.py | 3 | 4833 | # -*- coding: utf-8 -*-
"""
Offline editing Tests.
WFS-T tests need using QGIS Server through
qgis_wrapped_server.py.
This is an integration test for QGIS Desktop WFS-T provider and QGIS Server
WFS-T that check if QGIS offline editing works with a WFS-T endpoint.
The test uses testdata/wfs_transactional/wfs_transactional.qgs and three
initially empty shapefiles layers with points, lines and polygons.
The point layer is used in the test
From build dir, run: ctest -R PyQgsOfflineEditingWFS -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
from builtins import str
__author__ = 'Alessandro Pasotti'
__date__ = '05/15/2016'
__copyright__ = 'Copyright 2016, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
import sys
import re
import subprocess
from shutil import copytree, rmtree
import tempfile
from time import sleep
from utilities import unitTestDataPath, waitServer
from qgis.core import QgsVectorLayer
from qgis.testing import (
start_app,
unittest,
)
from offlineditingtestbase import OfflineTestBase
try:
QGIS_SERVER_OFFLINE_PORT = os.environ['QGIS_SERVER_OFFLINE_PORT']
except:
QGIS_SERVER_OFFLINE_PORT = '0' # Auto
qgis_app = start_app()
class TestWFST(unittest.TestCase, OfflineTestBase):
# To fake the WFS cache!
counter = 0
@classmethod
def setUpClass(cls):
"""Run before all tests"""
cls.port = QGIS_SERVER_OFFLINE_PORT
# Create tmp folder
cls.temp_path = tempfile.mkdtemp()
cls.testdata_path = cls.temp_path + '/' + 'wfs_transactional' + '/'
copytree(unitTestDataPath('wfs_transactional') + '/',
cls.temp_path + '/' + 'wfs_transactional')
cls.project_path = cls.temp_path + '/' + 'wfs_transactional' + '/' + \
'wfs_transactional.qgs'
assert os.path.exists(cls.project_path), "Project not found: %s" % \
cls.project_path
# Clean env just to be sure
env_vars = ['QUERY_STRING', 'QGIS_PROJECT_FILE']
for ev in env_vars:
try:
del os.environ[ev]
except KeyError:
pass
# Clear all test layers
cls._clearLayer(cls._getLayer('test_point'))
os.environ['QGIS_SERVER_PORT'] = str(cls.port)
cls.server_path = os.path.dirname(os.path.realpath(__file__)) + \
'/qgis_wrapped_server.py'
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
rmtree(cls.temp_path)
def setUp(self):
"""Run before each test."""
self.server = subprocess.Popen([sys.executable, self.server_path],
env=os.environ, stdout=subprocess.PIPE)
line = self.server.stdout.readline()
self.port = int(re.findall(b':(\d+)', line)[0])
assert self.port != 0
# Wait for the server process to start
assert waitServer('http://127.0.0.1:%s' % self.port), "Server is not responding!"
self._setUp()
def tearDown(self):
"""Run after each test."""
# Clear test layer
self._clearLayer(self._getOnlineLayer('test_point'))
# Kill the server
self.server.terminate()
self.server.wait()
del self.server
# Delete the sqlite db
os.unlink(os.path.join(self.temp_path, 'offlineDbFile.sqlite'))
self._tearDown()
def _getOnlineLayer(self, type_name, layer_name=None):
"""
Return a new WFS layer, overriding the WFS cache
"""
if layer_name is None:
layer_name = 'wfs_' + type_name
parms = {
'srsname': 'EPSG:4326',
'typename': type_name,
'url': 'http://127.0.0.1:%s/%s/?map=%s' % (self.port,
self.counter,
self.project_path),
'version': 'auto',
'table': '',
#'sql': '',
}
self.counter += 1
uri = ' '.join([("%s='%s'" % (k, v)) for k, v in list(parms.items())])
wfs_layer = QgsVectorLayer(uri, layer_name, 'WFS')
assert wfs_layer.isValid()
return wfs_layer
@classmethod
def _getLayer(cls, layer_name):
"""
Layer factory (return the backend layer), provider specific
"""
path = cls.testdata_path + layer_name + '.shp'
layer = QgsVectorLayer(path, layer_name, "ogr")
assert layer.isValid()
return layer
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
wanderine/nipype | nipype/interfaces/freesurfer/tests/test_auto_Label2Vol.py | 9 | 2534 | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.freesurfer.model import Label2Vol
def test_Label2Vol_inputs():
input_map = dict(annot_file=dict(argstr='--annot %s',
copyfile=False,
mandatory=True,
requires=('subject_id', 'hemi'),
xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'),
),
aparc_aseg=dict(argstr='--aparc+aseg',
mandatory=True,
xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'),
),
args=dict(argstr='%s',
),
environ=dict(nohash=True,
usedefault=True,
),
fill_thresh=dict(argstr='--fillthresh %.f',
),
hemi=dict(argstr='--hemi %s',
),
identity=dict(argstr='--identity',
xor=('reg_file', 'reg_header', 'identity'),
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
invert_mtx=dict(argstr='--invertmtx',
),
label_file=dict(argstr='--label %s...',
copyfile=False,
mandatory=True,
xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'),
),
label_hit_file=dict(argstr='--hits %s',
),
label_voxel_volume=dict(argstr='--labvoxvol %f',
),
map_label_stat=dict(argstr='--label-stat %s',
),
native_vox2ras=dict(argstr='--native-vox2ras',
),
proj=dict(argstr='--proj %s %f %f %f',
requires=('subject_id', 'hemi'),
),
reg_file=dict(argstr='--reg %s',
xor=('reg_file', 'reg_header', 'identity'),
),
reg_header=dict(argstr='--regheader %s',
xor=('reg_file', 'reg_header', 'identity'),
),
seg_file=dict(argstr='--seg %s',
copyfile=False,
mandatory=True,
xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'),
),
subject_id=dict(argstr='--subject %s',
),
subjects_dir=dict(),
surface=dict(argstr='--surf %s',
),
template_file=dict(argstr='--temp %s',
mandatory=True,
),
terminal_output=dict(nohash=True,
),
vol_label_file=dict(argstr='--o %s',
genfile=True,
),
)
inputs = Label2Vol.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_Label2Vol_outputs():
output_map = dict(vol_label_file=dict(),
)
outputs = Label2Vol.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
| bsd-3-clause |
gem/oq-hazardlib | openquake/hazardlib/source/multi.py | 1 | 4887 | # The Hazard Library
# Copyright (C) 2012-2017 GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Module :mod:`openquake.hazardlib.source.area` defines :class:`AreaSource`.
"""
from openquake.hazardlib.source.base import ParametricSeismicSource
from openquake.hazardlib.source.point import (
PointSource, angular_distance, KM_TO_DEGREES)
from openquake.hazardlib.geo.utils import cross_idl
class MultiPointSource(ParametricSeismicSource):
"""
MultiPointSource class, used to describe point sources with different
MFDs and the same rupture_mesh_spacing, magnitude_scaling_relationship,
rupture_aspect_ratio, temporal_occurrence_model, upper_seismogenic_depth,
lower_seismogenic_depth, nodal_plane_distribution, hypocenter_distribution
"""
MODIFICATIONS = set(())
RUPTURE_WEIGHT = 1 / 10.
def __init__(self, source_id, name, tectonic_region_type,
mfd, rupture_mesh_spacing,
magnitude_scaling_relationship,
rupture_aspect_ratio, temporal_occurrence_model,
# point-specific parameters (excluding location)
upper_seismogenic_depth, lower_seismogenic_depth,
nodal_plane_distribution, hypocenter_distribution, mesh):
assert len(mfd) == len(mesh), (len(mfd), len(mesh))
super(MultiPointSource, self).__init__(
source_id, name, tectonic_region_type, mfd,
rupture_mesh_spacing, magnitude_scaling_relationship,
rupture_aspect_ratio, temporal_occurrence_model)
self.upper_seismogenic_depth = upper_seismogenic_depth
self.lower_seismogenic_depth = lower_seismogenic_depth
self.nodal_plane_distribution = nodal_plane_distribution
self.hypocenter_distribution = hypocenter_distribution
self.mesh = mesh
self.max_radius = 0
def __iter__(self):
for i, (mfd, point) in enumerate(zip(self.mfd, self.mesh)):
name = '%s:%s' % (self.source_id, i)
ps = PointSource(
name, name, self.tectonic_region_type,
mfd, self.rupture_mesh_spacing,
self.magnitude_scaling_relationship,
self.rupture_aspect_ratio,
self.temporal_occurrence_model,
self.upper_seismogenic_depth,
self.lower_seismogenic_depth,
point,
self.nodal_plane_distribution,
self.hypocenter_distribution)
yield ps
def iter_ruptures(self):
"""
Yield the ruptures of the underlying point sources
"""
for ps in self:
for rupture in ps.iter_ruptures():
yield rupture
def count_ruptures(self):
"""
See
:meth:`openquake.hazardlib.source.base.BaseSeismicSource.count_ruptures`
for description of parameters and return value.
"""
return (len(self.get_annual_occurrence_rates()) *
len(self.nodal_plane_distribution.data) *
len(self.hypocenter_distribution.data))
def filter_sites_by_distance_to_source(self, integration_distance, sites):
"""Do not filter"""
return sites
def get_rupture_enclosing_polygon(self, dilation=0):
"""No polygon"""
def get_bounding_box(self, maxdist):
"""
Bounding box containing all points, enlarged by the maximum distance
and the maximum rupture projection radius (upper limit).
"""
maxradius = self._get_max_rupture_projection_radius()
min_lon = self.mesh.lons.min()
max_lon = self.mesh.lons.max()
if cross_idl(min_lon, max_lon):
min_lon, max_lon = max_lon, min_lon + 360
min_lat = self.mesh.lats.min()
max_lat = self.mesh.lats.max()
a1 = (maxdist + maxradius) * KM_TO_DEGREES
a2 = max(angular_distance(maxdist + maxradius, min_lat),
angular_distance(maxdist + maxradius, max_lat))
return min_lon - a2, min_lat - a1, max_lon + a2, max_lat + a1
_get_rupture_dimensions = PointSource.__dict__['_get_rupture_dimensions']
_get_max_rupture_projection_radius = PointSource.__dict__[
'_get_max_rupture_projection_radius']
| agpl-3.0 |
haiyangd/python-show-me-the-code- | Jimmy66/0017/0017.py | 34 | 1660 | #!/bin/env python
# -*- coding: utf-8 -*-
#要填json坑,前面写的代码,json部分是网上找的,还没有完全理解,尤其是相关的字符串编码没有实践
#抽空了解下从xls文件读取数据的库
#xls -> json -> xml 是我的思路,当然也可以尝试下直接xls -> xml
#主要还是比较看重json的应用。有时候感觉看了别人的代码,不自己用另一种方式实现,(即使变得复杂啰嗦)还是别人的代码
#导入模块
import xlrd
#这个是系统自带的,如果安装lxml遇到问题可以使用这个
import xml.etree.ElementTree as ET
from xml.dom import minidom
def read_xls(filename):
data = xlrd.open_workbook(filename)
table = data.sheet_by_index(0) #通过索引获取xls文件第0个sheet
nrows = table.nrows
d = {}
for i in range(nrows):
d[str(i+1)] = table.row_values(i)[1:] #取编号后的数据,以列表形式存在字典对应的值中
return d
def write_xml(d):
doc = minidom.Document()
root = doc.createElement("root")
doc.appendChild(root)
students = doc.createElement("students")
root.appendChild(students)
students.appendChild(doc.createComment(' 学生信息表\n "id" : [名字, 数学, 语文, 英文]'))
for i in d:
d[i][0] = d[i][0].encode('utf-8')
#有一种无奈叫做我懒得玩了,python2你是个好人
content = doc.createTextNode(str(d))
students.appendChild(content)
f = file("student.xml","w")
doc.writexml(f)
f.close()
def main():
d = read_xls('student.xls')
print(d)
write_xml(d)
if __name__ == '__main__':
main()
| mit |
Sidney84/pa-chromium | third_party/mesa/MesaLib/src/mapi/glapi/gen/gl_table.py | 33 | 7098 | #!/usr/bin/python2
# (C) Copyright IBM Corporation 2004
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# on the rights to use, copy, modify, merge, publish, distribute, sub
# license, and/or sell copies of the Software, and to permit persons to whom
# the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice (including the next
# paragraph) shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
# IBM AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# Authors:
# Ian Romanick <[email protected]>
import gl_XML
import license
import sys, getopt
class PrintGlTable(gl_XML.gl_print_base):
def __init__(self, es=False):
gl_XML.gl_print_base.__init__(self)
self.es = es
self.header_tag = '_GLAPI_TABLE_H_'
self.name = "gl_table.py (from Mesa)"
self.license = license.bsd_license_template % ( \
"""Copyright (C) 1999-2003 Brian Paul All Rights Reserved.
(C) Copyright IBM Corporation 2004""", "BRIAN PAUL, IBM")
return
def printBody(self, api):
for f in api.functionIterateByOffset():
arg_string = f.get_parameter_string()
print ' %s (GLAPIENTRYP %s)(%s); /* %d */' % (f.return_type, f.name, arg_string, f.offset)
def printRealHeader(self):
print '#ifndef GLAPIENTRYP'
print '# ifndef GLAPIENTRY'
print '# define GLAPIENTRY'
print '# endif'
print ''
print '# define GLAPIENTRYP GLAPIENTRY *'
print '#endif'
print ''
print ''
print 'struct _glapi_table'
print '{'
return
def printRealFooter(self):
print '};'
return
class PrintRemapTable(gl_XML.gl_print_base):
def __init__(self, es=False):
gl_XML.gl_print_base.__init__(self)
self.es = es
self.header_tag = '_GLAPI_DISPATCH_H_'
self.name = "gl_table.py (from Mesa)"
self.license = license.bsd_license_template % ("(C) Copyright IBM Corporation 2005", "IBM")
return
def printRealHeader(self):
print """
/* this file should not be included directly in mesa */
/**
* \\file glapidispatch.h
* Macros for handling GL dispatch tables.
*
* For each known GL function, there are 3 macros in this file. The first
* macro is named CALL_FuncName and is used to call that GL function using
* the specified dispatch table. The other 2 macros, called GET_FuncName
* can SET_FuncName, are used to get and set the dispatch pointer for the
* named function in the specified dispatch table.
*/
"""
return
def printBody(self, api):
print '#define CALL_by_offset(disp, cast, offset, parameters) \\'
print ' (*(cast (GET_by_offset(disp, offset)))) parameters'
print '#define GET_by_offset(disp, offset) \\'
print ' (offset >= 0) ? (((_glapi_proc *)(disp))[offset]) : NULL'
print '#define SET_by_offset(disp, offset, fn) \\'
print ' do { \\'
print ' if ( (offset) < 0 ) { \\'
print ' /* fprintf( stderr, "[%s:%u] SET_by_offset(%p, %d, %s)!\\n", */ \\'
print ' /* __func__, __LINE__, disp, offset, # fn); */ \\'
print ' /* abort(); */ \\'
print ' } \\'
print ' else { \\'
print ' ( (_glapi_proc *) (disp) )[offset] = (_glapi_proc) fn; \\'
print ' } \\'
print ' } while(0)'
print ''
functions = []
abi_functions = []
alias_functions = []
count = 0
for f in api.functionIterateByOffset():
if not f.is_abi():
functions.append( [f, count] )
count += 1
else:
abi_functions.append( f )
if self.es:
# remember functions with aliases
if len(f.entry_points) > 1:
alias_functions.append(f)
for f in abi_functions:
print '#define CALL_%s(disp, parameters) (*((disp)->%s)) parameters' % (f.name, f.name)
print '#define GET_%s(disp) ((disp)->%s)' % (f.name, f.name)
print '#define SET_%s(disp, fn) ((disp)->%s = fn)' % (f.name, f.name)
print ''
print '#if !defined(_GLAPI_USE_REMAP_TABLE)'
print ''
for [f, index] in functions:
print '#define CALL_%s(disp, parameters) (*((disp)->%s)) parameters' % (f.name, f.name)
print '#define GET_%s(disp) ((disp)->%s)' % (f.name, f.name)
print '#define SET_%s(disp, fn) ((disp)->%s = fn)' % (f.name, f.name)
print ''
print '#else'
print ''
print '#define driDispatchRemapTable_size %u' % (count)
print 'extern int driDispatchRemapTable[ driDispatchRemapTable_size ];'
print ''
for [f, index] in functions:
print '#define %s_remap_index %u' % (f.name, index)
print ''
for [f, index] in functions:
arg_string = gl_XML.create_parameter_string( f.parameters, 0 )
cast = '%s (GLAPIENTRYP)(%s)' % (f.return_type, arg_string)
print '#define CALL_%s(disp, parameters) CALL_by_offset(disp, (%s), driDispatchRemapTable[%s_remap_index], parameters)' % (f.name, cast, f.name)
print '#define GET_%s(disp) GET_by_offset(disp, driDispatchRemapTable[%s_remap_index])' % (f.name, f.name)
print '#define SET_%s(disp, fn) SET_by_offset(disp, driDispatchRemapTable[%s_remap_index], fn)' % (f.name, f.name)
print ''
print '#endif /* !defined(_GLAPI_USE_REMAP_TABLE) */'
if alias_functions:
print ''
print '/* define aliases for compatibility */'
for f in alias_functions:
for name in f.entry_points:
if name != f.name:
print '#define CALL_%s(disp, parameters) CALL_%s(disp, parameters)' % (name, f.name)
print '#define GET_%s(disp) GET_%s(disp)' % (name, f.name)
print '#define SET_%s(disp, fn) SET_%s(disp, fn)' % (name, f.name)
print ''
print '#if defined(_GLAPI_USE_REMAP_TABLE)'
for f in alias_functions:
for name in f.entry_points:
if name != f.name:
print '#define %s_remap_index %s_remap_index' % (name, f.name)
print '#endif /* defined(_GLAPI_USE_REMAP_TABLE) */'
print ''
return
def show_usage():
print "Usage: %s [-f input_file_name] [-m mode] [-c]" % sys.argv[0]
print " -m mode Mode can be 'table' or 'remap_table'."
print " -c Enable compatibility with OpenGL ES."
sys.exit(1)
if __name__ == '__main__':
file_name = "gl_API.xml"
try:
(args, trail) = getopt.getopt(sys.argv[1:], "f:m:c")
except Exception,e:
show_usage()
mode = "table"
es = False
for (arg,val) in args:
if arg == "-f":
file_name = val
elif arg == "-m":
mode = val
elif arg == "-c":
es = True
if mode == "table":
printer = PrintGlTable(es)
elif mode == "remap_table":
printer = PrintRemapTable(es)
else:
show_usage()
api = gl_XML.parse_GL_API( file_name )
printer.Print( api )
| bsd-3-clause |
jreback/pandas | pandas/io/formats/latex.py | 2 | 25201 | """
Module for formatting output data in Latex.
"""
from abc import ABC, abstractmethod
from typing import Iterator, List, Optional, Sequence, Tuple, Type, Union
import numpy as np
from pandas.core.dtypes.generic import ABCMultiIndex
from pandas.io.formats.format import DataFrameFormatter
def _split_into_full_short_caption(
caption: Optional[Union[str, Tuple[str, str]]]
) -> Tuple[str, str]:
"""Extract full and short captions from caption string/tuple.
Parameters
----------
caption : str or tuple, optional
Either table caption string or tuple (full_caption, short_caption).
If string is provided, then it is treated as table full caption,
while short_caption is considered an empty string.
Returns
-------
full_caption, short_caption : tuple
Tuple of full_caption, short_caption strings.
"""
if caption:
if isinstance(caption, str):
full_caption = caption
short_caption = ""
else:
try:
full_caption, short_caption = caption
except ValueError as err:
msg = "caption must be either a string or a tuple of two strings"
raise ValueError(msg) from err
else:
full_caption = ""
short_caption = ""
return full_caption, short_caption
class RowStringConverter(ABC):
r"""Converter for dataframe rows into LaTeX strings.
Parameters
----------
formatter : `DataFrameFormatter`
Instance of `DataFrameFormatter`.
multicolumn: bool, optional
Whether to use \multicolumn macro.
multicolumn_format: str, optional
Multicolumn format.
multirow: bool, optional
Whether to use \multirow macro.
"""
def __init__(
self,
formatter: DataFrameFormatter,
multicolumn: bool = False,
multicolumn_format: Optional[str] = None,
multirow: bool = False,
):
self.fmt = formatter
self.frame = self.fmt.frame
self.multicolumn = multicolumn
self.multicolumn_format = multicolumn_format
self.multirow = multirow
self.clinebuf: List[List[int]] = []
self.strcols = self._get_strcols()
self.strrows = list(zip(*self.strcols))
def get_strrow(self, row_num: int) -> str:
"""Get string representation of the row."""
row = self.strrows[row_num]
is_multicol = (
row_num < self.column_levels and self.fmt.header and self.multicolumn
)
is_multirow = (
row_num >= self.header_levels
and self.fmt.index
and self.multirow
and self.index_levels > 1
)
is_cline_maybe_required = is_multirow and row_num < len(self.strrows) - 1
crow = self._preprocess_row(row)
if is_multicol:
crow = self._format_multicolumn(crow)
if is_multirow:
crow = self._format_multirow(crow, row_num)
lst = []
lst.append(" & ".join(crow))
lst.append(" \\\\")
if is_cline_maybe_required:
cline = self._compose_cline(row_num, len(self.strcols))
lst.append(cline)
return "".join(lst)
@property
def _header_row_num(self) -> int:
"""Number of rows in header."""
return self.header_levels if self.fmt.header else 0
@property
def index_levels(self) -> int:
"""Integer number of levels in index."""
return self.frame.index.nlevels
@property
def column_levels(self) -> int:
return self.frame.columns.nlevels
@property
def header_levels(self) -> int:
nlevels = self.column_levels
if self.fmt.has_index_names and self.fmt.show_index_names:
nlevels += 1
return nlevels
def _get_strcols(self) -> List[List[str]]:
"""String representation of the columns."""
if self.fmt.frame.empty:
strcols = [[self._empty_info_line]]
else:
strcols = self.fmt.get_strcols()
# reestablish the MultiIndex that has been joined by get_strcols()
if self.fmt.index and isinstance(self.frame.index, ABCMultiIndex):
out = self.frame.index.format(
adjoin=False,
sparsify=self.fmt.sparsify,
names=self.fmt.has_index_names,
na_rep=self.fmt.na_rep,
)
# index.format will sparsify repeated entries with empty strings
# so pad these with some empty space
def pad_empties(x):
for pad in reversed(x):
if pad:
break
return [x[0]] + [i if i else " " * len(pad) for i in x[1:]]
gen = (pad_empties(i) for i in out)
# Add empty spaces for each column level
clevels = self.frame.columns.nlevels
out = [[" " * len(i[-1])] * clevels + i for i in gen]
# Add the column names to the last index column
cnames = self.frame.columns.names
if any(cnames):
new_names = [i if i else "{}" for i in cnames]
out[self.frame.index.nlevels - 1][:clevels] = new_names
# Get rid of old multiindex column and add new ones
strcols = out + strcols[1:]
return strcols
@property
def _empty_info_line(self):
return (
f"Empty {type(self.frame).__name__}\n"
f"Columns: {self.frame.columns}\n"
f"Index: {self.frame.index}"
)
def _preprocess_row(self, row: Sequence[str]) -> List[str]:
"""Preprocess elements of the row."""
if self.fmt.escape:
crow = _escape_symbols(row)
else:
crow = [x if x else "{}" for x in row]
if self.fmt.bold_rows and self.fmt.index:
crow = _convert_to_bold(crow, self.index_levels)
return crow
def _format_multicolumn(self, row: List[str]) -> List[str]:
r"""
Combine columns belonging to a group to a single multicolumn entry
according to self.multicolumn_format
e.g.:
a & & & b & c &
will become
\multicolumn{3}{l}{a} & b & \multicolumn{2}{l}{c}
"""
row2 = row[: self.index_levels]
ncol = 1
coltext = ""
def append_col():
# write multicolumn if needed
if ncol > 1:
row2.append(
f"\\multicolumn{{{ncol:d}}}{{{self.multicolumn_format}}}"
f"{{{coltext.strip()}}}"
)
# don't modify where not needed
else:
row2.append(coltext)
for c in row[self.index_levels :]:
# if next col has text, write the previous
if c.strip():
if coltext:
append_col()
coltext = c
ncol = 1
# if not, add it to the previous multicolumn
else:
ncol += 1
# write last column name
if coltext:
append_col()
return row2
def _format_multirow(self, row: List[str], i: int) -> List[str]:
r"""
Check following rows, whether row should be a multirow
e.g.: becomes:
a & 0 & \multirow{2}{*}{a} & 0 &
& 1 & & 1 &
b & 0 & \cline{1-2}
b & 0 &
"""
for j in range(self.index_levels):
if row[j].strip():
nrow = 1
for r in self.strrows[i + 1 :]:
if not r[j].strip():
nrow += 1
else:
break
if nrow > 1:
# overwrite non-multirow entry
row[j] = f"\\multirow{{{nrow:d}}}{{*}}{{{row[j].strip()}}}"
# save when to end the current block with \cline
self.clinebuf.append([i + nrow - 1, j + 1])
return row
def _compose_cline(self, i: int, icol: int) -> str:
"""
Create clines after multirow-blocks are finished.
"""
lst = []
for cl in self.clinebuf:
if cl[0] == i:
lst.append(f"\n\\cline{{{cl[1]:d}-{icol:d}}}")
# remove entries that have been written to buffer
self.clinebuf = [x for x in self.clinebuf if x[0] != i]
return "".join(lst)
class RowStringIterator(RowStringConverter):
"""Iterator over rows of the header or the body of the table."""
@abstractmethod
def __iter__(self) -> Iterator[str]:
"""Iterate over LaTeX string representations of rows."""
class RowHeaderIterator(RowStringIterator):
"""Iterator for the table header rows."""
def __iter__(self) -> Iterator[str]:
for row_num in range(len(self.strrows)):
if row_num < self._header_row_num:
yield self.get_strrow(row_num)
class RowBodyIterator(RowStringIterator):
"""Iterator for the table body rows."""
def __iter__(self) -> Iterator[str]:
for row_num in range(len(self.strrows)):
if row_num >= self._header_row_num:
yield self.get_strrow(row_num)
class TableBuilderAbstract(ABC):
"""
Abstract table builder producing string representation of LaTeX table.
Parameters
----------
formatter : `DataFrameFormatter`
Instance of `DataFrameFormatter`.
column_format: str, optional
Column format, for example, 'rcl' for three columns.
multicolumn: bool, optional
Use multicolumn to enhance MultiIndex columns.
multicolumn_format: str, optional
The alignment for multicolumns, similar to column_format.
multirow: bool, optional
Use multirow to enhance MultiIndex rows.
caption: str, optional
Table caption.
short_caption: str, optional
Table short caption.
label: str, optional
LaTeX label.
position: str, optional
Float placement specifier, for example, 'htb'.
"""
def __init__(
self,
formatter: DataFrameFormatter,
column_format: Optional[str] = None,
multicolumn: bool = False,
multicolumn_format: Optional[str] = None,
multirow: bool = False,
caption: Optional[str] = None,
short_caption: Optional[str] = None,
label: Optional[str] = None,
position: Optional[str] = None,
):
self.fmt = formatter
self.column_format = column_format
self.multicolumn = multicolumn
self.multicolumn_format = multicolumn_format
self.multirow = multirow
self.caption = caption
self.short_caption = short_caption
self.label = label
self.position = position
def get_result(self) -> str:
"""String representation of LaTeX table."""
elements = [
self.env_begin,
self.top_separator,
self.header,
self.middle_separator,
self.env_body,
self.bottom_separator,
self.env_end,
]
result = "\n".join([item for item in elements if item])
trailing_newline = "\n"
result += trailing_newline
return result
@property
@abstractmethod
def env_begin(self) -> str:
"""Beginning of the environment."""
@property
@abstractmethod
def top_separator(self) -> str:
"""Top level separator."""
@property
@abstractmethod
def header(self) -> str:
"""Header lines."""
@property
@abstractmethod
def middle_separator(self) -> str:
"""Middle level separator."""
@property
@abstractmethod
def env_body(self) -> str:
"""Environment body."""
@property
@abstractmethod
def bottom_separator(self) -> str:
"""Bottom level separator."""
@property
@abstractmethod
def env_end(self) -> str:
"""End of the environment."""
class GenericTableBuilder(TableBuilderAbstract):
"""Table builder producing string representation of LaTeX table."""
@property
def header(self) -> str:
iterator = self._create_row_iterator(over="header")
return "\n".join(list(iterator))
@property
def top_separator(self) -> str:
return "\\toprule"
@property
def middle_separator(self) -> str:
return "\\midrule" if self._is_separator_required() else ""
@property
def env_body(self) -> str:
iterator = self._create_row_iterator(over="body")
return "\n".join(list(iterator))
def _is_separator_required(self) -> bool:
return bool(self.header and self.env_body)
@property
def _position_macro(self) -> str:
r"""Position macro, extracted from self.position, like [h]."""
return f"[{self.position}]" if self.position else ""
@property
def _caption_macro(self) -> str:
r"""Caption macro, extracted from self.caption.
With short caption:
\caption[short_caption]{caption_string}.
Without short caption:
\caption{caption_string}.
"""
if self.caption:
return "".join(
[
r"\caption",
f"[{self.short_caption}]" if self.short_caption else "",
f"{{{self.caption}}}",
]
)
return ""
@property
def _label_macro(self) -> str:
r"""Label macro, extracted from self.label, like \label{ref}."""
return f"\\label{{{self.label}}}" if self.label else ""
def _create_row_iterator(self, over: str) -> RowStringIterator:
"""Create iterator over header or body of the table.
Parameters
----------
over : {'body', 'header'}
Over what to iterate.
Returns
-------
RowStringIterator
Iterator over body or header.
"""
iterator_kind = self._select_iterator(over)
return iterator_kind(
formatter=self.fmt,
multicolumn=self.multicolumn,
multicolumn_format=self.multicolumn_format,
multirow=self.multirow,
)
def _select_iterator(self, over: str) -> Type[RowStringIterator]:
"""Select proper iterator over table rows."""
if over == "header":
return RowHeaderIterator
elif over == "body":
return RowBodyIterator
else:
msg = f"'over' must be either 'header' or 'body', but {over} was provided"
raise ValueError(msg)
class LongTableBuilder(GenericTableBuilder):
"""Concrete table builder for longtable.
>>> from pandas import DataFrame
>>> from pandas.io.formats import format as fmt
>>> df = DataFrame({"a": [1, 2], "b": ["b1", "b2"]})
>>> formatter = fmt.DataFrameFormatter(df)
>>> builder = LongTableBuilder(formatter, caption='a long table',
... label='tab:long', column_format='lrl')
>>> table = builder.get_result()
>>> print(table)
\\begin{longtable}{lrl}
\\caption{a long table}
\\label{tab:long}\\\\
\\toprule
{} & a & b \\\\
\\midrule
\\endfirsthead
\\caption[]{a long table} \\\\
\\toprule
{} & a & b \\\\
\\midrule
\\endhead
\\midrule
\\multicolumn{3}{r}{{Continued on next page}} \\\\
\\midrule
\\endfoot
<BLANKLINE>
\\bottomrule
\\endlastfoot
0 & 1 & b1 \\\\
1 & 2 & b2 \\\\
\\end{longtable}
<BLANKLINE>
"""
@property
def env_begin(self) -> str:
first_row = (
f"\\begin{{longtable}}{self._position_macro}{{{self.column_format}}}"
)
elements = [first_row, f"{self._caption_and_label()}"]
return "\n".join([item for item in elements if item])
def _caption_and_label(self) -> str:
if self.caption or self.label:
double_backslash = "\\\\"
elements = [f"{self._caption_macro}", f"{self._label_macro}"]
caption_and_label = "\n".join([item for item in elements if item])
caption_and_label += double_backslash
return caption_and_label
else:
return ""
@property
def middle_separator(self) -> str:
iterator = self._create_row_iterator(over="header")
# the content between \endfirsthead and \endhead commands
# mitigates repeated List of Tables entries in the final LaTeX
# document when dealing with longtable environments; GH #34360
elements = [
"\\midrule",
"\\endfirsthead",
f"\\caption[]{{{self.caption}}} \\\\" if self.caption else "",
self.top_separator,
self.header,
"\\midrule",
"\\endhead",
"\\midrule",
f"\\multicolumn{{{len(iterator.strcols)}}}{{r}}"
"{{Continued on next page}} \\\\",
"\\midrule",
"\\endfoot\n",
"\\bottomrule",
"\\endlastfoot",
]
if self._is_separator_required():
return "\n".join(elements)
return ""
@property
def bottom_separator(self) -> str:
return ""
@property
def env_end(self) -> str:
return "\\end{longtable}"
class RegularTableBuilder(GenericTableBuilder):
"""Concrete table builder for regular table.
>>> from pandas import DataFrame
>>> from pandas.io.formats import format as fmt
>>> df = DataFrame({"a": [1, 2], "b": ["b1", "b2"]})
>>> formatter = fmt.DataFrameFormatter(df)
>>> builder = RegularTableBuilder(formatter, caption='caption', label='lab',
... column_format='lrc')
>>> table = builder.get_result()
>>> print(table)
\\begin{table}
\\centering
\\caption{caption}
\\label{lab}
\\begin{tabular}{lrc}
\\toprule
{} & a & b \\\\
\\midrule
0 & 1 & b1 \\\\
1 & 2 & b2 \\\\
\\bottomrule
\\end{tabular}
\\end{table}
<BLANKLINE>
"""
@property
def env_begin(self) -> str:
elements = [
f"\\begin{{table}}{self._position_macro}",
"\\centering",
f"{self._caption_macro}",
f"{self._label_macro}",
f"\\begin{{tabular}}{{{self.column_format}}}",
]
return "\n".join([item for item in elements if item])
@property
def bottom_separator(self) -> str:
return "\\bottomrule"
@property
def env_end(self) -> str:
return "\n".join(["\\end{tabular}", "\\end{table}"])
class TabularBuilder(GenericTableBuilder):
"""Concrete table builder for tabular environment.
>>> from pandas import DataFrame
>>> from pandas.io.formats import format as fmt
>>> df = DataFrame({"a": [1, 2], "b": ["b1", "b2"]})
>>> formatter = fmt.DataFrameFormatter(df)
>>> builder = TabularBuilder(formatter, column_format='lrc')
>>> table = builder.get_result()
>>> print(table)
\\begin{tabular}{lrc}
\\toprule
{} & a & b \\\\
\\midrule
0 & 1 & b1 \\\\
1 & 2 & b2 \\\\
\\bottomrule
\\end{tabular}
<BLANKLINE>
"""
@property
def env_begin(self) -> str:
return f"\\begin{{tabular}}{{{self.column_format}}}"
@property
def bottom_separator(self) -> str:
return "\\bottomrule"
@property
def env_end(self) -> str:
return "\\end{tabular}"
class LatexFormatter:
r"""
Used to render a DataFrame to a LaTeX tabular/longtable environment output.
Parameters
----------
formatter : `DataFrameFormatter`
longtable : bool, default False
Use longtable environment.
column_format : str, default None
The columns format as specified in `LaTeX table format
<https://en.wikibooks.org/wiki/LaTeX/Tables>`__ e.g 'rcl' for 3 columns
multicolumn : bool, default False
Use \multicolumn to enhance MultiIndex columns.
multicolumn_format : str, default 'l'
The alignment for multicolumns, similar to `column_format`
multirow : bool, default False
Use \multirow to enhance MultiIndex rows.
caption : str or tuple, optional
Tuple (full_caption, short_caption),
which results in \caption[short_caption]{full_caption};
if a single string is passed, no short caption will be set.
label : str, optional
The LaTeX label to be placed inside ``\label{}`` in the output.
position : str, optional
The LaTeX positional argument for tables, to be placed after
``\begin{}`` in the output.
See Also
--------
HTMLFormatter
"""
def __init__(
self,
formatter: DataFrameFormatter,
longtable: bool = False,
column_format: Optional[str] = None,
multicolumn: bool = False,
multicolumn_format: Optional[str] = None,
multirow: bool = False,
caption: Optional[Union[str, Tuple[str, str]]] = None,
label: Optional[str] = None,
position: Optional[str] = None,
):
self.fmt = formatter
self.frame = self.fmt.frame
self.longtable = longtable
self.column_format = column_format
self.multicolumn = multicolumn
self.multicolumn_format = multicolumn_format
self.multirow = multirow
self.caption, self.short_caption = _split_into_full_short_caption(caption)
self.label = label
self.position = position
def to_string(self) -> str:
"""
Render a DataFrame to a LaTeX tabular, longtable, or table/tabular
environment output.
"""
return self.builder.get_result()
@property
def builder(self) -> TableBuilderAbstract:
"""Concrete table builder.
Returns
-------
TableBuilder
"""
builder = self._select_builder()
return builder(
formatter=self.fmt,
column_format=self.column_format,
multicolumn=self.multicolumn,
multicolumn_format=self.multicolumn_format,
multirow=self.multirow,
caption=self.caption,
short_caption=self.short_caption,
label=self.label,
position=self.position,
)
def _select_builder(self) -> Type[TableBuilderAbstract]:
"""Select proper table builder."""
if self.longtable:
return LongTableBuilder
if any([self.caption, self.label, self.position]):
return RegularTableBuilder
return TabularBuilder
@property
def column_format(self) -> Optional[str]:
"""Column format."""
return self._column_format
@column_format.setter
def column_format(self, input_column_format: Optional[str]) -> None:
"""Setter for column format."""
if input_column_format is None:
self._column_format = (
self._get_index_format() + self._get_column_format_based_on_dtypes()
)
elif not isinstance(input_column_format, str):
raise ValueError(
f"column_format must be str or unicode, "
f"not {type(input_column_format)}"
)
else:
self._column_format = input_column_format
def _get_column_format_based_on_dtypes(self) -> str:
"""Get column format based on data type.
Right alignment for numbers and left - for strings.
"""
def get_col_type(dtype):
if issubclass(dtype.type, np.number):
return "r"
return "l"
dtypes = self.frame.dtypes._values
return "".join(map(get_col_type, dtypes))
def _get_index_format(self) -> str:
"""Get index column format."""
return "l" * self.frame.index.nlevels if self.fmt.index else ""
def _escape_symbols(row: Sequence[str]) -> List[str]:
"""Carry out string replacements for special symbols.
Parameters
----------
row : list
List of string, that may contain special symbols.
Returns
-------
list
list of strings with the special symbols replaced.
"""
return [
(
x.replace("\\", "\\textbackslash ")
.replace("_", "\\_")
.replace("%", "\\%")
.replace("$", "\\$")
.replace("#", "\\#")
.replace("{", "\\{")
.replace("}", "\\}")
.replace("~", "\\textasciitilde ")
.replace("^", "\\textasciicircum ")
.replace("&", "\\&")
if (x and x != "{}")
else "{}"
)
for x in row
]
def _convert_to_bold(crow: Sequence[str], ilevels: int) -> List[str]:
"""Convert elements in ``crow`` to bold."""
return [
f"\\textbf{{{x}}}" if j < ilevels and x.strip() not in ["", "{}"] else x
for j, x in enumerate(crow)
]
if __name__ == "__main__":
import doctest
doctest.testmod()
| bsd-3-clause |
sander76/home-assistant | tests/components/profiler/test_init.py | 6 | 4819 | """Test the Profiler config flow."""
from datetime import timedelta
import os
from unittest.mock import patch
from homeassistant import setup
from homeassistant.components.profiler import (
CONF_SCAN_INTERVAL,
CONF_SECONDS,
CONF_TYPE,
SERVICE_DUMP_LOG_OBJECTS,
SERVICE_MEMORY,
SERVICE_START,
SERVICE_START_LOG_OBJECTS,
SERVICE_STOP_LOG_OBJECTS,
)
from homeassistant.components.profiler.const import DOMAIN
import homeassistant.util.dt as dt_util
from tests.common import MockConfigEntry, async_fire_time_changed
async def test_basic_usage(hass, tmpdir):
"""Test we can setup and the service is registered."""
test_dir = tmpdir.mkdir("profiles")
await setup.async_setup_component(hass, "persistent_notification", {})
entry = MockConfigEntry(domain=DOMAIN)
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert hass.services.has_service(DOMAIN, SERVICE_START)
last_filename = None
def _mock_path(filename):
nonlocal last_filename
last_filename = f"{test_dir}/{filename}"
return last_filename
with patch("homeassistant.components.profiler.cProfile.Profile"), patch.object(
hass.config, "path", _mock_path
):
await hass.services.async_call(DOMAIN, SERVICE_START, {CONF_SECONDS: 0.000001})
await hass.async_block_till_done()
assert os.path.exists(last_filename)
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
async def test_memory_usage(hass, tmpdir):
"""Test we can setup and the service is registered."""
test_dir = tmpdir.mkdir("profiles")
await setup.async_setup_component(hass, "persistent_notification", {})
entry = MockConfigEntry(domain=DOMAIN)
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert hass.services.has_service(DOMAIN, SERVICE_MEMORY)
last_filename = None
def _mock_path(filename):
nonlocal last_filename
last_filename = f"{test_dir}/{filename}"
return last_filename
with patch("homeassistant.components.profiler.hpy") as mock_hpy, patch.object(
hass.config, "path", _mock_path
):
await hass.services.async_call(DOMAIN, SERVICE_MEMORY, {CONF_SECONDS: 0.000001})
await hass.async_block_till_done()
mock_hpy.assert_called_once()
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
async def test_object_growth_logging(hass, caplog):
"""Test we can setup and the service and we can dump objects to the log."""
await setup.async_setup_component(hass, "persistent_notification", {})
entry = MockConfigEntry(domain=DOMAIN)
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert hass.services.has_service(DOMAIN, SERVICE_START_LOG_OBJECTS)
assert hass.services.has_service(DOMAIN, SERVICE_STOP_LOG_OBJECTS)
await hass.services.async_call(
DOMAIN, SERVICE_START_LOG_OBJECTS, {CONF_SCAN_INTERVAL: 10}
)
await hass.async_block_till_done()
assert "Growth" in caplog.text
caplog.clear()
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=11))
await hass.async_block_till_done()
assert "Growth" in caplog.text
await hass.services.async_call(DOMAIN, SERVICE_STOP_LOG_OBJECTS, {})
await hass.async_block_till_done()
caplog.clear()
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=21))
await hass.async_block_till_done()
assert "Growth" not in caplog.text
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=31))
await hass.async_block_till_done()
assert "Growth" not in caplog.text
async def test_dump_log_object(hass, caplog):
"""Test we can setup and the service is registered and logging works."""
await setup.async_setup_component(hass, "persistent_notification", {})
entry = MockConfigEntry(domain=DOMAIN)
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert hass.services.has_service(DOMAIN, SERVICE_DUMP_LOG_OBJECTS)
await hass.services.async_call(
DOMAIN, SERVICE_DUMP_LOG_OBJECTS, {CONF_TYPE: "MockConfigEntry"}
)
await hass.async_block_till_done()
assert "MockConfigEntry" in caplog.text
caplog.clear()
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
| apache-2.0 |
catalan42/jna | native/libffi/generate-osx-source-and-headers.py | 100 | 5200 | #!/usr/bin/env python
import subprocess
import re
import os
import errno
import collections
import sys
class Platform(object):
pass
sdk_re = re.compile(r'.*-sdk ([a-zA-Z0-9.]*)')
def sdkinfo(sdkname):
ret = {}
for line in subprocess.Popen(['xcodebuild', '-sdk', sdkname, '-version'], stdout=subprocess.PIPE).stdout:
kv = line.strip().split(': ', 1)
if len(kv) == 2:
k,v = kv
ret[k] = v
return ret
desktop_sdk_info = sdkinfo('macosx')
def latest_sdks():
latest_desktop = None
for line in subprocess.Popen(['xcodebuild', '-showsdks'], stdout=subprocess.PIPE).stdout:
match = sdk_re.match(line)
if match:
if 'OS X' in line:
latest_desktop = match.group(1)
return latest_desktop
desktop_sdk = latest_sdks()
class desktop_platform_32(Platform):
sdk='macosx'
arch = 'i386'
name = 'mac32'
triple = 'i386-apple-darwin10'
sdkroot = desktop_sdk_info['Path']
prefix = "#if defined(__i386__) && !defined(__x86_64__)\n\n"
suffix = "\n\n#endif"
class desktop_platform_64(Platform):
sdk='macosx'
arch = 'x86_64'
name = 'mac'
triple = 'x86_64-apple-darwin10'
sdkroot = desktop_sdk_info['Path']
prefix = "#if !defined(__i386__) && defined(__x86_64__)\n\n"
suffix = "\n\n#endif"
def move_file(src_dir, dst_dir, filename, file_suffix=None, prefix='', suffix=''):
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
out_filename = filename
if file_suffix:
split_name = os.path.splitext(filename)
out_filename = "%s_%s%s" % (split_name[0], file_suffix, split_name[1])
with open(os.path.join(src_dir, filename)) as in_file:
with open(os.path.join(dst_dir, out_filename), 'w') as out_file:
if prefix:
out_file.write(prefix)
out_file.write(in_file.read())
if suffix:
out_file.write(suffix)
headers_seen = collections.defaultdict(set)
def move_source_tree(src_dir, dest_dir, dest_include_dir, arch=None, prefix=None, suffix=None):
for root, dirs, files in os.walk(src_dir, followlinks=True):
relroot = os.path.relpath(root,src_dir)
def move_dir(arch, prefix='', suffix='', files=[]):
for file in files:
file_suffix = None
if file.endswith('.h'):
if dest_include_dir:
file_suffix = arch
if arch:
headers_seen[file].add(arch)
move_file(root, dest_include_dir, file, arch, prefix=prefix, suffix=suffix)
elif dest_dir:
outroot = os.path.join(dest_dir, relroot)
move_file(root, outroot, file, prefix=prefix, suffix=suffix)
if relroot == '.':
move_dir(arch=arch,
files=files,
prefix=prefix,
suffix=suffix)
elif relroot == 'x86':
move_dir(arch='i386',
prefix="#if defined(__i386__) && !defined(__x86_64__)\n\n",
suffix="\n\n#endif",
files=files)
move_dir(arch='x86_64',
prefix="#if !defined(__i386__) && defined(__x86_64__)\n\n",
suffix="\n\n#endif",
files=files)
def build_target(platform):
def xcrun_cmd(cmd):
return subprocess.check_output(['xcrun', '-sdk', platform.sdkroot, '-find', cmd]).strip()
build_dir = 'build_' + platform.name
if not os.path.exists(build_dir):
os.makedirs(build_dir)
env = dict(CC=xcrun_cmd('clang'),
LD=xcrun_cmd('ld'),
CFLAGS='-arch %s -isysroot %s -mmacosx-version-min=10.6' % (platform.arch, platform.sdkroot))
working_dir=os.getcwd()
try:
os.chdir(build_dir)
subprocess.check_call(['../configure', '-host', platform.triple], env=env)
move_source_tree('.', None, '../osx/include',
arch=platform.arch,
prefix=platform.prefix,
suffix=platform.suffix)
move_source_tree('./include', None, '../osx/include',
arch=platform.arch,
prefix=platform.prefix,
suffix=platform.suffix)
finally:
os.chdir(working_dir)
for header_name, archs in headers_seen.iteritems():
basename, suffix = os.path.splitext(header_name)
def main():
move_source_tree('src', 'osx/src', 'osx/include')
move_source_tree('include', None, 'osx/include')
build_target(desktop_platform_32)
build_target(desktop_platform_64)
for header_name, archs in headers_seen.iteritems():
basename, suffix = os.path.splitext(header_name)
with open(os.path.join('osx/include', header_name), 'w') as header:
for arch in archs:
header.write('#include <%s_%s%s>\n' % (basename, arch, suffix))
if __name__ == '__main__':
main()
| lgpl-2.1 |
Aralicia/Tybalt-DiscordBot | cogs/streams.py | 5 | 26330 | from discord.ext import commands
from .utils.dataIO import dataIO
from .utils.chat_formatting import escape_mass_mentions
from .utils import checks
from collections import defaultdict
from string import ascii_letters
from random import choice
import discord
import os
import re
import aiohttp
import asyncio
import logging
import json
class StreamsError(Exception):
pass
class StreamNotFound(StreamsError):
pass
class APIError(StreamsError):
pass
class InvalidCredentials(StreamsError):
pass
class OfflineStream(StreamsError):
pass
class Streams:
"""Streams
Alerts for a variety of streaming services"""
def __init__(self, bot):
self.bot = bot
self.twitch_streams = dataIO.load_json("data/streams/twitch.json")
self.hitbox_streams = dataIO.load_json("data/streams/hitbox.json")
self.mixer_streams = dataIO.load_json("data/streams/beam.json")
self.picarto_streams = dataIO.load_json("data/streams/picarto.json")
settings = dataIO.load_json("data/streams/settings.json")
self.settings = defaultdict(dict, settings)
self.messages_cache = defaultdict(list)
@commands.command()
async def hitbox(self, stream: str):
"""Checks if hitbox stream is online"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(hitbox\.tv\/)'
stream = re.sub(regex, '', stream)
try:
embed = await self.hitbox_online(stream)
except OfflineStream:
await self.bot.say(stream + " is offline.")
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
except APIError:
await self.bot.say("Error contacting the API.")
else:
await self.bot.say(embed=embed)
@commands.command(pass_context=True)
async def twitch(self, ctx, stream: str):
"""Checks if twitch stream is online"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(twitch\.tv\/)'
stream = re.sub(regex, '', stream)
try:
data = await self.fetch_twitch_ids(stream, raise_if_none=True)
embed = await self.twitch_online(data[0]["_id"])
except OfflineStream:
await self.bot.say(stream + " is offline.")
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
except APIError:
await self.bot.say("Error contacting the API.")
except InvalidCredentials:
await self.bot.say("Owner: Client-ID is invalid or not set. "
"See `{}streamset twitchtoken`"
"".format(ctx.prefix))
else:
await self.bot.say(embed=embed)
@commands.command()
async def mixer(self, stream: str):
"""Checks if mixer stream is online"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(mixer\.com\/)'
stream = re.sub(regex, '', stream)
try:
embed = await self.mixer_online(stream)
except OfflineStream:
await self.bot.say(stream + " is offline.")
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
except APIError:
await self.bot.say("Error contacting the API.")
else:
await self.bot.say(embed=embed)
@commands.command()
async def picarto(self, stream: str):
"""Checks if picarto stream is online"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(picarto\.tv\/)'
stream = re.sub(regex, '', stream)
try:
embed = await self.picarto_online(stream)
except OfflineStream:
await self.bot.say(stream + " is offline.")
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
except APIError:
await self.bot.say("Error contacting the API.")
else:
await self.bot.say(embed=embed)
@commands.group(pass_context=True, no_pm=True)
@checks.mod_or_permissions(manage_server=True)
async def streamalert(self, ctx):
"""Adds/removes stream alerts from the current channel"""
if ctx.invoked_subcommand is None:
await self.bot.send_cmd_help(ctx)
@streamalert.command(name="twitch", pass_context=True)
async def twitch_alert(self, ctx, stream: str):
"""Adds/removes twitch alerts from the current channel"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(twitch\.tv\/)'
stream = re.sub(regex, '', stream)
channel = ctx.message.channel
try:
data = await self.fetch_twitch_ids(stream, raise_if_none=True)
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
return
except APIError:
await self.bot.say("Error contacting the API.")
return
except InvalidCredentials:
await self.bot.say("Owner: Client-ID is invalid or not set. "
"See `{}streamset twitchtoken`"
"".format(ctx.prefix))
return
enabled = self.enable_or_disable_if_active(self.twitch_streams,
stream,
channel,
_id=data[0]["_id"])
if enabled:
await self.bot.say("Alert activated. I will notify this channel "
"when {} is live.".format(stream))
else:
await self.bot.say("Alert has been removed from this channel.")
dataIO.save_json("data/streams/twitch.json", self.twitch_streams)
@streamalert.command(name="hitbox", pass_context=True)
async def hitbox_alert(self, ctx, stream: str):
"""Adds/removes hitbox alerts from the current channel"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(hitbox\.tv\/)'
stream = re.sub(regex, '', stream)
channel = ctx.message.channel
try:
await self.hitbox_online(stream)
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
return
except APIError:
await self.bot.say("Error contacting the API.")
return
except OfflineStream:
pass
enabled = self.enable_or_disable_if_active(self.hitbox_streams,
stream,
channel)
if enabled:
await self.bot.say("Alert activated. I will notify this channel "
"when {} is live.".format(stream))
else:
await self.bot.say("Alert has been removed from this channel.")
dataIO.save_json("data/streams/hitbox.json", self.hitbox_streams)
@streamalert.command(name="mixer", pass_context=True)
async def mixer_alert(self, ctx, stream: str):
"""Adds/removes mixer alerts from the current channel"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(mixer\.com\/)'
stream = re.sub(regex, '', stream)
channel = ctx.message.channel
try:
await self.mixer_online(stream)
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
return
except APIError:
await self.bot.say("Error contacting the API.")
return
except OfflineStream:
pass
enabled = self.enable_or_disable_if_active(self.mixer_streams,
stream,
channel)
if enabled:
await self.bot.say("Alert activated. I will notify this channel "
"when {} is live.".format(stream))
else:
await self.bot.say("Alert has been removed from this channel.")
dataIO.save_json("data/streams/beam.json", self.mixer_streams)
@streamalert.command(name="picarto", pass_context=True)
async def picarto_alert(self, ctx, stream: str):
"""Adds/removes picarto alerts from the current channel"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(picarto\.tv\/)'
stream = re.sub(regex, '', stream)
channel = ctx.message.channel
try:
await self.picarto_online(stream)
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
return
except APIError:
await self.bot.say("Error contacting the API.")
return
except OfflineStream:
pass
enabled = self.enable_or_disable_if_active(self.picarto_streams,
stream,
channel)
if enabled:
await self.bot.say("Alert activated. I will notify this channel "
"when {} is live.".format(stream))
else:
await self.bot.say("Alert has been removed from this channel.")
dataIO.save_json("data/streams/picarto.json", self.picarto_streams)
@streamalert.command(name="stop", pass_context=True)
async def stop_alert(self, ctx):
"""Stops all streams alerts in the current channel"""
channel = ctx.message.channel
streams = (
self.hitbox_streams,
self.twitch_streams,
self.mixer_streams,
self.picarto_streams
)
for stream_type in streams:
to_delete = []
for s in stream_type:
if channel.id in s["CHANNELS"]:
s["CHANNELS"].remove(channel.id)
if not s["CHANNELS"]:
to_delete.append(s)
for s in to_delete:
stream_type.remove(s)
dataIO.save_json("data/streams/twitch.json", self.twitch_streams)
dataIO.save_json("data/streams/hitbox.json", self.hitbox_streams)
dataIO.save_json("data/streams/beam.json", self.mixer_streams)
dataIO.save_json("data/streams/picarto.json", self.picarto_streams)
await self.bot.say("There will be no more stream alerts in this "
"channel.")
@commands.group(pass_context=True)
async def streamset(self, ctx):
"""Stream settings"""
if ctx.invoked_subcommand is None:
await self.bot.send_cmd_help(ctx)
@streamset.command()
@checks.is_owner()
async def twitchtoken(self, token : str):
"""Sets the Client ID for twitch
To do this, follow these steps:
1. Go to this page: https://dev.twitch.tv/dashboard/apps.
2. Click 'Register Your Application'
3. Enter a name, set the OAuth Redirect URI to 'http://localhost', and
select an Application Category of your choosing.
4. Click 'Register', and on the following page, copy the Client ID.
5. Paste the Client ID into this command. Done!
"""
self.settings["TWITCH_TOKEN"] = token
dataIO.save_json("data/streams/settings.json", self.settings)
await self.bot.say('Twitch Client-ID set.')
@streamset.command(pass_context=True, no_pm=True)
@checks.admin()
async def mention(self, ctx, *, mention_type : str):
"""Sets mentions for stream alerts
Types: everyone, here, none"""
server = ctx.message.server
mention_type = mention_type.lower()
if mention_type in ("everyone", "here"):
self.settings[server.id]["MENTION"] = "@" + mention_type
await self.bot.say("When a stream is online @\u200b{} will be "
"mentioned.".format(mention_type))
elif mention_type == "none":
self.settings[server.id]["MENTION"] = ""
await self.bot.say("Mentions disabled.")
else:
await self.bot.send_cmd_help(ctx)
dataIO.save_json("data/streams/settings.json", self.settings)
@streamset.command(pass_context=True, no_pm=True)
@checks.admin()
async def autodelete(self, ctx):
"""Toggles automatic notification deletion for streams that go offline"""
server = ctx.message.server
settings = self.settings[server.id]
current = settings.get("AUTODELETE", True)
settings["AUTODELETE"] = not current
if settings["AUTODELETE"]:
await self.bot.say("Notifications will be automatically deleted "
"once the stream goes offline.")
else:
await self.bot.say("Notifications won't be deleted anymore.")
dataIO.save_json("data/streams/settings.json", self.settings)
async def hitbox_online(self, stream):
url = "https://api.hitbox.tv/media/live/" + stream
async with aiohttp.get(url) as r:
data = await r.json(encoding='utf-8')
if "livestream" not in data:
raise StreamNotFound()
elif data["livestream"][0]["media_is_live"] == "0":
raise OfflineStream()
elif data["livestream"][0]["media_is_live"] == "1":
return self.hitbox_embed(data)
raise APIError()
async def twitch_online(self, stream):
session = aiohttp.ClientSession()
url = "https://api.twitch.tv/kraken/streams/" + stream
header = {
'Client-ID': self.settings.get("TWITCH_TOKEN", ""),
'Accept': 'application/vnd.twitchtv.v5+json'
}
async with session.get(url, headers=header) as r:
data = await r.json(encoding='utf-8')
await session.close()
if r.status == 200:
if data["stream"] is None:
raise OfflineStream()
return self.twitch_embed(data)
elif r.status == 400:
raise InvalidCredentials()
elif r.status == 404:
raise StreamNotFound()
else:
raise APIError()
async def mixer_online(self, stream):
url = "https://mixer.com/api/v1/channels/" + stream
async with aiohttp.get(url) as r:
data = await r.json(encoding='utf-8')
if r.status == 200:
if data["online"] is True:
return self.mixer_embed(data)
else:
raise OfflineStream()
elif r.status == 404:
raise StreamNotFound()
else:
raise APIError()
async def picarto_online(self, stream):
url = "https://api.picarto.tv/v1/channel/name/" + stream
async with aiohttp.get(url) as r:
data = await r.text(encoding='utf-8')
if r.status == 200:
data = json.loads(data)
if data["online"] is True:
return self.picarto_embed(data)
else:
raise OfflineStream()
elif r.status == 404:
raise StreamNotFound()
else:
raise APIError()
async def fetch_twitch_ids(self, *streams, raise_if_none=False):
def chunks(l):
for i in range(0, len(l), 100):
yield l[i:i + 100]
base_url = "https://api.twitch.tv/kraken/users?login="
header = {
'Client-ID': self.settings.get("TWITCH_TOKEN", ""),
'Accept': 'application/vnd.twitchtv.v5+json'
}
results = []
for streams_list in chunks(streams):
session = aiohttp.ClientSession()
url = base_url + ",".join(streams_list)
async with session.get(url, headers=header) as r:
data = await r.json(encoding='utf-8')
if r.status == 200:
results.extend(data["users"])
elif r.status == 400:
raise InvalidCredentials()
else:
raise APIError()
await session.close()
if not results and raise_if_none:
raise StreamNotFound()
return results
def twitch_embed(self, data):
channel = data["stream"]["channel"]
url = channel["url"]
logo = channel["logo"]
if logo is None:
logo = "https://static-cdn.jtvnw.net/jtv_user_pictures/xarth/404_user_70x70.png"
status = channel["status"]
if not status:
status = "Untitled broadcast"
embed = discord.Embed(title=status, url=url)
embed.set_author(name=channel["display_name"])
embed.add_field(name="Followers", value=channel["followers"])
embed.add_field(name="Total views", value=channel["views"])
embed.set_thumbnail(url=logo)
if data["stream"]["preview"]["medium"]:
embed.set_image(url=data["stream"]["preview"]["medium"] + self.rnd_attr())
if channel["game"]:
embed.set_footer(text="Playing: " + channel["game"])
embed.color = 0x6441A4
return embed
def hitbox_embed(self, data):
base_url = "https://edge.sf.hitbox.tv"
livestream = data["livestream"][0]
channel = livestream["channel"]
url = channel["channel_link"]
embed = discord.Embed(title=livestream["media_status"], url=url)
embed.set_author(name=livestream["media_name"])
embed.add_field(name="Followers", value=channel["followers"])
#embed.add_field(name="Views", value=channel["views"])
embed.set_thumbnail(url=base_url + channel["user_logo"])
if livestream["media_thumbnail"]:
embed.set_image(url=base_url + livestream["media_thumbnail"] + self.rnd_attr())
embed.set_footer(text="Playing: " + livestream["category_name"])
embed.color = 0x98CB00
return embed
def mixer_embed(self, data):
default_avatar = ("https://mixer.com/_latest/assets/images/main/"
"avatars/default.jpg")
user = data["user"]
url = "https://mixer.com/" + data["token"]
embed = discord.Embed(title=data["name"], url=url)
embed.set_author(name=user["username"])
embed.add_field(name="Followers", value=data["numFollowers"])
embed.add_field(name="Total views", value=data["viewersTotal"])
if user["avatarUrl"]:
embed.set_thumbnail(url=user["avatarUrl"])
else:
embed.set_thumbnail(url=default_avatar)
if data["thumbnail"]:
embed.set_image(url=data["thumbnail"]["url"] + self.rnd_attr())
embed.color = 0x4C90F3
if data["type"] is not None:
embed.set_footer(text="Playing: " + data["type"]["name"])
return embed
def picarto_embed(self, data):
avatar = ("https://picarto.tv/user_data/usrimg/{}/dsdefault.jpg{}"
"".format(data["name"].lower(), self.rnd_attr()))
url = "https://picarto.tv/" + data["name"]
thumbnail = data["thumbnails"]["web"]
embed = discord.Embed(title=data["title"], url=url)
embed.set_author(name=data["name"])
embed.set_image(url=thumbnail + self.rnd_attr())
embed.add_field(name="Followers", value=data["followers"])
embed.add_field(name="Total views", value=data["viewers_total"])
embed.set_thumbnail(url=avatar)
embed.color = 0x132332
data["tags"] = ", ".join(data["tags"])
if not data["tags"]:
data["tags"] = "None"
if data["adult"]:
data["adult"] = "NSFW | "
else:
data["adult"] = ""
embed.color = 0x4C90F3
embed.set_footer(text="{adult}Category: {category} | Tags: {tags}"
"".format(**data))
return embed
def enable_or_disable_if_active(self, streams, stream, channel, _id=None):
"""Returns True if enabled or False if disabled"""
for i, s in enumerate(streams):
stream_id = s.get("ID")
if stream_id and _id: # ID is available, matching by ID is
if stream_id != _id: # preferable
continue
else: # ID unavailable, matching by name
if s["NAME"] != stream:
continue
if channel.id in s["CHANNELS"]:
streams[i]["CHANNELS"].remove(channel.id)
if not s["CHANNELS"]:
streams.remove(s)
return False
else:
streams[i]["CHANNELS"].append(channel.id)
return True
data = {"CHANNELS": [channel.id],
"NAME": stream,
"ALREADY_ONLINE": False}
if _id:
data["ID"] = _id
streams.append(data)
return True
async def stream_checker(self):
CHECK_DELAY = 60
try:
await self._migration_twitch_v5()
except InvalidCredentials:
print("Error during convertion of twitch usernames to IDs: "
"invalid token")
except Exception as e:
print("Error during convertion of twitch usernames to IDs: "
"{}".format(e))
while self == self.bot.get_cog("Streams"):
save = False
streams = ((self.twitch_streams, self.twitch_online),
(self.hitbox_streams, self.hitbox_online),
(self.mixer_streams, self.mixer_online),
(self.picarto_streams, self.picarto_online))
for streams_list, parser in streams:
if parser == self.twitch_online:
_type = "ID"
else:
_type = "NAME"
for stream in streams_list:
if _type not in stream:
continue
key = (parser, stream[_type])
try:
embed = await parser(stream[_type])
except OfflineStream:
if stream["ALREADY_ONLINE"]:
stream["ALREADY_ONLINE"] = False
save = True
await self.delete_old_notifications(key)
except: # We don't want our task to die
continue
else:
if stream["ALREADY_ONLINE"]:
continue
save = True
stream["ALREADY_ONLINE"] = True
messages_sent = []
for channel_id in stream["CHANNELS"]:
channel = self.bot.get_channel(channel_id)
if channel is None:
continue
mention = self.settings.get(channel.server.id, {}).get("MENTION", "")
can_speak = channel.permissions_for(channel.server.me).send_messages
message = mention + " {} is live!".format(stream["NAME"])
if channel and can_speak:
m = await self.bot.send_message(channel, message, embed=embed)
messages_sent.append(m)
self.messages_cache[key] = messages_sent
await asyncio.sleep(0.5)
if save:
dataIO.save_json("data/streams/twitch.json", self.twitch_streams)
dataIO.save_json("data/streams/hitbox.json", self.hitbox_streams)
dataIO.save_json("data/streams/beam.json", self.mixer_streams)
dataIO.save_json("data/streams/picarto.json", self.picarto_streams)
await asyncio.sleep(CHECK_DELAY)
async def delete_old_notifications(self, key):
for message in self.messages_cache[key]:
server = message.server
settings = self.settings.get(server.id, {})
is_enabled = settings.get("AUTODELETE", True)
try:
if is_enabled:
await self.bot.delete_message(message)
except:
pass
del self.messages_cache[key]
def rnd_attr(self):
"""Avoids Discord's caching"""
return "?rnd=" + "".join([choice(ascii_letters) for i in range(6)])
async def _migration_twitch_v5(self):
# Migration of old twitch streams to API v5
to_convert = []
for stream in self.twitch_streams:
if "ID" not in stream:
to_convert.append(stream["NAME"])
if not to_convert:
return
results = await self.fetch_twitch_ids(*to_convert)
for stream in self.twitch_streams:
for result in results:
if stream["NAME"].lower() == result["name"].lower():
stream["ID"] = result["_id"]
# We might as well delete the invalid / renamed ones
self.twitch_streams = [s for s in self.twitch_streams if "ID" in s]
dataIO.save_json("data/streams/twitch.json", self.twitch_streams)
def check_folders():
if not os.path.exists("data/streams"):
print("Creating data/streams folder...")
os.makedirs("data/streams")
def check_files():
stream_files = (
"twitch.json",
"hitbox.json",
"beam.json",
"picarto.json"
)
for filename in stream_files:
if not dataIO.is_valid_json("data/streams/" + filename):
print("Creating empty {}...".format(filename))
dataIO.save_json("data/streams/" + filename, [])
f = "data/streams/settings.json"
if not dataIO.is_valid_json(f):
print("Creating empty settings.json...")
dataIO.save_json(f, {})
def setup(bot):
logger = logging.getLogger('aiohttp.client')
logger.setLevel(50) # Stops warning spam
check_folders()
check_files()
n = Streams(bot)
loop = asyncio.get_event_loop()
loop.create_task(n.stream_checker())
bot.add_cog(n)
| gpl-3.0 |
saurabh6790/trufil_app | selling/report/customers_not_buying_since_long_time/customers_not_buying_since_long_time.py | 9 | 1985 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.utils import cint
def execute(filters=None):
if not filters: filters ={}
days_since_last_order = filters.get("days_since_last_order")
if cint(days_since_last_order) <= 0:
webnotes.msgprint("Please mention positive value in 'Days Since Last Order' field",raise_exception=1)
columns = get_columns()
customers = get_so_details()
data = []
for cust in customers:
if cint(cust[8]) >= cint(days_since_last_order):
cust.insert(7,get_last_so_amt(cust[0]))
data.append(cust)
return columns, data
def get_so_details():
return webnotes.conn.sql("""select
cust.name,
cust.customer_name,
cust.territory,
cust.customer_group,
count(distinct(so.name)) as 'num_of_order',
sum(net_total) as 'total_order_value',
sum(if(so.status = "Stopped",
so.net_total * so.per_delivered/100,
so.net_total)) as 'total_order_considered',
max(so.transaction_date) as 'last_sales_order_date',
DATEDIFF(CURDATE(), max(so.transaction_date)) as 'days_since_last_order'
from `tabCustomer` cust, `tabSales Order` so
where cust.name = so.customer and so.docstatus = 1
group by cust.name
order by 'days_since_last_order' desc """,as_list=1)
def get_last_so_amt(customer):
res = webnotes.conn.sql("""select net_total from `tabSales Order`
where customer ='%(customer)s' and docstatus = 1 order by transaction_date desc
limit 1""" % {'customer':customer})
return res and res[0][0] or 0
def get_columns():
return [
"Customer:Link/Customer:120",
"Customer Name:Data:120",
"Territory::120",
"Customer Group::120",
"Number of Order::120",
"Total Order Value:Currency:120",
"Total Order Considered:Currency:160",
"Last Order Amount:Currency:160",
"Last Sales Order Date:Date:160",
"Days Since Last Order::160"
] | agpl-3.0 |
ericawright/bedrock | tests/pages/home.py | 5 | 1514 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from selenium.webdriver.common.by import By
from pages.base import BasePage
from pages.regions.download_button import DownloadButton
class HomePage(BasePage):
_intro_download_button_locator = (By.ID, 'download-intro') # legacy home page
_primary_download_button_locator = (By.ID, 'download-primary')
_secondary_download_button_locator = (By.ID, 'download-secondary')
_primary_accounts_button_locator = (By.ID, 'fxa-learn-primary')
_secondary_accounts_button_locator = (By.ID, 'fxa-learn-secondary')
@property
def intro_download_button(self):
el = self.find_element(*self._intro_download_button_locator)
return DownloadButton(self, root=el)
@property
def primary_download_button(self):
el = self.find_element(*self._primary_download_button_locator)
return DownloadButton(self, root=el)
@property
def secondary_download_button(self):
el = self.find_element(*self._secondary_download_button_locator)
return DownloadButton(self, root=el)
@property
def is_primary_accounts_button(self):
return self.is_element_displayed(*self._primary_accounts_button_locator)
@property
def is_secondary_accounts_button(self):
return self.is_element_displayed(*self._secondary_accounts_button_locator)
| mpl-2.0 |
DelvarWorld/three.js | utils/exporters/blender/addons/io_three/exporter/material.py | 70 | 3806 | from .. import constants, logger
from . import base_classes, utilities, api
class Material(base_classes.BaseNode):
"""Class that wraps material nodes"""
def __init__(self, node, parent):
logger.debug("Material().__init__(%s)", node)
base_classes.BaseNode.__init__(self, node, parent,
constants.MATERIAL)
self._common_attributes()
if self[constants.TYPE] == constants.THREE_PHONG:
self._phong_attributes()
textures = self.parent.options.get(constants.MAPS)
if textures:
self._update_maps()
def _common_attributes(self):
"""Parse the common material attributes"""
logger.debug('Material()._common_attributes()')
dispatch = {
constants.PHONG: constants.THREE_PHONG,
constants.LAMBERT: constants.THREE_LAMBERT,
constants.BASIC: constants.THREE_BASIC
}
shader_type = api.material.type(self.node)
self[constants.TYPE] = dispatch[shader_type]
diffuse = api.material.diffuse_color(self.node)
self[constants.COLOR] = utilities.rgb2int(diffuse)
if self[constants.TYPE] != constants.THREE_BASIC:
emissive = api.material.emissive_color(self.node)
self[constants.EMISSIVE] = utilities.rgb2int(emissive)
vertex_color = api.material.use_vertex_colors(self.node)
if vertex_color:
self[constants.VERTEX_COLORS] = constants.VERTEX_COLORS_ON
else:
self[constants.VERTEX_COLORS] = constants.VERTEX_COLORS_OFF
self[constants.BLENDING] = api.material.blending(self.node)
if api.material.transparent(self.node):
self[constants.TRANSPARENT] = True
if api.material.double_sided(self.node):
self[constants.SIDE] = constants.SIDE_DOUBLE
self[constants.DEPTH_TEST] = api.material.depth_test(self.node)
self[constants.DEPTH_WRITE] = api.material.depth_write(self.node)
def _phong_attributes(self):
"""Parse phong specific attributes"""
logger.debug("Material()._phong_attributes()")
specular = api.material.specular_color(self.node)
self[constants.SPECULAR] = utilities.rgb2int(specular)
self[constants.SHININESS] = api.material.specular_coef(self.node)
def _update_maps(self):
"""Parses maps/textures and updates the textures array
with any new nodes found.
"""
logger.debug("Material()._update_maps()")
mapping = (
(api.material.diffuse_map, constants.MAP),
(api.material.specular_map, constants.SPECULAR_MAP),
(api.material.light_map, constants.LIGHT_MAP)
)
for func, key in mapping:
map_node = func(self.node)
if map_node:
logger.info('Found map node %s for %s', map_node, key)
tex_inst = self.scene.texture(map_node.name)
self[key] = tex_inst[constants.UUID]
if self[constants.TYPE] == constants.THREE_PHONG:
mapping = (
(api.material.bump_map, constants.BUMP_MAP,
constants.BUMP_SCALE, api.material.bump_scale),
(api.material.normal_map, constants.NORMAL_MAP,
constants.NORMAL_SCALE, api.material.normal_scale)
)
for func, map_key, scale_key, scale_func in mapping:
map_node = func(self.node)
if not map_node:
continue
logger.info("Found map node %s for %s", map_node, map_key)
tex_inst = self.scene.texture(map_node.name)
self[map_key] = tex_inst[constants.UUID]
self[scale_key] = scale_func(self.node)
| mit |
kaktus018/ca_wizard_mechanical | cawm_classes.py | 1 | 55264 |
# ca_wizard_mechanical, version 0.2
# Allows the generation of comm-files for simple 3D structural analyses in code_aster with an interactive GUI
#
# This work is licensed under the terms and conditions of the GNU General Public License version 3
# Copyright (C) 2017 Dominik Lechleitner
# Contact: kaktus018(at)gmail.com
# GitHub repository: https://github.com/kaktus018/ca_wizard_mechanical
#
# This file is part of ca_wizard_mechanical.
#
# ca_wizard_mechanical is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ca_wizard_mechanical is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ca_wizard_mechanical. If not, see <http://www.gnu.org/licenses/>.
import xml.etree.ElementTree as ET
from keyword import iskeyword
from copy import deepcopy
ls = "\n"
def setMatLibPath(p):
global matLibPath
matLibPath = p
def setVersion(v):
global cawmVersion
cawmVersion = v
def isNumeric(*args):
for s in args:
try:
float(s)
except ValueError:
return False
return True
def isInteger(*args):
for s in args:
try:
int(s)
except ValueError:
return False
return True
def hasFunction(functionList,*args):
for f in args:
if f in [functionList[i].funcName for i in range(len(functionList))]:
return True
return False
def hasConstant(functionList,*args):
for c in args:
if c not in [functionList[i].funcName for i in range(len(functionList))] and c:
return True
return False
# check if string is either empty, a function or numeric - in which case: return True
def checkValidEntry(functionList,*args):
for el in args:
if el and not hasFunction(functionList,el) and not isNumeric(el):
return False
return True
class cawmInst:
def __init__(self,solverSet,names,workingDir,studyName):
self.solverSet = solverSet
self.names = names
self.workingDir = workingDir
self.studyName = studyName
self.cawmVersion = cawmVersion
class PyFunction:
def __init__(self,funcName,funcText):
self.funcName = funcName
self.funcText = funcText
# verify function name and see if the interpreter raises an exception
def verify(self,functionList,checkFuncDefi):
msgs = []
if not self.funcName.isidentifier() or iskeyword(self.funcName):
msgs.append(self.funcName + " is not a valid function name. The function will not be checked for further errors.")
elif checkFuncDefi:
try:
exec("def " + self.funcName + "(x,y,z,t):\n" + self.funcText)
except Exception as e:
msgs.append("While trying to evaluate the Python function " + self.funcName + " the Python 3 interpreter raised the following exception:\n" + str(e))
return msgs
class Material:
def __init__(self,matName):
root = ET.parse(matLibPath).getroot()
for child in root:
if child.attrib["name"] == matName:
self.matName = matName
self.matNum = child.find("materialNumber").text
self.matCat = child.find("category").text
self.youngsModulus = child.find("YoungsModulus").text
self.poissonRatio = child.find("PoissonRatio").text
self.alpha = child.find("alpha").text
self.density = child.find("density").text
return
class MaterialSet:
def __init__(self,assiName,nodalGroupName,materialName):
self.assiName = assiName
self.nodalGroupName = nodalGroupName
self.material = Material(materialName)
# verify datatype of properties and node group name
def verify(self,names,functionList):
msgs = []
if not isNumeric(self.material.youngsModulus,self.material.poissonRatio):
msgs.append(self.assiName + ": Young's modulus or Poisson's ratio is not numeric.")
if not [self.nodalGroupName, "Volume"] in [names[i] for i in range(len(names))] and not self.nodalGroupName == "whole mesh":
msgs.append(self.assiName + ": Material is not assigned to a valid node group.")
return msgs
class NodeJointSet:
def __init__(self,assiName,jointGroupName,nodeName,cX,cY,cZ,cPhiX,cPhiY,cPhiZ):
self.assiName = assiName
self.jointGroupName = jointGroupName
self.nodeName = nodeName
self.cX = cX
self.cY = cY
self.cZ = cZ
self.cPhiX = cPhiX
self.cPhiY = cPhiY
self.cPhiZ = cPhiZ
# verify datatype of properties and node group name
def verify(self,names,functionList):
msgs = []
if not isNumeric(self.cX, self.cY, self.cZ, self.cPhiX, self.cPhiY, self.cPhiZ):
msgs.append(self.assiName + ": At least one stiffness value is not numeric.")
if not [self.jointGroupName, "Node joint group"] in [names[i] for i in range(len(names))]:
msgs.append(self.assiName + ": Node group name for the node joint group is not valid.")
if not [self.nodeName, "Vertex/Node"] in [names[i] for i in range(len(names))]:
msgs.append(self.assiName + ": Node group name for the node is not valid.")
return msgs
class RestraintSet:
def __init__(self,assiName,nodalGroupName,rotMatViaPython,deltaX,deltaY,deltaZ,deltaPhiX,deltaPhiY,deltaPhiZ,xTrans,yTrans,zTrans,rotX,rotY,rotZ,reacMX,reacMY,reacMZ):
self.assiName = assiName
self.nodalGroupName = nodalGroupName
self.rotMatViaPython = rotMatViaPython
self.deltaX = deltaX
self.deltaY = deltaY
self.deltaZ = deltaZ
self.deltaPhiX = deltaPhiX
self.deltaPhiY = deltaPhiY
self.deltaPhiZ = deltaPhiZ
self.xTrans = xTrans
self.yTrans = yTrans
self.zTrans = zTrans
self.rotX = rotX
self.rotY = rotY
self.rotZ = rotZ
self.reacMX = reacMX
self.reacMY = reacMY
self.reacMZ = reacMZ
# verify datatype of properties and node group name
def verify(self,names,functionList):
msgs = []
if self.rotMatViaPython:
if hasFunction(functionList,self.deltaX,self.deltaY,self.deltaZ,self.deltaPhiX,self.deltaPhiY,self.deltaPhiZ):
raise ValueError(self.assiName + ": When using the provided function for the rotation matrix the entries for the restraints can not be a function.")
if (self.rotX and not (self.deltaX and self.deltaPhiY and self.deltaPhiZ)) or (self.rotY and not(self.deltaY and self.deltaPhiX and self.deltaPhiZ)) or \
(self.rotZ and not (self.deltaZ and self.deltaPhiX and self.deltaPhiY)):
raise ValueError(self.assiName + ": When using the provided function for the rotation matrix the translational DoFs for all axes to which the rotation is applied to have to be restrained.")
if not isNumeric(self.deltaPhiX, self.deltaPhiY, self.deltaPhiZ, self.xTrans, self.yTrans, self.zTrans):
msgs.append(self.assiName + ": Inputs for the rotational DoFs and the coordinates of the rotation center have to be numeric. (All rotational DoFs have to be restrained).")
if not checkValidEntry(functionList,self.deltaX,self.deltaY,self.deltaZ,self.deltaPhiX,self.deltaPhiY,self.deltaPhiZ):
msgs.append(self.assiName + ": At least one input for translation or rotation is neither a function nor numeric. If this is related to the rotational DoFs and the restraint is not assigned to " + \
"a node of a node joint group you can ignore this warning.")
if not isNumeric(self.reacMX, self.reacMY, self.reacMZ):
msgs.append(self.assiName + ": At least one input for the coordinates for the computation of the torsional reactions is not numeric.")
if not [self.nodalGroupName, "Surface"] in [names[i] for i in range(len(names))] and not [self.nodalGroupName, "Edge"] in [names[i] for i in range(len(names))] and \
not [self.nodalGroupName, "Vertex/Node"] in [names[i] for i in range(len(names))]:
msgs.append(self.assiName + ": Restraint is not assigned to a valid node group.")
return msgs
class LoadSet:
def __init__(self,assiName,nodalGroupName,loadType,FX,FY,FZ,MX,MY,MZ,p,gX,gY,gZ,omega,centerX,centerY,centerZ,axisX,axisY,axisZ):
self.assiName = assiName
self.nodalGroupName = nodalGroupName
self.loadType = loadType
self.FX = FX
self.FY = FY
self.FZ = FZ
self.MX = MX
self.MY = MY
self.MZ = MZ
self.p = p
self.gX = gX
self.gY = gY
self.gZ = gZ
self.omega = omega
self.centerX = centerX
self.centerY = centerY
self.centerZ = centerZ
self.axisX = axisX
self.axisY = axisY
self.axisZ = axisZ
# verify datatype of properties and node group name
def verify(self,names,functionList):
msgs = []
if self.loadType == "Gravity":
if not isNumeric(self.gX, self.gY, self.gZ):
msgs.append(self.assiName + ": At least one input for the gravity vector is not numeric.")
if not [self.nodalGroupName, "Volume"] in [names[i] for i in range(len(names))] and not self.nodalGroupName == "whole mesh":
msgs.append(self.assiName + ": Load is not assigned to a valid node group.")
if self.loadType == "Centrifugal force":
if not isNumeric(self.omega, self.centerX, self.centerY, self.centerZ, self.axisX, self.axisY, self.axisZ):
msgs.append(self.assiName + ": At least one input for the rotation is not numeric.")
if not [self.nodalGroupName, "Volume"] in [names[i] for i in range(len(names))] and not self.nodalGroupName == "whole mesh":
msgs.append(self.assiName + ": Load is not assigned to a valid node group.")
if self.loadType == "Force on volume":
if not checkValidEntry(functionList, self.FX, self.FY, self.FZ):
msgs.append(self.assiName + ": At least one input for the force vector is neither a function nor numeric.")
if not [self.nodalGroupName, "Volume"] in [names[i] for i in range(len(names))] and not self.nodalGroupName == "whole mesh":
msgs.append(self.assiName + ": Load is not assigned to a valid node group.")
if self.loadType == "Force on face":
if not checkValidEntry(functionList, self.FX, self.FY, self.FZ):
msgs.append(self.assiName + ": At least one input for the force vector is neither a function nor numeric.")
if not [self.nodalGroupName, "Surface"] in [names[i] for i in range(len(names))]:
msgs.append(self.assiName + ": Load is not assigned to a valid node group.")
if self.loadType == "Force on edge":
if not checkValidEntry(functionList, self.FX, self.FY, self.FZ):
msgs.append(self.assiName + ": At least one input for the force vector is neither a function nor numeric.")
if not [self.nodalGroupName, "Edge"] in [names[i] for i in range(len(names))]:
msgs.append(self.assiName + ": Load is not assigned to a valid node group.")
if self.loadType == "Force on node":
if not checkValidEntry(functionList, self.FX, self.FY, self.FZ, self.MX, self.MY, self.MZ):
msgs.append(self.assiName + ": At least one input for the force or torque vector is neither a function nor numeric (if this message relates to the torque and the node" + \
"is not assigned to a node joint group, you can disregard this message).")
if not self.nodalGroupName in [names[i][0] for i in range(len(names))] and not [self.nodalGroupName, "Node joint group"] in [names[i] for i in range(len(names))]:
msgs.append(self.assiName + ": Load is not assigned to a valid node group.")
if self.loadType == "Pressure":
if not checkValidEntry(functionList, self.p) or not self.p:
msgs.append(self.assiName + ": Input for the pressure is neither a function nor numeric.")
if not [self.nodalGroupName, "Surface"] in [names[i] for i in range(len(names))]:
msgs.append(self.assiName + ": Load is not assigned to a valid node group.")
return msgs
class ContactGlobalSetting:
def __init__(self,formulationType,frictionModel,contactAlgo,frictionAlgo):
self.formulationType = formulationType
self.frictionModel = frictionModel
self.contactAlgo = contactAlgo
self.frictionAlgo = frictionAlgo
class ContactSet:
def __init__(self,assiName,masterName,slaveName,fricCoeff,contactAlgo,E_N,E_T,globalSettings):
self.assiName = assiName
self.masterName = masterName
self.slaveName = slaveName
self.fricCoeff = fricCoeff
self.contactAlgo = contactAlgo
self.E_N = E_N
self.E_T = E_T
self.globalSettings = globalSettings
# verify datatype of properties and node group name
def verify(self,names,functionList):
msgs = []
if self.globalSettings.formulationType == "discrete":
if self.contactAlgo == "PENALISATION":
if not isNumeric(self.E_N):
msgs.append(self.assiName + ": E_N is not numeric.")
if self.globalSettings.frictionModel == "Coulomb":
if not isNumeric(self.E_T):
msgs.append(self.assiName + ": E_T is not numeric.")
if not isNumeric(self.fricCoeff):
msgs.append(self.assiName + ": Friction coefficient is not numeric.")
else:
if self.globalSettings.frictionModel == "Coulomb":
if not isNumeric(self.fricCoeff):
msgs.append(self.assiName + ": Friction coefficient is not numeric.")
if not [self.masterName, "Surface"] in [names[i] for i in range(len(names))]:
msgs.append(self.assiName + ": Master is not assigned to a valid node group.")
if not [self.slaveName, "Surface"] in [names[i] for i in range(len(names))]:
msgs.append(self.assiName + ": Slave is not assigned to a valid node group.")
return msgs
class ThermalSet:
def __init__(self,assiName,nodalGroupName,assiType,deltaT,unite,T0,funStr):
self.assiName = assiName
self.nodalGroupName = nodalGroupName
self.assiType = assiType
self.deltaT = deltaT
self.unite = unite
self.T0 = T0
self.funStr = funStr
# verify datatype of properties and node group name
def verify(self,names,functionList):
msgs = []
if self.assiType == "const":
if not checkValidEntry(functionList, self.deltaT):
msgs.append(self.assiName + ": \u0394T is neither a function nor numeric.")
else:
if not isNumeric(self.unite, self.T0):
msgs.append(self.assiName + ": UNITE or T0 is not numeric.")
if not [self.nodalGroupName, "Volume"] in [names[i] for i in range(len(names))] and not self.nodalGroupName == "whole mesh":
msgs.append(self.assiName + ": Temp. field is not assigned to a valid node group.")
return msgs
class OutputSet:
def __init__(self,nodalGroupName,SIGM,SIEQ,EPS,REAC,ERME,TEMP):
self.nodalGroupName = nodalGroupName
self.SIGM = SIGM
self.SIEQ = SIEQ
self.EPS = EPS
self.REAC = REAC
self.ERME = ERME
self.TEMP = TEMP
class SolverSet:
def __init__(self,analysisType,timeSteps,endTime,timeRampUp,timeRampDown,timeRampFunc,strainModel,method,resi,maxIter,functions,checkFuncDefis,materialSets,nodeJointSets,restraintSets,loadSets,contactSets,
thermalSets,outputSet):
self.analysisType = analysisType
self.timeSteps = timeSteps
self.endTime = endTime
self.timeRampUp = timeRampUp
self.timeRampDown = timeRampDown
self.timeRampFunc = timeRampFunc
self.strainModel = strainModel
self.method = method
self.resi = resi
self.maxIter = maxIter
self.functions = functions
self.checkFuncDefis = checkFuncDefis
self.materialSets = materialSets
self.nodeJointSets = nodeJointSets
self.restraintSets = restraintSets
self.loadSets = loadSets
self.contactSets = contactSets
self.thermalSets = thermalSets
self.outputSet = outputSet
# this method will check if relevant inputs are numeric and all assignments to node groups are valid. It will NOT check in anyway if the resulting comm-file will run in code_aster!
def verify(self,names,functionList):
msgs = []
if len(self.materialSets) == 0 or len(self.restraintSets) == 0:
msgs.extend(["The current setup has no material assignments and/or no restraint assignments."])
for el in self.functions:
msgs.extend(el.verify(functionList,self.checkFuncDefis))
for el in self.materialSets + self.nodeJointSets + self.restraintSets + self.loadSets + self.contactSets + self.thermalSets:
msgs.extend(el.verify(names,functionList))
if not isInteger(self.timeSteps):
raise ValueError("The number of time steps is not of type integer.")
if not isNumeric(self.endTime):
msgs.extend(["The simulation end time is not numeric."])
if self.analysisType == "non-linear static":
if not isInteger(self.maxIter):
msgs.extend(["The number of max. iterations has to be of type integer."])
if not isNumeric(self.resi):
msgs.extend(["Max. relative global residual is not numeric."])
if int(self.timeSteps) < 1:
msgs.extend(["A non-linear analysis requires at least one time step."])
if self.timeRampUp and self.timeRampDown and not int(self.timeSteps) % 2 == 0:
msgs.extend(["Ramping loads and restraints up AND down requires an even amount of time steps. Otherwise a computation with their max. values will not happen."])
if self.outputSet.ERME and len(self.nodeJointSets) > 0:
msgs.extend(["Calculation of the error a posteriori (ERME) with code_aster version <= 13.2 can only be performed on the whole mesh. This will not work with the discrete element" + \
" of a node joint (MODELISATION='DIS_TR')."])
return msgs
# generate string for comm-file
def assembleCOMM(self):
def getFormuleName(funcName):
for el in formules:
if el[1] == funcName:
return el[0]
return None
pythonFuns = ""
# If any restraints require the application of the roational matrix, add generic translation functions
if sum([self.restraintSets[i].rotMatViaPython for i in range(len(self.restraintSets))]) > 0:
pythonFuns = "# Generic translation functions:" + ls + "def translate_X(deltaX,phiY,phiZ,XTrans,YTrans,ZTrans,X,Y,Z):" + ls + \
" return deltaX+(X-XTrans)*cos(phiY)+(Z-ZTrans)*sin(phiY)+(X-XTrans)*cos(phiZ)-(Y-YTrans)*sin(phiZ)-2*(X-XTrans)" + ls + ls + \
"def translate_Y(deltaY,phiX,phiZ,XTrans,YTrans,ZTrans,X,Y,Z):" + ls + \
" return deltaY+(Y-YTrans)*cos(phiX)-(Z-ZTrans)*sin(phiX)+(Y-YTrans)*cos(phiZ)+(X-XTrans)*sin(phiZ)-2*(Y-YTrans)" + ls + ls + \
"def translate_Z(deltaZ,phiX,phiY,XTrans,YTrans,ZTrans,X,Y,Z):" + ls + \
" return deltaZ+(Z-ZTrans)*cos(phiX)+(Y-YTrans)*sin(phiX)+(Z-ZTrans)*cos(phiY)-(X-XTrans)*sin(phiY)-2*(Z-ZTrans)" + ls + ls
# For restraints that use the generic translation functions defined above, add wrapper functions to the functions list
restraintSetsLocal = deepcopy(self.restraintSets) # allow local modification of the restraint sets without compromising the original data
functionsLocal = deepcopy(self.functions) # allow local modification of the functions list without compromising the original data
for el in restraintSetsLocal:
if el.rotMatViaPython:
if not el.deltaX == "" or el.rotX:
if el.rotX:
phiY = str(float(el.deltaPhiY))
phiZ = str(float(el.deltaPhiZ))
else:
phiY = "0.0"
phiZ = "0.0"
functionsLocal.append(PyFunction("DX_" + el.assiName, " return translate_X("+str(float(el.deltaX))+","+phiY+","+phiZ+","+ \
str(float(el.xTrans))+","+str(float(el.yTrans))+","+str(float(el.zTrans))+",x,y,z)"))
el.deltaX = "DX_" + el.assiName
if not el.deltaY == "" or el.rotY:
if el.rotY:
phiX = str(float(el.deltaPhiX))
phiZ = str(float(el.deltaPhiZ))
else:
phiX = "0.0"
phiZ = "0.0"
functionsLocal.append(PyFunction("DY_" + el.assiName, " return translate_Y("+str(float(el.deltaY))+","+phiX+","+phiZ+","+ \
str(float(el.xTrans))+","+str(float(el.yTrans))+","+str(float(el.zTrans))+",x,y,z)"))
el.deltaY = "DY_" + el.assiName
if not el.deltaZ == "" or el.rotZ:
if el.rotZ:
phiX = str(float(el.deltaPhiX))
phiY = str(float(el.deltaPhiY))
else:
phiX = "0.0"
phiY = "0.0"
functionsLocal.append(PyFunction("DZ_" + el.assiName, " return translate_Z("+str(float(el.deltaZ))+","+phiX+","+phiY+","+ \
str(float(el.xTrans))+","+str(float(el.yTrans))+","+str(float(el.zTrans))+",x,y,z)"))
el.deltaZ = "DZ_" + el.assiName
# Add all Python functions in the functions list to the comm-file
if len(functionsLocal) > 0:
pythonFuns = pythonFuns + "# User defined Python functions and wrappers for the generic translation functions" + ls + ls
for el in functionsLocal:
pythonFuns = pythonFuns + "def " + el.funcName + "(x,y,z,t):" + ls + el.funcText + ls + ls
# DEBUT statement
debutStr = ls + "# Start of code_aster commands" + ls + "DEBUT();" + ls + ls
# list of time steps
if int(self.timeSteps) > 0:
tListStr = "# list of time steps" + ls + "TLIST=DEFI_LIST_REEL(DEBUT=0.0,INTERVALLE=_F(JUSQU_A=" + self.endTime + ",NOMBRE=" + self.timeSteps + ",),);" + ls + ls
"SF=DEFI_FONCTION(NOM_PARA='INST',VALE=("
if self.timeRampUp == 1 and self.timeRampDown == 1:
tListStr = tListStr + "SF=DEFI_FONCTION(NOM_PARA='INST',VALE=(0.0, 0.0, 0.5, 1.0 ,1.0 ,0.0,),);" + ls + ls
elif self.timeRampUp:
tListStr = tListStr + "SF=DEFI_FONCTION(NOM_PARA='INST',VALE=(0.0, 0.0, 1.0, 1.0,),);" + ls + ls
elif self.timeRampDown:
tListStr = tListStr + "SF=DEFI_FONCTION(NOM_PARA='INST',VALE=(0.0, 1.0, 1.0, 0.0,),);" + ls + ls
else:
tListStr = ""
# Bind all Python functions to corresponding code_aster formules
formuleStr = ""
formules = []
for el in functionsLocal:
if formuleStr == "":
formuleStr = "# Assign formules" + ls
# store all identifiers for the formules in a list (max. 8 characters allowed for identifier -> can not use the actual function name)
# formule name at index 0, function name at index 1
formules.append(["F" + str(len(formules)),el.funcName])
formuleStr = formuleStr + formules[-1][0] + "=FORMULE(VALE='" + el.funcName + "(X,Y,Z,INST)',NOM_PARA=('X','Y','Z','INST',),);" + ls
if not formuleStr == "":
formuleStr = formuleStr + ls
# material definitions
matDefiStr = "# Material definitions" + ls
matDefiNames = [] # same here as with the formules - use short identifiers for the material definitions
for el in self.materialSets:
matDefiNames.append("MA"+str(len(matDefiNames)))
matDefiStr = matDefiStr + matDefiNames[-1] + "=" + "DEFI_MATERIAU(ELAS=_F(E=" + el.material.youngsModulus + ",NU=" + el.material.poissonRatio + \
",RHO=" + el.material.density + ",ALPHA=" + el.material.alpha + ",),);" + ls
matDefiStr = matDefiStr + ls
# reading/modifying the mesh
meshName = "MAIL0"
# reading
meshStr = "# reading/modifying the mesh" + ls + meshName + "=LIRE_MAILLAGE(FORMAT='MED',);" + ls
# create points for node joints
if len(self.nodeJointSets) > 0:
meshName = "MAIL1"
meshStr = meshStr + meshName + "=CREA_MAILLAGE(MAILLAGE=MAIL0,CREA_POI1=("
for el in self.nodeJointSets:
meshStr = meshStr + "_F(NOM_GROUP_MA='" + el.nodeName + "',GROUP_NO='" + el.nodeName + "',)," + ls
meshStr = meshStr + "),);" + ls
# mesh adaption for pressure loads and contacts
groupMAStr = ""
groupMAList = []
for el in self.loadSets:
if el.loadType == "Pressure" and not el.nodalGroupName in groupMAList:
groupMAStr = groupMAStr + "'" + el.nodalGroupName + "',"
groupMAList.append(el.nodalGroupName)
if self.analysisType == "non-linear static":
for el in self.contactSets:
if not el.masterName in groupMAList:
groupMAStr = groupMAStr + "'" + el.masterName + "',"
groupMAList.append(el.masterName)
if not el.slaveName in groupMAList:
groupMAStr = groupMAStr + "'" + el.slaveName + "',"
groupMAList.append(el.slaveName)
if not groupMAStr == "":
meshStr = meshStr + meshName + "=MODI_MAILLAGE(reuse=" + meshName + ",MAILLAGE=" + meshName + ",ORIE_PEAU_3D=_F(GROUP_MA=(" + ls + \
groupMAStr + "),),);" + ls
meshStr = meshStr + ls
# create model
modelStr = "# create model" + ls + "MODE=AFFE_MODELE(MAILLAGE=" + meshName + ",AFFE=(_F(TOUT='OUI',PHENOMENE='MECANIQUE',MODELISATION='3D',)," + ls
groupMAStr = ""
for el in self.nodeJointSets:
groupMAStr = groupMAStr + ls + "'" + el.nodeName + "',"
if not groupMAStr == "":
modelStr = modelStr + "_F(GROUP_MA=(" + groupMAStr + ")," + ls + "PHENOMENE='MECANIQUE',MODELISATION='DIS_TR',),"
modelStr = modelStr + "),);" + ls + ls
# create temperature fields from constant or function
tempFieldStr = ""
tempFieldNames = []
if sum([self.thermalSets[i].assiType == "const" for i in range(len(self.thermalSets))]) > 0:
tempFieldStr = "# Create temperature fields" + ls
for el in self.thermalSets:
if el.assiType == "const":
tempFieldNames.append("TFld" + str(len(tempFieldNames)))
tempFieldStr = tempFieldStr + tempFieldNames[-1] + "=CREA_CHAMP(TYPE_CHAM='NOEU_TEMP_"
if hasFunction(functionsLocal,el.deltaT):
tempFieldStr = tempFieldStr + "F"
else:
tempFieldStr = tempFieldStr + "R"
tempFieldStr = tempFieldStr + "',OPERATION='AFFE',MODELE=MODE,AFFE=(_F("
if el.nodalGroupName == "whole mesh":
tempFieldStr = tempFieldStr + "TOUT='OUI'"
else:
tempFieldStr = tempFieldStr + "GROUP_MA='" + el.nodalGroupName + "'"
tempFieldStr = tempFieldStr + ",NOM_CMP='TEMP'"
if hasFunction(functionsLocal,el.deltaT):
tempFieldStr = tempFieldStr + ",VALE_F=" + getFormuleName(el.deltaT)
else:
tempFieldStr = tempFieldStr + ",VALE=" + el.deltaT
tempFieldStr = tempFieldStr + ",),),);" + ls
if not tempFieldStr == "":
tempFieldStr = tempFieldStr + ls
# create a code_aster-result for all temp. fields
tempResStr = ""
tempResNames = []
if len(tempFieldNames) > 0:
tempResStr = "# Create results for all temperature fields" + ls
for el in tempFieldNames:
tempResNames.append("TRes" + str(len(tempResNames)))
tempResStr = tempResStr + tempResNames[-1] + "=CREA_RESU(OPERATION='AFFE',TYPE_RESU='EVOL_THER',NOM_CHAM='TEMP',AFFE=_F(CHAM_GD=" + el + ","
if int(self.timeSteps) > 0:
tempResStr = tempResStr + "LIST_INST=TLIST"
else:
tempResStr = tempResStr + "INST=0.0"
tempResStr = tempResStr + ",),);" + ls
if not tempResStr == "":
tempResStr = tempResStr + ls
# create a code_aster-result for the temp. field from a med-file
if sum([self.thermalSets[i].assiType == "file" for i in range(len(self.thermalSets))]) > 0:
tempResStr = tempResStr + "# create result for the temperature field from a med-files"
for el in self.thermalSets:
if el.assiType == "file":
tempResNames.append("TRes" + str(len(tempResNames)))
tempResStr = tempResStr + tempResNames[-1] + "=LIRE_RESU(TYPE_RESU='EVOL_THER',FORMAT='MED'," + \
"MAILLAGE=" + meshName + "," + ls + "UNITE=" + el.unite + "," + \
"FORMAT_MED=_F(NOM_CHAM='TEMP',NOM_CHAM_MED='TEMP____TEMP',),TOUT_ORDRE='OUI',);" + ls
if sum([self.thermalSets[i].assiType == "file" for i in range(len(self.thermalSets))]) > 0:
tempResStr = tempResStr + ls
# assign materials and temperature results
matTempAssiStr = "# Assign materials and temp. results" + ls + "MATE=AFFE_MATERIAU(MAILLAGE=" + meshName + ",AFFE=(" + ls
i=0
for el in self.materialSets:
matTempAssiStr = matTempAssiStr + "_F("
if el.nodalGroupName == "whole mesh":
matTempAssiStr = matTempAssiStr + "TOUT='OUI',"
else:
matTempAssiStr = matTempAssiStr + "GROUP_MA='" + el.nodalGroupName + "',"
matTempAssiStr = matTempAssiStr + "MATER=" + matDefiNames[i] + ",)," + ls
i = i+1
matTempAssiStr = matTempAssiStr + "),"
i = 0
if len(self.thermalSets) > 0:
matTempAssiStr = matTempAssiStr + "AFFE_VARC=(" + ls
for el in self.thermalSets:
matTempAssiStr = matTempAssiStr + "_F("
if el.nodalGroupName == "whole mesh":
matTempAssiStr = matTempAssiStr + "TOUT='OUI'"
else:
matTempAssiStr = matTempAssiStr + "GROUP_MA='" + el.nodalGroupName + "'"
matTempAssiStr = matTempAssiStr + ",NOM_VARC='TEMP',EVOL=" + tempResNames[i] + ","
if el.assiType == "file":
matTempAssiStr = matTempAssiStr + "VALE_REF=" + str(float(el.T0))
else:
matTempAssiStr = matTempAssiStr + "VALE_REF=0.0"
matTempAssiStr = matTempAssiStr + ",)," + ls
i = i+1
if len(self.thermalSets) > 0:
matTempAssiStr = matTempAssiStr + "),"
matTempAssiStr = matTempAssiStr + ");" + ls + ls
# assign properties for node joints
caraStr = ""
for el in self.nodeJointSets:
if caraStr == "":
caraStr = "# assign properties for node joints" + ls + "CARA=AFFE_CARA_ELEM(MODELE=MODE,DISCRET=("
caraStr = caraStr + "_F(CARA='K_TR_D_N',GROUP_MA='" + el.nodeName + "',VALE=(" + el.cX + "," + el.cY + "," + el.cZ + \
"," + el.cPhiX + "," + el.cPhiY + "," + el.cPhiZ + ",),)," + ls
if not caraStr == "":
caraStr = caraStr + "),);" + ls + ls
# assign restraints/loads via formules
affeCharMecaFStr = ""
# restraints
for el in restraintSetsLocal:
hasFormulesTrans = hasFunction(functionsLocal,el.deltaX, el.deltaY, el.deltaZ) # at least one delta is not numeric (is a function)
hasFormulesRot = 0
if el.nodalGroupName in [self.nodeJointSets[i].nodeName for i in range(len(self.nodeJointSets))]: # restraint applied to a node of a node joint -> rotational DOFs
hasFormulesRot = hasFunction(functionsLocal,el.deltaPhiX, el.deltaPhiY, el.deltaPhiZ)
if hasFormulesTrans or hasFormulesRot: # restraint uses at least one function
affeCharMecaFStr = affeCharMecaFStr + "_F(GROUP_NO='" + el.nodalGroupName + "',"
if hasFunction(functionsLocal,el.deltaX):
affeCharMecaFStr = affeCharMecaFStr + "DX=" + getFormuleName(el.deltaX) + ","
if hasFunction(functionsLocal,el.deltaY):
affeCharMecaFStr = affeCharMecaFStr + "DY=" + getFormuleName(el.deltaY) + ","
if hasFunction(functionsLocal,el.deltaZ):
affeCharMecaFStr = affeCharMecaFStr + "DZ=" + getFormuleName(el.deltaZ) + ","
if hasFormulesRot:
if hasFunction(functionsLocal,el.deltaPhiX):
affeCharMecaFStr = affeCharMecaFStr + "DRX=" + getFormuleName(el.deltaPhiX) + ","
if hasFunction(functionsLocal,el.deltaPhiY):
affeCharMecaFStr = affeCharMecaFStr + "DRY=" + getFormuleName(el.deltaPhiY) + ","
if hasFunction(functionsLocal,el.deltaPhiZ):
affeCharMecaFStr = affeCharMecaFStr + "DRZ=" + getFormuleName(el.deltaPhiZ) + ","
affeCharMecaFStr = affeCharMecaFStr + ")," + ls
if not affeCharMecaFStr == "":
affeCharMecaFStr = "DDL_IMPO=(" + ls + affeCharMecaFStr + ")," + ls
# loads
forceOnVolumeStr = ""
forceOnFaceStr = ""
forceOnEdgeStr = ""
forceOnNodeStr = ""
pressureStr = ""
for el in self.loadSets:
# forces/torques
if el.loadType in ["Force on volume","Force on face","Force on edge","Force on node"]:
hasFormulesForce = hasFunction(functionsLocal,el.FX,el.FY,el.FZ)
hasFormulesTorque = 0
if el.loadType == "Force on node":
if el.nodalGroupName in [self.nodeJointSets[i].nodeName for i in range(len(self.nodeJointSets))]: # load applied to a node of a node joint -> torque assignment possible
hasFormulesTorque = hasFunction(functionsLocal,el.MX,el.MY,el.MZ)
if hasFormulesForce or hasFormulesTorque:
if el.nodalGroupName == "whole mesh":
assiStr = "TOUT='OUI',"
elif el.loadType == "Force on node":
assiStr = "GROUP_NO='" + el.nodalGroupName + "',"
else:
assiStr = "GROUP_MA='" + el.nodalGroupName + "',"
tempStr = "_F(" + assiStr
if hasFunction(functionsLocal,el.FX):
tempStr = tempStr + "FX=" + getFormuleName(el.FX) + ","
if hasFunction(functionsLocal,el.FY):
tempStr = tempStr + "FY=" + getFormuleName(el.FY) + ","
if hasFunction(functionsLocal,el.FZ):
tempStr = tempStr + "FZ=" + getFormuleName(el.FZ) + ","
if hasFormulesTorque:
if hasFunction(functionsLocal,el.MX):
tempStr = tempStr + "MX=" + getFormuleName(el.MX) + ","
if hasFunction(functionsLocal,el.MY):
tempStr = tempStr + "MY=" + getFormuleName(el.MY) + ","
if hasFunction(functionsLocal,el.MZ):
tempStr = tempStr + "MZ=" + getFormuleName(el.MZ) + ","
tempStr = tempStr + ")," + ls
if el.loadType == "Force on volume":
forceOnVolumeStr = forceOnVolumeStr + tempStr
elif el.loadType == "Force on face":
forceOnFaceStr = forceOnFaceStr + tempStr
elif el.loadType == "Force on edge":
forceOnEdgeStr = forceOnEdgeStr + tempStr
elif el.loadType == "Force on node":
forceOnNodeStr = forceOnNodeStr + tempStr
# pressures
if el.loadType == "Pressure":
if hasFunction(functionsLocal,el.p):
pressureStr = pressureStr + "_F(GROUP_MA='" + el.nodalGroupName + "',PRES=" + getFormuleName(el.p) + ",)," + ls
if not forceOnVolumeStr == "":
affeCharMecaFStr = affeCharMecaFStr + "FORCE_INTERNE=(" + ls + forceOnVolumeStr + ")," + ls
if not forceOnFaceStr == "":
affeCharMecaFStr = affeCharMecaFStr + "FORCE_FACE=(" + ls + forceOnFaceStr + ")," + ls
if not forceOnEdgeStr == "":
affeCharMecaFStr = affeCharMecaFStr + "FORCE_ARETE=(" + ls + forceOnEdgeStr + ")," + ls
if not forceOnNodeStr == "":
affeCharMecaFStr = affeCharMecaFStr + "FORCE_NODALE=(" + ls + forceOnNodeStr + ")," + ls
if not pressureStr == "":
affeCharMecaFStr = affeCharMecaFStr + "PRES_REP=(" + ls + pressureStr + ")," + ls
if not affeCharMecaFStr == "":
affeCharMecaFStr = "# assign restraints/loads via formules" + ls + "CHARF=AFFE_CHAR_MECA_F(MODELE=MODE," + ls + affeCharMecaFStr + ");" + ls + ls
# assign remaining restraints, node joints and loads
affeCharMecaStr = ""
# restraints
for el in restraintSetsLocal:
hasConstantsTrans = hasConstant(functionsLocal,el.deltaX, el.deltaY, el.deltaZ) # at least one delta is not a function
hasConstantsRot = 0
if el.nodalGroupName in [self.nodeJointSets[i].nodeName for i in range(len(self.nodeJointSets))]: # restraint applied to a node of a node joint -> rotational DOFs
hasConstantsRot = hasConstant(functionsLocal,el.deltaPhiX, el.deltaPhiY, el.deltaPhiZ)
if hasConstantsTrans or hasConstantsRot: # restraint uses at least one constant
if not el.rotMatViaPython:
affeCharMecaStr = affeCharMecaStr + "_F(GROUP_NO='" + el.nodalGroupName + "',"
if hasConstant(functionsLocal,el.deltaX):
affeCharMecaStr = affeCharMecaStr + "DX=" + el.deltaX + ","
if hasConstant(functionsLocal,el.deltaY):
affeCharMecaStr = affeCharMecaStr + "DY=" + el.deltaY + ","
if hasConstant(functionsLocal,el.deltaZ):
affeCharMecaStr = affeCharMecaStr + "DZ=" + el.deltaZ + ","
if hasConstantsRot:
if hasConstant(functionsLocal,el.deltaPhiX):
affeCharMecaStr = affeCharMecaStr + "DRX=" + el.deltaPhiX + ","
if hasConstant(functionsLocal,el.deltaPhiY):
affeCharMecaStr = affeCharMecaStr + "DRY=" + el.deltaPhiY + ","
if hasConstant(functionsLocal,el.deltaPhiZ):
affeCharMecaStr = affeCharMecaStr + "DRZ=" + el.deltaPhiZ + ","
affeCharMecaStr = affeCharMecaStr + ")," + ls
if not affeCharMecaStr == "":
affeCharMecaStr = "DDL_IMPO=(" + ls + affeCharMecaStr + ")," + ls
# node joints
nodeJointsStr = ""
for el in self.nodeJointSets:
nodeJointsStr = nodeJointsStr + "_F(GROUP_NO='" + el.jointGroupName + "',)," + ls
# loads
forceOnVolumeStr = ""
forceOnFaceStr = ""
forceOnEdgeStr = ""
forceOnNodeStr = ""
pressureStr = ""
gravityStr = ""
centrifugalForceStr = ""
for el in self.loadSets:
# forces/torques
if el.loadType in ["Force on volume","Force on face","Force on edge","Force on node"]:
hasConstantsForce = hasConstant(functionsLocal,el.FX,el.FY,el.FZ)
hasConstantsTorque = 0
if el.loadType == "Force on node":
if el.nodalGroupName in [self.nodeJointSets[i].nodeName for i in range(len(self.nodeJointSets))]: # load applied to a node of a node joint -> torque assignment possible
hasConstantsTorque = hasConstant(functionsLocal,el.MX,el.MY,el.MZ)
if hasConstantsForce or hasConstantsTorque:
if el.nodalGroupName == "whole mesh":
assiStr = "TOUT='OUI',"
elif el.loadType == "Force on node":
assiStr = "GROUP_NO='" + el.nodalGroupName + "',"
else:
assiStr = "GROUP_MA='" + el.nodalGroupName + "',"
tempStr = "_F(" + assiStr
if hasConstant(functionsLocal,el.FX):
tempStr = tempStr + "FX=" + el.FX + ","
if hasConstant(functionsLocal,el.FY):
tempStr = tempStr + "FY=" + el.FY + ","
if hasConstant(functionsLocal,el.FZ):
tempStr = tempStr + "FZ=" + el.FZ + ","
if hasConstantsTorque:
if hasConstant(functionsLocal,el.MX):
tempStr = tempStr + "MX=" + el.MX + ","
if hasConstant(functionsLocal,el.MY):
tempStr = tempStr + "MY=" + gel.MY + ","
if hasConstant(functionsLocal,el.MZ):
tempStr = tempStr + "MZ=" + el.MZ + ","
tempStr = tempStr + ")," + ls
if el.loadType == "Force on volume":
forceOnVolumeStr = forceOnVolumeStr + tempStr
elif el.loadType == "Force on face":
forceOnFaceStr = forceOnFaceStr + tempStr
elif el.loadType == "Force on edge":
forceOnEdgeStr = forceOnEdgeStr + tempStr
elif el.loadType == "Force on node":
forceOnNodeStr = forceOnNodeStr + tempStr
# pressures
if el.loadType == "Pressure":
if hasConstant(functionsLocal,el.p):
pressureStr = pressureStr + "_F(GROUP_MA='" + el.nodalGroupName + "',PRES=" + el.p + ",)," + ls
# gravity
if el.loadType == "Gravity":
g = (float(el.gX)**2+float(el.gY)**2+float(el.gZ)**2)**0.5
dirStr = "(" + str(float(el.gX)/g) + "," + str(float(el.gY)/g) + "," + str(float(el.gZ)/g) + ",)"
if el.nodalGroupName == "whole mesh":
assiStr = ""
else:
assiStr = "GROUP_MA='" + el.nodalGroupName + "',"
gravityStr = gravityStr + "_F(" + assiStr + "GRAVITE=" + str(g) + ",DIRECTION=" + dirStr + ",)," + ls
# centrifugal forces
if el.loadType == "Centrifugal force":
if el.nodalGroupName == "whole mesh":
assiStr = "TOUT='OUI',"
else:
assiStr = "GROUP_MA='" + el.nodalGroupName + "',"
centrifugalForceStr = centrifugalForceStr + "_F(" + assiStr + "VITESSE=" + el.omega + ",AXE=(" + \
el.axisX + "," + el.axisY + "," + el.axisZ + ",),CENTRE=(" + el.centerX + "," + el.centerY + "," + el.centerZ + ",),)," + ls
if not nodeJointsStr == "":
affeCharMecaStr = affeCharMecaStr + "LIAISON_SOLIDE=(" + ls + nodeJointsStr + ")," + ls
if not forceOnVolumeStr == "":
affeCharMecaStr = affeCharMecaStr + "FORCE_INTERNE=(" + ls + forceOnVolumeStr + ")," + ls
if not forceOnFaceStr == "":
affeCharMecaStr = affeCharMecaStr + "FORCE_FACE=(" + ls + forceOnFaceStr + ")," + ls
if not forceOnEdgeStr == "":
affeCharMecaStr = affeCharMecaStr + "FORCE_ARETE=(" + ls + forceOnEdgeStr + ")," + ls
if not forceOnNodeStr == "":
affeCharMecaStr = affeCharMecaStr + "FORCE_NODALE=(" + ls + forceOnNodeStr + ")," + ls
if not pressureStr == "":
affeCharMecaStr = affeCharMecaStr + "PRES_REP=(" + ls + pressureStr + ")," + ls
if not gravityStr == "":
affeCharMecaStr = affeCharMecaStr + "PESANTEUR=(" + ls + gravityStr + ")," + ls
if not centrifugalForceStr == "":
affeCharMecaStr = affeCharMecaStr + "ROTATION=(" + ls + centrifugalForceStr + ")," + ls
if not affeCharMecaStr == "":
affeCharMecaStr = "# assign constant restraints/loads and node joints" + ls + "CHAR=AFFE_CHAR_MECA(MODELE=MODE," + ls + affeCharMecaStr + ");" + ls + ls
# contact definition
contactStr = ""
if self.analysisType == "non-linear static" and len(self.contactSets) > 0:
contactStr = "# contact definition" + ls +"CONT=DEFI_CONTACT(MODELE=MODE,"
if self.contactSets[0].globalSettings.formulationType == "discrete":
contactStr = contactStr + "FORMULATION='DISCRETE',"
else:
contactStr = contactStr + "FORMULATION='CONTINUE',ALGO_RESO_CONT='" + self.contactSets[0].globalSettings.contactAlgo + "',"
if self.contactSets[0].globalSettings.frictionModel == "Coulomb":
contactStr = contactStr + "FROTTEMENT='COULOMB',"
if self.contactSets[0].globalSettings.formulationType == "continuous":
contactStr = contactStr + "ALGO_RESO_FROT='" + self.contactSets[0].globalSettings.frictionAlgo + "',"
else:
contactStr = contactStr + "FROTTEMENT='SANS',"
contactStr = contactStr + "ZONE="
for el in self.contactSets:
contactStr = contactStr + ls + "_F(GROUP_MA_MAIT='" + el.masterName + "',GROUP_MA_ESCL='" + el.slaveName + "',"
if el.globalSettings.formulationType == "discrete":
contactStr = contactStr + "ALGO_CONT='" + el.contactAlgo + "',"
if el.contactAlgo == "PENALISATION":
contactStr = contactStr + "E_N=" + el.E_N + ","
if el.globalSettings.frictionModel == "Coulomb":
contactStr = contactStr + "COULOMB=" + el.fricCoeff + "," + "ALGO_FROT='PENALISATION',E_T=" + el.E_T + ","
else:
if el.globalSettings.frictionModel == "Coulomb":
contactStr = contactStr + "COULOMB=" + el.fricCoeff + ","
contactStr = contactStr + "),"
contactStr = contactStr + ");" + ls + ls
# setting up and calling the solver
if self.analysisType == "linear static":
# MECA_STATIQUE
solverStr = "# calling MECA_STATIQUE" + ls + "RESU=MECA_STATIQUE(MODELE=MODE,CHAM_MATER=MATE,"
if not caraStr == "":
solverStr = solverStr + "CARA_ELEM=CARA,"
solverStr = solverStr + "EXCIT=("
if not affeCharMecaStr == "":
solverStr = solverStr + "_F(CHARGE=CHAR,"
if not tListStr == "" and (self.timeRampUp or self.timeRampDown):
solverStr = solverStr + "FONC_MULT=SF,),"
else:
solverStr = solverStr + "),"
if not affeCharMecaFStr == "":
solverStr = solverStr + "_F(CHARGE=CHARF,"
if not tListStr == "" and self.timeRampFunc and (self.timeRampUp or self.timeRampDown):
solverStr = solverStr + "FONC_MULT=SF,),"
else:
solverStr = solverStr + "),"
solverStr = solverStr + "),"
if not tListStr == "":
solverStr = solverStr + "LIST_INST=TLIST,"
solverStr = solverStr + "SOLVEUR=_F(METHODE='" + self.method + "',),);" + ls + ls
else:
# STAT_NON_LINE
solverStr = "# calling STAT_NON_LINE" + ls + "RESU=STAT_NON_LINE(MODELE=MODE,CHAM_MATER=MATE,"
if not caraStr == "":
solverStr = solverStr + "CARA_ELEM=CARA,"
solverStr = solverStr + "EXCIT=("
if not affeCharMecaStr == "":
solverStr = solverStr + "_F(CHARGE=CHAR,"
if (self.timeRampUp or self.timeRampDown):
solverStr = solverStr + "FONC_MULT=SF,),"
else:
solverStr = solverStr + "),"
if not affeCharMecaFStr == "":
solverStr = solverStr + "_F(CHARGE=CHARF,"
if self.timeRampFunc and (self.timeRampUp or self.timeRampDown):
solverStr = solverStr + "FONC_MULT=SF,),"
else:
solverStr = solverStr + "),"
solverStr = solverStr + "),"
if not contactStr == "":
solverStr = solverStr + "CONTACT=CONT," + ls
if self.strainModel == "Green-Lagrange":
solverStr = solverStr + "COMPORTEMENT=_F(RELATION='ELAS',DEFORMATION='GROT_GDEP',)," + ls
solverStr = solverStr + "NEWTON=_F(REAC_ITER=1,),INCREMENT=_F(LIST_INST=TLIST,),CONVERGENCE=_F(ITER_GLOB_MAXI=" + self.maxIter + ",RESI_GLOB_RELA=" + self.resi + \
"),SOLVEUR=_F(METHODE='" + self.method + "',),);" + ls + ls
# compute quantities from result
calcChampStr = ""
if self.outputSet.SIGM + self.outputSet.EPS + self.outputSet.SIEQ + self.outputSet.REAC > 0:
calcChampStr = "# compute output quantities" + ls + "RESU=CALC_CHAMP(reuse =RESU,RESULTAT=RESU," + ls
if self.outputSet.SIGM:
calcChampStr = calcChampStr + "CONTRAINTE=('SIGM_NOEU',)," + ls
if self.outputSet.EPS:
if self.strainModel == "Green-Lagrange" and self.analysisType == "non-linear static":
calcChampStr = calcChampStr + "DEFORMATION=('EPSG_NOEU',)," + ls
else:
calcChampStr = calcChampStr + "DEFORMATION=('EPSI_NOEU',)," + ls
if self.outputSet.SIEQ:
calcChampStr = calcChampStr + "CRITERES=('SIEQ_NOEU',)," + ls
if self.outputSet.REAC:
calcChampStr = calcChampStr + "FORCE=('REAC_NODA',)," + ls
calcChampStr = calcChampStr + ");" + ls + ls
# estimate error
erreurStr = ""
if self.outputSet.ERME:
erreurStr = "# error estimation a posteriori " + ls + "RESU=CALC_ERREUR(reuse=RESU,RESULTAT=RESU,OPTION=('ERME_ELEM',),);" + ls + ls
# compute reactions at restraints
reacStr = ""
if self.outputSet.REAC and len(restraintSetsLocal) > 0:
reacStr = "# integrate reactions at restraints" + ls + "Reac_Sum=POST_RELEVE_T(ACTION=("
for el in restraintSetsLocal:
reacStr = reacStr + "_F(OPERATION='EXTRACTION',INTITULE='" + el.nodalGroupName + \
"',RESULTAT=RESU,NOM_CHAM='REAC_NODA',GROUP_NO=('" + el.nodalGroupName + "',),RESULTANTE=('DX','DY','DZ',),MOMENT=('DRX','DRY','DRY',)," + \
"POINT=(" + el.reacMX + "," + el.reacMY + "," + el.reacMZ + ",),),"
reacStr = reacStr + "),);" + ls + ls + "IMPR_TABLE(TABLE=Reac_Sum,);" + ls + ls
# write the results to file
writeStr = "# write result to file (mechanical quantities)" + ls + "IMPR_RESU(FORMAT='MED',RESU=_F(RESULTAT=RESU,NOM_CHAM=('DEPL',"
if self.outputSet.SIGM:
writeStr = writeStr + "'SIGM_NOEU',"
if self.outputSet.SIEQ:
writeStr = writeStr + "'SIEQ_NOEU',"
if self.outputSet.EPS:
if self.strainModel == "Green-Lagrange" and self.analysisType == "non-linear static":
writeStr = writeStr + "'EPSG_NOEU',"
else:
writeStr = writeStr + "'EPSI_NOEU',"
if self.outputSet.REAC:
writeStr = writeStr + "'REAC_NODA',"
if self.outputSet.ERME:
writeStr = writeStr + "'ERME_ELEM',"
writeStr = writeStr + "),"
if self.outputSet.nodalGroupName == "whole mesh":
writeStr = writeStr + "TOUT='OUI',),);" + ls + ls
else:
writeStr = writeStr + "GROUP_MA='" + self.outputSet.nodalGroupName + "',),);" + ls + ls
if self.outputSet.TEMP and len(self.thermalSets) > 0:
writeStr = writeStr + "# write result to file (temperature)" + ls
for el in tempResNames:
writeStr = writeStr + "IMPR_RESU(FORMAT='MED',RESU=_F(RESULTAT=" + el + ",NOM_CHAM='TEMP',TOUT='OUI',),);" + ls
writeStr = writeStr + ls
# FIN statement
finStr = "FIN();"
# assemble everything
commStr = pythonFuns + debutStr + tListStr + formuleStr + matDefiStr + meshStr + modelStr + tempFieldStr + tempResStr + matTempAssiStr + caraStr + affeCharMecaFStr + \
affeCharMecaStr + contactStr + solverStr + calcChampStr + erreurStr + reacStr + writeStr + finStr
return commStr
| gpl-3.0 |
patricmutwiri/pombola | pombola/south_africa/fallback_urls.py | 4 | 2260 | from django.conf.urls import patterns, include, url
from pombola.south_africa.views import (
OldSectionRedirect, OldSpeechRedirect,
SASpeechView, SASectionView,
SAHansardIndex, SACommitteeIndex, SAQuestionIndex,
SASpeakerRedirectView)
# We add redirects for the old-style SayIt patterns, so that old
# bookmarks aren't lost. For examples, links to speeches should still
# work, e.g.
#
# /question/speech/367721
# /hansard/speech/7606
# /committee/speech/318055
# -> http://www.pmg.org.za/report/20131008-auditor-general-key-challenges-in-agriculture-departments-audit-report-2013-minister-in-attendance
#
# (The last one should be a redirect.) Old-style links to SayIt
# sections should still work too, e.g.:
#
# /question/59146
# /hansard/928
urlpatterns = [
url(r'^committee/(?P<pk>\d+)$', OldSectionRedirect.as_view()),
url(r'^question/(?P<pk>\d+)$', OldSectionRedirect.as_view()),
url(r'^hansard/(?P<pk>\d+)$', OldSectionRedirect.as_view()),
url(r'^committee/speech/(?P<pk>\d+)$', OldSpeechRedirect.as_view()),
url(r'^question/speech/(?P<pk>\d+)$', OldSpeechRedirect.as_view()),
url(r'^hansard/speech/(?P<pk>\d+)$', OldSpeechRedirect.as_view()),
]
# Make sure the top level custom indexes work:
urlpatterns += patterns('',
url(r'^hansard/?$', SAHansardIndex.as_view(), name='section-list-hansard'),
url(r'^committee-minutes/?$', SACommitteeIndex.as_view(), name='section-list-committee-minutes'),
url(r'^question/?$', SAQuestionIndex.as_view(), name='section-list-question'),
)
# Anything else unmatched we assume is dealt with by SayIt (which will
# return a 404 if the path is unknown anyway):
fallback_sayit_patterns = patterns('',
# Exposed endpoint for a speech referred to by a numeric ID:
url(r'^speech/(?P<pk>\d+)$', SASpeechView.as_view(), name='speech-view'),
# Fake endpoint to redirect to the right speaker:
url(r'^speaker/(?P<slug>[-\w]+)$', SASpeakerRedirectView.as_view(), name='speaker-view'),
# Anything else might be a slug referring to a section:
url(r'^(?P<full_slug>.+)$', SASectionView.as_view(), name='section-view'),
)
urlpatterns += patterns('',
url(r'', include(fallback_sayit_patterns, namespace='sayit', app_name='speeches')),
)
| agpl-3.0 |
RedHatSatellite/sat6_disconnected_tools | check_sync.py | 1 | 5114 | #!/usr/bin/python
#title :check_sync.py
#description :Checks Satellite 6 repository sync status
#URL :https://github.com/RedHatSatellite/sat6_disconnected_tools
#author :Geoff Gatward <[email protected]>
#notes :This script is NOT SUPPORTED by Red Hat Global Support Services.
#license :GPLv3
#==============================================================================
"""Check the status of Sync tasks.
Draws the users attention to problems in the sync tasks that could lead to
inconsistent repository states.
Call with -l switch to loop until all sync tasks are complete, otherwise runs
as a one-shot check.
"""
import sys, os, argparse, time
import simplejson as json
import helpers
def check_running_tasks(clear):
"""Check for any currently running Sync tasks.
Checks for any Synchronize tasks in running/paused or Incomplete state.
"""
#pylint: disable-msg=R0912,R0914,R0915
# Clear the screen
if clear:
os.system('clear')
print helpers.HEADER + "Checking for running/paused yum sync tasks..." + helpers.ENDC
tasks = helpers.get_p_json(
helpers.FOREMAN_API + "tasks/",
json.dumps(
{
"per_page": "100",
}
)
)
# From the list of tasks, look for any running export or sync jobs.
# If e have any we exit, as we can't export in this state.
running_sync = 0
for task_result in tasks['results']:
if task_result['state'] == 'running' and task_result['label'] != 'Actions::BulkAction':
if task_result['humanized']['action'] == 'Synchronize':
running_sync = 1
print helpers.BOLD + "Running: " + helpers.ENDC \
+ task_result['input']['repository']['name']
if task_result['state'] == 'paused' and task_result['label'] != 'Actions::BulkAction':
if task_result['humanized']['action'] == 'Synchronize':
running_sync = 1
print helpers.ERROR + "Paused: " + helpers.ENDC \
+ task_result['input']['repository']['name']
if not running_sync:
print helpers.GREEN + "None detected" + helpers.ENDC
# Check any repos marked as Sync Incomplete
print helpers.HEADER + "\nChecking for incomplete (stopped) yum sync tasks..." + helpers.ENDC
repo_list = helpers.get_json(
helpers.KATELLO_API + "/content_view_versions")
# Extract the list of repo ids, then check the state of each one.
incomplete_sync = 0
for repo in repo_list['results']:
for repo_id in repo['repositories']:
repo_status = helpers.get_json(
helpers.KATELLO_API + "/repositories/" + str(repo_id['id']))
if repo_status['content_type'] == 'yum':
if repo_status['last_sync'] is None:
if repo_status['library_instance_id'] is None:
# incomplete_sync = 1
# print helpers.ERROR + "Broken Repo: " + helpers.ENDC + repo_status['name']
print helpers.WARNING + "Never Synchronized: " + helpers.ENDC + repo_status['name']
elif repo_status['last_sync']['state'] == 'stopped':
if repo_status['last_sync']['result'] == 'warning':
incomplete_sync = 1
print helpers.WARNING + "Incomplete: " + helpers.ENDC + repo_status['name']
else:
msg = repo_status['name'] + " - last_sync: " + repo_status['last_sync']['ended_at']
helpers.log_msg(msg, 'DEBUG')
# If we have detected incomplete sync tasks, ask the user if they want to export anyway.
# This isn't fatal, but *MAY* lead to inconsistent repositories on the disconnected sat.
if not incomplete_sync:
print helpers.GREEN + "No incomplete syncs detected\n" + helpers.ENDC
else:
print "\n"
# Exit the loop if both tests are clear
if not running_sync and not incomplete_sync:
sys.exit(0)
def main(args):
"""Check the status of Sync tasks."""
#pylint: disable-msg=R0914,R0915
parser = argparse.ArgumentParser(description='Checks status of yum repository sync tasks.')
# pylint: disable=bad-continuation
parser.add_argument('-l', '--loop', help='Loop check until all tasks complete', required=False,
action="store_true")
args = parser.parse_args()
# Check if there are any currently running tasks that will conflict with an export
# Loop until all tasks are compltete.
if args.loop:
try:
while True:
clear = True
check_running_tasks(clear)
time.sleep(5)
except KeyboardInterrupt:
print "End"
else:
clear = False
check_running_tasks(clear)
sys.exit(0)
if __name__ == "__main__":
try:
main(sys.argv[1:])
except KeyboardInterrupt, e:
print >> sys.stderr, ("\n\nExiting on user cancel.")
sys.exit(1)
| gpl-3.0 |
elim/django-rest-framework | rest_framework/negotiation.py | 71 | 4244 | """
Content negotiation deals with selecting an appropriate renderer given the
incoming request. Typically this will be based on the request's Accept header.
"""
from __future__ import unicode_literals
from django.http import Http404
from rest_framework import HTTP_HEADER_ENCODING, exceptions
from rest_framework.settings import api_settings
from rest_framework.utils.mediatypes import (
_MediaType, media_type_matches, order_by_precedence
)
class BaseContentNegotiation(object):
def select_parser(self, request, parsers):
raise NotImplementedError('.select_parser() must be implemented')
def select_renderer(self, request, renderers, format_suffix=None):
raise NotImplementedError('.select_renderer() must be implemented')
class DefaultContentNegotiation(BaseContentNegotiation):
settings = api_settings
def select_parser(self, request, parsers):
"""
Given a list of parsers and a media type, return the appropriate
parser to handle the incoming request.
"""
for parser in parsers:
if media_type_matches(parser.media_type, request.content_type):
return parser
return None
def select_renderer(self, request, renderers, format_suffix=None):
"""
Given a request and a list of renderers, return a two-tuple of:
(renderer, media type).
"""
# Allow URL style format override. eg. "?format=json
format_query_param = self.settings.URL_FORMAT_OVERRIDE
format = format_suffix or request.query_params.get(format_query_param)
if format:
renderers = self.filter_renderers(renderers, format)
accepts = self.get_accept_list(request)
# Check the acceptable media types against each renderer,
# attempting more specific media types first
# NB. The inner loop here isn't as bad as it first looks :)
# Worst case is we're looping over len(accept_list) * len(self.renderers)
for media_type_set in order_by_precedence(accepts):
for renderer in renderers:
for media_type in media_type_set:
if media_type_matches(renderer.media_type, media_type):
# Return the most specific media type as accepted.
media_type_wrapper = _MediaType(media_type)
if (
_MediaType(renderer.media_type).precedence >
media_type_wrapper.precedence
):
# Eg client requests '*/*'
# Accepted media type is 'application/json'
full_media_type = ';'.join(
(renderer.media_type,) +
tuple('{0}={1}'.format(
key, value.decode(HTTP_HEADER_ENCODING))
for key, value in media_type_wrapper.params.items()))
return renderer, full_media_type
else:
# Eg client requests 'application/json; indent=8'
# Accepted media type is 'application/json; indent=8'
return renderer, media_type
raise exceptions.NotAcceptable(available_renderers=renderers)
def filter_renderers(self, renderers, format):
"""
If there is a '.json' style format suffix, filter the renderers
so that we only negotiation against those that accept that format.
"""
renderers = [renderer for renderer in renderers
if renderer.format == format]
if not renderers:
raise Http404
return renderers
def get_accept_list(self, request):
"""
Given the incoming request, return a tokenised list of media
type strings.
Allows URL style accept override. eg. "?accept=application/json"
"""
header = request.META.get('HTTP_ACCEPT', '*/*')
header = request.query_params.get(self.settings.URL_ACCEPT_OVERRIDE, header)
return [token.strip() for token in header.split(',')]
| bsd-2-clause |
crazyskateface/LC | chat/migrations/0002_auto__add_field_comments_datetime.py | 1 | 4344 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Comments.datetime'
db.add_column(u'chat_comments', 'datetime',
self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, default=datetime.datetime(2014, 4, 30, 0, 0), blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Comments.datetime'
db.delete_column(u'chat_comments', 'datetime')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'chat.comments': {
'Meta': {'object_name': 'Comments'},
'datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['chat'] | mit |
timothsp/where2ate | venv/lib/python3.3/site-packages/pkg_resources/__init__.py | 211 | 106670 | """
Package resource API
--------------------
A resource is a logical file contained within a package, or a logical
subdirectory thereof. The package resource API expects resource names
to have their path parts separated with ``/``, *not* whatever the local
path separator is. Do not use os.path operations to manipulate resource
names being passed into the API.
The package resource API is designed to work with normal filesystem packages,
.egg files, and unpacked .egg files. It can also work in a limited way with
.zip files and with custom PEP 302 loaders that support the ``get_data()``
method.
"""
from __future__ import absolute_import
import sys
import os
import io
import time
import re
import types
import zipfile
import zipimport
import warnings
import stat
import functools
import pkgutil
import token
import symbol
import operator
import platform
import collections
import plistlib
import email.parser
import tempfile
import textwrap
from pkgutil import get_importer
try:
import _imp
except ImportError:
# Python 3.2 compatibility
import imp as _imp
PY3 = sys.version_info > (3,)
PY2 = not PY3
if PY3:
from urllib.parse import urlparse, urlunparse
if PY2:
from urlparse import urlparse, urlunparse
if PY3:
string_types = str,
else:
string_types = str, eval('unicode')
iteritems = (lambda i: i.items()) if PY3 else lambda i: i.iteritems()
# capture these to bypass sandboxing
from os import utime
try:
from os import mkdir, rename, unlink
WRITE_SUPPORT = True
except ImportError:
# no write support, probably under GAE
WRITE_SUPPORT = False
from os import open as os_open
from os.path import isdir, split
# Avoid try/except due to potential problems with delayed import mechanisms.
if sys.version_info >= (3, 3) and sys.implementation.name == "cpython":
import importlib.machinery as importlib_machinery
else:
importlib_machinery = None
try:
import parser
except ImportError:
pass
try:
import pkg_resources._vendor.packaging.version
import pkg_resources._vendor.packaging.specifiers
packaging = pkg_resources._vendor.packaging
except ImportError:
# fallback to naturally-installed version; allows system packagers to
# omit vendored packages.
import packaging.version
import packaging.specifiers
# declare some globals that will be defined later to
# satisfy the linters.
require = None
working_set = None
class PEP440Warning(RuntimeWarning):
"""
Used when there is an issue with a version or specifier not complying with
PEP 440.
"""
class _SetuptoolsVersionMixin(object):
def __hash__(self):
return super(_SetuptoolsVersionMixin, self).__hash__()
def __lt__(self, other):
if isinstance(other, tuple):
return tuple(self) < other
else:
return super(_SetuptoolsVersionMixin, self).__lt__(other)
def __le__(self, other):
if isinstance(other, tuple):
return tuple(self) <= other
else:
return super(_SetuptoolsVersionMixin, self).__le__(other)
def __eq__(self, other):
if isinstance(other, tuple):
return tuple(self) == other
else:
return super(_SetuptoolsVersionMixin, self).__eq__(other)
def __ge__(self, other):
if isinstance(other, tuple):
return tuple(self) >= other
else:
return super(_SetuptoolsVersionMixin, self).__ge__(other)
def __gt__(self, other):
if isinstance(other, tuple):
return tuple(self) > other
else:
return super(_SetuptoolsVersionMixin, self).__gt__(other)
def __ne__(self, other):
if isinstance(other, tuple):
return tuple(self) != other
else:
return super(_SetuptoolsVersionMixin, self).__ne__(other)
def __getitem__(self, key):
return tuple(self)[key]
def __iter__(self):
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
replace = {
'pre': 'c',
'preview': 'c',
'-': 'final-',
'rc': 'c',
'dev': '@',
}.get
def _parse_version_parts(s):
for part in component_re.split(s):
part = replace(part, part)
if not part or part == '.':
continue
if part[:1] in '0123456789':
# pad for numeric comparison
yield part.zfill(8)
else:
yield '*'+part
# ensure that alpha/beta/candidate are before final
yield '*final'
def old_parse_version(s):
parts = []
for part in _parse_version_parts(s.lower()):
if part.startswith('*'):
# remove '-' before a prerelease tag
if part < '*final':
while parts and parts[-1] == '*final-':
parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1] == '00000000':
parts.pop()
parts.append(part)
return tuple(parts)
# Warn for use of this function
warnings.warn(
"You have iterated over the result of "
"pkg_resources.parse_version. This is a legacy behavior which is "
"inconsistent with the new version class introduced in setuptools "
"8.0. In most cases, conversion to a tuple is unnecessary. For "
"comparison of versions, sort the Version instances directly. If "
"you have another use case requiring the tuple, please file a "
"bug with the setuptools project describing that need.",
RuntimeWarning,
stacklevel=1,
)
for part in old_parse_version(str(self)):
yield part
class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version):
pass
class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin,
packaging.version.LegacyVersion):
pass
def parse_version(v):
try:
return SetuptoolsVersion(v)
except packaging.version.InvalidVersion:
return SetuptoolsLegacyVersion(v)
_state_vars = {}
def _declare_state(vartype, **kw):
globals().update(kw)
_state_vars.update(dict.fromkeys(kw, vartype))
def __getstate__():
state = {}
g = globals()
for k, v in _state_vars.items():
state[k] = g['_sget_'+v](g[k])
return state
def __setstate__(state):
g = globals()
for k, v in state.items():
g['_sset_'+_state_vars[k]](k, g[k], v)
return state
def _sget_dict(val):
return val.copy()
def _sset_dict(key, ob, state):
ob.clear()
ob.update(state)
def _sget_object(val):
return val.__getstate__()
def _sset_object(key, ob, state):
ob.__setstate__(state)
_sget_none = _sset_none = lambda *args: None
def get_supported_platform():
"""Return this platform's maximum compatible version.
distutils.util.get_platform() normally reports the minimum version
of Mac OS X that would be required to *use* extensions produced by
distutils. But what we want when checking compatibility is to know the
version of Mac OS X that we are *running*. To allow usage of packages that
explicitly require a newer version of Mac OS X, we must also know the
current version of the OS.
If this condition occurs for any other platform with a version in its
platform strings, this function should be extended accordingly.
"""
plat = get_build_platform()
m = macosVersionString.match(plat)
if m is not None and sys.platform == "darwin":
try:
plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
except ValueError:
# not Mac OS X
pass
return plat
__all__ = [
# Basic resource access and distribution/entry point discovery
'require', 'run_script', 'get_provider', 'get_distribution',
'load_entry_point', 'get_entry_map', 'get_entry_info',
'iter_entry_points',
'resource_string', 'resource_stream', 'resource_filename',
'resource_listdir', 'resource_exists', 'resource_isdir',
# Environmental control
'declare_namespace', 'working_set', 'add_activation_listener',
'find_distributions', 'set_extraction_path', 'cleanup_resources',
'get_default_cache',
# Primary implementation classes
'Environment', 'WorkingSet', 'ResourceManager',
'Distribution', 'Requirement', 'EntryPoint',
# Exceptions
'ResolutionError', 'VersionConflict', 'DistributionNotFound',
'UnknownExtra', 'ExtractionError',
# Warnings
'PEP440Warning',
# Parsing functions and string utilities
'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
# filesystem utilities
'ensure_directory', 'normalize_path',
# Distribution "precedence" constants
'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
# "Provider" interfaces, implementations, and registration/lookup APIs
'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
'register_finder', 'register_namespace_handler', 'register_loader_type',
'fixup_namespace_packages', 'get_importer',
# Deprecated/backward compatibility only
'run_main', 'AvailableDistributions',
]
class ResolutionError(Exception):
"""Abstract base for dependency resolution errors"""
def __repr__(self):
return self.__class__.__name__+repr(self.args)
class VersionConflict(ResolutionError):
"""
An already-installed version conflicts with the requested version.
Should be initialized with the installed Distribution and the requested
Requirement.
"""
_template = "{self.dist} is installed but {self.req} is required"
@property
def dist(self):
return self.args[0]
@property
def req(self):
return self.args[1]
def report(self):
return self._template.format(**locals())
def with_context(self, required_by):
"""
If required_by is non-empty, return a version of self that is a
ContextualVersionConflict.
"""
if not required_by:
return self
args = self.args + (required_by,)
return ContextualVersionConflict(*args)
class ContextualVersionConflict(VersionConflict):
"""
A VersionConflict that accepts a third parameter, the set of the
requirements that required the installed Distribution.
"""
_template = VersionConflict._template + ' by {self.required_by}'
@property
def required_by(self):
return self.args[2]
class DistributionNotFound(ResolutionError):
"""A requested distribution was not found"""
_template = ("The '{self.req}' distribution was not found "
"and is required by {self.requirers_str}")
@property
def req(self):
return self.args[0]
@property
def requirers(self):
return self.args[1]
@property
def requirers_str(self):
if not self.requirers:
return 'the application'
return ', '.join(self.requirers)
def report(self):
return self._template.format(**locals())
def __str__(self):
return self.report()
class UnknownExtra(ResolutionError):
"""Distribution doesn't have an "extra feature" of the given name"""
_provider_factories = {}
PY_MAJOR = sys.version[:3]
EGG_DIST = 3
BINARY_DIST = 2
SOURCE_DIST = 1
CHECKOUT_DIST = 0
DEVELOP_DIST = -1
def register_loader_type(loader_type, provider_factory):
"""Register `provider_factory` to make providers for `loader_type`
`loader_type` is the type or class of a PEP 302 ``module.__loader__``,
and `provider_factory` is a function that, passed a *module* object,
returns an ``IResourceProvider`` for that module.
"""
_provider_factories[loader_type] = provider_factory
def get_provider(moduleOrReq):
"""Return an IResourceProvider for the named module or requirement"""
if isinstance(moduleOrReq, Requirement):
return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
try:
module = sys.modules[moduleOrReq]
except KeyError:
__import__(moduleOrReq)
module = sys.modules[moduleOrReq]
loader = getattr(module, '__loader__', None)
return _find_adapter(_provider_factories, loader)(module)
def _macosx_vers(_cache=[]):
if not _cache:
version = platform.mac_ver()[0]
# fallback for MacPorts
if version == '':
plist = '/System/Library/CoreServices/SystemVersion.plist'
if os.path.exists(plist):
if hasattr(plistlib, 'readPlist'):
plist_content = plistlib.readPlist(plist)
if 'ProductVersion' in plist_content:
version = plist_content['ProductVersion']
_cache.append(version.split('.'))
return _cache[0]
def _macosx_arch(machine):
return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
def get_build_platform():
"""Return this platform's string for platform-specific distributions
XXX Currently this is the same as ``distutils.util.get_platform()``, but it
needs some hacks for Linux and Mac OS X.
"""
try:
# Python 2.7 or >=3.2
from sysconfig import get_platform
except ImportError:
from distutils.util import get_platform
plat = get_platform()
if sys.platform == "darwin" and not plat.startswith('macosx-'):
try:
version = _macosx_vers()
machine = os.uname()[4].replace(" ", "_")
return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
_macosx_arch(machine))
except ValueError:
# if someone is running a non-Mac darwin system, this will fall
# through to the default implementation
pass
return plat
macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
# XXX backward compat
get_platform = get_build_platform
def compatible_platforms(provided, required):
"""Can code for the `provided` platform run on the `required` platform?
Returns true if either platform is ``None``, or the platforms are equal.
XXX Needs compatibility checks for Linux and other unixy OSes.
"""
if provided is None or required is None or provided==required:
# easy case
return True
# Mac OS X special cases
reqMac = macosVersionString.match(required)
if reqMac:
provMac = macosVersionString.match(provided)
# is this a Mac package?
if not provMac:
# this is backwards compatibility for packages built before
# setuptools 0.6. All packages built after this point will
# use the new macosx designation.
provDarwin = darwinVersionString.match(provided)
if provDarwin:
dversion = int(provDarwin.group(1))
macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
if dversion == 7 and macosversion >= "10.3" or \
dversion == 8 and macosversion >= "10.4":
return True
# egg isn't macosx or legacy darwin
return False
# are they the same major version and machine type?
if provMac.group(1) != reqMac.group(1) or \
provMac.group(3) != reqMac.group(3):
return False
# is the required OS major update >= the provided one?
if int(provMac.group(2)) > int(reqMac.group(2)):
return False
return True
# XXX Linux and other platforms' special cases should go here
return False
def run_script(dist_spec, script_name):
"""Locate distribution `dist_spec` and run its `script_name` script"""
ns = sys._getframe(1).f_globals
name = ns['__name__']
ns.clear()
ns['__name__'] = name
require(dist_spec)[0].run_script(script_name, ns)
# backward compatibility
run_main = run_script
def get_distribution(dist):
"""Return a current distribution object for a Requirement or string"""
if isinstance(dist, string_types):
dist = Requirement.parse(dist)
if isinstance(dist, Requirement):
dist = get_provider(dist)
if not isinstance(dist, Distribution):
raise TypeError("Expected string, Requirement, or Distribution", dist)
return dist
def load_entry_point(dist, group, name):
"""Return `name` entry point of `group` for `dist` or raise ImportError"""
return get_distribution(dist).load_entry_point(group, name)
def get_entry_map(dist, group=None):
"""Return the entry point map for `group`, or the full entry map"""
return get_distribution(dist).get_entry_map(group)
def get_entry_info(dist, group, name):
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
return get_distribution(dist).get_entry_info(group, name)
class IMetadataProvider:
def has_metadata(name):
"""Does the package's distribution contain the named metadata?"""
def get_metadata(name):
"""The named metadata resource as a string"""
def get_metadata_lines(name):
"""Yield named metadata resource as list of non-blank non-comment lines
Leading and trailing whitespace is stripped from each line, and lines
with ``#`` as the first non-blank character are omitted."""
def metadata_isdir(name):
"""Is the named metadata a directory? (like ``os.path.isdir()``)"""
def metadata_listdir(name):
"""List of metadata names in the directory (like ``os.listdir()``)"""
def run_script(script_name, namespace):
"""Execute the named script in the supplied namespace dictionary"""
class IResourceProvider(IMetadataProvider):
"""An object that provides access to package resources"""
def get_resource_filename(manager, resource_name):
"""Return a true filesystem path for `resource_name`
`manager` must be an ``IResourceManager``"""
def get_resource_stream(manager, resource_name):
"""Return a readable file-like object for `resource_name`
`manager` must be an ``IResourceManager``"""
def get_resource_string(manager, resource_name):
"""Return a string containing the contents of `resource_name`
`manager` must be an ``IResourceManager``"""
def has_resource(resource_name):
"""Does the package contain the named resource?"""
def resource_isdir(resource_name):
"""Is the named resource a directory? (like ``os.path.isdir()``)"""
def resource_listdir(resource_name):
"""List of resource names in the directory (like ``os.listdir()``)"""
class WorkingSet(object):
"""A collection of active distributions on sys.path (or a similar list)"""
def __init__(self, entries=None):
"""Create working set from list of path entries (default=sys.path)"""
self.entries = []
self.entry_keys = {}
self.by_key = {}
self.callbacks = []
if entries is None:
entries = sys.path
for entry in entries:
self.add_entry(entry)
@classmethod
def _build_master(cls):
"""
Prepare the master working set.
"""
ws = cls()
try:
from __main__ import __requires__
except ImportError:
# The main program does not list any requirements
return ws
# ensure the requirements are met
try:
ws.require(__requires__)
except VersionConflict:
return cls._build_from_requirements(__requires__)
return ws
@classmethod
def _build_from_requirements(cls, req_spec):
"""
Build a working set from a requirement spec. Rewrites sys.path.
"""
# try it without defaults already on sys.path
# by starting with an empty path
ws = cls([])
reqs = parse_requirements(req_spec)
dists = ws.resolve(reqs, Environment())
for dist in dists:
ws.add(dist)
# add any missing entries from sys.path
for entry in sys.path:
if entry not in ws.entries:
ws.add_entry(entry)
# then copy back to sys.path
sys.path[:] = ws.entries
return ws
def add_entry(self, entry):
"""Add a path item to ``.entries``, finding any distributions on it
``find_distributions(entry, True)`` is used to find distributions
corresponding to the path entry, and they are added. `entry` is
always appended to ``.entries``, even if it is already present.
(This is because ``sys.path`` can contain the same value more than
once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
equal ``sys.path``.)
"""
self.entry_keys.setdefault(entry, [])
self.entries.append(entry)
for dist in find_distributions(entry, True):
self.add(dist, entry, False)
def __contains__(self, dist):
"""True if `dist` is the active distribution for its project"""
return self.by_key.get(dist.key) == dist
def find(self, req):
"""Find a distribution matching requirement `req`
If there is an active distribution for the requested project, this
returns it as long as it meets the version requirement specified by
`req`. But, if there is an active distribution for the project and it
does *not* meet the `req` requirement, ``VersionConflict`` is raised.
If there is no active distribution for the requested project, ``None``
is returned.
"""
dist = self.by_key.get(req.key)
if dist is not None and dist not in req:
# XXX add more info
raise VersionConflict(dist, req)
return dist
def iter_entry_points(self, group, name=None):
"""Yield entry point objects from `group` matching `name`
If `name` is None, yields all entry points in `group` from all
distributions in the working set, otherwise only ones matching
both `group` and `name` are yielded (in distribution order).
"""
for dist in self:
entries = dist.get_entry_map(group)
if name is None:
for ep in entries.values():
yield ep
elif name in entries:
yield entries[name]
def run_script(self, requires, script_name):
"""Locate distribution for `requires` and run `script_name` script"""
ns = sys._getframe(1).f_globals
name = ns['__name__']
ns.clear()
ns['__name__'] = name
self.require(requires)[0].run_script(script_name, ns)
def __iter__(self):
"""Yield distributions for non-duplicate projects in the working set
The yield order is the order in which the items' path entries were
added to the working set.
"""
seen = {}
for item in self.entries:
if item not in self.entry_keys:
# workaround a cache issue
continue
for key in self.entry_keys[item]:
if key not in seen:
seen[key]=1
yield self.by_key[key]
def add(self, dist, entry=None, insert=True, replace=False):
"""Add `dist` to working set, associated with `entry`
If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
On exit from this routine, `entry` is added to the end of the working
set's ``.entries`` (if it wasn't already present).
`dist` is only added to the working set if it's for a project that
doesn't already have a distribution in the set, unless `replace=True`.
If it's added, any callbacks registered with the ``subscribe()`` method
will be called.
"""
if insert:
dist.insert_on(self.entries, entry)
if entry is None:
entry = dist.location
keys = self.entry_keys.setdefault(entry,[])
keys2 = self.entry_keys.setdefault(dist.location,[])
if not replace and dist.key in self.by_key:
# ignore hidden distros
return
self.by_key[dist.key] = dist
if dist.key not in keys:
keys.append(dist.key)
if dist.key not in keys2:
keys2.append(dist.key)
self._added_new(dist)
def resolve(self, requirements, env=None, installer=None,
replace_conflicting=False):
"""List all distributions needed to (recursively) meet `requirements`
`requirements` must be a sequence of ``Requirement`` objects. `env`,
if supplied, should be an ``Environment`` instance. If
not supplied, it defaults to all distributions available within any
entry or distribution in the working set. `installer`, if supplied,
will be invoked with each requirement that cannot be met by an
already-installed distribution; it should return a ``Distribution`` or
``None``.
Unless `replace_conflicting=True`, raises a VersionConflict exception if
any requirements are found on the path that have the correct name but
the wrong version. Otherwise, if an `installer` is supplied it will be
invoked to obtain the correct version of the requirement and activate
it.
"""
# set up the stack
requirements = list(requirements)[::-1]
# set of processed requirements
processed = {}
# key -> dist
best = {}
to_activate = []
# Mapping of requirement to set of distributions that required it;
# useful for reporting info about conflicts.
required_by = collections.defaultdict(set)
while requirements:
# process dependencies breadth-first
req = requirements.pop(0)
if req in processed:
# Ignore cyclic or redundant dependencies
continue
dist = best.get(req.key)
if dist is None:
# Find the best distribution and add it to the map
dist = self.by_key.get(req.key)
if dist is None or (dist not in req and replace_conflicting):
ws = self
if env is None:
if dist is None:
env = Environment(self.entries)
else:
# Use an empty environment and workingset to avoid
# any further conflicts with the conflicting
# distribution
env = Environment([])
ws = WorkingSet([])
dist = best[req.key] = env.best_match(req, ws, installer)
if dist is None:
requirers = required_by.get(req, None)
raise DistributionNotFound(req, requirers)
to_activate.append(dist)
if dist not in req:
# Oops, the "best" so far conflicts with a dependency
dependent_req = required_by[req]
raise VersionConflict(dist, req).with_context(dependent_req)
# push the new requirements onto the stack
new_requirements = dist.requires(req.extras)[::-1]
requirements.extend(new_requirements)
# Register the new requirements needed by req
for new_requirement in new_requirements:
required_by[new_requirement].add(req.project_name)
processed[req] = True
# return list of distros to activate
return to_activate
def find_plugins(self, plugin_env, full_env=None, installer=None,
fallback=True):
"""Find all activatable distributions in `plugin_env`
Example usage::
distributions, errors = working_set.find_plugins(
Environment(plugin_dirlist)
)
# add plugins+libs to sys.path
map(working_set.add, distributions)
# display errors
print('Could not load', errors)
The `plugin_env` should be an ``Environment`` instance that contains
only distributions that are in the project's "plugin directory" or
directories. The `full_env`, if supplied, should be an ``Environment``
contains all currently-available distributions. If `full_env` is not
supplied, one is created automatically from the ``WorkingSet`` this
method is called on, which will typically mean that every directory on
``sys.path`` will be scanned for distributions.
`installer` is a standard installer callback as used by the
``resolve()`` method. The `fallback` flag indicates whether we should
attempt to resolve older versions of a plugin if the newest version
cannot be resolved.
This method returns a 2-tuple: (`distributions`, `error_info`), where
`distributions` is a list of the distributions found in `plugin_env`
that were loadable, along with any other distributions that are needed
to resolve their dependencies. `error_info` is a dictionary mapping
unloadable plugin distributions to an exception instance describing the
error that occurred. Usually this will be a ``DistributionNotFound`` or
``VersionConflict`` instance.
"""
plugin_projects = list(plugin_env)
# scan project names in alphabetic order
plugin_projects.sort()
error_info = {}
distributions = {}
if full_env is None:
env = Environment(self.entries)
env += plugin_env
else:
env = full_env + plugin_env
shadow_set = self.__class__([])
# put all our entries in shadow_set
list(map(shadow_set.add, self))
for project_name in plugin_projects:
for dist in plugin_env[project_name]:
req = [dist.as_requirement()]
try:
resolvees = shadow_set.resolve(req, env, installer)
except ResolutionError as v:
# save error info
error_info[dist] = v
if fallback:
# try the next older version of project
continue
else:
# give up on this project, keep going
break
else:
list(map(shadow_set.add, resolvees))
distributions.update(dict.fromkeys(resolvees))
# success, no need to try any more versions of this project
break
distributions = list(distributions)
distributions.sort()
return distributions, error_info
def require(self, *requirements):
"""Ensure that distributions matching `requirements` are activated
`requirements` must be a string or a (possibly-nested) sequence
thereof, specifying the distributions and versions required. The
return value is a sequence of the distributions that needed to be
activated to fulfill the requirements; all relevant distributions are
included, even if they were already activated in this working set.
"""
needed = self.resolve(parse_requirements(requirements))
for dist in needed:
self.add(dist)
return needed
def subscribe(self, callback):
"""Invoke `callback` for all distributions (including existing ones)"""
if callback in self.callbacks:
return
self.callbacks.append(callback)
for dist in self:
callback(dist)
def _added_new(self, dist):
for callback in self.callbacks:
callback(dist)
def __getstate__(self):
return (
self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
self.callbacks[:]
)
def __setstate__(self, e_k_b_c):
entries, keys, by_key, callbacks = e_k_b_c
self.entries = entries[:]
self.entry_keys = keys.copy()
self.by_key = by_key.copy()
self.callbacks = callbacks[:]
class Environment(object):
"""Searchable snapshot of distributions on a search path"""
def __init__(self, search_path=None, platform=get_supported_platform(),
python=PY_MAJOR):
"""Snapshot distributions available on a search path
Any distributions found on `search_path` are added to the environment.
`search_path` should be a sequence of ``sys.path`` items. If not
supplied, ``sys.path`` is used.
`platform` is an optional string specifying the name of the platform
that platform-specific distributions must be compatible with. If
unspecified, it defaults to the current platform. `python` is an
optional string naming the desired version of Python (e.g. ``'3.3'``);
it defaults to the current version.
You may explicitly set `platform` (and/or `python`) to ``None`` if you
wish to map *all* distributions, not just those compatible with the
running platform or Python version.
"""
self._distmap = {}
self.platform = platform
self.python = python
self.scan(search_path)
def can_add(self, dist):
"""Is distribution `dist` acceptable for this environment?
The distribution must match the platform and python version
requirements specified when this environment was created, or False
is returned.
"""
return (self.python is None or dist.py_version is None
or dist.py_version==self.python) \
and compatible_platforms(dist.platform, self.platform)
def remove(self, dist):
"""Remove `dist` from the environment"""
self._distmap[dist.key].remove(dist)
def scan(self, search_path=None):
"""Scan `search_path` for distributions usable in this environment
Any distributions found are added to the environment.
`search_path` should be a sequence of ``sys.path`` items. If not
supplied, ``sys.path`` is used. Only distributions conforming to
the platform/python version defined at initialization are added.
"""
if search_path is None:
search_path = sys.path
for item in search_path:
for dist in find_distributions(item):
self.add(dist)
def __getitem__(self, project_name):
"""Return a newest-to-oldest list of distributions for `project_name`
Uses case-insensitive `project_name` comparison, assuming all the
project's distributions use their project's name converted to all
lowercase as their key.
"""
distribution_key = project_name.lower()
return self._distmap.get(distribution_key, [])
def add(self, dist):
"""Add `dist` if we ``can_add()`` it and it has not already been added
"""
if self.can_add(dist) and dist.has_version():
dists = self._distmap.setdefault(dist.key, [])
if dist not in dists:
dists.append(dist)
dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
def best_match(self, req, working_set, installer=None):
"""Find distribution best matching `req` and usable on `working_set`
This calls the ``find(req)`` method of the `working_set` to see if a
suitable distribution is already active. (This may raise
``VersionConflict`` if an unsuitable version of the project is already
active in the specified `working_set`.) If a suitable distribution
isn't active, this method returns the newest distribution in the
environment that meets the ``Requirement`` in `req`. If no suitable
distribution is found, and `installer` is supplied, then the result of
calling the environment's ``obtain(req, installer)`` method will be
returned.
"""
dist = working_set.find(req)
if dist is not None:
return dist
for dist in self[req.key]:
if dist in req:
return dist
# try to download/install
return self.obtain(req, installer)
def obtain(self, requirement, installer=None):
"""Obtain a distribution matching `requirement` (e.g. via download)
Obtain a distro that matches requirement (e.g. via download). In the
base ``Environment`` class, this routine just returns
``installer(requirement)``, unless `installer` is None, in which case
None is returned instead. This method is a hook that allows subclasses
to attempt other ways of obtaining a distribution before falling back
to the `installer` argument."""
if installer is not None:
return installer(requirement)
def __iter__(self):
"""Yield the unique project names of the available distributions"""
for key in self._distmap.keys():
if self[key]:
yield key
def __iadd__(self, other):
"""In-place addition of a distribution or environment"""
if isinstance(other, Distribution):
self.add(other)
elif isinstance(other, Environment):
for project in other:
for dist in other[project]:
self.add(dist)
else:
raise TypeError("Can't add %r to environment" % (other,))
return self
def __add__(self, other):
"""Add an environment or distribution to an environment"""
new = self.__class__([], platform=None, python=None)
for env in self, other:
new += env
return new
# XXX backward compatibility
AvailableDistributions = Environment
class ExtractionError(RuntimeError):
"""An error occurred extracting a resource
The following attributes are available from instances of this exception:
manager
The resource manager that raised this exception
cache_path
The base directory for resource extraction
original_error
The exception instance that caused extraction to fail
"""
class ResourceManager:
"""Manage resource extraction and packages"""
extraction_path = None
def __init__(self):
self.cached_files = {}
def resource_exists(self, package_or_requirement, resource_name):
"""Does the named resource exist?"""
return get_provider(package_or_requirement).has_resource(resource_name)
def resource_isdir(self, package_or_requirement, resource_name):
"""Is the named resource an existing directory?"""
return get_provider(package_or_requirement).resource_isdir(
resource_name
)
def resource_filename(self, package_or_requirement, resource_name):
"""Return a true filesystem path for specified resource"""
return get_provider(package_or_requirement).get_resource_filename(
self, resource_name
)
def resource_stream(self, package_or_requirement, resource_name):
"""Return a readable file-like object for specified resource"""
return get_provider(package_or_requirement).get_resource_stream(
self, resource_name
)
def resource_string(self, package_or_requirement, resource_name):
"""Return specified resource as a string"""
return get_provider(package_or_requirement).get_resource_string(
self, resource_name
)
def resource_listdir(self, package_or_requirement, resource_name):
"""List the contents of the named resource directory"""
return get_provider(package_or_requirement).resource_listdir(
resource_name
)
def extraction_error(self):
"""Give an error message for problems extracting file(s)"""
old_exc = sys.exc_info()[1]
cache_path = self.extraction_path or get_default_cache()
err = ExtractionError("""Can't extract file(s) to egg cache
The following error occurred while trying to extract file(s) to the Python egg
cache:
%s
The Python egg cache directory is currently set to:
%s
Perhaps your account does not have write access to this directory? You can
change the cache directory by setting the PYTHON_EGG_CACHE environment
variable to point to an accessible directory.
""" % (old_exc, cache_path)
)
err.manager = self
err.cache_path = cache_path
err.original_error = old_exc
raise err
def get_cache_path(self, archive_name, names=()):
"""Return absolute location in cache for `archive_name` and `names`
The parent directory of the resulting path will be created if it does
not already exist. `archive_name` should be the base filename of the
enclosing egg (which may not be the name of the enclosing zipfile!),
including its ".egg" extension. `names`, if provided, should be a
sequence of path name parts "under" the egg's extraction location.
This method should only be called by resource providers that need to
obtain an extraction location, and only for names they intend to
extract, as it tracks the generated names for possible cleanup later.
"""
extract_path = self.extraction_path or get_default_cache()
target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
try:
_bypass_ensure_directory(target_path)
except:
self.extraction_error()
self._warn_unsafe_extraction_path(extract_path)
self.cached_files[target_path] = 1
return target_path
@staticmethod
def _warn_unsafe_extraction_path(path):
"""
If the default extraction path is overridden and set to an insecure
location, such as /tmp, it opens up an opportunity for an attacker to
replace an extracted file with an unauthorized payload. Warn the user
if a known insecure location is used.
See Distribute #375 for more details.
"""
if os.name == 'nt' and not path.startswith(os.environ['windir']):
# On Windows, permissions are generally restrictive by default
# and temp directories are not writable by other users, so
# bypass the warning.
return
mode = os.stat(path).st_mode
if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
msg = ("%s is writable by group/others and vulnerable to attack "
"when "
"used with get_resource_filename. Consider a more secure "
"location (set with .set_extraction_path or the "
"PYTHON_EGG_CACHE environment variable)." % path)
warnings.warn(msg, UserWarning)
def postprocess(self, tempname, filename):
"""Perform any platform-specific postprocessing of `tempname`
This is where Mac header rewrites should be done; other platforms don't
have anything special they should do.
Resource providers should call this method ONLY after successfully
extracting a compressed resource. They must NOT call it on resources
that are already in the filesystem.
`tempname` is the current (temporary) name of the file, and `filename`
is the name it will be renamed to by the caller after this routine
returns.
"""
if os.name == 'posix':
# Make the resource executable
mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
os.chmod(tempname, mode)
def set_extraction_path(self, path):
"""Set the base path where resources will be extracted to, if needed.
If you do not call this routine before any extractions take place, the
path defaults to the return value of ``get_default_cache()``. (Which
is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
platform-specific fallbacks. See that routine's documentation for more
details.)
Resources are extracted to subdirectories of this path based upon
information given by the ``IResourceProvider``. You may set this to a
temporary directory, but then you must call ``cleanup_resources()`` to
delete the extracted files when done. There is no guarantee that
``cleanup_resources()`` will be able to remove all extracted files.
(Note: you may not change the extraction path for a given resource
manager once resources have been extracted, unless you first call
``cleanup_resources()``.)
"""
if self.cached_files:
raise ValueError(
"Can't change extraction path, files already extracted"
)
self.extraction_path = path
def cleanup_resources(self, force=False):
"""
Delete all extracted resource files and directories, returning a list
of the file and directory names that could not be successfully removed.
This function does not have any concurrency protection, so it should
generally only be called when the extraction path is a temporary
directory exclusive to a single process. This method is not
automatically called; you must call it explicitly or register it as an
``atexit`` function if you wish to ensure cleanup of a temporary
directory used for extractions.
"""
# XXX
def get_default_cache():
"""Determine the default cache location
This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
"Application Data" directory. On all other systems, it's "~/.python-eggs".
"""
try:
return os.environ['PYTHON_EGG_CACHE']
except KeyError:
pass
if os.name!='nt':
return os.path.expanduser('~/.python-eggs')
# XXX this may be locale-specific!
app_data = 'Application Data'
app_homes = [
# best option, should be locale-safe
(('APPDATA',), None),
(('USERPROFILE',), app_data),
(('HOMEDRIVE','HOMEPATH'), app_data),
(('HOMEPATH',), app_data),
(('HOME',), None),
# 95/98/ME
(('WINDIR',), app_data),
]
for keys, subdir in app_homes:
dirname = ''
for key in keys:
if key in os.environ:
dirname = os.path.join(dirname, os.environ[key])
else:
break
else:
if subdir:
dirname = os.path.join(dirname, subdir)
return os.path.join(dirname, 'Python-Eggs')
else:
raise RuntimeError(
"Please set the PYTHON_EGG_CACHE enviroment variable"
)
def safe_name(name):
"""Convert an arbitrary string to a standard distribution name
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
"""
return re.sub('[^A-Za-z0-9.]+', '-', name)
def safe_version(version):
"""
Convert an arbitrary string to a standard version string
"""
try:
# normalize the version
return str(packaging.version.Version(version))
except packaging.version.InvalidVersion:
version = version.replace(' ','.')
return re.sub('[^A-Za-z0-9.]+', '-', version)
def safe_extra(extra):
"""Convert an arbitrary string to a standard 'extra' name
Any runs of non-alphanumeric characters are replaced with a single '_',
and the result is always lowercased.
"""
return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
def to_filename(name):
"""Convert a project or version name to its filename-escaped form
Any '-' characters are currently replaced with '_'.
"""
return name.replace('-','_')
class MarkerEvaluation(object):
values = {
'os_name': lambda: os.name,
'sys_platform': lambda: sys.platform,
'python_full_version': platform.python_version,
'python_version': lambda: platform.python_version()[:3],
'platform_version': platform.version,
'platform_machine': platform.machine,
'python_implementation': platform.python_implementation,
}
@classmethod
def is_invalid_marker(cls, text):
"""
Validate text as a PEP 426 environment marker; return an exception
if invalid or False otherwise.
"""
try:
cls.evaluate_marker(text)
except SyntaxError as e:
return cls.normalize_exception(e)
return False
@staticmethod
def normalize_exception(exc):
"""
Given a SyntaxError from a marker evaluation, normalize the error
message:
- Remove indications of filename and line number.
- Replace platform-specific error messages with standard error
messages.
"""
subs = {
'unexpected EOF while parsing': 'invalid syntax',
'parenthesis is never closed': 'invalid syntax',
}
exc.filename = None
exc.lineno = None
exc.msg = subs.get(exc.msg, exc.msg)
return exc
@classmethod
def and_test(cls, nodelist):
# MUST NOT short-circuit evaluation, or invalid syntax can be skipped!
items = [
cls.interpret(nodelist[i])
for i in range(1, len(nodelist), 2)
]
return functools.reduce(operator.and_, items)
@classmethod
def test(cls, nodelist):
# MUST NOT short-circuit evaluation, or invalid syntax can be skipped!
items = [
cls.interpret(nodelist[i])
for i in range(1, len(nodelist), 2)
]
return functools.reduce(operator.or_, items)
@classmethod
def atom(cls, nodelist):
t = nodelist[1][0]
if t == token.LPAR:
if nodelist[2][0] == token.RPAR:
raise SyntaxError("Empty parentheses")
return cls.interpret(nodelist[2])
msg = "Language feature not supported in environment markers"
raise SyntaxError(msg)
@classmethod
def comparison(cls, nodelist):
if len(nodelist) > 4:
msg = "Chained comparison not allowed in environment markers"
raise SyntaxError(msg)
comp = nodelist[2][1]
cop = comp[1]
if comp[0] == token.NAME:
if len(nodelist[2]) == 3:
if cop == 'not':
cop = 'not in'
else:
cop = 'is not'
try:
cop = cls.get_op(cop)
except KeyError:
msg = repr(cop) + " operator not allowed in environment markers"
raise SyntaxError(msg)
return cop(cls.evaluate(nodelist[1]), cls.evaluate(nodelist[3]))
@classmethod
def get_op(cls, op):
ops = {
symbol.test: cls.test,
symbol.and_test: cls.and_test,
symbol.atom: cls.atom,
symbol.comparison: cls.comparison,
'not in': lambda x, y: x not in y,
'in': lambda x, y: x in y,
'==': operator.eq,
'!=': operator.ne,
'<': operator.lt,
'>': operator.gt,
'<=': operator.le,
'>=': operator.ge,
}
if hasattr(symbol, 'or_test'):
ops[symbol.or_test] = cls.test
return ops[op]
@classmethod
def evaluate_marker(cls, text, extra=None):
"""
Evaluate a PEP 426 environment marker on CPython 2.4+.
Return a boolean indicating the marker result in this environment.
Raise SyntaxError if marker is invalid.
This implementation uses the 'parser' module, which is not implemented
on
Jython and has been superseded by the 'ast' module in Python 2.6 and
later.
"""
return cls.interpret(parser.expr(text).totuple(1)[1])
@classmethod
def _markerlib_evaluate(cls, text):
"""
Evaluate a PEP 426 environment marker using markerlib.
Return a boolean indicating the marker result in this environment.
Raise SyntaxError if marker is invalid.
"""
import _markerlib
# markerlib implements Metadata 1.2 (PEP 345) environment markers.
# Translate the variables to Metadata 2.0 (PEP 426).
env = _markerlib.default_environment()
for key in env.keys():
new_key = key.replace('.', '_')
env[new_key] = env.pop(key)
try:
result = _markerlib.interpret(text, env)
except NameError as e:
raise SyntaxError(e.args[0])
return result
if 'parser' not in globals():
# Fall back to less-complete _markerlib implementation if 'parser' module
# is not available.
evaluate_marker = _markerlib_evaluate
@classmethod
def interpret(cls, nodelist):
while len(nodelist)==2: nodelist = nodelist[1]
try:
op = cls.get_op(nodelist[0])
except KeyError:
raise SyntaxError("Comparison or logical expression expected")
return op(nodelist)
@classmethod
def evaluate(cls, nodelist):
while len(nodelist)==2: nodelist = nodelist[1]
kind = nodelist[0]
name = nodelist[1]
if kind==token.NAME:
try:
op = cls.values[name]
except KeyError:
raise SyntaxError("Unknown name %r" % name)
return op()
if kind==token.STRING:
s = nodelist[1]
if not cls._safe_string(s):
raise SyntaxError(
"Only plain strings allowed in environment markers")
return s[1:-1]
msg = "Language feature not supported in environment markers"
raise SyntaxError(msg)
@staticmethod
def _safe_string(cand):
return (
cand[:1] in "'\"" and
not cand.startswith('"""') and
not cand.startswith("'''") and
'\\' not in cand
)
invalid_marker = MarkerEvaluation.is_invalid_marker
evaluate_marker = MarkerEvaluation.evaluate_marker
class NullProvider:
"""Try to implement resources and metadata for arbitrary PEP 302 loaders"""
egg_name = None
egg_info = None
loader = None
def __init__(self, module):
self.loader = getattr(module, '__loader__', None)
self.module_path = os.path.dirname(getattr(module, '__file__', ''))
def get_resource_filename(self, manager, resource_name):
return self._fn(self.module_path, resource_name)
def get_resource_stream(self, manager, resource_name):
return io.BytesIO(self.get_resource_string(manager, resource_name))
def get_resource_string(self, manager, resource_name):
return self._get(self._fn(self.module_path, resource_name))
def has_resource(self, resource_name):
return self._has(self._fn(self.module_path, resource_name))
def has_metadata(self, name):
return self.egg_info and self._has(self._fn(self.egg_info, name))
if sys.version_info <= (3,):
def get_metadata(self, name):
if not self.egg_info:
return ""
return self._get(self._fn(self.egg_info, name))
else:
def get_metadata(self, name):
if not self.egg_info:
return ""
return self._get(self._fn(self.egg_info, name)).decode("utf-8")
def get_metadata_lines(self, name):
return yield_lines(self.get_metadata(name))
def resource_isdir(self, resource_name):
return self._isdir(self._fn(self.module_path, resource_name))
def metadata_isdir(self, name):
return self.egg_info and self._isdir(self._fn(self.egg_info, name))
def resource_listdir(self, resource_name):
return self._listdir(self._fn(self.module_path, resource_name))
def metadata_listdir(self, name):
if self.egg_info:
return self._listdir(self._fn(self.egg_info, name))
return []
def run_script(self, script_name, namespace):
script = 'scripts/'+script_name
if not self.has_metadata(script):
raise ResolutionError("No script named %r" % script_name)
script_text = self.get_metadata(script).replace('\r\n', '\n')
script_text = script_text.replace('\r', '\n')
script_filename = self._fn(self.egg_info, script)
namespace['__file__'] = script_filename
if os.path.exists(script_filename):
source = open(script_filename).read()
code = compile(source, script_filename, 'exec')
exec(code, namespace, namespace)
else:
from linecache import cache
cache[script_filename] = (
len(script_text), 0, script_text.split('\n'), script_filename
)
script_code = compile(script_text, script_filename,'exec')
exec(script_code, namespace, namespace)
def _has(self, path):
raise NotImplementedError(
"Can't perform this operation for unregistered loader type"
)
def _isdir(self, path):
raise NotImplementedError(
"Can't perform this operation for unregistered loader type"
)
def _listdir(self, path):
raise NotImplementedError(
"Can't perform this operation for unregistered loader type"
)
def _fn(self, base, resource_name):
if resource_name:
return os.path.join(base, *resource_name.split('/'))
return base
def _get(self, path):
if hasattr(self.loader, 'get_data'):
return self.loader.get_data(path)
raise NotImplementedError(
"Can't perform this operation for loaders without 'get_data()'"
)
register_loader_type(object, NullProvider)
class EggProvider(NullProvider):
"""Provider based on a virtual filesystem"""
def __init__(self, module):
NullProvider.__init__(self, module)
self._setup_prefix()
def _setup_prefix(self):
# we assume here that our metadata may be nested inside a "basket"
# of multiple eggs; that's why we use module_path instead of .archive
path = self.module_path
old = None
while path!=old:
if path.lower().endswith('.egg'):
self.egg_name = os.path.basename(path)
self.egg_info = os.path.join(path, 'EGG-INFO')
self.egg_root = path
break
old = path
path, base = os.path.split(path)
class DefaultProvider(EggProvider):
"""Provides access to package resources in the filesystem"""
def _has(self, path):
return os.path.exists(path)
def _isdir(self, path):
return os.path.isdir(path)
def _listdir(self, path):
return os.listdir(path)
def get_resource_stream(self, manager, resource_name):
return open(self._fn(self.module_path, resource_name), 'rb')
def _get(self, path):
with open(path, 'rb') as stream:
return stream.read()
register_loader_type(type(None), DefaultProvider)
if importlib_machinery is not None:
register_loader_type(importlib_machinery.SourceFileLoader, DefaultProvider)
class EmptyProvider(NullProvider):
"""Provider that returns nothing for all requests"""
_isdir = _has = lambda self, path: False
_get = lambda self, path: ''
_listdir = lambda self, path: []
module_path = None
def __init__(self):
pass
empty_provider = EmptyProvider()
class ZipManifests(dict):
"""
zip manifest builder
"""
@classmethod
def build(cls, path):
"""
Build a dictionary similar to the zipimport directory
caches, except instead of tuples, store ZipInfo objects.
Use a platform-specific path separator (os.sep) for the path keys
for compatibility with pypy on Windows.
"""
with ContextualZipFile(path) as zfile:
items = (
(
name.replace('/', os.sep),
zfile.getinfo(name),
)
for name in zfile.namelist()
)
return dict(items)
load = build
class MemoizedZipManifests(ZipManifests):
"""
Memoized zipfile manifests.
"""
manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
def load(self, path):
"""
Load a manifest at path or return a suitable manifest already loaded.
"""
path = os.path.normpath(path)
mtime = os.stat(path).st_mtime
if path not in self or self[path].mtime != mtime:
manifest = self.build(path)
self[path] = self.manifest_mod(manifest, mtime)
return self[path].manifest
class ContextualZipFile(zipfile.ZipFile):
"""
Supplement ZipFile class to support context manager for Python 2.6
"""
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def __new__(cls, *args, **kwargs):
"""
Construct a ZipFile or ContextualZipFile as appropriate
"""
if hasattr(zipfile.ZipFile, '__exit__'):
return zipfile.ZipFile(*args, **kwargs)
return super(ContextualZipFile, cls).__new__(cls)
class ZipProvider(EggProvider):
"""Resource support for zips and eggs"""
eagers = None
_zip_manifests = MemoizedZipManifests()
def __init__(self, module):
EggProvider.__init__(self, module)
self.zip_pre = self.loader.archive+os.sep
def _zipinfo_name(self, fspath):
# Convert a virtual filename (full path to file) into a zipfile subpath
# usable with the zipimport directory cache for our target archive
if fspath.startswith(self.zip_pre):
return fspath[len(self.zip_pre):]
raise AssertionError(
"%s is not a subpath of %s" % (fspath, self.zip_pre)
)
def _parts(self, zip_path):
# Convert a zipfile subpath into an egg-relative path part list.
# pseudo-fs path
fspath = self.zip_pre+zip_path
if fspath.startswith(self.egg_root+os.sep):
return fspath[len(self.egg_root)+1:].split(os.sep)
raise AssertionError(
"%s is not a subpath of %s" % (fspath, self.egg_root)
)
@property
def zipinfo(self):
return self._zip_manifests.load(self.loader.archive)
def get_resource_filename(self, manager, resource_name):
if not self.egg_name:
raise NotImplementedError(
"resource_filename() only supported for .egg, not .zip"
)
# no need to lock for extraction, since we use temp names
zip_path = self._resource_to_zip(resource_name)
eagers = self._get_eager_resources()
if '/'.join(self._parts(zip_path)) in eagers:
for name in eagers:
self._extract_resource(manager, self._eager_to_zip(name))
return self._extract_resource(manager, zip_path)
@staticmethod
def _get_date_and_size(zip_stat):
size = zip_stat.file_size
# ymdhms+wday, yday, dst
date_time = zip_stat.date_time + (0, 0, -1)
# 1980 offset already done
timestamp = time.mktime(date_time)
return timestamp, size
def _extract_resource(self, manager, zip_path):
if zip_path in self._index():
for name in self._index()[zip_path]:
last = self._extract_resource(
manager, os.path.join(zip_path, name)
)
# return the extracted directory name
return os.path.dirname(last)
timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
if not WRITE_SUPPORT:
raise IOError('"os.rename" and "os.unlink" are not supported '
'on this platform')
try:
real_path = manager.get_cache_path(
self.egg_name, self._parts(zip_path)
)
if self._is_current(real_path, zip_path):
return real_path
outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
os.write(outf, self.loader.get_data(zip_path))
os.close(outf)
utime(tmpnam, (timestamp, timestamp))
manager.postprocess(tmpnam, real_path)
try:
rename(tmpnam, real_path)
except os.error:
if os.path.isfile(real_path):
if self._is_current(real_path, zip_path):
# the file became current since it was checked above,
# so proceed.
return real_path
# Windows, del old file and retry
elif os.name=='nt':
unlink(real_path)
rename(tmpnam, real_path)
return real_path
raise
except os.error:
# report a user-friendly error
manager.extraction_error()
return real_path
def _is_current(self, file_path, zip_path):
"""
Return True if the file_path is current for this zip_path
"""
timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
if not os.path.isfile(file_path):
return False
stat = os.stat(file_path)
if stat.st_size!=size or stat.st_mtime!=timestamp:
return False
# check that the contents match
zip_contents = self.loader.get_data(zip_path)
with open(file_path, 'rb') as f:
file_contents = f.read()
return zip_contents == file_contents
def _get_eager_resources(self):
if self.eagers is None:
eagers = []
for name in ('native_libs.txt', 'eager_resources.txt'):
if self.has_metadata(name):
eagers.extend(self.get_metadata_lines(name))
self.eagers = eagers
return self.eagers
def _index(self):
try:
return self._dirindex
except AttributeError:
ind = {}
for path in self.zipinfo:
parts = path.split(os.sep)
while parts:
parent = os.sep.join(parts[:-1])
if parent in ind:
ind[parent].append(parts[-1])
break
else:
ind[parent] = [parts.pop()]
self._dirindex = ind
return ind
def _has(self, fspath):
zip_path = self._zipinfo_name(fspath)
return zip_path in self.zipinfo or zip_path in self._index()
def _isdir(self, fspath):
return self._zipinfo_name(fspath) in self._index()
def _listdir(self, fspath):
return list(self._index().get(self._zipinfo_name(fspath), ()))
def _eager_to_zip(self, resource_name):
return self._zipinfo_name(self._fn(self.egg_root, resource_name))
def _resource_to_zip(self, resource_name):
return self._zipinfo_name(self._fn(self.module_path, resource_name))
register_loader_type(zipimport.zipimporter, ZipProvider)
class FileMetadata(EmptyProvider):
"""Metadata handler for standalone PKG-INFO files
Usage::
metadata = FileMetadata("/path/to/PKG-INFO")
This provider rejects all data and metadata requests except for PKG-INFO,
which is treated as existing, and will be the contents of the file at
the provided location.
"""
def __init__(self, path):
self.path = path
def has_metadata(self, name):
return name=='PKG-INFO'
def get_metadata(self, name):
if name=='PKG-INFO':
with open(self.path,'rU') as f:
metadata = f.read()
return metadata
raise KeyError("No metadata except PKG-INFO is available")
def get_metadata_lines(self, name):
return yield_lines(self.get_metadata(name))
class PathMetadata(DefaultProvider):
"""Metadata provider for egg directories
Usage::
# Development eggs:
egg_info = "/path/to/PackageName.egg-info"
base_dir = os.path.dirname(egg_info)
metadata = PathMetadata(base_dir, egg_info)
dist_name = os.path.splitext(os.path.basename(egg_info))[0]
dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
# Unpacked egg directories:
egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
dist = Distribution.from_filename(egg_path, metadata=metadata)
"""
def __init__(self, path, egg_info):
self.module_path = path
self.egg_info = egg_info
class EggMetadata(ZipProvider):
"""Metadata provider for .egg files"""
def __init__(self, importer):
"""Create a metadata provider from a zipimporter"""
self.zip_pre = importer.archive+os.sep
self.loader = importer
if importer.prefix:
self.module_path = os.path.join(importer.archive, importer.prefix)
else:
self.module_path = importer.archive
self._setup_prefix()
_declare_state('dict', _distribution_finders = {})
def register_finder(importer_type, distribution_finder):
"""Register `distribution_finder` to find distributions in sys.path items
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
handler), and `distribution_finder` is a callable that, passed a path
item and the importer instance, yields ``Distribution`` instances found on
that path item. See ``pkg_resources.find_on_path`` for an example."""
_distribution_finders[importer_type] = distribution_finder
def find_distributions(path_item, only=False):
"""Yield distributions accessible via `path_item`"""
importer = get_importer(path_item)
finder = _find_adapter(_distribution_finders, importer)
return finder(importer, path_item, only)
def find_eggs_in_zip(importer, path_item, only=False):
"""
Find eggs in zip files; possibly multiple nested eggs.
"""
if importer.archive.endswith('.whl'):
# wheels are not supported with this finder
# they don't have PKG-INFO metadata, and won't ever contain eggs
return
metadata = EggMetadata(importer)
if metadata.has_metadata('PKG-INFO'):
yield Distribution.from_filename(path_item, metadata=metadata)
if only:
# don't yield nested distros
return
for subitem in metadata.resource_listdir('/'):
if subitem.endswith('.egg'):
subpath = os.path.join(path_item, subitem)
for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath):
yield dist
register_finder(zipimport.zipimporter, find_eggs_in_zip)
def find_nothing(importer, path_item, only=False):
return ()
register_finder(object, find_nothing)
def find_on_path(importer, path_item, only=False):
"""Yield distributions accessible on a sys.path directory"""
path_item = _normalize_cached(path_item)
if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
if path_item.lower().endswith('.egg'):
# unpacked egg
yield Distribution.from_filename(
path_item, metadata=PathMetadata(
path_item, os.path.join(path_item,'EGG-INFO')
)
)
else:
# scan for .egg and .egg-info in directory
for entry in os.listdir(path_item):
lower = entry.lower()
if lower.endswith('.egg-info') or lower.endswith('.dist-info'):
fullpath = os.path.join(path_item, entry)
if os.path.isdir(fullpath):
# egg-info directory, allow getting metadata
metadata = PathMetadata(path_item, fullpath)
else:
metadata = FileMetadata(fullpath)
yield Distribution.from_location(
path_item, entry, metadata, precedence=DEVELOP_DIST
)
elif not only and lower.endswith('.egg'):
dists = find_distributions(os.path.join(path_item, entry))
for dist in dists:
yield dist
elif not only and lower.endswith('.egg-link'):
with open(os.path.join(path_item, entry)) as entry_file:
entry_lines = entry_file.readlines()
for line in entry_lines:
if not line.strip():
continue
path = os.path.join(path_item, line.rstrip())
dists = find_distributions(path)
for item in dists:
yield item
break
register_finder(pkgutil.ImpImporter, find_on_path)
if importlib_machinery is not None:
register_finder(importlib_machinery.FileFinder, find_on_path)
_declare_state('dict', _namespace_handlers={})
_declare_state('dict', _namespace_packages={})
def register_namespace_handler(importer_type, namespace_handler):
"""Register `namespace_handler` to declare namespace packages
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
handler), and `namespace_handler` is a callable like this::
def namespace_handler(importer, path_entry, moduleName, module):
# return a path_entry to use for child packages
Namespace handlers are only called if the importer object has already
agreed that it can handle the relevant path item, and they should only
return a subpath if the module __path__ does not already contain an
equivalent subpath. For an example namespace handler, see
``pkg_resources.file_ns_handler``.
"""
_namespace_handlers[importer_type] = namespace_handler
def _handle_ns(packageName, path_item):
"""Ensure that named package includes a subpath of path_item (if needed)"""
importer = get_importer(path_item)
if importer is None:
return None
loader = importer.find_module(packageName)
if loader is None:
return None
module = sys.modules.get(packageName)
if module is None:
module = sys.modules[packageName] = types.ModuleType(packageName)
module.__path__ = []
_set_parent_ns(packageName)
elif not hasattr(module,'__path__'):
raise TypeError("Not a package:", packageName)
handler = _find_adapter(_namespace_handlers, importer)
subpath = handler(importer, path_item, packageName, module)
if subpath is not None:
path = module.__path__
path.append(subpath)
loader.load_module(packageName)
for path_item in path:
if path_item not in module.__path__:
module.__path__.append(path_item)
return subpath
def declare_namespace(packageName):
"""Declare that package 'packageName' is a namespace package"""
_imp.acquire_lock()
try:
if packageName in _namespace_packages:
return
path, parent = sys.path, None
if '.' in packageName:
parent = '.'.join(packageName.split('.')[:-1])
declare_namespace(parent)
if parent not in _namespace_packages:
__import__(parent)
try:
path = sys.modules[parent].__path__
except AttributeError:
raise TypeError("Not a package:", parent)
# Track what packages are namespaces, so when new path items are added,
# they can be updated
_namespace_packages.setdefault(parent,[]).append(packageName)
_namespace_packages.setdefault(packageName,[])
for path_item in path:
# Ensure all the parent's path items are reflected in the child,
# if they apply
_handle_ns(packageName, path_item)
finally:
_imp.release_lock()
def fixup_namespace_packages(path_item, parent=None):
"""Ensure that previously-declared namespace packages include path_item"""
_imp.acquire_lock()
try:
for package in _namespace_packages.get(parent,()):
subpath = _handle_ns(package, path_item)
if subpath:
fixup_namespace_packages(subpath, package)
finally:
_imp.release_lock()
def file_ns_handler(importer, path_item, packageName, module):
"""Compute an ns-package subpath for a filesystem or zipfile importer"""
subpath = os.path.join(path_item, packageName.split('.')[-1])
normalized = _normalize_cached(subpath)
for item in module.__path__:
if _normalize_cached(item)==normalized:
break
else:
# Only return the path if it's not already there
return subpath
register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
register_namespace_handler(zipimport.zipimporter, file_ns_handler)
if importlib_machinery is not None:
register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
def null_ns_handler(importer, path_item, packageName, module):
return None
register_namespace_handler(object, null_ns_handler)
def normalize_path(filename):
"""Normalize a file/dir name for comparison purposes"""
return os.path.normcase(os.path.realpath(filename))
def _normalize_cached(filename, _cache={}):
try:
return _cache[filename]
except KeyError:
_cache[filename] = result = normalize_path(filename)
return result
def _set_parent_ns(packageName):
parts = packageName.split('.')
name = parts.pop()
if parts:
parent = '.'.join(parts)
setattr(sys.modules[parent], name, sys.modules[packageName])
def yield_lines(strs):
"""Yield non-empty/non-comment lines of a string or sequence"""
if isinstance(strs, string_types):
for s in strs.splitlines():
s = s.strip()
# skip blank lines/comments
if s and not s.startswith('#'):
yield s
else:
for ss in strs:
for s in yield_lines(ss):
yield s
# whitespace and comment
LINE_END = re.compile(r"\s*(#.*)?$").match
# line continuation
CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match
# Distribution or extra
DISTRO = re.compile(r"\s*((\w|[-.])+)").match
# ver. info
VERSION = re.compile(r"\s*(<=?|>=?|===?|!=|~=)\s*((\w|[-.*_!+])+)").match
# comma between items
COMMA = re.compile(r"\s*,").match
OBRACKET = re.compile(r"\s*\[").match
CBRACKET = re.compile(r"\s*\]").match
MODULE = re.compile(r"\w+(\.\w+)*$").match
EGG_NAME = re.compile(
r"""
(?P<name>[^-]+) (
-(?P<ver>[^-]+) (
-py(?P<pyver>[^-]+) (
-(?P<plat>.+)
)?
)?
)?
""",
re.VERBOSE | re.IGNORECASE,
).match
class EntryPoint(object):
"""Object representing an advertised importable object"""
def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
if not MODULE(module_name):
raise ValueError("Invalid module name", module_name)
self.name = name
self.module_name = module_name
self.attrs = tuple(attrs)
self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
self.dist = dist
def __str__(self):
s = "%s = %s" % (self.name, self.module_name)
if self.attrs:
s += ':' + '.'.join(self.attrs)
if self.extras:
s += ' [%s]' % ','.join(self.extras)
return s
def __repr__(self):
return "EntryPoint.parse(%r)" % str(self)
def load(self, require=True, *args, **kwargs):
"""
Require packages for this EntryPoint, then resolve it.
"""
if not require or args or kwargs:
warnings.warn(
"Parameters to load are deprecated. Call .resolve and "
".require separately.",
DeprecationWarning,
stacklevel=2,
)
if require:
self.require(*args, **kwargs)
return self.resolve()
def resolve(self):
"""
Resolve the entry point from its module and attrs.
"""
module = __import__(self.module_name, fromlist=['__name__'], level=0)
try:
return functools.reduce(getattr, self.attrs, module)
except AttributeError as exc:
raise ImportError(str(exc))
def require(self, env=None, installer=None):
if self.extras and not self.dist:
raise UnknownExtra("Can't require() without a distribution", self)
reqs = self.dist.requires(self.extras)
items = working_set.resolve(reqs, env, installer)
list(map(working_set.add, items))
pattern = re.compile(
r'\s*'
r'(?P<name>.+?)\s*'
r'=\s*'
r'(?P<module>[\w.]+)\s*'
r'(:\s*(?P<attr>[\w.]+))?\s*'
r'(?P<extras>\[.*\])?\s*$'
)
@classmethod
def parse(cls, src, dist=None):
"""Parse a single entry point from string `src`
Entry point syntax follows the form::
name = some.module:some.attr [extra1, extra2]
The entry name and module name are required, but the ``:attrs`` and
``[extras]`` parts are optional
"""
m = cls.pattern.match(src)
if not m:
msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
raise ValueError(msg, src)
res = m.groupdict()
extras = cls._parse_extras(res['extras'])
attrs = res['attr'].split('.') if res['attr'] else ()
return cls(res['name'], res['module'], attrs, extras, dist)
@classmethod
def _parse_extras(cls, extras_spec):
if not extras_spec:
return ()
req = Requirement.parse('x' + extras_spec)
if req.specs:
raise ValueError()
return req.extras
@classmethod
def parse_group(cls, group, lines, dist=None):
"""Parse an entry point group"""
if not MODULE(group):
raise ValueError("Invalid group name", group)
this = {}
for line in yield_lines(lines):
ep = cls.parse(line, dist)
if ep.name in this:
raise ValueError("Duplicate entry point", group, ep.name)
this[ep.name]=ep
return this
@classmethod
def parse_map(cls, data, dist=None):
"""Parse a map of entry point groups"""
if isinstance(data, dict):
data = data.items()
else:
data = split_sections(data)
maps = {}
for group, lines in data:
if group is None:
if not lines:
continue
raise ValueError("Entry points must be listed in groups")
group = group.strip()
if group in maps:
raise ValueError("Duplicate group name", group)
maps[group] = cls.parse_group(group, lines, dist)
return maps
def _remove_md5_fragment(location):
if not location:
return ''
parsed = urlparse(location)
if parsed[-1].startswith('md5='):
return urlunparse(parsed[:-1] + ('',))
return location
class Distribution(object):
"""Wrap an actual or potential sys.path entry w/metadata"""
PKG_INFO = 'PKG-INFO'
def __init__(self, location=None, metadata=None, project_name=None,
version=None, py_version=PY_MAJOR, platform=None,
precedence=EGG_DIST):
self.project_name = safe_name(project_name or 'Unknown')
if version is not None:
self._version = safe_version(version)
self.py_version = py_version
self.platform = platform
self.location = location
self.precedence = precedence
self._provider = metadata or empty_provider
@classmethod
def from_location(cls, location, basename, metadata=None,**kw):
project_name, version, py_version, platform = [None]*4
basename, ext = os.path.splitext(basename)
if ext.lower() in _distributionImpl:
# .dist-info gets much metadata differently
match = EGG_NAME(basename)
if match:
project_name, version, py_version, platform = match.group(
'name','ver','pyver','plat'
)
cls = _distributionImpl[ext.lower()]
return cls(
location, metadata, project_name=project_name, version=version,
py_version=py_version, platform=platform, **kw
)
@property
def hashcmp(self):
return (
self.parsed_version,
self.precedence,
self.key,
_remove_md5_fragment(self.location),
self.py_version or '',
self.platform or '',
)
def __hash__(self):
return hash(self.hashcmp)
def __lt__(self, other):
return self.hashcmp < other.hashcmp
def __le__(self, other):
return self.hashcmp <= other.hashcmp
def __gt__(self, other):
return self.hashcmp > other.hashcmp
def __ge__(self, other):
return self.hashcmp >= other.hashcmp
def __eq__(self, other):
if not isinstance(other, self.__class__):
# It's not a Distribution, so they are not equal
return False
return self.hashcmp == other.hashcmp
def __ne__(self, other):
return not self == other
# These properties have to be lazy so that we don't have to load any
# metadata until/unless it's actually needed. (i.e., some distributions
# may not know their name or version without loading PKG-INFO)
@property
def key(self):
try:
return self._key
except AttributeError:
self._key = key = self.project_name.lower()
return key
@property
def parsed_version(self):
if not hasattr(self, "_parsed_version"):
self._parsed_version = parse_version(self.version)
return self._parsed_version
def _warn_legacy_version(self):
LV = packaging.version.LegacyVersion
is_legacy = isinstance(self._parsed_version, LV)
if not is_legacy:
return
# While an empty version is technically a legacy version and
# is not a valid PEP 440 version, it's also unlikely to
# actually come from someone and instead it is more likely that
# it comes from setuptools attempting to parse a filename and
# including it in the list. So for that we'll gate this warning
# on if the version is anything at all or not.
if not self.version:
return
tmpl = textwrap.dedent("""
'{project_name} ({version})' is being parsed as a legacy,
non PEP 440,
version. You may find odd behavior and sort order.
In particular it will be sorted as less than 0.0. It
is recommended to migrate to PEP 440 compatible
versions.
""").strip().replace('\n', ' ')
warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
@property
def version(self):
try:
return self._version
except AttributeError:
for line in self._get_metadata(self.PKG_INFO):
if line.lower().startswith('version:'):
self._version = safe_version(line.split(':',1)[1].strip())
return self._version
else:
tmpl = "Missing 'Version:' header and/or %s file"
raise ValueError(tmpl % self.PKG_INFO, self)
@property
def _dep_map(self):
try:
return self.__dep_map
except AttributeError:
dm = self.__dep_map = {None: []}
for name in 'requires.txt', 'depends.txt':
for extra, reqs in split_sections(self._get_metadata(name)):
if extra:
if ':' in extra:
extra, marker = extra.split(':', 1)
if invalid_marker(marker):
# XXX warn
reqs=[]
elif not evaluate_marker(marker):
reqs=[]
extra = safe_extra(extra) or None
dm.setdefault(extra,[]).extend(parse_requirements(reqs))
return dm
def requires(self, extras=()):
"""List of Requirements needed for this distro if `extras` are used"""
dm = self._dep_map
deps = []
deps.extend(dm.get(None, ()))
for ext in extras:
try:
deps.extend(dm[safe_extra(ext)])
except KeyError:
raise UnknownExtra(
"%s has no such extra feature %r" % (self, ext)
)
return deps
def _get_metadata(self, name):
if self.has_metadata(name):
for line in self.get_metadata_lines(name):
yield line
def activate(self, path=None):
"""Ensure distribution is importable on `path` (default=sys.path)"""
if path is None:
path = sys.path
self.insert_on(path)
if path is sys.path:
fixup_namespace_packages(self.location)
for pkg in self._get_metadata('namespace_packages.txt'):
if pkg in sys.modules:
declare_namespace(pkg)
def egg_name(self):
"""Return what this distribution's standard .egg filename should be"""
filename = "%s-%s-py%s" % (
to_filename(self.project_name), to_filename(self.version),
self.py_version or PY_MAJOR
)
if self.platform:
filename += '-' + self.platform
return filename
def __repr__(self):
if self.location:
return "%s (%s)" % (self, self.location)
else:
return str(self)
def __str__(self):
try:
version = getattr(self, 'version', None)
except ValueError:
version = None
version = version or "[unknown version]"
return "%s %s" % (self.project_name, version)
def __getattr__(self, attr):
"""Delegate all unrecognized public attributes to .metadata provider"""
if attr.startswith('_'):
raise AttributeError(attr)
return getattr(self._provider, attr)
@classmethod
def from_filename(cls, filename, metadata=None, **kw):
return cls.from_location(
_normalize_cached(filename), os.path.basename(filename), metadata,
**kw
)
def as_requirement(self):
"""Return a ``Requirement`` that matches this distribution exactly"""
if isinstance(self.parsed_version, packaging.version.Version):
spec = "%s==%s" % (self.project_name, self.parsed_version)
else:
spec = "%s===%s" % (self.project_name, self.parsed_version)
return Requirement.parse(spec)
def load_entry_point(self, group, name):
"""Return the `name` entry point of `group` or raise ImportError"""
ep = self.get_entry_info(group, name)
if ep is None:
raise ImportError("Entry point %r not found" % ((group, name),))
return ep.load()
def get_entry_map(self, group=None):
"""Return the entry point map for `group`, or the full entry map"""
try:
ep_map = self._ep_map
except AttributeError:
ep_map = self._ep_map = EntryPoint.parse_map(
self._get_metadata('entry_points.txt'), self
)
if group is not None:
return ep_map.get(group,{})
return ep_map
def get_entry_info(self, group, name):
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
return self.get_entry_map(group).get(name)
def insert_on(self, path, loc = None):
"""Insert self.location in path before its nearest parent directory"""
loc = loc or self.location
if not loc:
return
nloc = _normalize_cached(loc)
bdir = os.path.dirname(nloc)
npath= [(p and _normalize_cached(p) or p) for p in path]
for p, item in enumerate(npath):
if item == nloc:
break
elif item == bdir and self.precedence == EGG_DIST:
# if it's an .egg, give it precedence over its directory
if path is sys.path:
self.check_version_conflict()
path.insert(p, loc)
npath.insert(p, nloc)
break
else:
if path is sys.path:
self.check_version_conflict()
path.append(loc)
return
# p is the spot where we found or inserted loc; now remove duplicates
while True:
try:
np = npath.index(nloc, p+1)
except ValueError:
break
else:
del npath[np], path[np]
# ha!
p = np
return
def check_version_conflict(self):
if self.key == 'setuptools':
# ignore the inevitable setuptools self-conflicts :(
return
nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
loc = normalize_path(self.location)
for modname in self._get_metadata('top_level.txt'):
if (modname not in sys.modules or modname in nsp
or modname in _namespace_packages):
continue
if modname in ('pkg_resources', 'setuptools', 'site'):
continue
fn = getattr(sys.modules[modname], '__file__', None)
if fn and (normalize_path(fn).startswith(loc) or
fn.startswith(self.location)):
continue
issue_warning(
"Module %s was already imported from %s, but %s is being added"
" to sys.path" % (modname, fn, self.location),
)
def has_version(self):
try:
self.version
except ValueError:
issue_warning("Unbuilt egg for " + repr(self))
return False
return True
def clone(self,**kw):
"""Copy this distribution, substituting in any changed keyword args"""
names = 'project_name version py_version platform location precedence'
for attr in names.split():
kw.setdefault(attr, getattr(self, attr, None))
kw.setdefault('metadata', self._provider)
return self.__class__(**kw)
@property
def extras(self):
return [dep for dep in self._dep_map if dep]
class DistInfoDistribution(Distribution):
"""Wrap an actual or potential sys.path entry w/metadata, .dist-info style"""
PKG_INFO = 'METADATA'
EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
@property
def _parsed_pkg_info(self):
"""Parse and cache metadata"""
try:
return self._pkg_info
except AttributeError:
metadata = self.get_metadata(self.PKG_INFO)
self._pkg_info = email.parser.Parser().parsestr(metadata)
return self._pkg_info
@property
def _dep_map(self):
try:
return self.__dep_map
except AttributeError:
self.__dep_map = self._compute_dependencies()
return self.__dep_map
def _preparse_requirement(self, requires_dist):
"""Convert 'Foobar (1); baz' to ('Foobar ==1', 'baz')
Split environment marker, add == prefix to version specifiers as
necessary, and remove parenthesis.
"""
parts = requires_dist.split(';', 1) + ['']
distvers = parts[0].strip()
mark = parts[1].strip()
distvers = re.sub(self.EQEQ, r"\1==\2\3", distvers)
distvers = distvers.replace('(', '').replace(')', '')
return (distvers, mark)
def _compute_dependencies(self):
"""Recompute this distribution's dependencies."""
from _markerlib import compile as compile_marker
dm = self.__dep_map = {None: []}
reqs = []
# Including any condition expressions
for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
distvers, mark = self._preparse_requirement(req)
parsed = next(parse_requirements(distvers))
parsed.marker_fn = compile_marker(mark)
reqs.append(parsed)
def reqs_for_extra(extra):
for req in reqs:
if req.marker_fn(override={'extra':extra}):
yield req
common = frozenset(reqs_for_extra(None))
dm[None].extend(common)
for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
extra = safe_extra(extra.strip())
dm[extra] = list(frozenset(reqs_for_extra(extra)) - common)
return dm
_distributionImpl = {
'.egg': Distribution,
'.egg-info': Distribution,
'.dist-info': DistInfoDistribution,
}
def issue_warning(*args,**kw):
level = 1
g = globals()
try:
# find the first stack frame that is *not* code in
# the pkg_resources module, to use for the warning
while sys._getframe(level).f_globals is g:
level += 1
except ValueError:
pass
warnings.warn(stacklevel=level + 1, *args, **kw)
class RequirementParseError(ValueError):
def __str__(self):
return ' '.join(self.args)
def parse_requirements(strs):
"""Yield ``Requirement`` objects for each specification in `strs`
`strs` must be a string, or a (possibly-nested) iterable thereof.
"""
# create a steppable iterator, so we can handle \-continuations
lines = iter(yield_lines(strs))
def scan_list(ITEM, TERMINATOR, line, p, groups, item_name):
items = []
while not TERMINATOR(line, p):
if CONTINUE(line, p):
try:
line = next(lines)
p = 0
except StopIteration:
msg = "\\ must not appear on the last nonblank line"
raise RequirementParseError(msg)
match = ITEM(line, p)
if not match:
msg = "Expected " + item_name + " in"
raise RequirementParseError(msg, line, "at", line[p:])
items.append(match.group(*groups))
p = match.end()
match = COMMA(line, p)
if match:
# skip the comma
p = match.end()
elif not TERMINATOR(line, p):
msg = "Expected ',' or end-of-list in"
raise RequirementParseError(msg, line, "at", line[p:])
match = TERMINATOR(line, p)
# skip the terminator, if any
if match:
p = match.end()
return line, p, items
for line in lines:
match = DISTRO(line)
if not match:
raise RequirementParseError("Missing distribution spec", line)
project_name = match.group(1)
p = match.end()
extras = []
match = OBRACKET(line, p)
if match:
p = match.end()
line, p, extras = scan_list(
DISTRO, CBRACKET, line, p, (1,), "'extra' name"
)
line, p, specs = scan_list(VERSION, LINE_END, line, p, (1, 2),
"version spec")
specs = [(op, val) for op, val in specs]
yield Requirement(project_name, specs, extras)
class Requirement:
def __init__(self, project_name, specs, extras):
"""DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
self.unsafe_name, project_name = project_name, safe_name(project_name)
self.project_name, self.key = project_name, project_name.lower()
self.specifier = packaging.specifiers.SpecifierSet(
",".join(["".join([x, y]) for x, y in specs])
)
self.specs = specs
self.extras = tuple(map(safe_extra, extras))
self.hashCmp = (
self.key,
self.specifier,
frozenset(self.extras),
)
self.__hash = hash(self.hashCmp)
def __str__(self):
extras = ','.join(self.extras)
if extras:
extras = '[%s]' % extras
return '%s%s%s' % (self.project_name, extras, self.specifier)
def __eq__(self, other):
return (
isinstance(other, Requirement) and
self.hashCmp == other.hashCmp
)
def __ne__(self, other):
return not self == other
def __contains__(self, item):
if isinstance(item, Distribution):
if item.key != self.key:
return False
item = item.version
# Allow prereleases always in order to match the previous behavior of
# this method. In the future this should be smarter and follow PEP 440
# more accurately.
return self.specifier.contains(item, prereleases=True)
def __hash__(self):
return self.__hash
def __repr__(self): return "Requirement.parse(%r)" % str(self)
@staticmethod
def parse(s):
reqs = list(parse_requirements(s))
if reqs:
if len(reqs) == 1:
return reqs[0]
raise ValueError("Expected only one requirement", s)
raise ValueError("No requirements found", s)
def _get_mro(cls):
"""Get an mro for a type or classic class"""
if not isinstance(cls, type):
class cls(cls, object): pass
return cls.__mro__[1:]
return cls.__mro__
def _find_adapter(registry, ob):
"""Return an adapter factory for `ob` from `registry`"""
for t in _get_mro(getattr(ob, '__class__', type(ob))):
if t in registry:
return registry[t]
def ensure_directory(path):
"""Ensure that the parent directory of `path` exists"""
dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
def _bypass_ensure_directory(path):
"""Sandbox-bypassing version of ensure_directory()"""
if not WRITE_SUPPORT:
raise IOError('"os.mkdir" not supported on this platform.')
dirname, filename = split(path)
if dirname and filename and not isdir(dirname):
_bypass_ensure_directory(dirname)
mkdir(dirname, 0o755)
def split_sections(s):
"""Split a string or iterable thereof into (section, content) pairs
Each ``section`` is a stripped version of the section header ("[section]")
and each ``content`` is a list of stripped lines excluding blank lines and
comment-only lines. If there are any such lines before the first section
header, they're returned in a first ``section`` of ``None``.
"""
section = None
content = []
for line in yield_lines(s):
if line.startswith("["):
if line.endswith("]"):
if section or content:
yield section, content
section = line[1:-1].strip()
content = []
else:
raise ValueError("Invalid section heading", line)
else:
content.append(line)
# wrap up last segment
yield section, content
def _mkstemp(*args,**kw):
old_open = os.open
try:
# temporarily bypass sandboxing
os.open = os_open
return tempfile.mkstemp(*args,**kw)
finally:
# and then put it back
os.open = old_open
# Silence the PEP440Warning by default, so that end users don't get hit by it
# randomly just because they use pkg_resources. We want to append the rule
# because we want earlier uses of filterwarnings to take precedence over this
# one.
warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
# from jaraco.functools 1.3
def _call_aside(f, *args, **kwargs):
f(*args, **kwargs)
return f
@_call_aside
def _initialize(g=globals()):
"Set up global resource manager (deliberately not state-saved)"
manager = ResourceManager()
g['_manager'] = manager
for name in dir(manager):
if not name.startswith('_'):
g[name] = getattr(manager, name)
@_call_aside
def _initialize_master_working_set():
"""
Prepare the master working set and make the ``require()``
API available.
This function has explicit effects on the global state
of pkg_resources. It is intended to be invoked once at
the initialization of this module.
Invocation by other packages is unsupported and done
at their own risk.
"""
working_set = WorkingSet._build_master()
_declare_state('object', working_set=working_set)
require = working_set.require
iter_entry_points = working_set.iter_entry_points
add_activation_listener = working_set.subscribe
run_script = working_set.run_script
# backward compatibility
run_main = run_script
# Activate all distributions already on sys.path, and ensure that
# all distributions added to the working set in the future (e.g. by
# calling ``require()``) will get activated as well.
add_activation_listener(lambda dist: dist.activate())
working_set.entries=[]
# match order
list(map(working_set.add_entry, sys.path))
globals().update(locals())
| cc0-1.0 |
lmazuel/azure-sdk-for-python | azure-mgmt-sql/azure/mgmt/sql/models/subscription_usage.py | 2 | 2114 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .proxy_resource import ProxyResource
class SubscriptionUsage(ProxyResource):
"""Usage Metric of a Subscription in a Location.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar display_name: User-readable name of the metric.
:vartype display_name: str
:ivar current_value: Current value of the metric.
:vartype current_value: float
:ivar limit: Boundary value of the metric.
:vartype limit: float
:ivar unit: Unit of the metric.
:vartype unit: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'display_name': {'readonly': True},
'current_value': {'readonly': True},
'limit': {'readonly': True},
'unit': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'display_name': {'key': 'properties.displayName', 'type': 'str'},
'current_value': {'key': 'properties.currentValue', 'type': 'float'},
'limit': {'key': 'properties.limit', 'type': 'float'},
'unit': {'key': 'properties.unit', 'type': 'str'},
}
def __init__(self):
super(SubscriptionUsage, self).__init__()
self.display_name = None
self.current_value = None
self.limit = None
self.unit = None
| mit |
vdhan/focus-booster | __main__.py | 1 | 7987 | import os
from threading import Thread
from tkinter import *
from tkinter.ttk import *
from pydub import AudioSegment
from pydub.playback import play
class Core(Tk):
def get_w(self):
return self._w
class Application(Frame):
def __init__(self, master=None, **kw):
super().__init__(master, **kw)
self.path = os.path.dirname(__file__)
self.master.title("An's focus booster v2.0")
self.master.minsize(300, 80)
self.master.maxsize(600, 80)
try:
img = PhotoImage(file=os.path.join(self.path, 'icon.png'))
self.master.tk.call('wm', 'iconphoto', self.master.get_w(), img)
except TclError:
img = PhotoImage(file=os.path.join(self.path, 'icon.gif'))
self.master.tk.call('wm', 'iconphoto', self.master.get_w(), img)
self.sec = self.itv = self.ses_num = self.break_num = self.ps_sec = self.se_sec = self.brk_sec = 0
self.minu = self.ps_min = self.ses_min = 25
self.is_ses = True
self.brk_min = 5
self.bell = AudioSegment.from_mp3(os.path.join(self.path, 'bell.mp3'))
self.time = self.master.after(0)
self.style = Style()
self.tbs = StringVar()
self.stime = StringVar()
self.t_ses_min = StringVar()
self.t_se_sec = StringVar()
self.t_brk_min = StringVar()
self.t_brk_sec = StringVar()
self.ses = StringVar()
self.win_op = Toplevel(self.master)
self.pb_time = Progressbar(self, orient=HORIZONTAL, mode='determinate', maximum=2520)
self.widgets()
def widgets(self):
themes = self.style.theme_names()
if 'xpnative' in themes:
self.style.theme_use('xpnative')
elif 'aqua' in themes:
self.style.theme_use('aqua')
elif 'alt' in themes:
self.style.theme_use('alt')
else:
self.style.theme_use('default')
self.style.configure('Horizontal.TProgressbar', background='#00f')
self.stime.set('25:00')
lb_time = Label(self, textvariable=self.stime)
lb_time.grid(column=0, row=0, sticky='s')
self.ses.set('Session {:02d}'.format(self.ses_num))
lb_ses = Label(self, textvariable=self.ses)
lb_ses.grid(column=0, row=1, sticky='w')
self.pb_time.grid(column=1, row=0, rowspan=2, padx=5, sticky='wnes')
self.tbs.set('Start')
btn_s = Button(self, textvariable=self.tbs, command=self.btn_start)
btn_s.grid(column=2, row=0, pady=2, sticky='ne')
btn_i = Button(self, text='Option', command=self.open_pref)
btn_i.grid(column=2, row=1, pady=2, sticky='se')
self.master.columnconfigure(0, weight=1)
self.columnconfigure(1, weight=1)
self.win_op.title('Preferences')
self.win_op.resizable(False, False)
self.win_op.protocol('WM_DELETE_WINDOW', self.ok_pref)
lf_ses = Labelframe(self.win_op, text='Session Time:', padding=10)
lf_ses.grid(column=0, row=0, columnspan=2)
self.t_ses_min.set(25)
sb_ses_min = Spinbox(lf_ses, from_=0, to=999, textvariable=self.t_ses_min, increment=1, state='readonly')
sb_ses_min.grid(column=0, row=0, padx=5, pady=5)
sb_se_sec = Spinbox(lf_ses, from_=0, to=59, textvariable=self.t_se_sec, increment=1, state='readonly')
sb_se_sec.grid(column=0, row=1, padx=5, pady=5)
lb_ses_min = Label(lf_ses, text='Minutes')
lb_ses_min.grid(column=1, row=0, sticky='w')
lb_se_sec = Label(lf_ses, text='Seconds')
lb_se_sec.grid(column=1, row=1, sticky='w')
lf_brk = Labelframe(self.win_op, text='Break Time:', padding=10)
lf_brk.grid(column=0, row=1, columnspan=2)
self.t_brk_min.set(5)
sb_brk_min = Spinbox(lf_brk, from_=0, to=60, textvariable=self.t_brk_min, increment=1, state='readonly')
sb_brk_min.grid(column=0, row=0, padx=5, pady=5)
sb_brk_sec = Spinbox(lf_brk, from_=0, to=59, textvariable=self.t_brk_sec, increment=1, state='readonly')
sb_brk_sec.grid(column=0, row=1, padx=5, pady=5)
lb_brk_min = Label(lf_brk, text='Minutes')
lb_brk_min.grid(column=1, row=0, sticky='w')
lb_brk_sec = Label(lf_brk, text='Seconds')
lb_brk_sec.grid(column=1, row=1, sticky='w')
self.win_op.state('withdraw')
def btn_start(self):
if self.tbs.get() == 'Start':
self.start()
else:
self.stop()
def open_pref(self):
self.win_op.state('normal')
def ok_pref(self):
self.ses_min = int(self.t_ses_min.get())
self.se_sec = int(self.t_se_sec.get())
self.brk_min = int(self.t_brk_min.get())
self.brk_sec = int(self.t_brk_sec.get())
self.win_op.state('withdrawn')
if self.tbs.get() == 'Start':
self.stime.set('{:02d}:{:02d}'.format(self.ses_min, self.se_sec))
def start(self):
self.minu = self.ses_min
self.sec = self.se_sec
if self.minu == 0 and self.sec == 0:
return
self.itv = self.pb_time['maximum'] / (self.minu * 60 + self.sec)
self.style.configure('Horizontal.TProgressbar', background='#00f')
self.tbs.set('Stop')
self.ses_num += 1
self.ses.set('Session {:02d}'.format(self.ses_num))
self.time = self.master.after(1000, self.update)
def stop(self):
self.pb_time['value'] = 0
self.master.after_cancel(self.time)
self.tbs.set('Start')
def update(self):
self.sec -= 1
if self.sec < 0:
self.sec = 59
self.minu -= 1
self.stime.set('{:02d}:{:02d}'.format(self.minu, self.sec))
if self.is_ses:
if self.sec == 0 and self.minu == 0:
self.minu = self.brk_min
self.sec = self.brk_sec
if self.minu == 0 and self.sec == 0:
self.stop()
return
self.break_num += 1
if self.break_num % 4 == 0:
t = (self.minu * 60 + self.sec) * 3
self.minu = t // 60
self.sec = t % 60
self.itv = self.pb_time['maximum'] / (self.minu * 60 + self.sec)
self.ses.set('Break {:02d}'.format(self.break_num))
self.pb_time['value'] = self.pb_time['maximum']
self.is_ses = False
self.style.configure('Horizontal.TProgressbar', background='#f0f')
else:
self.pb_time['value'] += self.itv
if self.minu == 0 and self.sec <= 10:
thread = Thread(target=play, args=(self.bell,))
thread.start()
if self.style.theme_use() == 'alt':
if self.pb_time['value'] / self.pb_time['maximum'] < 0.2:
pass
elif self.pb_time['value'] / self.pb_time['maximum'] < 0.4:
self.style.configure('Horizontal.TProgressbar', background='#0ff')
elif self.pb_time['value'] / self.pb_time['maximum'] < 0.6:
self.style.configure('Horizontal.TProgressbar', background='#0f0')
elif self.pb_time['value'] / self.pb_time['maximum'] < 0.8:
self.style.configure('Horizontal.TProgressbar', background='#ff0')
else:
self.style.configure('Horizontal.TProgressbar', background='#f00')
else:
if self.sec == 0 and self.minu == 0:
self.stop()
self.is_ses = True
return
else:
self.pb_time['value'] -= self.itv
self.time = self.master.after(1000, self.update)
if __name__ == '__main__':
root = Core()
app = Application(root, padding=10)
app.grid(column=0, row=0, sticky='wnes')
app.mainloop()
| gpl-3.0 |
dushu1203/chromium.src | third_party/cython/src/Cython/Compiler/CmdLine.py | 90 | 8191 | #
# Cython - Command Line Parsing
#
import os
import sys
import Options
usage = """\
Cython (http://cython.org) is a compiler for code written in the
Cython language. Cython is based on Pyrex by Greg Ewing.
Usage: cython [options] sourcefile.{pyx,py} ...
Options:
-V, --version Display version number of cython compiler
-l, --create-listing Write error messages to a listing file
-I, --include-dir <directory> Search for include files in named directory
(multiple include directories are allowed).
-o, --output-file <filename> Specify name of generated C file
-t, --timestamps Only compile newer source files
-f, --force Compile all source files (overrides implied -t)
-v, --verbose Be verbose, print file names on multiple compilation
-p, --embed-positions If specified, the positions in Cython files of each
function definition is embedded in its docstring.
--cleanup <level> Release interned objects on python exit, for memory debugging.
Level indicates aggressiveness, default 0 releases nothing.
-w, --working <directory> Sets the working directory for Cython (the directory modules
are searched from)
--gdb Output debug information for cygdb
--gdb-outdir <directory> Specify gdb debug information output directory. Implies --gdb.
-D, --no-docstrings Strip docstrings from the compiled module.
-a, --annotate Produce a colorized HTML version of the source.
--line-directives Produce #line directives pointing to the .pyx source
--cplus Output a C++ rather than C file.
--embed[=<method_name>] Generate a main() function that embeds the Python interpreter.
-2 Compile based on Python-2 syntax and code semantics.
-3 Compile based on Python-3 syntax and code semantics.
--lenient Change some compile time errors to runtime errors to
improve Python compatibility
--capi-reexport-cincludes Add cincluded headers to any auto-generated header files.
--fast-fail Abort the compilation on the first error
--warning-errors, -Werror Make all warnings into errors
--warning-extra, -Wextra Enable extra warnings
-X, --directive <name>=<value>[,<name=value,...] Overrides a compiler directive
"""
#The following experimental options are supported only on MacOSX:
# -C, --compile Compile generated .c file to .o file
# --link Link .o file to produce extension module (implies -C)
# -+, --cplus Use C++ compiler for compiling and linking
# Additional .o files to link may be supplied when using -X."""
def bad_usage():
sys.stderr.write(usage)
sys.exit(1)
def parse_command_line(args):
from Cython.Compiler.Main import \
CompilationOptions, default_options
def pop_arg():
if args:
return args.pop(0)
else:
bad_usage()
def get_param(option):
tail = option[2:]
if tail:
return tail
else:
return pop_arg()
options = CompilationOptions(default_options)
sources = []
while args:
if args[0].startswith("-"):
option = pop_arg()
if option in ("-V", "--version"):
options.show_version = 1
elif option in ("-l", "--create-listing"):
options.use_listing_file = 1
elif option in ("-+", "--cplus"):
options.cplus = 1
elif option == "--embed":
Options.embed = "main"
elif option.startswith("--embed="):
Options.embed = option[8:]
elif option.startswith("-I"):
options.include_path.append(get_param(option))
elif option == "--include-dir":
options.include_path.append(pop_arg())
elif option in ("-w", "--working"):
options.working_path = pop_arg()
elif option in ("-o", "--output-file"):
options.output_file = pop_arg()
elif option in ("-t", "--timestamps"):
options.timestamps = 1
elif option in ("-f", "--force"):
options.timestamps = 0
elif option in ("-v", "--verbose"):
options.verbose += 1
elif option in ("-p", "--embed-positions"):
Options.embed_pos_in_docstring = 1
elif option in ("-z", "--pre-import"):
Options.pre_import = pop_arg()
elif option == "--cleanup":
Options.generate_cleanup_code = int(pop_arg())
elif option in ("-D", "--no-docstrings"):
Options.docstrings = False
elif option in ("-a", "--annotate"):
Options.annotate = True
elif option == "--convert-range":
Options.convert_range = True
elif option == "--line-directives":
options.emit_linenums = True
elif option == "--no-c-in-traceback":
options.c_line_in_traceback = False
elif option == "--gdb":
options.gdb_debug = True
options.output_dir = os.curdir
elif option == "--gdb-outdir":
options.gdb_debug = True
options.output_dir = pop_arg()
elif option == "--lenient":
Options.error_on_unknown_names = False
Options.error_on_uninitialized = False
elif option == '-2':
options.language_level = 2
elif option == '-3':
options.language_level = 3
elif option == "--capi-reexport-cincludes":
options.capi_reexport_cincludes = True
elif option == "--fast-fail":
Options.fast_fail = True
elif option in ('-Werror', '--warning-errors'):
Options.warning_errors = True
elif option in ('-Wextra', '--warning-extra'):
options.compiler_directives.update(Options.extra_warnings)
elif option == "--old-style-globals":
Options.old_style_globals = True
elif option == "--directive" or option.startswith('-X'):
if option.startswith('-X') and option[2:].strip():
x_args = option[2:]
else:
x_args = pop_arg()
try:
options.compiler_directives = Options.parse_directive_list(
x_args, relaxed_bool=True,
current_settings=options.compiler_directives)
except ValueError, e:
sys.stderr.write("Error in compiler directive: %s\n" % e.args[0])
sys.exit(1)
elif option.startswith('--debug'):
option = option[2:].replace('-', '_')
import DebugFlags
if option in dir(DebugFlags):
setattr(DebugFlags, option, True)
else:
sys.stderr.write("Unknown debug flag: %s\n" % option)
bad_usage()
elif option in ('-h', '--help'):
sys.stdout.write(usage)
sys.exit(0)
else:
sys.stderr.write("Unknown compiler flag: %s\n" % option)
sys.exit(1)
else:
sources.append(pop_arg())
if options.use_listing_file and len(sources) > 1:
sys.stderr.write(
"cython: Only one source file allowed when using -o\n")
sys.exit(1)
if len(sources) == 0 and not options.show_version:
bad_usage()
if Options.embed and len(sources) > 1:
sys.stderr.write(
"cython: Only one source file allowed when using -embed\n")
sys.exit(1)
return options, sources
| bsd-3-clause |
ooici/coi-services | ion/agents/data/test/test_external_dataset_agent_slocum.py | 1 | 19351 | #!/usr/bin/env python
"""
@package ion.agents.data.test.test_external_dataset_agent_slocum
@file ion/agents/data/test/test_external_dataset_agent_slocum.py
@author Christopher Mueller
@brief
"""
# Import pyon first for monkey patching.
from pyon.public import log, IonObject
from pyon.ion.resource import PRED, RT
from interface.services.dm.idataset_management_service import DatasetManagementServiceClient
from interface.services.sa.idata_product_management_service import DataProductManagementServiceClient
from interface.services.sa.idata_acquisition_management_service import DataAcquisitionManagementServiceClient
from interface.services.coi.iresource_registry_service import ResourceRegistryServiceClient
from interface.objects import ExternalDatasetAgent, ExternalDatasetAgentInstance, ExternalDataProvider, DataSourceModel, ContactInformation, UpdateDescription, DatasetDescription, ExternalDataset, Institution, DataSource
from ion.services.dm.utility.granule_utils import time_series_domain
from ion.agents.data.test.test_external_dataset_agent import ExternalDatasetAgentTestBase, IonIntegrationTestCase
from nose.plugins.attrib import attr
#temp until stream defs are completed
from interface.services.dm.ipubsub_management_service import\
PubsubManagementServiceClient
from coverage_model.parameter import ParameterDictionary, ParameterContext
from coverage_model.parameter_types import QuantityType
import numpy
#DISABLED: attr('INT_LONG', group='eoi')
# these tests rely on the original handler mechanism which had several shortcomings leading to the poller/parser rewrite
class TestExternalDatasetAgent_Slocum(ExternalDatasetAgentTestBase,
IonIntegrationTestCase):
DVR_CONFIG = {
'dvr_mod': 'ion.agents.data.handlers.slocum_data_handler',
'dvr_cls': 'SlocumDataHandler', }
HIST_CONSTRAINTS_1 = {}
HIST_CONSTRAINTS_2 = {}
def _setup_resources(self):
# TODO: some or all of this (or some variation) should move to DAMS'
# Build the test resources for the dataset
dms_cli = DatasetManagementServiceClient()
dams_cli = DataAcquisitionManagementServiceClient()
dpms_cli = DataProductManagementServiceClient()
rr_cli = ResourceRegistryServiceClient()
pubsub_cli = PubsubManagementServiceClient()
eda = ExternalDatasetAgent(name='example dataset agent', handler_module=self.DVR_CONFIG['dvr_mod'],
handler_class=self.DVR_CONFIG['dvr_cls'])
eda_id = dams_cli.create_external_dataset_agent(eda)
eda_inst = ExternalDatasetAgentInstance(name='example dataset agent instance')
eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id)
# Create and register the necessary resources/objects
# Create DataProvider
dprov = ExternalDataProvider(name='example data provider', institution=Institution(), contact=ContactInformation())
dprov.contact.individual_names_given = 'Christopher Mueller'
dprov.contact.email = '[email protected]'
# Create DataSource
dsrc = DataSource(name='example datasource', protocol_type='FILE', institution=Institution(), contact=ContactInformation())
dsrc.connection_params['base_data_url'] = ''
dsrc.contact.individual_names_given = 'Tim Giguere'
dsrc.contact.email = '[email protected]'
# Create ExternalDataset
ds_name = 'slocum_test_dataset'
dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())
dset.dataset_description.parameters['base_url'] = 'test_data/slocum/'
dset.dataset_description.parameters['list_pattern'] = 'ru05-2012-021-0-0-sbd.dat'
dset.dataset_description.parameters['date_pattern'] = '%Y %j'
dset.dataset_description.parameters['date_extraction_pattern'] = 'ru05-([\d]{4})-([\d]{3})-\d-\d-sbd.dat'
dset.dataset_description.parameters['temporal_dimension'] = None
dset.dataset_description.parameters['zonal_dimension'] = None
dset.dataset_description.parameters['meridional_dimension'] = None
dset.dataset_description.parameters['vertical_dimension'] = None
dset.dataset_description.parameters['variables'] = [
'c_wpt_y_lmc',
'sci_water_cond',
'm_y_lmc',
'u_hd_fin_ap_inflection_holdoff',
'sci_m_present_time',
'm_leakdetect_voltage_forward',
'sci_bb3slo_b660_scaled',
'c_science_send_all',
'm_gps_status',
'm_water_vx',
'm_water_vy',
'c_heading',
'sci_fl3slo_chlor_units',
'u_hd_fin_ap_gain',
'm_vacuum',
'u_min_water_depth',
'm_gps_lat',
'm_veh_temp',
'f_fin_offset',
'u_hd_fin_ap_hardover_holdoff',
'c_alt_time',
'm_present_time',
'm_heading',
'sci_bb3slo_b532_scaled',
'sci_fl3slo_cdom_units',
'm_fin',
'x_cycle_overrun_in_ms',
'sci_water_pressure',
'u_hd_fin_ap_igain',
'sci_fl3slo_phyco_units',
'm_battpos',
'sci_bb3slo_b470_scaled',
'm_lat',
'm_gps_lon',
'sci_ctd41cp_timestamp',
'm_pressure',
'c_wpt_x_lmc',
'c_ballast_pumped',
'x_lmc_xy_source',
'm_lon',
'm_avg_speed',
'sci_water_temp',
'u_pitch_ap_gain',
'm_roll',
'm_tot_num_inflections',
'm_x_lmc',
'u_pitch_ap_deadband',
'm_final_water_vy',
'm_final_water_vx',
'm_water_depth',
'm_leakdetect_voltage',
'u_pitch_max_delta_battpos',
'm_coulomb_amphr',
'm_pitch', ]
# Create DataSourceModel
dsrc_model = DataSourceModel(name='slocum_model')
# dsrc_model.model = 'SLOCUM'
dsrc_model.data_handler_module = 'N/A'
dsrc_model.data_handler_class = 'N/A'
## Run everything through DAMS
ds_id = dams_cli.create_external_dataset(external_dataset=dset)
ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)
# Register the ExternalDataset
dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)
# Or using each method
dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
# dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)
#create temp streamdef so the data product can create the stream
pc_list = []
for pc_k, pc in self._create_parameter_dictionary().iteritems():
pc_list.append(dms_cli.create_parameter_context(pc_k, pc[1].dump()))
pdict_id = dms_cli.create_parameter_dictionary('slocum_param_dict', pc_list)
streamdef_id = pubsub_cli.create_stream_definition(name="slocum_stream_def", description="stream def for slocum testing", parameter_dictionary_id=pdict_id)
# dpms_cli.create_data_product()
# Generate the data product and associate it to the ExternalDataset
dprod = IonObject(RT.DataProduct,
name='slocum_parsed_product',
description='parsed slocum product')
dproduct_id = dpms_cli.create_data_product(data_product=dprod,
stream_definition_id=streamdef_id)
dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id)
stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
stream_id = stream_id[0]
log.info('Created resources: {0}'.format({'ExternalDataset': ds_id, 'ExternalDataProvider': ext_dprov_id, 'DataSource': ext_dsrc_id, 'DataSourceModel': ext_dsrc_model_id, 'DataProducer': dproducer_id, 'DataProduct': dproduct_id, 'Stream': stream_id}))
# Create the logger for receiving publications
_, stream_route, _ = self.create_stream_and_logger(name='slocum', stream_id=stream_id)
self.EDA_RESOURCE_ID = ds_id
self.EDA_NAME = ds_name
self.DVR_CONFIG['dh_cfg'] = {
'TESTING': True,
'stream_id': stream_id,
'stream_route': stream_route,
'stream_def': streamdef_id,
'external_dataset_res': dset,
'data_producer_id': dproducer_id, # CBM: Should this be put in the main body of the config - with mod & cls?
'max_records': 20,
}
def _create_parameter_dictionary(self):
pdict = ParameterDictionary()
t_ctxt = ParameterContext('c_wpt_y_lmc', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('sci_water_cond', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_y_lmc', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('u_hd_fin_ap_inflection_holdoff', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('sci_m_present_time', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_leakdetect_voltage_forward', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('sci_bb3slo_b660_scaled', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('c_science_send_all', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_gps_status', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_water_vx', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_water_vy', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('c_heading', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('sci_fl3slo_chlor_units', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('u_hd_fin_ap_gain', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_vacuum', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('u_min_water_depth', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_gps_lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_veh_temp', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('f_fin_offset', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('u_hd_fin_ap_hardover_holdoff', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('c_alt_time', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_present_time', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_heading', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('sci_bb3slo_b532_scaled', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('sci_fl3slo_cdom_units', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_fin', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('x_cycle_overrun_in_ms', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('sci_water_pressure', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('u_hd_fin_ap_igain', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('sci_fl3slo_phyco_units', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_battpos', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('sci_bb3slo_b470_scaled', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_lat', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_gps_lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('sci_ctd41cp_timestamp', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_pressure', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('c_wpt_x_lmc', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('c_ballast_pumped', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('x_lmc_xy_source', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_lon', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_avg_speed', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('sci_water_temp', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('u_pitch_ap_gain', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_roll', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_tot_num_inflections', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_x_lmc', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('u_pitch_ap_deadband', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_final_water_vy', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_final_water_vx', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_water_depth', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_leakdetect_voltage', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('u_pitch_max_delta_battpos', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_coulomb_amphr', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
t_ctxt = ParameterContext('m_pitch', param_type=QuantityType(value_encoding=numpy.dtype('float32')))
t_ctxt.uom = 'unknown'
pdict.add_context(t_ctxt)
return pdict
| bsd-2-clause |
testvidya11/ejrf | questionnaire/views/questions.py | 1 | 2642 | from django.contrib import messages
from django.core.urlresolvers import reverse, reverse_lazy
from django.http import HttpResponseRedirect
from django.views.generic import ListView, CreateView, DeleteView
from questionnaire.forms.questions import QuestionForm
from questionnaire.models import Question, Questionnaire
class QuestionList(ListView):
template_name = 'questions/index.html'
model = Question
object_list = Question.objects.all()
def get(self, *args, **kwargs):
finalized_questionnaire = Questionnaire.objects.filter(status=Questionnaire.FINALIZED)
active_questions = None
if finalized_questionnaire.exists():
active_questions = finalized_questionnaire.latest('created').get_all_questions()
context = {'request': self.request, 'questions': self.model.objects.all(), 'active_questions': active_questions}
return self.render_to_response(context)
class CreateQuestion(CreateView):
def __init__(self, **kwargs):
super(CreateQuestion, self).__init__(**kwargs)
self.template_name = 'questions/new.html'
self.object = Question
self.model = Question
self.form_class = QuestionForm
self.form = None
def get_context_data(self, **kwargs):
context = super(CreateQuestion, self).get_context_data(**kwargs)
context.update({'btn_label': 'CREATE', 'id': 'id-new-question-form'})
return context
def post(self, request, *args, **kwargs):
self.form = QuestionForm(data=request.POST)
if self.form.is_valid():
return self._form_valid()
return self._form_invalid()
def _form_valid(self):
self.form.save()
messages.success(self.request, "Question successfully created.")
return HttpResponseRedirect(reverse('list_questions_page'))
def _form_invalid(self):
messages.error(self.request, "Question NOT created. See errors below.")
context = {'form': self.form, 'btn_label': "CREATE", 'id': 'id-new-question-form'}
return self.render_to_response(context)
class DeleteQuestion(DeleteView):
model = Question
def post(self, *args, **kwargs):
question = self.model.objects.get(pk=kwargs['question_id'])
if question.can_be_deleted():
question.delete()
messages.success(self.request, "Question was deleted successfully")
return HttpResponseRedirect(reverse_lazy('list_questions_page'))
messages.error(self.request, "Question was not deleted because it has responses")
return HttpResponseRedirect(reverse_lazy('list_questions_page')) | bsd-3-clause |
c0hen/django-venv | lib/python3.4/site-packages/django/contrib/gis/gdal/prototypes/geom.py | 48 | 4777 | from ctypes import POINTER, c_char_p, c_double, c_int, c_void_p
from django.contrib.gis.gdal.envelope import OGREnvelope
from django.contrib.gis.gdal.libgdal import lgdal
from django.contrib.gis.gdal.prototypes.errcheck import check_envelope
from django.contrib.gis.gdal.prototypes.generation import (
const_string_output, double_output, geom_output, int_output, srs_output,
string_output, void_output,
)
# ### Generation routines specific to this module ###
def env_func(f, argtypes):
"For getting OGREnvelopes."
f.argtypes = argtypes
f.restype = None
f.errcheck = check_envelope
return f
def pnt_func(f):
"For accessing point information."
return double_output(f, [c_void_p, c_int])
def topology_func(f):
f.argtypes = [c_void_p, c_void_p]
f.restype = c_int
f.errcheck = lambda result, func, cargs: bool(result)
return f
# ### OGR_G ctypes function prototypes ###
# GeoJSON routines.
from_json = geom_output(lgdal.OGR_G_CreateGeometryFromJson, [c_char_p])
to_json = string_output(lgdal.OGR_G_ExportToJson, [c_void_p], str_result=True, decoding='ascii')
to_kml = string_output(lgdal.OGR_G_ExportToKML, [c_void_p, c_char_p], str_result=True, decoding='ascii')
# GetX, GetY, GetZ all return doubles.
getx = pnt_func(lgdal.OGR_G_GetX)
gety = pnt_func(lgdal.OGR_G_GetY)
getz = pnt_func(lgdal.OGR_G_GetZ)
# Geometry creation routines.
from_wkb = geom_output(lgdal.OGR_G_CreateFromWkb, [c_char_p, c_void_p, POINTER(c_void_p), c_int], offset=-2)
from_wkt = geom_output(lgdal.OGR_G_CreateFromWkt, [POINTER(c_char_p), c_void_p, POINTER(c_void_p)], offset=-1)
create_geom = geom_output(lgdal.OGR_G_CreateGeometry, [c_int])
clone_geom = geom_output(lgdal.OGR_G_Clone, [c_void_p])
get_geom_ref = geom_output(lgdal.OGR_G_GetGeometryRef, [c_void_p, c_int])
get_boundary = geom_output(lgdal.OGR_G_GetBoundary, [c_void_p])
geom_convex_hull = geom_output(lgdal.OGR_G_ConvexHull, [c_void_p])
geom_diff = geom_output(lgdal.OGR_G_Difference, [c_void_p, c_void_p])
geom_intersection = geom_output(lgdal.OGR_G_Intersection, [c_void_p, c_void_p])
geom_sym_diff = geom_output(lgdal.OGR_G_SymmetricDifference, [c_void_p, c_void_p])
geom_union = geom_output(lgdal.OGR_G_Union, [c_void_p, c_void_p])
# Geometry modification routines.
add_geom = void_output(lgdal.OGR_G_AddGeometry, [c_void_p, c_void_p])
import_wkt = void_output(lgdal.OGR_G_ImportFromWkt, [c_void_p, POINTER(c_char_p)])
# Destroys a geometry
destroy_geom = void_output(lgdal.OGR_G_DestroyGeometry, [c_void_p], errcheck=False)
# Geometry export routines.
to_wkb = void_output(lgdal.OGR_G_ExportToWkb, None, errcheck=True) # special handling for WKB.
to_wkt = string_output(lgdal.OGR_G_ExportToWkt, [c_void_p, POINTER(c_char_p)], decoding='ascii')
to_gml = string_output(lgdal.OGR_G_ExportToGML, [c_void_p], str_result=True, decoding='ascii')
get_wkbsize = int_output(lgdal.OGR_G_WkbSize, [c_void_p])
# Geometry spatial-reference related routines.
assign_srs = void_output(lgdal.OGR_G_AssignSpatialReference, [c_void_p, c_void_p], errcheck=False)
get_geom_srs = srs_output(lgdal.OGR_G_GetSpatialReference, [c_void_p])
# Geometry properties
get_area = double_output(lgdal.OGR_G_GetArea, [c_void_p])
get_centroid = void_output(lgdal.OGR_G_Centroid, [c_void_p, c_void_p])
get_dims = int_output(lgdal.OGR_G_GetDimension, [c_void_p])
get_coord_dim = int_output(lgdal.OGR_G_GetCoordinateDimension, [c_void_p])
set_coord_dim = void_output(lgdal.OGR_G_SetCoordinateDimension, [c_void_p, c_int], errcheck=False)
get_geom_count = int_output(lgdal.OGR_G_GetGeometryCount, [c_void_p])
get_geom_name = const_string_output(lgdal.OGR_G_GetGeometryName, [c_void_p], decoding='ascii')
get_geom_type = int_output(lgdal.OGR_G_GetGeometryType, [c_void_p])
get_point_count = int_output(lgdal.OGR_G_GetPointCount, [c_void_p])
get_point = void_output(
lgdal.OGR_G_GetPoint,
[c_void_p, c_int, POINTER(c_double), POINTER(c_double), POINTER(c_double)], errcheck=False
)
geom_close_rings = void_output(lgdal.OGR_G_CloseRings, [c_void_p], errcheck=False)
# Topology routines.
ogr_contains = topology_func(lgdal.OGR_G_Contains)
ogr_crosses = topology_func(lgdal.OGR_G_Crosses)
ogr_disjoint = topology_func(lgdal.OGR_G_Disjoint)
ogr_equals = topology_func(lgdal.OGR_G_Equals)
ogr_intersects = topology_func(lgdal.OGR_G_Intersects)
ogr_overlaps = topology_func(lgdal.OGR_G_Overlaps)
ogr_touches = topology_func(lgdal.OGR_G_Touches)
ogr_within = topology_func(lgdal.OGR_G_Within)
# Transformation routines.
geom_transform = void_output(lgdal.OGR_G_Transform, [c_void_p, c_void_p])
geom_transform_to = void_output(lgdal.OGR_G_TransformTo, [c_void_p, c_void_p])
# For retrieving the envelope of the geometry.
get_envelope = env_func(lgdal.OGR_G_GetEnvelope, [c_void_p, POINTER(OGREnvelope)])
| gpl-3.0 |
awangga/smsweb | bson/errors.py | 77 | 1156 | # Copyright 2009-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Exceptions raised by the BSON package."""
class BSONError(Exception):
"""Base class for all BSON exceptions.
"""
class InvalidBSON(BSONError):
"""Raised when trying to create a BSON object from invalid data.
"""
class InvalidStringData(BSONError):
"""Raised when trying to encode a string containing non-UTF8 data.
"""
class InvalidDocument(BSONError):
"""Raised when trying to create a BSON object from an invalid document.
"""
class InvalidId(BSONError):
"""Raised when trying to create an ObjectId from invalid data.
"""
| agpl-3.0 |
gautam1858/tensorflow | tensorflow/contrib/graph_editor/util.py | 39 | 17675 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utility functions for the graph_editor.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import re
from six import iteritems
from tensorflow.python.framework import ops as tf_ops
from tensorflow.python.ops import array_ops as tf_array_ops
__all__ = [
"make_list_of_op",
"get_tensors",
"make_list_of_t",
"get_generating_ops",
"get_consuming_ops",
"ControlOutputs",
"placeholder_name",
"make_placeholder_from_tensor",
"make_placeholder_from_dtype_and_shape",
]
# The graph editor sometimes need to create placeholders, they are named
# "geph_*". "geph" stands for Graph-Editor PlaceHolder.
_DEFAULT_PLACEHOLDER_PREFIX = "geph"
def concatenate_unique(la, lb):
"""Add all the elements of `lb` to `la` if they are not there already.
The elements added to `la` maintain ordering with respect to `lb`.
Args:
la: List of Python objects.
lb: List of Python objects.
Returns:
`la`: The list `la` with missing elements from `lb`.
"""
la_set = set(la)
for l in lb:
if l not in la_set:
la.append(l)
la_set.add(l)
return la
# TODO(fkp): very generic code, it should be moved in a more generic place.
class ListView(object):
"""Immutable list wrapper.
This class is strongly inspired by the one in tf.Operation.
"""
def __init__(self, list_):
if not isinstance(list_, list):
raise TypeError("Expected a list, got: {}.".format(type(list_)))
self._list = list_
def __iter__(self):
return iter(self._list)
def __len__(self):
return len(self._list)
def __bool__(self):
return bool(self._list)
# Python 3 wants __bool__, Python 2.7 wants __nonzero__
__nonzero__ = __bool__
def __getitem__(self, i):
return self._list[i]
def __add__(self, other):
if not isinstance(other, list):
other = list(other)
return list(self) + other
# TODO(fkp): very generic code, it should be moved in a more generic place.
def is_iterable(obj):
"""Return true if the object is iterable."""
if isinstance(obj, tf_ops.Tensor):
return False
try:
_ = iter(obj)
except Exception: # pylint: disable=broad-except
return False
return True
def flatten_tree(tree, leaves=None):
"""Flatten a tree into a list.
Args:
tree: iterable or not. If iterable, its elements (child) can also be
iterable or not.
leaves: list to which the tree leaves are appended (None by default).
Returns:
A list of all the leaves in the tree.
"""
if leaves is None:
leaves = []
if isinstance(tree, dict):
for _, child in iteritems(tree):
flatten_tree(child, leaves)
elif is_iterable(tree):
for child in tree:
flatten_tree(child, leaves)
else:
leaves.append(tree)
return leaves
def transform_tree(tree, fn, iterable_type=tuple):
"""Transform all the nodes of a tree.
Args:
tree: iterable or not. If iterable, its elements (child) can also be
iterable or not.
fn: function to apply to each leaves.
iterable_type: type use to construct the resulting tree for unknown
iterable, typically `list` or `tuple`.
Returns:
A tree whose leaves has been transformed by `fn`.
The hierarchy of the output tree mimics the one of the input tree.
"""
if is_iterable(tree):
if isinstance(tree, dict):
res = tree.__new__(type(tree))
res.__init__(
(k, transform_tree(child, fn)) for k, child in iteritems(tree))
return res
elif isinstance(tree, tuple):
# NamedTuple?
if hasattr(tree, "_asdict"):
res = tree.__new__(type(tree), **transform_tree(tree._asdict(), fn))
else:
res = tree.__new__(type(tree),
(transform_tree(child, fn) for child in tree))
return res
elif isinstance(tree, collections.Sequence):
res = tree.__new__(type(tree))
res.__init__(transform_tree(child, fn) for child in tree)
return res
else:
return iterable_type(transform_tree(child, fn) for child in tree)
else:
return fn(tree)
def check_graphs(*args):
"""Check that all the element in args belong to the same graph.
Args:
*args: a list of object with a obj.graph property.
Raises:
ValueError: if all the elements do not belong to the same graph.
"""
graph = None
for i, sgv in enumerate(args):
if graph is None and sgv.graph is not None:
graph = sgv.graph
elif sgv.graph is not None and sgv.graph is not graph:
raise ValueError("Argument[{}]: Wrong graph!".format(i))
def get_unique_graph(tops, check_types=None, none_if_empty=False):
"""Return the unique graph used by the all the elements in tops.
Args:
tops: list of elements to check (usually a list of tf.Operation and/or
tf.Tensor). Or a tf.Graph.
check_types: check that the element in tops are of given type(s). If None,
the types (tf.Operation, tf.Tensor) are used.
none_if_empty: don't raise an error if tops is an empty list, just return
None.
Returns:
The unique graph used by all the tops.
Raises:
TypeError: if tops is not a iterable of tf.Operation.
ValueError: if the graph is not unique.
"""
if isinstance(tops, tf_ops.Graph):
return tops
if not is_iterable(tops):
raise TypeError("{} is not iterable".format(type(tops)))
if check_types is None:
check_types = (tf_ops.Operation, tf_ops.Tensor)
elif not is_iterable(check_types):
check_types = (check_types,)
g = None
for op in tops:
if not isinstance(op, check_types):
raise TypeError("Expected a type in ({}), got: {}".format(", ".join([str(
t) for t in check_types]), type(op)))
if g is None:
g = op.graph
elif g is not op.graph:
raise ValueError("Operation {} does not belong to given graph".format(op))
if g is None and not none_if_empty:
raise ValueError("Can't find the unique graph of an empty list")
return g
def make_list_of_op(ops, check_graph=True, allow_graph=True, ignore_ts=False):
"""Convert ops to a list of `tf.Operation`.
Args:
ops: can be an iterable of `tf.Operation`, a `tf.Graph` or a single
operation.
check_graph: if `True` check if all the operations belong to the same graph.
allow_graph: if `False` a `tf.Graph` cannot be converted.
ignore_ts: if True, silently ignore `tf.Tensor`.
Returns:
A newly created list of `tf.Operation`.
Raises:
TypeError: if ops cannot be converted to a list of `tf.Operation` or,
if `check_graph` is `True`, if all the ops do not belong to the
same graph.
"""
if isinstance(ops, tf_ops.Graph):
if allow_graph:
return ops.get_operations()
else:
raise TypeError("allow_graph is False: cannot convert a tf.Graph.")
else:
if not is_iterable(ops):
ops = [ops]
if not ops:
return []
if check_graph:
check_types = None if ignore_ts else tf_ops.Operation
get_unique_graph(ops, check_types=check_types)
return [op for op in ops if isinstance(op, tf_ops.Operation)]
# TODO(fkp): move this function in tf.Graph?
def get_tensors(graph):
"""get all the tensors which are input or output of an op in the graph.
Args:
graph: a `tf.Graph`.
Returns:
A list of `tf.Tensor`.
Raises:
TypeError: if graph is not a `tf.Graph`.
"""
if not isinstance(graph, tf_ops.Graph):
raise TypeError("Expected a graph, got: {}".format(type(graph)))
ts = []
for op in graph.get_operations():
ts += op.outputs
return ts
def make_list_of_t(ts, check_graph=True, allow_graph=True, ignore_ops=False):
"""Convert ts to a list of `tf.Tensor`.
Args:
ts: can be an iterable of `tf.Tensor`, a `tf.Graph` or a single tensor.
check_graph: if `True` check if all the tensors belong to the same graph.
allow_graph: if `False` a `tf.Graph` cannot be converted.
ignore_ops: if `True`, silently ignore `tf.Operation`.
Returns:
A newly created list of `tf.Tensor`.
Raises:
TypeError: if `ts` cannot be converted to a list of `tf.Tensor` or,
if `check_graph` is `True`, if all the ops do not belong to the same graph.
"""
if isinstance(ts, tf_ops.Graph):
if allow_graph:
return get_tensors(ts)
else:
raise TypeError("allow_graph is False: cannot convert a tf.Graph.")
else:
if not is_iterable(ts):
ts = [ts]
if not ts:
return []
if check_graph:
check_types = None if ignore_ops else tf_ops.Tensor
get_unique_graph(ts, check_types=check_types)
return [t for t in ts if isinstance(t, tf_ops.Tensor)]
def get_generating_ops(ts):
"""Return all the generating ops of the tensors in `ts`.
Args:
ts: a list of `tf.Tensor`
Returns:
A list of all the generating `tf.Operation` of the tensors in `ts`.
Raises:
TypeError: if `ts` cannot be converted to a list of `tf.Tensor`.
"""
ts = make_list_of_t(ts, allow_graph=False)
return [t.op for t in ts]
def get_consuming_ops(ts):
"""Return all the consuming ops of the tensors in ts.
Args:
ts: a list of `tf.Tensor`
Returns:
A list of all the consuming `tf.Operation` of the tensors in `ts`.
Raises:
TypeError: if ts cannot be converted to a list of `tf.Tensor`.
"""
ts = make_list_of_t(ts, allow_graph=False)
ops = []
for t in ts:
for op in t.consumers():
if op not in ops:
ops.append(op)
return ops
class ControlOutputs(object):
"""The control outputs topology."""
def __init__(self, graph):
"""Create a dictionary of control-output dependencies.
Args:
graph: a `tf.Graph`.
Returns:
A dictionary where a key is a `tf.Operation` instance and the
corresponding value is a list of all the ops which have the key
as one of their control-input dependencies.
Raises:
TypeError: graph is not a `tf.Graph`.
"""
if not isinstance(graph, tf_ops.Graph):
raise TypeError("Expected a tf.Graph, got: {}".format(type(graph)))
self._control_outputs = {}
self._graph = graph
self._version = None
self._build()
def update(self):
"""Update the control outputs if the graph has changed."""
if self._version != self._graph.version:
self._build()
return self
def _build(self):
"""Build the control outputs dictionary."""
self._control_outputs.clear()
ops = self._graph.get_operations()
for op in ops:
for control_input in op.control_inputs:
if control_input not in self._control_outputs:
self._control_outputs[control_input] = []
if op not in self._control_outputs[control_input]:
self._control_outputs[control_input].append(op)
self._version = self._graph.version
def get_all(self):
return self._control_outputs
def get(self, op):
"""return the control outputs of op."""
if op in self._control_outputs:
return self._control_outputs[op]
else:
return ()
@property
def graph(self):
return self._graph
def scope_finalize(scope):
if scope and scope[-1] != "/":
scope += "/"
return scope
def scope_dirname(scope):
slash = scope.rfind("/")
if slash == -1:
return ""
return scope[:slash + 1]
def scope_basename(scope):
slash = scope.rfind("/")
if slash == -1:
return scope
return scope[slash + 1:]
def placeholder_name(t=None, scope=None, prefix=_DEFAULT_PLACEHOLDER_PREFIX):
"""Create placeholder name for the graph editor.
Args:
t: optional tensor on which the placeholder operation's name will be based
on
scope: absolute scope with which to prefix the placeholder's name. None
means that the scope of t is preserved. "" means the root scope.
prefix: placeholder name prefix.
Returns:
A new placeholder name prefixed by "geph". Note that "geph" stands for
Graph Editor PlaceHolder. This convention allows to quickly identify the
placeholder generated by the Graph Editor.
Raises:
TypeError: if t is not None or a tf.Tensor.
"""
if scope is not None:
scope = scope_finalize(scope)
if t is not None:
if not isinstance(t, tf_ops.Tensor):
raise TypeError("Expected a tf.Tenfor, got: {}".format(type(t)))
op_dirname = scope_dirname(t.op.name)
op_basename = scope_basename(t.op.name)
if scope is None:
scope = op_dirname
if op_basename.startswith("{}__".format(prefix)):
ph_name = op_basename
else:
ph_name = "{}__{}_{}".format(prefix, op_basename, t.value_index)
return scope + ph_name
else:
if scope is None:
scope = ""
return "{}{}".format(scope, prefix)
def make_placeholder_from_tensor(t, scope=None,
prefix=_DEFAULT_PLACEHOLDER_PREFIX):
"""Create a `tf.placeholder` for the Graph Editor.
Note that the correct graph scope must be set by the calling function.
Args:
t: a `tf.Tensor` whose name will be used to create the placeholder
(see function placeholder_name).
scope: absolute scope within which to create the placeholder. None
means that the scope of `t` is preserved. `""` means the root scope.
prefix: placeholder name prefix.
Returns:
A newly created `tf.placeholder`.
Raises:
TypeError: if `t` is not `None` or a `tf.Tensor`.
"""
return tf_array_ops.placeholder(
dtype=t.dtype, shape=t.get_shape(),
name=placeholder_name(t, scope=scope, prefix=prefix))
def make_placeholder_from_dtype_and_shape(dtype, shape=None, scope=None,
prefix=_DEFAULT_PLACEHOLDER_PREFIX):
"""Create a tf.placeholder for the Graph Editor.
Note that the correct graph scope must be set by the calling function.
The placeholder is named using the function placeholder_name (with no
tensor argument).
Args:
dtype: the tensor type.
shape: the tensor shape (optional).
scope: absolute scope within which to create the placeholder. None
means that the scope of t is preserved. "" means the root scope.
prefix: placeholder name prefix.
Returns:
A newly created tf.placeholder.
"""
return tf_array_ops.placeholder(
dtype=dtype, shape=shape,
name=placeholder_name(scope=scope, prefix=prefix))
_INTERNAL_VARIABLE_RE = re.compile(r"^__\w+__$")
def get_predefined_collection_names():
"""Return all the predefined collection names."""
return [getattr(tf_ops.GraphKeys, key) for key in dir(tf_ops.GraphKeys)
if not _INTERNAL_VARIABLE_RE.match(key)]
def find_corresponding_elem(target, dst_graph, dst_scope="", src_scope=""):
"""Find corresponding op/tensor in a different graph.
Args:
target: A `tf.Tensor` or a `tf.Operation` belonging to the original graph.
dst_graph: The graph in which the corresponding graph element must be found.
dst_scope: A scope which is prepended to the name to look for.
src_scope: A scope which is removed from the original of `target` name.
Returns:
The corresponding tf.Tensor` or a `tf.Operation`.
Raises:
ValueError: if `src_name` does not start with `src_scope`.
TypeError: if `target` is not a `tf.Tensor` or a `tf.Operation`
KeyError: If the corresponding graph element cannot be found.
"""
src_name = target.name
if src_scope:
src_scope = scope_finalize(src_scope)
if not src_name.startswidth(src_scope):
raise ValueError("{} does not start with {}".format(src_name, src_scope))
src_name = src_name[len(src_scope):]
dst_name = src_name
if dst_scope:
dst_scope = scope_finalize(dst_scope)
dst_name = dst_scope + dst_name
if isinstance(target, tf_ops.Tensor):
return dst_graph.get_tensor_by_name(dst_name)
if isinstance(target, tf_ops.Operation):
return dst_graph.get_operation_by_name(dst_name)
raise TypeError("Expected tf.Tensor or tf.Operation, got: {}", type(target))
def find_corresponding(targets, dst_graph, dst_scope="", src_scope=""):
"""Find corresponding ops/tensors in a different graph.
`targets` is a Python tree, that is, a nested structure of iterable
(list, tupple, dictionary) whose leaves are instances of
`tf.Tensor` or `tf.Operation`
Args:
targets: A Python tree containing `tf.Tensor` or `tf.Operation`
belonging to the original graph.
dst_graph: The graph in which the corresponding graph element must be found.
dst_scope: A scope which is prepended to the name to look for.
src_scope: A scope which is removed from the original of `top` name.
Returns:
A Python tree containin the corresponding tf.Tensor` or a `tf.Operation`.
Raises:
ValueError: if `src_name` does not start with `src_scope`.
TypeError: if `top` is not a `tf.Tensor` or a `tf.Operation`
KeyError: If the corresponding graph element cannot be found.
"""
def func(top):
return find_corresponding_elem(top, dst_graph, dst_scope, src_scope)
return transform_tree(targets, func)
| apache-2.0 |
ekalosak/boto | boto/ec2/spotinstancerequest.py | 170 | 7288 | # Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents an EC2 Spot Instance Request
"""
from boto.ec2.ec2object import TaggedEC2Object
from boto.ec2.launchspecification import LaunchSpecification
class SpotInstanceStateFault(object):
"""
The fault codes for the Spot Instance request, if any.
:ivar code: The reason code for the Spot Instance state change.
:ivar message: The message for the Spot Instance state change.
"""
def __init__(self, code=None, message=None):
self.code = code
self.message = message
def __repr__(self):
return '(%s, %s)' % (self.code, self.message)
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'code':
self.code = value
elif name == 'message':
self.message = value
setattr(self, name, value)
class SpotInstanceStatus(object):
"""
Contains the status of a Spot Instance Request.
:ivar code: Status code of the request.
:ivar message: The description for the status code for the Spot request.
:ivar update_time: Time the status was stated.
"""
def __init__(self, code=None, update_time=None, message=None):
self.code = code
self.update_time = update_time
self.message = message
def __repr__(self):
return '<Status: %s>' % self.code
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'code':
self.code = value
elif name == 'message':
self.message = value
elif name == 'updateTime':
self.update_time = value
class SpotInstanceRequest(TaggedEC2Object):
"""
:ivar id: The ID of the Spot Instance Request.
:ivar price: The maximum hourly price for any Spot Instance launched to
fulfill the request.
:ivar type: The Spot Instance request type.
:ivar state: The state of the Spot Instance request.
:ivar fault: The fault codes for the Spot Instance request, if any.
:ivar valid_from: The start date of the request. If this is a one-time
request, the request becomes active at this date and time and remains
active until all instances launch, the request expires, or the request is
canceled. If the request is persistent, the request becomes active at this
date and time and remains active until it expires or is canceled.
:ivar valid_until: The end date of the request. If this is a one-time
request, the request remains active until all instances launch, the request
is canceled, or this date is reached. If the request is persistent, it
remains active until it is canceled or this date is reached.
:ivar launch_group: The instance launch group. Launch groups are Spot
Instances that launch together and terminate together.
:ivar launched_availability_zone: foo
:ivar product_description: The Availability Zone in which the bid is
launched.
:ivar availability_zone_group: The Availability Zone group. If you specify
the same Availability Zone group for all Spot Instance requests, all Spot
Instances are launched in the same Availability Zone.
:ivar create_time: The time stamp when the Spot Instance request was
created.
:ivar launch_specification: Additional information for launching instances.
:ivar instance_id: The instance ID, if an instance has been launched to
fulfill the Spot Instance request.
:ivar status: The status code and status message describing the Spot
Instance request.
"""
def __init__(self, connection=None):
super(SpotInstanceRequest, self).__init__(connection)
self.id = None
self.price = None
self.type = None
self.state = None
self.fault = None
self.valid_from = None
self.valid_until = None
self.launch_group = None
self.launched_availability_zone = None
self.product_description = None
self.availability_zone_group = None
self.create_time = None
self.launch_specification = None
self.instance_id = None
self.status = None
def __repr__(self):
return 'SpotInstanceRequest:%s' % self.id
def startElement(self, name, attrs, connection):
retval = super(SpotInstanceRequest, self).startElement(name, attrs,
connection)
if retval is not None:
return retval
if name == 'launchSpecification':
self.launch_specification = LaunchSpecification(connection)
return self.launch_specification
elif name == 'fault':
self.fault = SpotInstanceStateFault()
return self.fault
elif name == 'status':
self.status = SpotInstanceStatus()
return self.status
else:
return None
def endElement(self, name, value, connection):
if name == 'spotInstanceRequestId':
self.id = value
elif name == 'spotPrice':
self.price = float(value)
elif name == 'type':
self.type = value
elif name == 'state':
self.state = value
elif name == 'validFrom':
self.valid_from = value
elif name == 'validUntil':
self.valid_until = value
elif name == 'launchGroup':
self.launch_group = value
elif name == 'availabilityZoneGroup':
self.availability_zone_group = value
elif name == 'launchedAvailabilityZone':
self.launched_availability_zone = value
elif name == 'instanceId':
self.instance_id = value
elif name == 'createTime':
self.create_time = value
elif name == 'productDescription':
self.product_description = value
else:
setattr(self, name, value)
def cancel(self, dry_run=False):
self.connection.cancel_spot_instance_requests(
[self.id],
dry_run=dry_run
)
| mit |
lllcho/CAPTCHA-breaking | keras-master/tests/auto/keras/test_activations.py | 79 | 2185 | import math
import keras
import theano
import theano.tensor as T
import numpy
def list_assert_equal(a, b, round_to=7):
'''
This will do a pairwise, rounded equality test across two lists of
numbers.
'''
pairs = zip(a, b)
for i, j in pairs:
assert round(i, round_to) == round(j, round_to)
def get_standard_values():
'''
These are just a set of floats used for testing the activation
functions, and are useful in multiple tests.
'''
return [0,0.1,0.5,0.9,1.0]
def test_softmax():
from keras.activations import softmax as s
# Test using a reference implementation of softmax
def softmax(values):
m = max(values)
values = numpy.array(values)
e = numpy.exp(values - m)
dist = list(e / numpy.sum(e))
return dist
x = T.vector()
exp = s(x)
f = theano.function([x], exp)
test_values=get_standard_values()
result = f(test_values)
expected = softmax(test_values)
print(str(result))
print(str(expected))
list_assert_equal(result, expected)
def test_relu():
'''
Relu implementation doesn't depend on the value being
a theano variable. Testing ints, floats and theano tensors.
'''
from keras.activations import relu as r
assert r(5) == 5
assert r(-5) == 0
assert r(-0.1) == 0
assert r(0.1) == 0.1
x = T.vector()
exp = r(x)
f = theano.function([x], exp)
test_values = get_standard_values()
result = f(test_values)
list_assert_equal(result, test_values) # because no negatives in test values
def test_tanh():
from keras.activations import tanh as t
test_values = get_standard_values()
x = T.vector()
exp = t(x)
f = theano.function([x], exp)
result = f(test_values)
expected = [math.tanh(v) for v in test_values]
print(result)
print(expected)
list_assert_equal(result, expected)
def test_linear():
'''
This function does no input validation, it just returns the thing
that was passed in.
'''
from keras.activations import linear as l
xs = [1, 5, True, None, 'foo']
for x in xs:
assert x == l(x)
| mit |
2014c2g12/c2g12 | c2wp/wsgi/static/Brython2.1.0-20140419-113919/Lib/io.py | 58 | 9385 | import builtins
open = builtins.open
# for seek()
SEEK_SET = 0
SEEK_CUR = 1
SEEK_END = 2
r"""File-like objects that read from or write to a string buffer.
This implements (nearly) all stdio methods.
f = StringIO() # ready for writing
f = StringIO(buf) # ready for reading
f.close() # explicitly release resources held
flag = f.isatty() # always false
pos = f.tell() # get current position
f.seek(pos) # set current position
f.seek(pos, mode) # mode 0: absolute; 1: relative; 2: relative to EOF
buf = f.read() # read until EOF
buf = f.read(n) # read up to n bytes
buf = f.readline() # read until end of line ('\n') or EOF
list = f.readlines()# list of f.readline() results until EOF
f.truncate([size]) # truncate file at to at most size (default: current pos)
f.write(buf) # write at current position
f.writelines(list) # for line in list: f.write(line)
f.getvalue() # return whole file's contents as a string
Notes:
- Using a real file is often faster (but less convenient).
- There's also a much faster implementation in C, called cStringIO, but
it's not subclassable.
- fileno() is left unimplemented so that code which uses it triggers
an exception early.
- Seeking far beyond EOF and then writing will insert real null
bytes that occupy space in the buffer.
- There's a simple test set (see end of this file).
"""
try:
from errno import EINVAL
except ImportError:
EINVAL = 22
__all__ = ["StringIO"]
def _complain_ifclosed(closed):
if closed:
raise ValueError("I/O operation on closed file")
class StringIO:
"""class StringIO([buffer])
When a StringIO object is created, it can be initialized to an existing
string by passing the string to the constructor. If no string is given,
the StringIO will start empty.
The StringIO object can accept either Unicode or 8-bit strings, but
mixing the two may take some care. If both are used, 8-bit strings that
cannot be interpreted as 7-bit ASCII (that use the 8th bit) will cause
a UnicodeError to be raised when getvalue() is called.
"""
def __init__(self, buf = ''):
self.buf = buf
self.len = len(buf)
self.buflist = []
self.pos = 0
self.closed = False
self.softspace = 0
def __iter__(self):
return self
def next(self):
"""A file object is its own iterator, for example iter(f) returns f
(unless f is closed). When a file is used as an iterator, typically
in a for loop (for example, for line in f: print line), the next()
method is called repeatedly. This method returns the next input line,
or raises StopIteration when EOF is hit.
"""
_complain_ifclosed(self.closed)
r = self.readline()
if not r:
raise StopIteration
return r
def close(self):
"""Free the memory buffer.
"""
if not self.closed:
self.closed = True
del self.buf, self.pos
def isatty(self):
"""Returns False because StringIO objects are not connected to a
tty-like device.
"""
_complain_ifclosed(self.closed)
return False
def seek(self, pos, mode = 0):
"""Set the file's current position.
The mode argument is optional and defaults to 0 (absolute file
positioning); other values are 1 (seek relative to the current
position) and 2 (seek relative to the file's end).
There is no return value.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
if mode == 1:
pos += self.pos
elif mode == 2:
pos += self.len
self.pos = max(0, pos)
def tell(self):
"""Return the file's current position."""
_complain_ifclosed(self.closed)
return self.pos
def read(self, n = -1):
"""Read at most size bytes from the file
(less if the read hits EOF before obtaining size bytes).
If the size argument is negative or omitted, read all data until EOF
is reached. The bytes are returned as a string object. An empty
string is returned when EOF is encountered immediately.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
if n is None or n < 0:
newpos = self.len
else:
newpos = min(self.pos+n, self.len)
r = self.buf[self.pos:newpos]
self.pos = newpos
return r
def readline(self, length=None):
r"""Read one entire line from the file.
A trailing newline character is kept in the string (but may be absent
when a file ends with an incomplete line). If the size argument is
present and non-negative, it is a maximum byte count (including the
trailing newline) and an incomplete line may be returned.
An empty string is returned only when EOF is encountered immediately.
Note: Unlike stdio's fgets(), the returned string contains null
characters ('\0') if they occurred in the input.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
i = self.buf.find('\n', self.pos)
if i < 0:
newpos = self.len
else:
newpos = i+1
if length is not None and length >= 0:
if self.pos + length < newpos:
newpos = self.pos + length
r = self.buf[self.pos:newpos]
self.pos = newpos
return r
def readlines(self, sizehint = 0):
"""Read until EOF using readline() and return a list containing the
lines thus read.
If the optional sizehint argument is present, instead of reading up
to EOF, whole lines totalling approximately sizehint bytes (or more
to accommodate a final whole line).
"""
total = 0
lines = []
line = self.readline()
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline()
return lines
def truncate(self, size=None):
"""Truncate the file's size.
If the optional size argument is present, the file is truncated to
(at most) that size. The size defaults to the current position.
The current file position is not changed unless the position
is beyond the new file size.
If the specified size exceeds the file's current size, the
file remains unchanged.
"""
_complain_ifclosed(self.closed)
if size is None:
size = self.pos
elif size < 0:
raise IOError(EINVAL, "Negative size not allowed")
elif size < self.pos:
self.pos = size
self.buf = self.getvalue()[:size]
self.len = size
def write(self, s):
"""Write a string to the file.
There is no return value.
"""
_complain_ifclosed(self.closed)
if not s: return
spos = self.pos
slen = self.len
if spos == slen:
self.buflist.append(s)
self.len = self.pos = spos + len(s)
return
if spos > slen:
self.buflist.append('\0'*(spos - slen))
slen = spos
newpos = spos + len(s)
if spos < slen:
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = [self.buf[:spos], s, self.buf[newpos:]]
self.buf = ''
if newpos > slen:
slen = newpos
else:
self.buflist.append(s)
slen = newpos
self.len = slen
self.pos = newpos
def writelines(self, iterable):
"""Write a sequence of strings to the file. The sequence can be any
iterable object producing strings, typically a list of strings. There
is no return value.
(The name is intended to match readlines(); writelines() does not add
line separators.)
"""
write = self.write
for line in iterable:
write(line)
def flush(self):
"""Flush the internal buffer
"""
_complain_ifclosed(self.closed)
def getvalue(self):
"""
Retrieve the entire contents of the "file" at any time before
the StringIO object's close() method is called.
The StringIO object can accept either Unicode or 8-bit strings,
but mixing the two may take some care. If both are used, 8-bit
strings that cannot be interpreted as 7-bit ASCII (that use the
8th bit) will cause a UnicodeError to be raised when getvalue()
is called.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
return self.buf
TextIOWrapper = StringIO
class RawIOBase:
def read(self,n=-1):
pass
def readall(self):
pass
def readinto(self,b):
pass
def write(self,b):
pass
BufferedReader = RawIOBase
| gpl-2.0 |
stephan-hof/pyrocksdb | setup.py | 1 | 1112 | from setuptools import setup
from setuptools import find_packages
from distutils.extension import Extension
try:
from Cython.Build import cythonize
except ImportError:
def cythonize(extensions): return extensions
sources = ['rocksdb/_rocksdb.cpp']
else:
sources = ['rocksdb/_rocksdb.pyx']
mod1 = Extension(
'rocksdb._rocksdb',
sources,
extra_compile_args=[
'-std=c++11',
'-O3',
'-Wall',
'-Wextra',
'-Wconversion',
'-fno-strict-aliasing'
],
language='c++',
libraries=[
'rocksdb',
'snappy',
'bz2',
'z'
]
)
setup(
name="pyrocksdb",
version='0.5',
description="Python bindings for RocksDB",
keywords='rocksdb',
author='Stephan Hofmockel',
author_email="Use the github issues",
url="https://github.com/stephan-hof/pyrocksdb",
license='BSD License',
install_requires=['setuptools'],
package_dir={'rocksdb': 'rocksdb'},
packages=find_packages('.'),
ext_modules=cythonize([mod1]),
test_suite='rocksdb.tests',
include_package_data=True
)
| bsd-3-clause |
2013Commons/hue | desktop/core/ext-py/urllib2_kerberos-0.1.6/urllib2_kerberos.py | 44 | 5663 | #!/usr/bin/python
# urllib2 with kerberos proof of concept
# Copyright 2008 Lime Nest LLC
# Copyright 2008 Lime Spot LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import logging
import sys
import urllib2 as u2
import kerberos as k
LOG = logging.getLogger("http_kerberos_auth_handler")
class AbstractKerberosAuthHandler:
"""auth handler for urllib2 that does Kerberos HTTP Negotiate Authentication
"""
def negotiate_value(self, headers):
"""checks for "Negotiate" in proper auth header
"""
authreq = headers.get(self.auth_header, None)
if authreq:
rx = re.compile('(?:.*,)*\s*Negotiate\s*([^,]*),?', re.I)
mo = rx.search(authreq)
if mo:
return mo.group(1)
else:
LOG.debug("regex failed on: %s" % authreq)
else:
LOG.debug("%s header not found" % self.auth_header)
return None
def __init__(self):
self.retried = 0
self.context = None
def generate_request_header(self, req, headers, neg_value):
self.retried += 1
LOG.debug("retry count: %d" % self.retried)
host = req.get_host()
LOG.debug("req.get_host() returned %s" % host)
# We need Python 2.4 compatibility
#tail, sep, head = host.rpartition(':')
#domain = tail or head
host_parts = host.rsplit(':', 1)
domain = host_parts[0]
result, self.context = k.authGSSClientInit("HTTP@%s" % domain)
if result < 1:
LOG.warning("authGSSClientInit returned result %d" % result)
return None
LOG.debug("authGSSClientInit() succeeded")
result = k.authGSSClientStep(self.context, neg_value)
if result < 0:
LOG.warning("authGSSClientStep returned result %d" % result)
return None
LOG.debug("authGSSClientStep() succeeded")
response = k.authGSSClientResponse(self.context)
LOG.debug("authGSSClientResponse() succeeded")
return "Negotiate %s" % response
def authenticate_server(self, headers):
neg_value = self.negotiate_value(headers)
if neg_value is None:
LOG.critical("mutual auth failed. No negotiate header")
return None
result = k.authGSSClientStep(self.context, neg_value)
if result < 1:
# this is a critical security warning
# should change to a raise --Tim
LOG.critical("mutual auth failed: authGSSClientStep returned result %d" % result)
pass
def clean_context(self):
if self.context is not None:
LOG.debug("cleaning context")
k.authGSSClientClean(self.context)
self.context = None
def http_error_auth_reqed(self, host, req, headers):
neg_value = self.negotiate_value(headers) #Check for auth_header
if neg_value is not None:
if not self.retried > 0:
return self.retry_http_kerberos_auth(req, headers, neg_value)
else:
return None
else:
self.retried = 0
def retry_http_kerberos_auth(self, req, headers, neg_value):
try:
try:
neg_hdr = self.generate_request_header(req, headers, neg_value)
if neg_hdr is None:
LOG.debug("neg_hdr was None")
return None
req.add_unredirected_header(self.authz_header, neg_hdr)
resp = self.parent.open(req)
self.authenticate_server(resp.info())
return resp
except k.GSSError, e:
LOG.critical("GSSAPI Error: %s/%s" % (e[0][0], e[1][0]))
return None
finally:
self.clean_context()
self.retried = 0
class ProxyKerberosAuthHandler(u2.BaseHandler, AbstractKerberosAuthHandler):
"""Kerberos Negotiation handler for HTTP proxy auth
"""
authz_header = 'Proxy-Authorization'
auth_header = 'proxy-authenticate'
handler_order = 480 # before Digest auth
def http_error_407(self, req, fp, code, msg, headers):
LOG.debug("inside http_error_407")
host = req.get_host()
retry = self.http_error_auth_reqed(host, req, headers)
self.retried = 0
return retry
class HTTPKerberosAuthHandler(u2.BaseHandler, AbstractKerberosAuthHandler):
"""Kerberos Negotiation handler for HTTP auth
"""
authz_header = 'Authorization'
auth_header = 'www-authenticate'
handler_order = 480 # before Digest auth
def http_error_401(self, req, fp, code, msg, headers):
LOG.debug("inside http_error_401")
host = req.get_host()
retry = self.http_error_auth_reqed(host, req, headers)
self.retried = 0
return retry
def test():
LOG.setLevel(logging.DEBUG)
LOG.info("starting test")
opener = u2.build_opener()
opener.add_handler(HTTPKerberosAuthHandler())
resp = opener.open(sys.argv[1])
print dir(resp), resp.info(), resp.code
if __name__ == '__main__':
test()
| apache-2.0 |
srajag/contrail-controller | src/config/utils/provision_database_node.py | 10 | 5513 | #!/usr/bin/python
#
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
import sys
import time
import argparse
import ConfigParser
from vnc_api.vnc_api import *
from cfgm_common.exceptions import *
class DatabaseNodeProvisioner(object):
def __init__(self, args_str=None):
self._args = None
if not args_str:
args_str = ' '.join(sys.argv[1:])
self._parse_args(args_str)
connected = False
tries = 0
while not connected:
try:
self._vnc_lib = VncApi(
self._args.admin_user, self._args.admin_password,
self._args.admin_tenant_name,
self._args.api_server_ip,
self._args.api_server_port, '/',
auth_host=self._args.openstack_ip)
connected = True
except ResourceExhaustionError: # haproxy throws 503
if tries < 10:
tries += 1
time.sleep(3)
else:
raise
gsc_obj = self._vnc_lib.global_system_config_read(
fq_name=['default-global-system-config'])
self._global_system_config_obj = gsc_obj
if self._args.oper == 'add':
self.add_database_node()
elif self._args.oper == 'del':
self.del_database_node()
else:
print "Unknown operation %s. Only 'add' and 'del' supported"\
% (self._args.oper)
# end __init__
def _parse_args(self, args_str):
'''
Eg. python provision_database_node.py --host_name a3s30.contrail.juniper.net
--host_ip 10.1.1.1
--api_server_ip 127.0.0.1
--api_server_port 8082
--oper <add | del>
'''
# Source any specified config/ini file
# Turn off help, so we print all options in response to -h
conf_parser = argparse.ArgumentParser(add_help=False)
conf_parser.add_argument("-c", "--conf_file",
help="Specify config file", metavar="FILE")
args, remaining_argv = conf_parser.parse_known_args(args_str.split())
defaults = {
'api_server_ip': '127.0.0.1',
'api_server_port': '8082',
'oper': 'add',
}
ksopts = {
'admin_user': 'user1',
'admin_password': 'password1',
'admin_tenant_name': 'default-domain'
}
if args.conf_file:
config = ConfigParser.SafeConfigParser()
config.read([args.conf_file])
defaults.update(dict(config.items("DEFAULTS")))
if 'KEYSTONE' in config.sections():
ksopts.update(dict(config.items("KEYSTONE")))
# Override with CLI options
# Don't surpress add_help here so it will handle -h
parser = argparse.ArgumentParser(
# Inherit options from config_parser
parents=[conf_parser],
# print script description with -h/--help
description=__doc__,
# Don't mess with format of description
formatter_class=argparse.RawDescriptionHelpFormatter,
)
defaults.update(ksopts)
parser.set_defaults(**defaults)
parser.add_argument(
"--host_name", help="hostname name of database node", required=True)
parser.add_argument("--host_ip", help="IP address of database node", required=True)
parser.add_argument(
"--api_server_ip", help="IP address of api server", required=True)
parser.add_argument("--api_server_port", help="Port of api server")
parser.add_argument(
"--oper", default='add',
help="Provision operation to be done(add or del)")
parser.add_argument(
"--admin_user", help="Name of keystone admin user")
parser.add_argument(
"--admin_password", help="Password of keystone admin user")
parser.add_argument(
"--admin_tenant_name", help="Tenamt name for keystone admin user")
parser.add_argument(
"--openstack_ip", help="IP address of openstack node")
self._args = parser.parse_args(remaining_argv)
# end _parse_args
def add_database_node(self):
gsc_obj = self._global_system_config_obj
database_node_obj = DatabaseNode(
self._args.host_name, gsc_obj,
database_node_ip_address=self._args.host_ip)
database_node_exists = True
try:
database_node_obj = self._vnc_lib.database_node_read(
fq_name=database_node_obj.get_fq_name())
except NoIdError:
database_node_exists = False
if database_node_exists:
self._vnc_lib.database_node_update(database_node_obj)
else:
self._vnc_lib.database_node_create(database_node_obj)
# end add_database_node
def del_database_node(self):
gsc_obj = self._global_system_config_obj
database_node_obj = DatabaseNode(self._args.host_name, gsc_obj)
self._vnc_lib.database_node_delete(
fq_name=database_node_obj.get_fq_name())
# end del_database_node
# end class DatabaseNodeProvisioner
def main(args_str=None):
DatabaseNodeProvisioner(args_str)
# end main
if __name__ == "__main__":
main()
| apache-2.0 |
saurabh6790/test_final_med_app | patches/1311/p06_fix_report_columns.py | 30 | 1270 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
import json
def execute():
doctypes_child_tables_map = {}
# Get all saved report columns
columns = webnotes.conn.sql("""select defvalue, defkey from `tabDefaultValue` where
defkey like '_list_settings:%'""")
# Make map of doctype and child tables
for value, key in columns:
doctype = key.split(':')[-1]
child_tables = webnotes.conn.sql_list("""select options from `tabDocField`
where parent=%s and fieldtype='Table'""", doctype)
doctypes_child_tables_map.setdefault(doctype, child_tables + [doctype])
# If defvalue contains child doctypes then only append the column
for value, key in columns:
new_columns = []
column_doctype = key.split(':')[-1]
for field, field_doctype in json.loads(value):
if field_doctype in doctypes_child_tables_map.get(column_doctype):
new_columns.append([field, field_doctype])
if new_columns:
webnotes.conn.sql("""update `tabDefaultValue` set defvalue=%s
where defkey=%s""" % ('%s', '%s'), (json.dumps(new_columns), key))
else:
webnotes.conn.sql("""delete from `tabDefaultValue` where defkey=%s""", key) | agpl-3.0 |
hce/antlr4 | runtime/Python2/src/antlr4/RuleContext.py | 14 | 8627 | # [The "BSD license"]
# Copyright (c) 2013 Terence Parr
# Copyright (c) 2013 Sam Harwell
# Copyright (c) 2014 Eric Vergnaud
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#/
# A rule context is a record of a single rule invocation. It knows
# which context invoked it, if any. If there is no parent context, then
# naturally the invoking state is not valid. The parent link
# provides a chain upwards from the current rule invocation to the root
# of the invocation tree, forming a stack. We actually carry no
# information about the rule associated with this context (except
# when parsing). We keep only the state number of the invoking state from
# the ATN submachine that invoked this. Contrast this with the s
# pointer inside ParserRuleContext that tracks the current state
# being "executed" for the current rule.
#
# The parent contexts are useful for computing lookahead sets and
# getting error information.
#
# These objects are used during parsing and prediction.
# For the special case of parsers, we use the subclass
# ParserRuleContext.
#
# @see ParserRuleContext
#/
from io import StringIO
from antlr4.tree.Tree import RuleNode, INVALID_INTERVAL
from antlr4.tree.Trees import Trees
class RuleContext(RuleNode):
EMPTY = None
def __init__(self, parent=None, invokingState=-1):
super(RuleContext, self).__init__()
# What context invoked this rule?
self.parentCtx = parent
# What state invoked the rule associated with this context?
# The "return address" is the followState of invokingState
# If parent is null, this should be -1.
self.invokingState = invokingState
def depth(self):
n = 0
p = self
while p is not None:
p = p.parentCtx
n += 1
return n
# A context is empty if there is no invoking state; meaning nobody call
# current context.
def isEmpty(self):
return self.invokingState == -1
# satisfy the ParseTree / SyntaxTree interface
def getSourceInterval(self):
return INVALID_INTERVAL
def getRuleContext(self):
return self
def getPayload(self):
return self
# Return the combined text of all child nodes. This method only considers
# tokens which have been added to the parse tree.
# <p>
# Since tokens on hidden channels (e.g. whitespace or comments) are not
# added to the parse trees, they will not appear in the output of this
# method.
#/
def getText(self):
if self.getChildCount() == 0:
return u""
with StringIO() as builder:
for child in self.getChildren():
builder.write(child.getText())
return builder.getvalue()
def getRuleIndex(self):
return -1
def getChild(self, i):
return None
def getChildCount(self):
return 0
def getChildren(self):
for c in []:
yield c
def accept(self, visitor):
return visitor.visitChildren(self)
# # Call this method to view a parse tree in a dialog box visually.#/
# public Future<JDialog> inspect(@Nullable Parser parser) {
# List<String> ruleNames = parser != null ? Arrays.asList(parser.getRuleNames()) : null;
# return inspect(ruleNames);
# }
#
# public Future<JDialog> inspect(@Nullable List<String> ruleNames) {
# TreeViewer viewer = new TreeViewer(ruleNames, this);
# return viewer.open();
# }
#
# # Save this tree in a postscript file#/
# public void save(@Nullable Parser parser, String fileName)
# throws IOException, PrintException
# {
# List<String> ruleNames = parser != null ? Arrays.asList(parser.getRuleNames()) : null;
# save(ruleNames, fileName);
# }
#
# # Save this tree in a postscript file using a particular font name and size#/
# public void save(@Nullable Parser parser, String fileName,
# String fontName, int fontSize)
# throws IOException
# {
# List<String> ruleNames = parser != null ? Arrays.asList(parser.getRuleNames()) : null;
# save(ruleNames, fileName, fontName, fontSize);
# }
#
# # Save this tree in a postscript file#/
# public void save(@Nullable List<String> ruleNames, String fileName)
# throws IOException, PrintException
# {
# Trees.writePS(this, ruleNames, fileName);
# }
#
# # Save this tree in a postscript file using a particular font name and size#/
# public void save(@Nullable List<String> ruleNames, String fileName,
# String fontName, int fontSize)
# throws IOException
# {
# Trees.writePS(this, ruleNames, fileName, fontName, fontSize);
# }
#
# # Print out a whole tree, not just a node, in LISP format
# # (root child1 .. childN). Print just a node if this is a leaf.
# # We have to know the recognizer so we can get rule names.
# #/
# @Override
# public String toStringTree(@Nullable Parser recog) {
# return Trees.toStringTree(this, recog);
# }
#
# Print out a whole tree, not just a node, in LISP format
# (root child1 .. childN). Print just a node if this is a leaf.
#
def toStringTree(self, ruleNames=None, recog=None):
return Trees.toStringTree(self, ruleNames=ruleNames, recog=recog)
# }
#
# @Override
# public String toStringTree() {
# return toStringTree((List<String>)null);
# }
#
def __unicode__(self):
return self.toString(None, None)
# @Override
# public String toString() {
# return toString((List<String>)null, (RuleContext)null);
# }
#
# public final String toString(@Nullable Recognizer<?,?> recog) {
# return toString(recog, ParserRuleContext.EMPTY);
# }
#
# public final String toString(@Nullable List<String> ruleNames) {
# return toString(ruleNames, null);
# }
#
# // recog null unless ParserRuleContext, in which case we use subclass toString(...)
# public String toString(@Nullable Recognizer<?,?> recog, @Nullable RuleContext stop) {
# String[] ruleNames = recog != null ? recog.getRuleNames() : null;
# List<String> ruleNamesList = ruleNames != null ? Arrays.asList(ruleNames) : null;
# return toString(ruleNamesList, stop);
# }
def toString(self, ruleNames, stop):
with StringIO() as buf:
p = self
buf.write(u"[")
while p is not None and p is not stop:
if ruleNames is None:
if not p.isEmpty():
buf.write(unicode(p.invokingState))
else:
ri = p.getRuleIndex()
ruleName = ruleNames[ri] if ri >= 0 and ri < len(ruleNames) else unicode(ri)
buf.write(ruleName)
if p.parentCtx is not None and (ruleNames is not None or not p.parentCtx.isEmpty()):
buf.write(u" ")
p = p.parentCtx
buf.write(u"]")
return buf.getvalue()
| bsd-3-clause |
coblo/pyiscclib | src/iscclib/base.py | 1 | 6592 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from future.builtins import int, object
import re
import six
import unicodedata
import random
from abc import ABCMeta
from bitstring import BitArray
class Component(object):
"""Base class for all ISCC identifier components"""
__metaclass__ = ABCMeta
#: Base35 custom symbol table for conversion between `ident` and `code`
SYMBOLS = u"H9ITDKR83F4SV12PAXWBYG57JQ6OCNMLUEZ"
#: Base35
BASE = len(SYMBOLS)
#: Regex for `code` validation
STR_PATT = re.compile("^([A-Z1-9]*)$", re.UNICODE)
#: Min value of internal `ident`
INT_MIN = 0
#: Min length of `code` representation
STR_MIN = 1
@property
def INT_MAX(self):
"""Max value of internal `ident` (2**64-1)"""
return 2 ** self.BIT_LENGTH - 1
@property
def STR_MAX(self):
return len(self.encode(self.INT_MAX))
def __init__(self, ident=None, code=None, normalize=True, validate=True, bits=64):
"""
:param int ident: Identifier integers value.
:param str or unicode code: Identifier string representation for display
:param bool normalize: Normalize `ident` and `code` before processing
:param bool validate: Validate the identifier at instantiation.
:param int bits: Number of bits of identifier component.
"""
self.BIT_LENGTH = bits
# Case: mormalization
if normalize and code is not None:
code = self.normalize_code(code)
if normalize and ident is not None:
ident = self.normalize_ident(ident)
# Case: create random identifier
if ident is None and code is None:
ident = self.random_ident()
# Case: only `ident` passed in
if ident is not None and code is None:
code = self.encode(ident)
# Case: only `code` passed in
if code is not None and ident is None:
ident = self.decode(code)
self._int = ident
self._str = code
# Case: validation
if validate:
self.validate()
@property
def ident(self):
"""Internal integer value of identifier"""
return self._int
@property
def code(self):
"""External string representation of identifier"""
return self._str
@property
def bitstring(self):
"""String representation of bit-seqence"""
return BitArray(uint=self.ident, length=self.BIT_LENGTH).bin
@classmethod
def normalize_code(cls, code):
return unicodedata.normalize('NFKC', code).strip().upper()
@staticmethod
def normalize_ident(ident):
return int(ident)
@classmethod
def random_ident(cls, bits=64):
"""Create a random identifier.
:return int: Random identifier
"""
rand_crypt = random.SystemRandom()
rand_id = rand_crypt.randint(cls.INT_MIN, 2 ** bits - 1)
return rand_id
def __int__(self):
return self._int
def __str__(self):
return self._str
def __repr__(self):
return '{}({})'.format(self.__class__.__name__, self._int)
def __eq__(self, other):
"""Identifiers are identical if their `ident`s are equal"""
return self.ident == other.ident
def __hash__(self):
"""Override for set uniqueness."""
return self.ident
@classmethod
def encode(cls, ident):
"""
:param int ident: Integer value of identifier
:return str: String representation of identifier
"""
code = ''
while ident > 0 or not code:
ident, i = divmod(ident, cls.BASE)
code += cls.SYMBOLS[i]
return code
@classmethod
def decode(cls, code):
"""
:param str code: String representation of identifier
:return int: Integer value of identifier
"""
ident = 0
for i, digit in enumerate(code):
ident += cls.SYMBOLS.index(digit) * (cls.BASE ** i)
return ident
def hamming_distance(self, other):
x = (self.ident ^ other.ident) & ((1 << self.BIT_LENGTH) - 1)
tot = 0
while x:
tot += 1
x &= x - 1
return tot
def jaccard_similarity(self, other):
"""Bitwise jaccard coefficient of integers a, b"""
same_bits = [(bit == other.bitstring[i]) for i, bit in enumerate(self.bitstring)].count(True)
return same_bits / (2 * len(self.bitstring) - same_bits)
def is_valid(self):
"""
:return bool: True or False
"""
return all((
self._int_valid(self._int),
self._str_valid(self._str),
self.encode(self._int) == self._str,
self.decode(self._str) == self._int,
))
def validate(self):
"""
:raises ValueError: Raises ValueError with help text if invalid
:return bool: Returns True if valid or raises ValueError
"""
self._validate_int(self._int)
self._validate_str(self._str)
self._validate_match(self._str, self._int)
return True
def _validate_str(self, s):
if not isinstance(s, six.text_type):
raise ValueError(
u's must be {} not {}'.format(six.text_type, type(s))
)
if not self._str_valid_chars(s):
raise ValueError(
u'text value `{}` must only contain [1-9][A-Z]'.format(s)
)
if not self._str_valid_len(s):
raise ValueError(u'text value `{}` must be {} to {} chars'.format(
s, self.STR_MIN, self.STR_MAX
))
def _validate_int(self, n):
if not self._int_valid(n):
raise ValueError(u'number value `{}` not between {} and {}'.format(
n, self.INT_MIN, self.INT_MAX
))
def _validate_match(self, s, n):
if not self._is_match(s, n):
raise ValueError(
u'text/number representations don´t match: {}!={}'.format(
self.encode(n), s
)
)
def _int_valid(self, n):
return self.INT_MIN <= n <= self.INT_MAX
def _str_valid(self, s):
return self._str_valid_chars(s) and self._str_valid_len(s)
def _str_valid_chars(self, s):
return bool(self.STR_PATT.match(s))
def _str_valid_len(self, s):
return self.STR_MIN <= len(s) <= self.STR_MAX
def _is_match(self, s, n):
return self.encode(n) == s
| bsd-2-clause |
rosshamish/classtime | classtime/brain/local_db/stdlocaldb.py | 1 | 4950 |
from classtime.logging import logging
logging = logging.getLogger(__name__) #pylint: disable=C0103
from classtime.core import db
from classtime.models import Term, Schedule, Course, Section
class StandardLocalDatabase(object):
"""A single institution's view of the local database
Uses a stack-based accessor idiom. Usage:
self.push_<datatype>()
... use self.cur_datatype_model() ...
self.pop_<datatype>()
"""
def __init__(self, institution):
self._institution = institution
self._model_stack = list()
self.Term = Term
self.Schedule = Schedule
self.Course = Course
self.Section = Section
def create(self):
"""Create the database, if it did not already exist
"""
db.create_all()
def push_datatype(self, datatype):
datatype = datatype.lower()
if 'term' in datatype:
self.push_terms()
elif 'schedule' in datatype:
self.push_schedules()
elif 'course' in datatype:
self.push_courses()
elif 'section' in datatype:
self.push_sections()
else:
logging.error('Cannot find datatype <{}>'.format(datatype))
return self
def push_terms(self):
"""Filter all requests to Term objects only. Returns self,
so this method should be chained with other methods.
:returns: self
:rtype: StandardLocalDatabase
"""
self._model_stack.append(Term)
return self
def push_schedules(self):
"""Filter all requests to Schedule objects only. Returns self,
so this method should be chained with other methods.
:returns: self
:rtype: StandardLocalDatabase
"""
self._model_stack.append(Schedule)
return self
def push_courses(self):
"""Filter all requests to Course objects only. Returns self,
so this method should be chained with other methods.
:returns: self
:rtype: StandardLocalDatabase
"""
self._model_stack.append(Course)
return self
def push_sections(self):
"""Filter all requests to Section objects only. Should be
the first call in every chained call to the StandardLocalDatabase.
:returns: self
:rtype: StandardLocalDatabase
"""
self._model_stack.append(Section)
return self
def pop_datatype(self):
self._model_stack.pop()
return self
def cur_datatype_model(self):
return self._model_stack[-1]
def exists(self, datatype, identifiers=None, **kwargs):
"""Checks whether an object exists with the given identifiers (primary key values).
If no identifiers are given, checks if *any* object exists.
Primary keys are specified in each models/*.py definition. Institution must be
be omitted. It will be inferred from the institution of this local database instance.
:returns: whether the object(s) exist(s)
:rtype: boolean
"""
if kwargs:
retval = self.query(datatype) \
.filter_by(**kwargs) \
.first() is not None
elif identifiers is None:
retval = self.query(datatype) \
.first() is not None
else:
retval = self.get(datatype, identifiers) is not None
return retval
def get(self, datatype, identifiers):
self.push_datatype(datatype)
identifiers = (self._institution,) + identifiers
retval = self.cur_datatype_model().query.get(identifiers)
self.pop_datatype()
return retval
def query(self, datatype):
self.push_datatype(datatype)
retval = self.cur_datatype_model() \
.query \
.filter_by(institution=self._institution)
self.pop_datatype()
return retval
def add(self, model_dict, datatype):
"""Adds an 'add command' to the running transaction which will
add a new model with attributes specified by dict 'data_dict'
:param dict data_dict: dictionary of attributes to store in the
object.
"""
self.push_datatype(datatype)
model_dict['institution'] = self._institution
db.session.add(self.cur_datatype_model()(model_dict))
self.pop_datatype()
def update(self, model_dict, datatype, identifiers):
db_obj = self.get(datatype=datatype,
identifiers=identifiers)
for attr, value in model_dict.iteritems():
setattr(db_obj, attr, value)
def commit(self):
"""Commits the running transaction to the database
If the commit fails, it will be rolled back to a safe state.
"""
try:
db.session.commit()
except:
db.session.rollback()
raise
| mit |
QinerTech/QinerApps | openerp/addons/pos_cache/models/pos_cache.py | 13 | 3295 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from ast import literal_eval
import cPickle
from openerp import models, fields, api
class pos_cache(models.Model):
_name = 'pos.cache'
cache = fields.Binary()
product_domain = fields.Text(required=True)
product_fields = fields.Text(required=True)
config_id = fields.Many2one('pos.config', ondelete='cascade', required=True)
compute_user_id = fields.Many2one('res.users', 'Cache compute user', required=True)
@api.model
def refresh_all_caches(self):
self.env['pos.cache'].search([]).refresh_cache()
@api.one
def refresh_cache(self):
products = self.env['product.product'].search(self.get_product_domain())
prod_ctx = products.with_context(pricelist=self.config_id.pricelist_id.id, display_default_code=False)
prod_ctx = prod_ctx.sudo(self.compute_user_id.id)
res = prod_ctx.read(self.get_product_fields())
datas = {
'cache': cPickle.dumps(res, protocol=cPickle.HIGHEST_PROTOCOL),
}
self.write(datas)
@api.model
def get_product_domain(self):
return literal_eval(self.product_domain)
@api.model
def get_product_fields(self):
return literal_eval(self.product_fields)
@api.model
def get_cache(self, domain, fields):
if not self.cache or domain != self.get_product_domain() or fields != self.get_product_fields():
self.product_domain = str(domain)
self.product_fields = str(fields)
self.refresh_cache()
return cPickle.loads(self.cache)
class pos_config(models.Model):
_inherit = 'pos.config'
@api.one
@api.depends('cache_ids')
def _get_oldest_cache_time(self):
pos_cache = self.env['pos.cache']
oldest_cache = pos_cache.search([('config_id', '=', self.id)], order='write_date', limit=1)
if oldest_cache:
self.oldest_cache_time = oldest_cache.write_date
# Use a related model to avoid the load of the cache when the pos load his config
cache_ids = fields.One2many('pos.cache', 'config_id')
oldest_cache_time = fields.Datetime(compute='_get_oldest_cache_time', string='Oldest cache time', readonly=True)
def _get_cache_for_user(self):
pos_cache = self.env['pos.cache']
cache_for_user = pos_cache.search([('id', 'in', self.cache_ids.ids), ('compute_user_id', '=', self.env.uid)])
if cache_for_user:
return cache_for_user[0]
else:
return None
@api.multi
def get_products_from_cache(self, fields, domain):
cache_for_user = self._get_cache_for_user()
if cache_for_user:
return cache_for_user.get_cache(domain, fields)
else:
pos_cache = self.env['pos.cache']
pos_cache.create({
'config_id': self.id,
'product_domain': str(domain),
'product_fields': str(fields),
'compute_user_id': self.env.uid
})
new_cache = self._get_cache_for_user()
return new_cache.get_cache(domain, fields)
@api.one
def delete_cache(self):
# throw away the old caches
self.cache_ids.unlink()
| gpl-3.0 |
jstammers/EDMSuite | NavPython/IronPython/Lib/distutils/command/build_ext.py | 71 | 32327 | """distutils.command.build_ext
Implements the Distutils 'build_ext' command, for building extension
modules (currently limited to C extensions, should accommodate C++
extensions ASAP)."""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
import sys, os, string, re
from types import *
from site import USER_BASE, USER_SITE
from distutils.core import Command
from distutils.errors import *
from distutils.sysconfig import customize_compiler, get_python_version
from distutils.dep_util import newer_group
from distutils.extension import Extension
from distutils.util import get_platform
from distutils import log
if os.name == 'nt':
from distutils.msvccompiler import get_build_version
MSVC_VERSION = int(get_build_version())
# An extension name is just a dot-separated list of Python NAMEs (ie.
# the same as a fully-qualified module name).
extension_name_re = re.compile \
(r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
def show_compilers ():
from distutils.ccompiler import show_compilers
show_compilers()
class build_ext (Command):
description = "build C/C++ extensions (compile/link to build directory)"
# XXX thoughts on how to deal with complex command-line options like
# these, i.e. how to make it so fancy_getopt can suck them off the
# command line and make it look like setup.py defined the appropriate
# lists of tuples of what-have-you.
# - each command needs a callback to process its command-line options
# - Command.__init__() needs access to its share of the whole
# command line (must ultimately come from
# Distribution.parse_command_line())
# - it then calls the current command class' option-parsing
# callback to deal with weird options like -D, which have to
# parse the option text and churn out some custom data
# structure
# - that data structure (in this case, a list of 2-tuples)
# will then be present in the command object by the time
# we get to finalize_options() (i.e. the constructor
# takes care of both command-line and client options
# in between initialize_options() and finalize_options())
sep_by = " (separated by '%s')" % os.pathsep
user_options = [
('build-lib=', 'b',
"directory for compiled extension modules"),
('build-temp=', 't',
"directory for temporary files (build by-products)"),
('plat-name=', 'p',
"platform name to cross-compile for, if supported "
"(default: %s)" % get_platform()),
('inplace', 'i',
"ignore build-lib and put compiled extensions into the source " +
"directory alongside your pure Python modules"),
('include-dirs=', 'I',
"list of directories to search for header files" + sep_by),
('define=', 'D',
"C preprocessor macros to define"),
('undef=', 'U',
"C preprocessor macros to undefine"),
('libraries=', 'l',
"external C libraries to link with"),
('library-dirs=', 'L',
"directories to search for external C libraries" + sep_by),
('rpath=', 'R',
"directories to search for shared C libraries at runtime"),
('link-objects=', 'O',
"extra explicit link objects to include in the link"),
('debug', 'g',
"compile/link with debugging information"),
('force', 'f',
"forcibly build everything (ignore file timestamps)"),
('compiler=', 'c',
"specify the compiler type"),
('swig-cpp', None,
"make SWIG create C++ files (default is C)"),
('swig-opts=', None,
"list of SWIG command line options"),
('swig=', None,
"path to the SWIG executable"),
('user', None,
"add user include, library and rpath"),
]
boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user']
help_options = [
('help-compiler', None,
"list available compilers", show_compilers),
]
def initialize_options (self):
self.extensions = None
self.build_lib = None
self.plat_name = None
self.build_temp = None
self.inplace = 0
self.package = None
self.include_dirs = None
self.define = None
self.undef = None
self.libraries = None
self.library_dirs = None
self.rpath = None
self.link_objects = None
self.debug = None
self.force = None
self.compiler = None
self.swig = None
self.swig_cpp = None
self.swig_opts = None
self.user = None
def finalize_options(self):
from distutils import sysconfig
self.set_undefined_options('build',
('build_lib', 'build_lib'),
('build_temp', 'build_temp'),
('compiler', 'compiler'),
('debug', 'debug'),
('force', 'force'),
('plat_name', 'plat_name'),
)
if self.package is None:
self.package = self.distribution.ext_package
self.extensions = self.distribution.ext_modules
# Make sure Python's include directories (for Python.h, pyconfig.h,
# etc.) are in the include search path.
py_include = sysconfig.get_python_inc()
plat_py_include = sysconfig.get_python_inc(plat_specific=1)
if self.include_dirs is None:
self.include_dirs = self.distribution.include_dirs or []
if isinstance(self.include_dirs, str):
self.include_dirs = self.include_dirs.split(os.pathsep)
# Put the Python "system" include dir at the end, so that
# any local include dirs take precedence.
self.include_dirs.append(py_include)
if plat_py_include != py_include:
self.include_dirs.append(plat_py_include)
if isinstance(self.libraries, str):
self.libraries = [self.libraries]
# Life is easier if we're not forever checking for None, so
# simplify these options to empty lists if unset
if self.libraries is None:
self.libraries = []
if self.library_dirs is None:
self.library_dirs = []
elif type(self.library_dirs) is StringType:
self.library_dirs = string.split(self.library_dirs, os.pathsep)
if self.rpath is None:
self.rpath = []
elif type(self.rpath) is StringType:
self.rpath = string.split(self.rpath, os.pathsep)
# for extensions under windows use different directories
# for Release and Debug builds.
# also Python's library directory must be appended to library_dirs
if os.name == 'nt':
# the 'libs' directory is for binary installs - we assume that
# must be the *native* platform. But we don't really support
# cross-compiling via a binary install anyway, so we let it go.
self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
if self.debug:
self.build_temp = os.path.join(self.build_temp, "Debug")
else:
self.build_temp = os.path.join(self.build_temp, "Release")
# Append the source distribution include and library directories,
# this allows distutils on windows to work in the source tree
self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC'))
if MSVC_VERSION == 9:
# Use the .lib files for the correct architecture
if self.plat_name == 'win32':
suffix = ''
else:
# win-amd64 or win-ia64
suffix = self.plat_name[4:]
new_lib = os.path.join(sys.exec_prefix, 'PCbuild')
if suffix:
new_lib = os.path.join(new_lib, suffix)
self.library_dirs.append(new_lib)
elif MSVC_VERSION == 8:
self.library_dirs.append(os.path.join(sys.exec_prefix,
'PC', 'VS8.0'))
elif MSVC_VERSION == 7:
self.library_dirs.append(os.path.join(sys.exec_prefix,
'PC', 'VS7.1'))
else:
self.library_dirs.append(os.path.join(sys.exec_prefix,
'PC', 'VC6'))
# OS/2 (EMX) doesn't support Debug vs Release builds, but has the
# import libraries in its "Config" subdirectory
if os.name == 'os2':
self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config'))
# for extensions under Cygwin and AtheOS Python's library directory must be
# appended to library_dirs
if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos':
if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
# building third party extensions
self.library_dirs.append(os.path.join(sys.prefix, "lib",
"python" + get_python_version(),
"config"))
else:
# building python standard extensions
self.library_dirs.append('.')
# for extensions under Linux or Solaris with a shared Python library,
# Python's library directory must be appended to library_dirs
sysconfig.get_config_var('Py_ENABLE_SHARED')
if ((sys.platform.startswith('linux') or sys.platform.startswith('gnu')
or sys.platform.startswith('sunos'))
and sysconfig.get_config_var('Py_ENABLE_SHARED')):
if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
# building third party extensions
self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
else:
# building python standard extensions
self.library_dirs.append('.')
# The argument parsing will result in self.define being a string, but
# it has to be a list of 2-tuples. All the preprocessor symbols
# specified by the 'define' option will be set to '1'. Multiple
# symbols can be separated with commas.
if self.define:
defines = self.define.split(',')
self.define = map(lambda symbol: (symbol, '1'), defines)
# The option for macros to undefine is also a string from the
# option parsing, but has to be a list. Multiple symbols can also
# be separated with commas here.
if self.undef:
self.undef = self.undef.split(',')
if self.swig_opts is None:
self.swig_opts = []
else:
self.swig_opts = self.swig_opts.split(' ')
# Finally add the user include and library directories if requested
if self.user:
user_include = os.path.join(USER_BASE, "include")
user_lib = os.path.join(USER_BASE, "lib")
if os.path.isdir(user_include):
self.include_dirs.append(user_include)
if os.path.isdir(user_lib):
self.library_dirs.append(user_lib)
self.rpath.append(user_lib)
def run(self):
from distutils.ccompiler import new_compiler
# 'self.extensions', as supplied by setup.py, is a list of
# Extension instances. See the documentation for Extension (in
# distutils.extension) for details.
#
# For backwards compatibility with Distutils 0.8.2 and earlier, we
# also allow the 'extensions' list to be a list of tuples:
# (ext_name, build_info)
# where build_info is a dictionary containing everything that
# Extension instances do except the name, with a few things being
# differently named. We convert these 2-tuples to Extension
# instances as needed.
if not self.extensions:
return
# If we were asked to build any C/C++ libraries, make sure that the
# directory where we put them is in the library search path for
# linking extensions.
if self.distribution.has_c_libraries():
build_clib = self.get_finalized_command('build_clib')
self.libraries.extend(build_clib.get_library_names() or [])
self.library_dirs.append(build_clib.build_clib)
# Setup the CCompiler object that we'll use to do all the
# compiling and linking
self.compiler = new_compiler(compiler=self.compiler,
verbose=self.verbose,
dry_run=self.dry_run,
force=self.force)
customize_compiler(self.compiler)
# If we are cross-compiling, init the compiler now (if we are not
# cross-compiling, init would not hurt, but people may rely on
# late initialization of compiler even if they shouldn't...)
if os.name == 'nt' and self.plat_name != get_platform():
self.compiler.initialize(self.plat_name)
# And make sure that any compile/link-related options (which might
# come from the command-line or from the setup script) are set in
# that CCompiler object -- that way, they automatically apply to
# all compiling and linking done here.
if self.include_dirs is not None:
self.compiler.set_include_dirs(self.include_dirs)
if self.define is not None:
# 'define' option is a list of (name,value) tuples
for (name, value) in self.define:
self.compiler.define_macro(name, value)
if self.undef is not None:
for macro in self.undef:
self.compiler.undefine_macro(macro)
if self.libraries is not None:
self.compiler.set_libraries(self.libraries)
if self.library_dirs is not None:
self.compiler.set_library_dirs(self.library_dirs)
if self.rpath is not None:
self.compiler.set_runtime_library_dirs(self.rpath)
if self.link_objects is not None:
self.compiler.set_link_objects(self.link_objects)
# Now actually compile and link everything.
self.build_extensions()
def check_extensions_list(self, extensions):
"""Ensure that the list of extensions (presumably provided as a
command option 'extensions') is valid, i.e. it is a list of
Extension objects. We also support the old-style list of 2-tuples,
where the tuples are (ext_name, build_info), which are converted to
Extension instances here.
Raise DistutilsSetupError if the structure is invalid anywhere;
just returns otherwise.
"""
if not isinstance(extensions, list):
raise DistutilsSetupError, \
"'ext_modules' option must be a list of Extension instances"
for i, ext in enumerate(extensions):
if isinstance(ext, Extension):
continue # OK! (assume type-checking done
# by Extension constructor)
if not isinstance(ext, tuple) or len(ext) != 2:
raise DistutilsSetupError, \
("each element of 'ext_modules' option must be an "
"Extension instance or 2-tuple")
ext_name, build_info = ext
log.warn(("old-style (ext_name, build_info) tuple found in "
"ext_modules for extension '%s'"
"-- please convert to Extension instance" % ext_name))
if not (isinstance(ext_name, str) and
extension_name_re.match(ext_name)):
raise DistutilsSetupError, \
("first element of each tuple in 'ext_modules' "
"must be the extension name (a string)")
if not isinstance(build_info, dict):
raise DistutilsSetupError, \
("second element of each tuple in 'ext_modules' "
"must be a dictionary (build info)")
# OK, the (ext_name, build_info) dict is type-safe: convert it
# to an Extension instance.
ext = Extension(ext_name, build_info['sources'])
# Easy stuff: one-to-one mapping from dict elements to
# instance attributes.
for key in ('include_dirs', 'library_dirs', 'libraries',
'extra_objects', 'extra_compile_args',
'extra_link_args'):
val = build_info.get(key)
if val is not None:
setattr(ext, key, val)
# Medium-easy stuff: same syntax/semantics, different names.
ext.runtime_library_dirs = build_info.get('rpath')
if 'def_file' in build_info:
log.warn("'def_file' element of build info dict "
"no longer supported")
# Non-trivial stuff: 'macros' split into 'define_macros'
# and 'undef_macros'.
macros = build_info.get('macros')
if macros:
ext.define_macros = []
ext.undef_macros = []
for macro in macros:
if not (isinstance(macro, tuple) and len(macro) in (1, 2)):
raise DistutilsSetupError, \
("'macros' element of build info dict "
"must be 1- or 2-tuple")
if len(macro) == 1:
ext.undef_macros.append(macro[0])
elif len(macro) == 2:
ext.define_macros.append(macro)
extensions[i] = ext
def get_source_files(self):
self.check_extensions_list(self.extensions)
filenames = []
# Wouldn't it be neat if we knew the names of header files too...
for ext in self.extensions:
filenames.extend(ext.sources)
return filenames
def get_outputs(self):
# Sanity check the 'extensions' list -- can't assume this is being
# done in the same run as a 'build_extensions()' call (in fact, we
# can probably assume that it *isn't*!).
self.check_extensions_list(self.extensions)
# And build the list of output (built) filenames. Note that this
# ignores the 'inplace' flag, and assumes everything goes in the
# "build" tree.
outputs = []
for ext in self.extensions:
outputs.append(self.get_ext_fullpath(ext.name))
return outputs
def build_extensions(self):
# First, sanity-check the 'extensions' list
self.check_extensions_list(self.extensions)
for ext in self.extensions:
self.build_extension(ext)
def build_extension(self, ext):
sources = ext.sources
if sources is None or type(sources) not in (ListType, TupleType):
raise DistutilsSetupError, \
("in 'ext_modules' option (extension '%s'), " +
"'sources' must be present and must be " +
"a list of source filenames") % ext.name
sources = list(sources)
ext_path = self.get_ext_fullpath(ext.name)
depends = sources + ext.depends
if not (self.force or newer_group(depends, ext_path, 'newer')):
log.debug("skipping '%s' extension (up-to-date)", ext.name)
return
else:
log.info("building '%s' extension", ext.name)
# First, scan the sources for SWIG definition files (.i), run
# SWIG on 'em to create .c files, and modify the sources list
# accordingly.
sources = self.swig_sources(sources, ext)
# Next, compile the source code to object files.
# XXX not honouring 'define_macros' or 'undef_macros' -- the
# CCompiler API needs to change to accommodate this, and I
# want to do one thing at a time!
# Two possible sources for extra compiler arguments:
# - 'extra_compile_args' in Extension object
# - CFLAGS environment variable (not particularly
# elegant, but people seem to expect it and I
# guess it's useful)
# The environment variable should take precedence, and
# any sensible compiler will give precedence to later
# command line args. Hence we combine them in order:
extra_args = ext.extra_compile_args or []
macros = ext.define_macros[:]
for undef in ext.undef_macros:
macros.append((undef,))
objects = self.compiler.compile(sources,
output_dir=self.build_temp,
macros=macros,
include_dirs=ext.include_dirs,
debug=self.debug,
extra_postargs=extra_args,
depends=ext.depends)
# XXX -- this is a Vile HACK!
#
# The setup.py script for Python on Unix needs to be able to
# get this list so it can perform all the clean up needed to
# avoid keeping object files around when cleaning out a failed
# build of an extension module. Since Distutils does not
# track dependencies, we have to get rid of intermediates to
# ensure all the intermediates will be properly re-built.
#
self._built_objects = objects[:]
# Now link the object files together into a "shared object" --
# of course, first we have to figure out all the other things
# that go into the mix.
if ext.extra_objects:
objects.extend(ext.extra_objects)
extra_args = ext.extra_link_args or []
# Detect target language, if not provided
language = ext.language or self.compiler.detect_language(sources)
self.compiler.link_shared_object(
objects, ext_path,
libraries=self.get_libraries(ext),
library_dirs=ext.library_dirs,
runtime_library_dirs=ext.runtime_library_dirs,
extra_postargs=extra_args,
export_symbols=self.get_export_symbols(ext),
debug=self.debug,
build_temp=self.build_temp,
target_lang=language)
def swig_sources (self, sources, extension):
"""Walk the list of source files in 'sources', looking for SWIG
interface (.i) files. Run SWIG on all that are found, and
return a modified 'sources' list with SWIG source files replaced
by the generated C (or C++) files.
"""
new_sources = []
swig_sources = []
swig_targets = {}
# XXX this drops generated C/C++ files into the source tree, which
# is fine for developers who want to distribute the generated
# source -- but there should be an option to put SWIG output in
# the temp dir.
if self.swig_cpp:
log.warn("--swig-cpp is deprecated - use --swig-opts=-c++")
if self.swig_cpp or ('-c++' in self.swig_opts) or \
('-c++' in extension.swig_opts):
target_ext = '.cpp'
else:
target_ext = '.c'
for source in sources:
(base, ext) = os.path.splitext(source)
if ext == ".i": # SWIG interface file
new_sources.append(base + '_wrap' + target_ext)
swig_sources.append(source)
swig_targets[source] = new_sources[-1]
else:
new_sources.append(source)
if not swig_sources:
return new_sources
swig = self.swig or self.find_swig()
swig_cmd = [swig, "-python"]
swig_cmd.extend(self.swig_opts)
if self.swig_cpp:
swig_cmd.append("-c++")
# Do not override commandline arguments
if not self.swig_opts:
for o in extension.swig_opts:
swig_cmd.append(o)
for source in swig_sources:
target = swig_targets[source]
log.info("swigging %s to %s", source, target)
self.spawn(swig_cmd + ["-o", target, source])
return new_sources
# swig_sources ()
def find_swig (self):
"""Return the name of the SWIG executable. On Unix, this is
just "swig" -- it should be in the PATH. Tries a bit harder on
Windows.
"""
if os.name == "posix":
return "swig"
elif os.name == "nt":
# Look for SWIG in its standard installation directory on
# Windows (or so I presume!). If we find it there, great;
# if not, act like Unix and assume it's in the PATH.
for vers in ("1.3", "1.2", "1.1"):
fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
if os.path.isfile(fn):
return fn
else:
return "swig.exe"
elif os.name == "os2":
# assume swig available in the PATH.
return "swig.exe"
else:
raise DistutilsPlatformError, \
("I don't know how to find (much less run) SWIG "
"on platform '%s'") % os.name
# find_swig ()
# -- Name generators -----------------------------------------------
# (extension names, filenames, whatever)
def get_ext_fullpath(self, ext_name):
"""Returns the path of the filename for a given extension.
The file is located in `build_lib` or directly in the package
(inplace option).
"""
# makes sure the extension name is only using dots
all_dots = string.maketrans('/'+os.sep, '..')
ext_name = ext_name.translate(all_dots)
fullname = self.get_ext_fullname(ext_name)
modpath = fullname.split('.')
filename = self.get_ext_filename(ext_name)
filename = os.path.split(filename)[-1]
if not self.inplace:
# no further work needed
# returning :
# build_dir/package/path/filename
filename = os.path.join(*modpath[:-1]+[filename])
return os.path.join(self.build_lib, filename)
# the inplace option requires to find the package directory
# using the build_py command for that
package = '.'.join(modpath[0:-1])
build_py = self.get_finalized_command('build_py')
package_dir = os.path.abspath(build_py.get_package_dir(package))
# returning
# package_dir/filename
return os.path.join(package_dir, filename)
def get_ext_fullname(self, ext_name):
"""Returns the fullname of a given extension name.
Adds the `package.` prefix"""
if self.package is None:
return ext_name
else:
return self.package + '.' + ext_name
def get_ext_filename(self, ext_name):
r"""Convert the name of an extension (eg. "foo.bar") into the name
of the file from which it will be loaded (eg. "foo/bar.so", or
"foo\bar.pyd").
"""
from distutils.sysconfig import get_config_var
ext_path = string.split(ext_name, '.')
# OS/2 has an 8 character module (extension) limit :-(
if os.name == "os2":
ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8]
# extensions in debug_mode are named 'module_d.pyd' under windows
so_ext = get_config_var('SO')
if os.name == 'nt' and self.debug:
return os.path.join(*ext_path) + '_d' + so_ext
return os.path.join(*ext_path) + so_ext
def get_export_symbols (self, ext):
"""Return the list of symbols that a shared extension has to
export. This either uses 'ext.export_symbols' or, if it's not
provided, "init" + module_name. Only relevant on Windows, where
the .pyd file (DLL) must export the module "init" function.
"""
initfunc_name = "init" + ext.name.split('.')[-1]
if initfunc_name not in ext.export_symbols:
ext.export_symbols.append(initfunc_name)
return ext.export_symbols
def get_libraries (self, ext):
"""Return the list of libraries to link against when building a
shared extension. On most platforms, this is just 'ext.libraries';
on Windows and OS/2, we add the Python library (eg. python20.dll).
"""
# The python library is always needed on Windows. For MSVC, this
# is redundant, since the library is mentioned in a pragma in
# pyconfig.h that MSVC groks. The other Windows compilers all seem
# to need it mentioned explicitly, though, so that's what we do.
# Append '_d' to the python import library on debug builds.
if sys.platform == "win32":
from distutils.msvccompiler import MSVCCompiler
if not isinstance(self.compiler, MSVCCompiler):
template = "python%d%d"
if self.debug:
template = template + '_d'
pythonlib = (template %
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
# don't extend ext.libraries, it may be shared with other
# extensions, it is a reference to the original list
return ext.libraries + [pythonlib]
else:
return ext.libraries
elif sys.platform == "os2emx":
# EMX/GCC requires the python library explicitly, and I
# believe VACPP does as well (though not confirmed) - AIM Apr01
template = "python%d%d"
# debug versions of the main DLL aren't supported, at least
# not at this time - AIM Apr01
#if self.debug:
# template = template + '_d'
pythonlib = (template %
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
# don't extend ext.libraries, it may be shared with other
# extensions, it is a reference to the original list
return ext.libraries + [pythonlib]
elif sys.platform[:6] == "cygwin":
template = "python%d.%d"
pythonlib = (template %
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
# don't extend ext.libraries, it may be shared with other
# extensions, it is a reference to the original list
return ext.libraries + [pythonlib]
elif sys.platform[:6] == "atheos":
from distutils import sysconfig
template = "python%d.%d"
pythonlib = (template %
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
# Get SHLIBS from Makefile
extra = []
for lib in sysconfig.get_config_var('SHLIBS').split():
if lib.startswith('-l'):
extra.append(lib[2:])
else:
extra.append(lib)
# don't extend ext.libraries, it may be shared with other
# extensions, it is a reference to the original list
return ext.libraries + [pythonlib, "m"] + extra
elif sys.platform == 'darwin':
# Don't use the default code below
return ext.libraries
elif sys.platform[:3] == 'aix':
# Don't use the default code below
return ext.libraries
else:
from distutils import sysconfig
if sysconfig.get_config_var('Py_ENABLE_SHARED'):
template = "python%d.%d"
pythonlib = (template %
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
return ext.libraries + [pythonlib]
else:
return ext.libraries
# class build_ext
| mit |
umlfri/umlfri2 | umlfri2/application/tablist.py | 1 | 4454 | from umlfri2.application.events.solution import CloseSolutionEvent
from .events.model import DiagramDeletedEvent
from .events.tabs import OpenTabEvent, ChangedCurrentTabEvent, ClosedTabEvent
from .tab import Tab
class TabList:
def __init__(self, application):
self.__tabs = []
self.__application = application
self.__current_tab = None
self.__locked_tabs = set()
application.event_dispatcher.subscribe(DiagramDeletedEvent, self.__diagram_deleted)
application.event_dispatcher.subscribe(CloseSolutionEvent, self.__solution_closed)
def __diagram_deleted(self, event):
tab = self.get_tab_for(event.diagram)
if tab is not None:
tab.close()
def __solution_closed(self, event):
events = []
for tab in self.__tabs:
events.append(ClosedTabEvent(tab))
self.__tabs = []
self.__application.event_dispatcher.dispatch_all(events)
self.__current_tab = None
self.__application.event_dispatcher.dispatch(ChangedCurrentTabEvent(None))
def reset_lock_status(self):
self.__locked_tabs = {tab.drawing_area.diagram.save_id for tab in self.__tabs if tab.locked}
@property
def lock_status_changed(self):
new_locked_tabs = {tab.drawing_area.diagram.save_id for tab in self.__tabs if tab.locked}
return self.__locked_tabs != new_locked_tabs
def get_tab_for(self, diagram):
for tab in self.__tabs:
if tab.drawing_area.diagram is diagram:
return tab
return None
def open_new_project_tabs(self, tabs):
last_tab = None
for tab_info in tabs:
tab = Tab(self.__application, self, tab_info.diagram, locked=tab_info.locked)
self.__tabs.append(tab)
self.__application.event_dispatcher.dispatch(OpenTabEvent(tab))
last_tab = tab
if last_tab is not None:
self.__current_tab = last_tab
self.__application.event_dispatcher.dispatch(ChangedCurrentTabEvent(last_tab))
def select_tab(self, diagram):
if self.__current_tab is not None:
self.__current_tab.drawing_area.reset_action()
if diagram is None:
self.__current_tab = None
self.__application.event_dispatcher.dispatch(ChangedCurrentTabEvent(None))
return
for tab in self.__tabs:
if tab.drawing_area.diagram is diagram:
if self.__current_tab is not tab:
self.__current_tab = tab
self.__application.event_dispatcher.dispatch(ChangedCurrentTabEvent(tab))
return tab
else:
tab = Tab(self.__application, self, diagram)
self.__tabs.append(tab)
self.__current_tab = tab
self.__application.event_dispatcher.dispatch(OpenTabEvent(tab))
self.__application.event_dispatcher.dispatch(ChangedCurrentTabEvent(tab))
return tab
def _close_tab(self, tab):
if tab.locked:
tab.unlock()
tab_id = self.__tabs.index(tab)
del self.__tabs[tab_id]
if tab_id < len(self.__tabs):
self.__current_tab = self.__tabs[tab_id]
elif self.__tabs:
self.__current_tab = self.__tabs[-1]
else:
self.__current_tab = None
self.__application.event_dispatcher.dispatch(ClosedTabEvent(tab))
self.__application.event_dispatcher.dispatch(ChangedCurrentTabEvent(self.__current_tab))
def close_all(self):
events = []
new_tabs = []
for tab in self.__tabs:
if tab.locked:
new_tabs.append(tab)
else:
events.append(ClosedTabEvent(tab))
self.__tabs = new_tabs
self.__application.event_dispatcher.dispatch_all(events)
if new_tabs:
self.__current_tab = new_tabs[-1]
self.__application.event_dispatcher.dispatch(ChangedCurrentTabEvent(new_tabs[-1]))
else:
self.__current_tab = None
self.__application.event_dispatcher.dispatch(ChangedCurrentTabEvent(None))
@property
def current_tab(self):
return self.__current_tab
def __iter__(self):
yield from self.__tabs
| gpl-3.0 |
epitron/youtube-dl | youtube_dl/extractor/fc2.py | 24 | 5591 | # coding: utf-8
from __future__ import unicode_literals
import hashlib
import re
from .common import InfoExtractor
from ..compat import (
compat_parse_qs,
compat_urllib_request,
compat_urlparse,
)
from ..utils import (
ExtractorError,
sanitized_Request,
urlencode_postdata,
)
class FC2IE(InfoExtractor):
_VALID_URL = r'^(?:https?://video\.fc2\.com/(?:[^/]+/)*content/|fc2:)(?P<id>[^/]+)'
IE_NAME = 'fc2'
_NETRC_MACHINE = 'fc2'
_TESTS = [{
'url': 'http://video.fc2.com/en/content/20121103kUan1KHs',
'md5': 'a6ebe8ebe0396518689d963774a54eb7',
'info_dict': {
'id': '20121103kUan1KHs',
'ext': 'flv',
'title': 'Boxing again with Puff',
},
}, {
'url': 'http://video.fc2.com/en/content/20150125cEva0hDn/',
'info_dict': {
'id': '20150125cEva0hDn',
'ext': 'mp4',
},
'params': {
'username': '[email protected]',
'password': '(snip)',
},
'skip': 'requires actual password',
}, {
'url': 'http://video.fc2.com/en/a/content/20130926eZpARwsF',
'only_matching': True,
}]
def _login(self):
username, password = self._get_login_info()
if username is None or password is None:
return False
# Log in
login_form_strs = {
'email': username,
'password': password,
'done': 'video',
'Submit': ' Login ',
}
login_data = urlencode_postdata(login_form_strs)
request = sanitized_Request(
'https://secure.id.fc2.com/index.php?mode=login&switch_language=en', login_data)
login_results = self._download_webpage(request, None, note='Logging in', errnote='Unable to log in')
if 'mode=redirect&login=done' not in login_results:
self.report_warning('unable to log in: bad username or password')
return False
# this is also needed
login_redir = sanitized_Request('http://id.fc2.com/?mode=redirect&login=done')
self._download_webpage(
login_redir, None, note='Login redirect', errnote='Login redirect failed')
return True
def _real_extract(self, url):
video_id = self._match_id(url)
self._login()
webpage = None
if not url.startswith('fc2:'):
webpage = self._download_webpage(url, video_id)
self._downloader.cookiejar.clear_session_cookies() # must clear
self._login()
title = 'FC2 video %s' % video_id
thumbnail = None
if webpage is not None:
title = self._og_search_title(webpage)
thumbnail = self._og_search_thumbnail(webpage)
refer = url.replace('/content/', '/a/content/') if '/a/content/' not in url else url
mimi = hashlib.md5((video_id + '_gGddgPfeaf_gzyr').encode('utf-8')).hexdigest()
info_url = (
'http://video.fc2.com/ginfo.php?mimi={1:s}&href={2:s}&v={0:s}&fversion=WIN%2011%2C6%2C602%2C180&from=2&otag=0&upid={0:s}&tk=null&'.
format(video_id, mimi, compat_urllib_request.quote(refer, safe=b'').replace('.', '%2E')))
info_webpage = self._download_webpage(
info_url, video_id, note='Downloading info page')
info = compat_urlparse.parse_qs(info_webpage)
if 'err_code' in info:
# most of the time we can still download wideo even if err_code is 403 or 602
self.report_warning(
'Error code was: %s... but still trying' % info['err_code'][0])
if 'filepath' not in info:
raise ExtractorError('Cannot download file. Are you logged in?')
video_url = info['filepath'][0] + '?mid=' + info['mid'][0]
title_info = info.get('title')
if title_info:
title = title_info[0]
return {
'id': video_id,
'title': title,
'url': video_url,
'ext': 'flv',
'thumbnail': thumbnail,
}
class FC2EmbedIE(InfoExtractor):
_VALID_URL = r'https?://video\.fc2\.com/flv2\.swf\?(?P<query>.+)'
IE_NAME = 'fc2:embed'
_TEST = {
'url': 'http://video.fc2.com/flv2.swf?t=201404182936758512407645&i=20130316kwishtfitaknmcgd76kjd864hso93htfjcnaogz629mcgfs6rbfk0hsycma7shkf85937cbchfygd74&i=201403223kCqB3Ez&d=2625&sj=11&lang=ja&rel=1&from=11&cmt=1&tk=TlRBM09EQTNNekU9&tl=プリズン・ブレイク%20S1-01%20マイケル%20【吹替】',
'md5': 'b8aae5334cb691bdb1193a88a6ab5d5a',
'info_dict': {
'id': '201403223kCqB3Ez',
'ext': 'flv',
'title': 'プリズン・ブレイク S1-01 マイケル 【吹替】',
'thumbnail': r're:^https?://.*\.jpg$',
},
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
query = compat_parse_qs(mobj.group('query'))
video_id = query['i'][-1]
title = query.get('tl', ['FC2 video %s' % video_id])[0]
sj = query.get('sj', [None])[0]
thumbnail = None
if sj:
# See thumbnailImagePath() in ServerConst.as of flv2.swf
thumbnail = 'http://video%s-thumbnail.fc2.com/up/pic/%s.jpg' % (
sj, '/'.join((video_id[:6], video_id[6:8], video_id[-2], video_id[-1], video_id)))
return {
'_type': 'url_transparent',
'ie_key': FC2IE.ie_key(),
'url': 'fc2:%s' % video_id,
'title': title,
'thumbnail': thumbnail,
}
| unlicense |
byt3smith/CIRTKit | modules/reversing/viper/peepdf/ccitt.py | 43 | 15515 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
ccitt.py
TODO
http://tools.ietf.org/pdf/rfc804.pdf
http://code.google.com/p/origami-pdf/source/browse/lib/origami/filters/ccitt.rb
"""
__author__ = 'Binjo'
__version__ = '0.1'
__date__ = '2012-04-08 14:30:05'
class BitWriterException(Exception):
pass
class BitWriter(object):
"""
"""
def __init__(self, ):
"""
"""
self._data = ''
self._last_byte = None
self._bit_ptr = 0
@property
def data(self):
"""
"""
return self._data
def write(self, data, length):
"""
"""
if not ( length >= 0 and (1 << length) > data ):
raise BitWriterException, "Invalid data length"
if length == 8 and not self._last_byte and self._bit_ptr == 0:
self._data += chr(data)
return
while length > 0:
if length >= 8 - self._bit_ptr:
length -= 8 - self._bit_ptr
if not self._last_byte:
self._last_byte = 0
self._last_byte |= (data >> length) & ((1 << (8 - self._bit_ptr)) - 1)
data &= (1 << length) - 1
self._data += chr(self._last_byte)
self._last_byte = None
self._bit_ptr = 0
else:
if not self._last_byte:
self._last_byte = 0
self._last_byte |= (data & ((1 << length) - 1)) << (8 - self._bit_ptr - length)
self._bit_ptr += length
if self._bit_ptr == 8:
self._data += chr(self._last_byte)
self._last_byte = None
self._bit_ptr = 0
length = 0
class BitReaderException(Exception):
pass
class BitReader(object):
"""
"""
def __init__(self, data):
"""
"""
self._data = data
self._byte_ptr, self._bit_ptr = 0, 0
def reset(self):
"""
"""
self._byte_ptr, self._bit_ptr = 0, 0
@property
def eod_p(self):
"""
"""
return self._byte_ptr >= len(self._data)
@property
def pos(self):
"""
"""
return (self._byte_ptr << 3) + self._bit_ptr
@property
def size(self):
"""
"""
return len(self._data) << 3
@pos.setter
def pos(self, bits):
"""
"""
if bits > self.size:
raise BitReaderException, "Pointer position out of data"
pbyte = bits >> 3
pbit = bits - (pbyte <<3)
self._byte_ptr, self._bit_ptr = pbyte, pbit
def peek(self, length):
"""
"""
if length <= 0:
raise BitReaderException, "Invalid read length"
elif ( self.pos + length ) > self.size:
raise BitReaderException, "Insufficient data"
n = 0
byte_ptr, bit_ptr = self._byte_ptr, self._bit_ptr
while length > 0:
byte = ord( self._data[byte_ptr] )
if length > 8 - bit_ptr:
length -= 8 - bit_ptr
n |= ( byte & ((1 << (8 - bit_ptr)) - 1) ) << length
byte_ptr += 1
bit_ptr = 0
else:
n |= (byte >> (8 - bit_ptr - length)) & ((1 << length) - 1)
length = 0
return n
def read(self, length):
"""
"""
n = self.peek(length)
self.pos += length
return n
def codeword(bits):
"""return tuple rather than list, since list is not hashable...
"""
return ( int(bits, 2), len(bits) )
class CCITTFax(object):
"""
"""
EOL = codeword('000000000001')
RTC = codeword('000000000001' * 6)
WHITE_TERMINAL_ENCODE_TABLE = {
0 : codeword('00110101'),
1 : codeword('000111'),
2 : codeword('0111'),
3 : codeword('1000'),
4 : codeword('1011'),
5 : codeword('1100'),
6 : codeword('1110'),
7 : codeword('1111'),
8 : codeword('10011'),
9 : codeword('10100'),
10 : codeword('00111'),
11 : codeword('01000'),
12 : codeword('001000'),
13 : codeword('000011'),
14 : codeword('110100'),
15 : codeword('110101'),
16 : codeword('101010'),
17 : codeword('101011'),
18 : codeword('0100111'),
19 : codeword('0001100'),
20 : codeword('0001000'),
21 : codeword('0010111'),
22 : codeword('0000011'),
23 : codeword('0000100'),
24 : codeword('0101000'),
25 : codeword('0101011'),
26 : codeword('0010011'),
27 : codeword('0100100'),
28 : codeword('0011000'),
29 : codeword('00000010'),
30 : codeword('00000011'),
31 : codeword('00011010'),
32 : codeword('00011011'),
33 : codeword('00010010'),
34 : codeword('00010011'),
35 : codeword('00010100'),
36 : codeword('00010101'),
37 : codeword('00010110'),
38 : codeword('00010111'),
39 : codeword('00101000'),
40 : codeword('00101001'),
41 : codeword('00101010'),
42 : codeword('00101011'),
43 : codeword('00101100'),
44 : codeword('00101101'),
45 : codeword('00000100'),
46 : codeword('00000101'),
47 : codeword('00001010'),
48 : codeword('00001011'),
49 : codeword('01010010'),
50 : codeword('01010011'),
51 : codeword('01010100'),
52 : codeword('01010101'),
53 : codeword('00100100'),
54 : codeword('00100101'),
55 : codeword('01011000'),
56 : codeword('01011001'),
57 : codeword('01011010'),
58 : codeword('01011011'),
59 : codeword('01001010'),
60 : codeword('01001011'),
61 : codeword('00110010'),
62 : codeword('00110011'),
63 : codeword('00110100')
}
WHITE_TERMINAL_DECODE_TABLE = dict( (v, k) for k, v in WHITE_TERMINAL_ENCODE_TABLE.iteritems() )
BLACK_TERMINAL_ENCODE_TABLE = {
0 : codeword('0000110111'),
1 : codeword('010'),
2 : codeword('11'),
3 : codeword('10'),
4 : codeword('011'),
5 : codeword('0011'),
6 : codeword('0010'),
7 : codeword('00011'),
8 : codeword('000101'),
9 : codeword('000100'),
10 : codeword('0000100'),
11 : codeword('0000101'),
12 : codeword('0000111'),
13 : codeword('00000100'),
14 : codeword('00000111'),
15 : codeword('000011000'),
16 : codeword('0000010111'),
17 : codeword('0000011000'),
18 : codeword('0000001000'),
19 : codeword('00001100111'),
20 : codeword('00001101000'),
21 : codeword('00001101100'),
22 : codeword('00000110111'),
23 : codeword('00000101000'),
24 : codeword('00000010111'),
25 : codeword('00000011000'),
26 : codeword('000011001010'),
27 : codeword('000011001011'),
28 : codeword('000011001100'),
29 : codeword('000011001101'),
30 : codeword('000001101000'),
31 : codeword('000001101001'),
32 : codeword('000001101010'),
33 : codeword('000001101011'),
34 : codeword('000011010010'),
35 : codeword('000011010011'),
36 : codeword('000011010100'),
37 : codeword('000011010101'),
38 : codeword('000011010110'),
39 : codeword('000011010111'),
40 : codeword('000001101100'),
41 : codeword('000001101101'),
42 : codeword('000011011010'),
43 : codeword('000011011011'),
44 : codeword('000001010100'),
45 : codeword('000001010101'),
46 : codeword('000001010110'),
47 : codeword('000001010111'),
48 : codeword('000001100100'),
49 : codeword('000001100101'),
50 : codeword('000001010010'),
51 : codeword('000001010011'),
52 : codeword('000000100100'),
53 : codeword('000000110111'),
54 : codeword('000000111000'),
55 : codeword('000000100111'),
56 : codeword('000000101000'),
57 : codeword('000001011000'),
58 : codeword('000001011001'),
59 : codeword('000000101011'),
60 : codeword('000000101100'),
61 : codeword('000001011010'),
62 : codeword('000001100110'),
63 : codeword('000001100111')
}
BLACK_TERMINAL_DECODE_TABLE = dict( (v, k) for k, v in BLACK_TERMINAL_ENCODE_TABLE.iteritems() )
WHITE_CONFIGURATION_ENCODE_TABLE = {
64 : codeword('11011'),
128 : codeword('10010'),
192 : codeword('010111'),
256 : codeword('0110111'),
320 : codeword('00110110'),
384 : codeword('00110111'),
448 : codeword('01100100'),
512 : codeword('01100101'),
576 : codeword('01101000'),
640 : codeword('01100111'),
704 : codeword('011001100'),
768 : codeword('011001101'),
832 : codeword('011010010'),
896 : codeword('011010011'),
960 : codeword('011010100'),
1024 : codeword('011010101'),
1088 : codeword('011010110'),
1152 : codeword('011010111'),
1216 : codeword('011011000'),
1280 : codeword('011011001'),
1344 : codeword('011011010'),
1408 : codeword('011011011'),
1472 : codeword('010011000'),
1536 : codeword('010011001'),
1600 : codeword('010011010'),
1664 : codeword('011000'),
1728 : codeword('010011011'),
1792 : codeword('00000001000'),
1856 : codeword('00000001100'),
1920 : codeword('00000001001'),
1984 : codeword('000000010010'),
2048 : codeword('000000010011'),
2112 : codeword('000000010100'),
2176 : codeword('000000010101'),
2240 : codeword('000000010110'),
2340 : codeword('000000010111'),
2368 : codeword('000000011100'),
2432 : codeword('000000011101'),
2496 : codeword('000000011110'),
2560 : codeword('000000011111')
}
WHITE_CONFIGURATION_DECODE_TABLE = dict( (v, k) for k, v in WHITE_CONFIGURATION_ENCODE_TABLE.iteritems() )
BLACK_CONFIGURATION_ENCODE_TABLE = {
64 : codeword('0000001111'),
128 : codeword('000011001000'),
192 : codeword('000011001001'),
256 : codeword('000001011011'),
320 : codeword('000000110011'),
384 : codeword('000000110100'),
448 : codeword('000000110101'),
512 : codeword('0000001101100'),
576 : codeword('0000001101101'),
640 : codeword('0000001001010'),
704 : codeword('0000001001011'),
768 : codeword('0000001001100'),
832 : codeword('0000001001101'),
896 : codeword('0000001110010'),
960 : codeword('0000001110011'),
1024 : codeword('0000001110100'),
1088 : codeword('0000001110101'),
1152 : codeword('0000001110110'),
1216 : codeword('0000001110111'),
1280 : codeword('0000001010010'),
1344 : codeword('0000001010011'),
1408 : codeword('0000001010100'),
1472 : codeword('0000001010101'),
1536 : codeword('0000001011010'),
1600 : codeword('0000001011011'),
1664 : codeword('0000001100100'),
1728 : codeword('0000001100101'),
1792 : codeword('00000001000'),
1856 : codeword('00000001100'),
1920 : codeword('00000001001'),
1984 : codeword('000000010010'),
2048 : codeword('000000010011'),
2112 : codeword('000000010100'),
2176 : codeword('000000010101'),
2240 : codeword('000000010110'),
2340 : codeword('000000010111'),
2368 : codeword('000000011100'),
2432 : codeword('000000011101'),
2496 : codeword('000000011110'),
2560 : codeword('000000011111')
}
BLACK_CONFIGURATION_DECODE_TABLE = dict( (v, k) for k, v in BLACK_CONFIGURATION_ENCODE_TABLE.iteritems() )
def __init__(self, ):
"""
"""
self._decoded = []
def decode(self, stream, k = 0, eol = False, byteAlign = False, columns = 1728, rows = 0, eob = True, blackIs1 = False, damagedRowsBeforeError = 0):
"""
"""
# FIXME seems not stick to the spec? default is false, but if not set as true, it won't decode 6cc2a162e08836f7d50d461a9fc136fe correctly
byteAlign = True
if blackIs1:
white, black = 0,1
else:
white, black = 1,0
bitr = BitReader( stream )
bitw = BitWriter()
while not ( bitr.eod_p or rows == 0 ):
current_color = white
if byteAlign and bitr.pos % 8 != 0:
bitr.pos += 8 - (bitr.pos % 8)
if eob and bitr.peek(self.RTC[1]) == self.RTC[0]:
bitr.pos += RTC[1]
break
if bitr.peek(self.EOL[1]) != self.EOL[0]:
if eol:
raise Exception, "No end-of-line pattern found (at bit pos %d/%d)" % (bitr.pos, bitr.size)
else:
bitr.pos += self.EOL[1]
line_length = 0
while line_length < columns:
if current_color == white:
bit_length = self.get_white_bits(bitr)
else:
bit_length = self.get_black_bits(bitr)
if bit_length == None:
raise Exception, "Unfinished line (at bit pos %d/%d), %s" % (bitr.pos, bitr.size, bitw.data)
line_length += bit_length
if line_length > columns:
raise Exception, "Line is too long (at bit pos %d/%d)" % (bitr.pos, bitr.size)
bitw.write( (current_color << bit_length) - current_color, bit_length )
current_color ^= 1
rows -= 1
return bitw.data
def get_white_bits(self, bitr):
"""
"""
return self.get_color_bits( bitr, self.WHITE_CONFIGURATION_DECODE_TABLE, self.WHITE_TERMINAL_DECODE_TABLE )
def get_black_bits(self, bitr):
"""
"""
return self.get_color_bits( bitr, self.BLACK_CONFIGURATION_DECODE_TABLE, self.BLACK_TERMINAL_DECODE_TABLE )
def get_color_bits(self, bitr, config_words, term_words):
"""
"""
bits = 0
check_conf = True
while check_conf:
check_conf = False
for i in xrange(2, 14):
codeword = bitr.peek(i)
config_value = config_words.get((codeword, i), None)
if config_value is not None:
bitr.pos += i
bits += config_value
if config_value == 2560:
check_conf = True
break
for i in xrange(2, 14):
codeword = bitr.peek(i)
term_value = term_words.get((codeword, i), None)
if term_value is not None:
bitr.pos += i
bits += term_value
return bits
return None | mit |
jhayworth/config | .emacs.d/elpy/rpc-venv/lib/python2.7/site-packages/pip/_internal/commands/uninstall.py | 10 | 2983 | # The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
from __future__ import absolute_import
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.cli.base_command import Command
from pip._internal.cli.req_command import SessionCommandMixin
from pip._internal.exceptions import InstallationError
from pip._internal.req import parse_requirements
from pip._internal.req.constructors import install_req_from_line
from pip._internal.utils.misc import protect_pip_from_modification_on_windows
class UninstallCommand(Command, SessionCommandMixin):
"""
Uninstall packages.
pip is able to uninstall most installed packages. Known exceptions are:
- Pure distutils packages installed with ``python setup.py install``, which
leave behind no metadata to determine what files were installed.
- Script wrappers installed by ``python setup.py develop``.
"""
usage = """
%prog [options] <package> ...
%prog [options] -r <requirements file> ..."""
def __init__(self, *args, **kw):
super(UninstallCommand, self).__init__(*args, **kw)
self.cmd_opts.add_option(
'-r', '--requirement',
dest='requirements',
action='append',
default=[],
metavar='file',
help='Uninstall all the packages listed in the given requirements '
'file. This option can be used multiple times.',
)
self.cmd_opts.add_option(
'-y', '--yes',
dest='yes',
action='store_true',
help="Don't ask for confirmation of uninstall deletions.")
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options, args):
session = self.get_default_session(options)
reqs_to_uninstall = {}
for name in args:
req = install_req_from_line(
name, isolated=options.isolated_mode,
)
if req.name:
reqs_to_uninstall[canonicalize_name(req.name)] = req
for filename in options.requirements:
for req in parse_requirements(
filename,
options=options,
session=session):
if req.name:
reqs_to_uninstall[canonicalize_name(req.name)] = req
if not reqs_to_uninstall:
raise InstallationError(
'You must give at least one requirement to %(name)s (see '
'"pip help %(name)s")' % dict(name=self.name)
)
protect_pip_from_modification_on_windows(
modifying_pip="pip" in reqs_to_uninstall
)
for req in reqs_to_uninstall.values():
uninstall_pathset = req.uninstall(
auto_confirm=options.yes, verbose=self.verbosity > 0,
)
if uninstall_pathset:
uninstall_pathset.commit()
| gpl-3.0 |
jamesblunt/edx-platform | lms/djangoapps/commerce/api/v1/tests/test_models.py | 127 | 1169 | """ Tests for models. """
import ddt
from django.test import TestCase
from commerce.api.v1.models import Course
from course_modes.models import CourseMode
@ddt.ddt
class CourseTests(TestCase):
""" Tests for Course model. """
def setUp(self):
super(CourseTests, self).setUp()
self.course = Course('a/b/c', [])
@ddt.unpack
@ddt.data(
('credit', 'Credit'),
('professional', 'Professional Education'),
('no-id-professional', 'Professional Education'),
('verified', 'Verified Certificate'),
('honor', 'Honor Certificate'),
('audit', 'Audit'),
)
def test_get_mode_display_name(self, slug, expected_display_name):
""" Verify the method properly maps mode slugs to display names. """
mode = CourseMode(mode_slug=slug)
self.assertEqual(self.course.get_mode_display_name(mode), expected_display_name)
def test_get_mode_display_name_unknown_slug(self):
""" Verify the method returns the slug if it has no known mapping. """
mode = CourseMode(mode_slug='Blah!')
self.assertEqual(self.course.get_mode_display_name(mode), mode.mode_slug)
| agpl-3.0 |
leighpauls/k2cro4 | third_party/python_26/Lib/test/test_print.py | 51 | 3867 | """Test correct operation of the print function.
"""
# In 2.6, this gives us the behavior we want. In 3.0, it has
# no function, but it still must parse correctly.
from __future__ import print_function
import unittest
from test import test_support
import sys
if sys.version_info[0] == 3:
# 3.x
from io import StringIO
else:
# 2.x
from StringIO import StringIO
NotDefined = object()
# A dispatch table all 8 combinations of providing
# sep, end, and file
# I use this machinery so that I'm not just passing default
# values to print, I'm eiher passing or not passing in the
# arguments
dispatch = {
(False, False, False):
lambda args, sep, end, file: print(*args),
(False, False, True):
lambda args, sep, end, file: print(file=file, *args),
(False, True, False):
lambda args, sep, end, file: print(end=end, *args),
(False, True, True):
lambda args, sep, end, file: print(end=end, file=file, *args),
(True, False, False):
lambda args, sep, end, file: print(sep=sep, *args),
(True, False, True):
lambda args, sep, end, file: print(sep=sep, file=file, *args),
(True, True, False):
lambda args, sep, end, file: print(sep=sep, end=end, *args),
(True, True, True):
lambda args, sep, end, file: print(sep=sep, end=end, file=file, *args),
}
# Class used to test __str__ and print
class ClassWith__str__:
def __init__(self, x):
self.x = x
def __str__(self):
return self.x
class TestPrint(unittest.TestCase):
def check(self, expected, args,
sep=NotDefined, end=NotDefined, file=NotDefined):
# Capture sys.stdout in a StringIO. Call print with args,
# and with sep, end, and file, if they're defined. Result
# must match expected.
# Look up the actual function to call, based on if sep, end, and file
# are defined
fn = dispatch[(sep is not NotDefined,
end is not NotDefined,
file is not NotDefined)]
with test_support.captured_stdout() as t:
fn(args, sep, end, file)
self.assertEqual(t.getvalue(), expected)
def test_print(self):
def x(expected, args, sep=NotDefined, end=NotDefined):
# Run the test 2 ways: not using file, and using
# file directed to a StringIO
self.check(expected, args, sep=sep, end=end)
# When writing to a file, stdout is expected to be empty
o = StringIO()
self.check('', args, sep=sep, end=end, file=o)
# And o will contain the expected output
self.assertEqual(o.getvalue(), expected)
x('\n', ())
x('a\n', ('a',))
x('None\n', (None,))
x('1 2\n', (1, 2))
x('1 2\n', (1, ' ', 2))
x('1*2\n', (1, 2), sep='*')
x('1 s', (1, 's'), end='')
x('a\nb\n', ('a', 'b'), sep='\n')
x('1.01', (1.0, 1), sep='', end='')
x('1*a*1.3+', (1, 'a', 1.3), sep='*', end='+')
x('a\n\nb\n', ('a\n', 'b'), sep='\n')
x('\0+ +\0\n', ('\0', ' ', '\0'), sep='+')
x('a\n b\n', ('a\n', 'b'))
x('a\n b\n', ('a\n', 'b'), sep=None)
x('a\n b\n', ('a\n', 'b'), end=None)
x('a\n b\n', ('a\n', 'b'), sep=None, end=None)
x('*\n', (ClassWith__str__('*'),))
x('abc 1\n', (ClassWith__str__('abc'), 1))
# 2.x unicode tests
x(u'1 2\n', ('1', u'2'))
x(u'u\1234\n', (u'u\1234',))
x(u' abc 1\n', (' ', ClassWith__str__(u'abc'), 1))
# errors
self.assertRaises(TypeError, print, '', sep=3)
self.assertRaises(TypeError, print, '', end=3)
self.assertRaises(AttributeError, print, '', file='')
def test_main():
test_support.run_unittest(TestPrint)
if __name__ == "__main__":
test_main()
| bsd-3-clause |
ozburo/youtube-dl | youtube_dl/extractor/usatoday.py | 20 | 2703 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
ExtractorError,
get_element_by_attribute,
parse_duration,
try_get,
update_url_query,
)
from ..compat import compat_str
class USATodayIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?usatoday\.com/(?:[^/]+/)*(?P<id>[^?/#]+)'
_TESTS = [{
# Brightcove Partner ID = 29906170001
'url': 'http://www.usatoday.com/media/cinematic/video/81729424/us-france-warn-syrian-regime-ahead-of-new-peace-talks/',
'md5': '033587d2529dc3411a1ab3644c3b8827',
'info_dict': {
'id': '4799374959001',
'ext': 'mp4',
'title': 'US, France warn Syrian regime ahead of new peace talks',
'timestamp': 1457891045,
'description': 'md5:7e50464fdf2126b0f533748d3c78d58f',
'uploader_id': '29906170001',
'upload_date': '20160313',
}
}, {
# ui-video-data[asset_metadata][items][brightcoveaccount] = 28911775001
'url': 'https://www.usatoday.com/story/tech/science/2018/08/21/yellowstone-supervolcano-eruption-stop-worrying-its-blow/973633002/',
'info_dict': {
'id': '5824495846001',
'ext': 'mp4',
'title': 'Yellowstone more likely to crack rather than explode',
'timestamp': 1534790612,
'description': 'md5:3715e7927639a4f16b474e9391687c62',
'uploader_id': '28911775001',
'upload_date': '20180820',
}
}]
BRIGHTCOVE_URL_TEMPLATE = 'http://players.brightcove.net/%s/default_default/index.html?videoId=%s'
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(update_url_query(url, {'ajax': 'true'}), display_id)
ui_video_data = get_element_by_attribute('class', 'ui-video-data', webpage)
if not ui_video_data:
raise ExtractorError('no video on the webpage', expected=True)
video_data = self._parse_json(ui_video_data, display_id)
item = try_get(video_data, lambda x: x['asset_metadata']['items'], dict) or {}
return {
'_type': 'url_transparent',
'url': self.BRIGHTCOVE_URL_TEMPLATE % (item.get('brightcoveaccount', '29906170001'), item.get('brightcoveid') or video_data['brightcove_id']),
'id': compat_str(video_data['id']),
'title': video_data['title'],
'thumbnail': video_data.get('thumbnail'),
'description': video_data.get('description'),
'duration': parse_duration(video_data.get('length')),
'ie_key': 'BrightcoveNew',
}
| unlicense |
rockstor/rockstor-core | src/rockstor/smart_manager/views/samba_service.py | 2 | 6882 | """
Copyright (c) 2012-2020 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import logging
import re
import shutil
from tempfile import mkstemp
from django.conf import settings
from django.db import transaction
from rest_framework.response import Response
from base_service import BaseServiceDetailView
from smart_manager.models import Service
from smart_manager.serializers import ServiceStatusSerializer
from storageadmin.util import handle_exception
from system.osi import md5sum
from system.samba import update_global_config, restart_samba, get_global_config
from system.services import systemctl, service_status
logger = logging.getLogger(__name__)
class SambaServiceView(BaseServiceDetailView):
service_name = "smb"
@transaction.atomic
def get(self, request, *args, **kwargs):
with self._handle_exception(self.request, msg=None):
so = Service.objects.get(name=self.service_name)
config = get_global_config()
self._save_config(so, config)
sd = ServiceStatusSerializer(self._get_or_create_sso(so))
return Response(sd.data)
@transaction.atomic
def post(self, request, command):
"""
execute a command on the service
"""
service = Service.objects.get(name=self.service_name)
if command == "config":
try:
config = request.data.get("config", {})
global_config = {}
if "global_config" in config:
gc_lines = config["global_config"].split("\n")
for l in gc_lines:
gc_param = l.strip().split(" = ")
if len(gc_param) == 2:
if "=" in gc_param[0]:
raise Exception(
"Syntax error, one param has wrong "
"spaces around equal signs, "
"please check syntax of "
"'%s'" % "".join(gc_param)
)
global_config[gc_param[0].strip().lower()] = gc_param[
1
].strip() # noqa
# #E501 Default set current workgroup to one got via samba
# config page
global_config["workgroup"] = config["workgroup"]
else:
global_config = config
# Check Active Directory config and status if AD configured and
# ON set workgroup to AD retrieved workgroup else AD not
# running and leave workgroup to one choosen by user
adso = Service.objects.get(name="active-directory")
adconfig = None
adso_status = 1
if adso.config is not None:
adconfig = self._get_config(adso)
adso_out, adso_err, adso_status = service_status(
"active-directory", adconfig
)
if adso_status == 0:
global_config["workgroup"] = adconfig["workgroup"]
else:
adconfig = None
self._save_config(service, global_config)
update_global_config(global_config, adconfig)
_, _, smb_rc = service_status(self.service_name)
# Restart samba only if already ON
# rc == 0 if service is ON, rc != 0 otherwise
if smb_rc == 0:
restart_samba(hard=True)
except Exception as e:
e_msg = (
"Samba could not be configured. Try again. "
"Exception: %s" % e.__str__()
)
handle_exception(Exception(e_msg), request)
else:
try:
if command == "stop":
systemctl("smb", "disable")
systemctl("nmb", "disable")
else:
systemd_name = "{}.service".format(self.service_name)
distro_id = settings.OS_DISTRO_ID
self._write_smb_service(systemd_name, distro_id)
systemctl("smb", "enable")
systemctl("nmb", "enable")
systemctl("nmb", command)
systemctl("smb", command)
except Exception as e:
e_msg = "Failed to {} samba due to a system error: {}".format(
command, e.__str__()
)
handle_exception(Exception(e_msg), request)
return Response()
def _write_smb_service(self, systemd_name, distro_id):
"""
Customize smb.service file in a distro-dependent manner.
In rockstor-based systems, source from settings.CONFROOT.
In opensuse-based systems, source from package default.
Check for differences before final copy.
:param systemd_name:
:param distro_id:
:return:
"""
ss_dest = "/etc/systemd/system/{}".format(systemd_name)
# BEGIN CentOS section
if distro_id == "rockstor":
ss_src = "{}/{}".format(settings.CONFROOT, systemd_name)
sum1 = md5sum(ss_dest)
sum2 = md5sum(ss_src)
if sum1 != sum2:
shutil.copy(ss_src, ss_dest)
# END CentOS section
else:
ss_src = "/usr/lib/systemd/system/smb.service"
# Customize package's default
fo, npath = mkstemp()
with open(ss_src) as ino, open(npath, "w") as tfo:
for l in ino.readlines():
if re.match("After=", l) is not None:
tfo.write(
"{} {}\n".format(l.strip(), "rockstor-bootstrap.service")
)
else:
tfo.write(l)
# Check for diff and then move file from temp to final location
sum1 = md5sum(ss_dest)
sum2 = md5sum(npath)
if sum1 != sum2:
shutil.move(npath, ss_dest)
| gpl-3.0 |
Loodoor/UrWorld-Alpha-3.x | src/compressor.py | 1 | 2667 | import pickle
import re
from itertools import groupby
class RLECompress:
def __init__(self, file):
self.file = file
def verifier(self, objet):
if isinstance(objet, list):
return all(map(lambda elt: isinstance(elt, list), objet))
return False
def dump(self, objet):
temp = list()
total_element = 0
if self.verifier(objet):
for i in objet:
for j in i:
temp.append(j)
total_element += 1
print(total_element)
total_element = 0
count = 0
array = []
last = temp[0]
for k in temp:
if k == last:
count += 1
total_element += 1
else:
array.append((count, last))
last = k
count = 1
total_element += 1
print(total_element)
#self.file.write(str(array))
pickle.Pickler(self.file).dump(array)
class RLEUncompress:
def __init__(self, file):
self.file = file
def load(self):
carte_str = pickle.Unpickler(self.file).load()
temp = []
temp_element = 0
for i in carte_str:
temp += [i[1]] * i[0]
temp_element += 1 * i[0]
print(temp_element)
liste = []
carte_lst = []
count = 1
total_element = 0
for j in temp:
if count == 4096:
count = 1
carte_lst.append(liste)
liste = []
liste.append(j)
count += 1
total_element += 1
print(total_element)
return carte_lst
RLE_BLOCK_FORMAT = r'\'(\w+)\'(\d+)'
def valid(obj):
if isinstance(obj, list):
return all(map(lambda elt: isinstance(elt, list), obj))
return False
def dump(file, obj):
group_count = lambda g: len(list(group))
cpt = 0
if valid(obj):
dumped = ''
for row in obj:
for tile, group in groupby(row):
dumped += "'" + tile + "'" + str(group_count(group))
cpt += int(group_count(group))
dumped += '\n'
print(cpt)
file.write(dumped)
else:
raise ValueError("Invalid object format")
def load(file):
loaded = []
cpt = 0
for line in file.readlines():
row = []
for tile, count in re.findall(RLE_BLOCK_FORMAT, line):
row += [tile] * int(count)
cpt += int(count)
loaded.append(row)
print(cpt)
return loaded | gpl-2.0 |
agx/libvirt-sandbox-debian | libvirt-sandbox/image/cli.py | 1 | 9075 | #!/usr/bin/python -Es
# -*- coding: utf-8 -*-
# Authors: Daniel P. Berrange <[email protected]>
# Eren Yagdiran <[email protected]>
#
# Copyright (C) 2013-2015 Red Hat, Inc.
# Copyright (C) 2015 Universitat Politècnica de Catalunya.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import argparse
import gettext
import hashlib
import json
import os
import os.path
import shutil
import sys
import urllib2
import subprocess
import random
import string
from libvirt_sandbox.image import template
if os.geteuid() == 0:
default_template_dir = "/var/lib/libvirt/templates"
default_image_dir = "/var/lib/libvirt/images"
else:
default_template_dir = os.environ['HOME'] + "/.local/share/libvirt/templates"
default_image_dir = os.environ['HOME'] + "/.local/share/libvirt/images"
debug = False
verbose = False
gettext.bindtextdomain("libvirt-sandbox", "/usr/share/locale")
gettext.textdomain("libvirt-sandbox")
try:
gettext.install("libvirt-sandbox",
localedir="/usr/share/locale",
unicode=False,
codeset = 'utf-8')
except IOError:
import __builtin__
__builtin__.__dict__['_'] = unicode
def debug(msg):
sys.stderr.write(msg)
def info(msg):
sys.stdout.write(msg)
def get_template_dir(args):
tmpl = template.Template.from_uri(args.template)
return "%s/%s" % (args.template_dir, tmpl.source)
def purge(args):
tmpl = template.Template.from_uri(args.template)
source = tmpl.get_source_impl()
source.delete_template(template=tmpl,
templatedir=get_template_dir(args))
def prepare(args):
tmpl = template.Template.from_uri(args.template)
source = tmpl.get_source_impl()
source.create_template(template=tmpl,
templatedir=get_template_dir(args),
connect=args.connect)
def run(args):
if args.connect is not None:
check_connect(args.connect)
tmpl = template.Template.from_uri(args.template)
source = tmpl.get_source_impl()
template_dir = get_template_dir(args)
# Create the template image if needed
if not source.has_template(tmpl, template_dir):
prepare(args)
name = args.name
if name is None:
randomid = ''.join(random.choice(string.lowercase) for i in range(10))
name = tmpl.path[1:] + ":" + randomid
diskfile = source.get_disk(template=tmpl,
templatedir=template_dir,
imagedir=args.image_dir,
sandboxname=name)
commandToRun = source.get_command(tmpl, template_dir, args.args)
if len(commandToRun) == 0:
commandToRun = ["/bin/sh"]
cmd = ['virt-sandbox', '--name', name]
if args.connect is not None:
cmd.append("-c")
cmd.append(args.connect)
params = ['-m','host-image:/=%s,format=qcow2' % diskfile]
networkArgs = args.network
if networkArgs is not None:
params.append('-N')
params.append(networkArgs)
allEnvs = source.get_env(tmpl, template_dir)
envArgs = args.env
if envArgs is not None:
allEnvs = allEnvs + envArgs
for env in allEnvs:
envsplit = env.split("=")
envlen = len(envsplit)
if envlen == 2:
params.append("--env")
params.append(env)
else:
pass
cmd = cmd + params + ['--'] + commandToRun
subprocess.call(cmd)
os.unlink(diskfile)
source.post_run(tmpl, template_dir, name)
def list_cached(args):
tmpls = []
if args.source is not None:
tmpls.extend(template.Template.get_all(args.source,
"%s/%s" % (args.template_dir, args.source)))
else:
for source in ["docker", "virt-builder"]:
tmpls.extend(template.Template.get_all(source,
"%s/%s" % (args.template_dir, source)))
for tmpl in tmpls:
print tmpl
def requires_template(parser):
parser.add_argument("template",
help=_("URI of the template"))
def requires_name(parser):
parser.add_argument("-n","--name",
help=_("Name of the running sandbox"))
def requires_debug(parser):
parser.add_argument("-d","--debug",
default=False, action="store_true",
help=_("Run in debug mode"))
def check_connect(connectstr):
supportedDrivers = ['lxc:///','qemu:///session','qemu:///system']
if not connectstr in supportedDrivers:
raise ValueError("URI '%s' is not supported by virt-sandbox-image" % connectstr)
return True
def requires_connect(parser):
parser.add_argument("-c","--connect",
help=_("Connect string for libvirt"))
def requires_template_dir(parser):
global default_template_dir
parser.add_argument("-t","--template-dir",
default=default_template_dir,
help=_("Template directory for saving templates"))
def requires_image_dir(parser):
global default_image_dir
parser.add_argument("-I","--image-dir",
default=default_image_dir,
help=_("Image directory for saving images"))
def gen_command_parser(subparser, name, helptext):
parser = subparser.add_parser(
name, help=helptext,
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Example supported URI formats:
docker:///ubuntu?tag=15.04
docker://username:[email protected]/private/image
docker://registry.access.redhat.com/rhel6
virt-builder:///fedora-20
""")
return parser
def gen_purge_args(subparser):
parser = gen_command_parser(subparser, "purge",
_("Purge cached template"))
requires_debug(parser)
requires_template(parser)
requires_template_dir(parser)
parser.set_defaults(func=purge)
def gen_prepare_args(subparser):
parser = gen_command_parser(subparser, "prepare",
_("Prepare local template"))
requires_debug(parser)
requires_template(parser)
requires_connect(parser)
requires_template_dir(parser)
parser.set_defaults(func=prepare)
def gen_run_args(subparser):
parser = gen_command_parser(subparser, "run",
_("Run an instance of a template"))
requires_debug(parser)
requires_name(parser)
requires_template(parser)
requires_connect(parser)
requires_template_dir(parser)
requires_image_dir(parser)
parser.add_argument("args",
nargs=argparse.REMAINDER,
help=_("command arguments to run"))
parser.add_argument("-N","--network",
help=_("Network params for running template"))
parser.add_argument("-e","--env",action="append",
help=_("Environment params for running template"))
parser.set_defaults(func=run)
def gen_list_args(subparser):
parser = gen_command_parser(subparser, "list",
_("List locally cached images"))
requires_debug(parser)
requires_template_dir(parser)
parser.add_argument("-s","--source",
help=_("Name of the template source"))
parser.set_defaults(func=list_cached)
def main():
parser = argparse.ArgumentParser(description="Sandbox Container Image Tool")
subparser = parser.add_subparsers(help=_("commands"))
gen_purge_args(subparser)
gen_prepare_args(subparser)
gen_run_args(subparser)
gen_list_args(subparser)
args = parser.parse_args()
if args.debug:
args.func(args)
sys.exit(0)
else:
try:
args.func(args)
sys.exit(0)
except KeyboardInterrupt, e:
sys.exit(0)
except ValueError, e:
sys.stderr.write("%s: %s\n" % (sys.argv[0], e))
sys.stderr.flush()
sys.exit(1)
except IOError, e:
sys.stderr.write("%s: %s: %s\n" % (sys.argv[0], e.filename, e.reason))
sys.stderr.flush()
sys.exit(1)
except OSError, e:
sys.stderr.write("%s: %s\n" % (sys.argv[0], e))
sys.stderr.flush()
sys.exit(1)
except Exception, e:
print e.message
sys.exit(1)
| lgpl-2.1 |
OS2World/APP-INTERNET-torpak_2 | Tools/scripts/md5sum.py | 102 | 2386 | #! /usr/bin/env python
"""Python utility to print MD5 checksums of argument files.
"""
bufsize = 8096
fnfilter = None
rmode = 'rb'
usage = """
usage: sum5 [-b] [-t] [-l] [-s bufsize] [file ...]
-b : read files in binary mode (default)
-t : read files in text mode (you almost certainly don't want this!)
-l : print last pathname component only
-s bufsize: read buffer size (default %d)
file ... : files to sum; '-' or no files means stdin
""" % bufsize
import sys
import os
import getopt
import md5
def sum(*files):
sts = 0
if files and isinstance(files[-1], file):
out, files = files[-1], files[:-1]
else:
out = sys.stdout
if len(files) == 1 and not isinstance(files[0], str):
files = files[0]
for f in files:
if isinstance(f, str):
if f == '-':
sts = printsumfp(sys.stdin, '<stdin>', out) or sts
else:
sts = printsum(f, out) or sts
else:
sts = sum(f, out) or sts
return sts
def printsum(filename, out=sys.stdout):
try:
fp = open(filename, rmode)
except IOError, msg:
sys.stderr.write('%s: Can\'t open: %s\n' % (filename, msg))
return 1
if fnfilter:
filename = fnfilter(filename)
sts = printsumfp(fp, filename, out)
fp.close()
return sts
def printsumfp(fp, filename, out=sys.stdout):
m = md5.new()
try:
while 1:
data = fp.read(bufsize)
if not data:
break
m.update(data)
except IOError, msg:
sys.stderr.write('%s: I/O error: %s\n' % (filename, msg))
return 1
out.write('%s %s\n' % (m.hexdigest(), filename))
return 0
def main(args = sys.argv[1:], out=sys.stdout):
global fnfilter, rmode, bufsize
try:
opts, args = getopt.getopt(args, 'blts:')
except getopt.error, msg:
sys.stderr.write('%s: %s\n%s' % (sys.argv[0], msg, usage))
return 2
for o, a in opts:
if o == '-l':
fnfilter = os.path.basename
elif o == '-b':
rmode = 'rb'
elif o == '-t':
rmode = 'r'
elif o == '-s':
bufsize = int(a)
if not args:
args = ['-']
return sum(args, out)
if __name__ == '__main__' or __name__ == sys.argv[0]:
sys.exit(main(sys.argv[1:], sys.stdout))
| mit |
CTSRD-SOAAP/chromium-42.0.2311.135 | third_party/mesa/src/src/gallium/auxiliary/util/u_format_parse.py | 13 | 8935 | #!/usr/bin/env python
'''
/**************************************************************************
*
* Copyright 2009 VMware, Inc.
* All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sub license, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
* IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
**************************************************************************/
'''
VOID, UNSIGNED, SIGNED, FIXED, FLOAT = range(5)
SWIZZLE_X, SWIZZLE_Y, SWIZZLE_Z, SWIZZLE_W, SWIZZLE_0, SWIZZLE_1, SWIZZLE_NONE, = range(7)
PLAIN = 'plain'
RGB = 'rgb'
SRGB = 'srgb'
YUV = 'yuv'
ZS = 'zs'
def is_pot(x):
return (x & (x - 1)) == 0
VERY_LARGE = 99999999999999999999999
class Channel:
'''Describe the channel of a color channel.'''
def __init__(self, type, norm, pure, size, name = ''):
self.type = type
self.norm = norm
self.pure = pure
self.size = size
self.sign = type in (SIGNED, FIXED, FLOAT)
self.name = name
def __str__(self):
s = str(self.type)
if self.norm:
s += 'n'
if self.pure:
s += 'p'
s += str(self.size)
return s
def __eq__(self, other):
return self.type == other.type and self.norm == other.norm and self.pure == other.pure and self.size == other.size
def max(self):
'''Maximum representable number.'''
if self.type == FLOAT:
return VERY_LARGE
if self.type == FIXED:
return (1 << (self.size/2)) - 1
if self.norm:
return 1
if self.type == UNSIGNED:
return (1 << self.size) - 1
if self.type == SIGNED:
return (1 << (self.size - 1)) - 1
assert False
def min(self):
'''Minimum representable number.'''
if self.type == FLOAT:
return -VERY_LARGE
if self.type == FIXED:
return -(1 << (self.size/2))
if self.type == UNSIGNED:
return 0
if self.norm:
return -1
if self.type == SIGNED:
return -(1 << (self.size - 1))
assert False
class Format:
'''Describe a pixel format.'''
def __init__(self, name, layout, block_width, block_height, channels, swizzles, colorspace):
self.name = name
self.layout = layout
self.block_width = block_width
self.block_height = block_height
self.channels = channels
self.swizzles = swizzles
self.name = name
self.colorspace = colorspace
def __str__(self):
return self.name
def short_name(self):
'''Make up a short norm for a format, suitable to be used as suffix in
function names.'''
name = self.name
if name.startswith('PIPE_FORMAT_'):
name = name[len('PIPE_FORMAT_'):]
name = name.lower()
return name
def block_size(self):
size = 0
for channel in self.channels:
size += channel.size
return size
def nr_channels(self):
nr_channels = 0
for channel in self.channels:
if channel.size:
nr_channels += 1
return nr_channels
def is_array(self):
if self.layout != PLAIN:
return False
ref_channel = self.channels[0]
for channel in self.channels[1:]:
if channel.size and (channel.size != ref_channel.size or channel.size % 8):
return False
return True
def is_mixed(self):
if self.layout != PLAIN:
return False
ref_channel = self.channels[0]
if ref_channel.type == VOID:
ref_channel = self.channels[1]
for channel in self.channels[1:]:
if channel.type != VOID:
if channel.type != ref_channel.type:
return True
if channel.norm != ref_channel.norm:
return True
if channel.pure != ref_channel.pure:
return True
return False
def is_pot(self):
return is_pot(self.block_size())
def is_int(self):
if self.layout != PLAIN:
return False
for channel in self.channels:
if channel.type not in (VOID, UNSIGNED, SIGNED):
return False
return True
def is_float(self):
if self.layout != PLAIN:
return False
for channel in self.channels:
if channel.type not in (VOID, FLOAT):
return False
return True
def is_bitmask(self):
if self.layout != PLAIN:
return False
if self.block_size() not in (8, 16, 32):
return False
for channel in self.channels:
if channel.type not in (VOID, UNSIGNED, SIGNED):
return False
return True
def inv_swizzles(self):
'''Return an array[4] of inverse swizzle terms'''
'''Only pick the first matching value to avoid l8 getting blue and i8 getting alpha'''
inv_swizzle = [None]*4
for i in range(4):
swizzle = self.swizzles[i]
if swizzle < 4 and inv_swizzle[swizzle] == None:
inv_swizzle[swizzle] = i
return inv_swizzle
def stride(self):
return self.block_size()/8
_type_parse_map = {
'': VOID,
'x': VOID,
'u': UNSIGNED,
's': SIGNED,
'h': FIXED,
'f': FLOAT,
}
_swizzle_parse_map = {
'x': SWIZZLE_X,
'y': SWIZZLE_Y,
'z': SWIZZLE_Z,
'w': SWIZZLE_W,
'0': SWIZZLE_0,
'1': SWIZZLE_1,
'_': SWIZZLE_NONE,
}
def parse(filename):
'''Parse the format descrition in CSV format in terms of the
Channel and Format classes above.'''
stream = open(filename)
formats = []
for line in stream:
try:
comment = line.index('#')
except ValueError:
pass
else:
line = line[:comment]
line = line.strip()
if not line:
continue
fields = [field.strip() for field in line.split(',')]
name = fields[0]
layout = fields[1]
block_width, block_height = map(int, fields[2:4])
swizzles = [_swizzle_parse_map[swizzle] for swizzle in fields[8]]
colorspace = fields[9]
if layout == PLAIN:
names = ['']*4
if colorspace in (RGB, SRGB):
for i in range(4):
swizzle = swizzles[i]
if swizzle < 4:
names[swizzle] += 'rgba'[i]
elif colorspace == ZS:
for i in range(4):
swizzle = swizzles[i]
if swizzle < 4:
names[swizzle] += 'zs'[i]
else:
assert False
for i in range(4):
if names[i] == '':
names[i] = 'x'
else:
names = ['x', 'y', 'z', 'w']
channels = []
for i in range(0, 4):
field = fields[4 + i]
if field:
type = _type_parse_map[field[0]]
if field[1] == 'n':
norm = True
pure = False
size = int(field[2:])
elif field[1] == 'p':
pure = True
norm = False
size = int(field[2:])
else:
norm = False
pure = False
size = int(field[1:])
else:
type = VOID
norm = False
pure = False
size = 0
channel = Channel(type, norm, pure, size, names[i])
channels.append(channel)
format = Format(name, layout, block_width, block_height, channels, swizzles, colorspace)
formats.append(format)
return formats
| bsd-3-clause |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.