repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
eayunstack/nova | nova/tests/objects/test_instance.py | 4 | 54406 | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import iso8601
import mock
import mox
import netaddr
from nova.cells import rpcapi as cells_rpcapi
from nova.compute import flavors
from nova import db
from nova import exception
from nova.network import model as network_model
from nova import notifications
from nova.objects import instance
from nova.objects import instance_info_cache
from nova.objects import instance_numa_topology
from nova.objects import pci_device
from nova.objects import security_group
from nova.openstack.common import timeutils
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_instance
from nova.tests.objects import test_instance_fault
from nova.tests.objects import test_instance_info_cache
from nova.tests.objects import test_instance_numa_topology
from nova.tests.objects import test_objects
from nova.tests.objects import test_security_group
from nova import utils
class _TestInstanceObject(object):
@property
def fake_instance(self):
fake_instance = fakes.stub_instance(id=2,
access_ipv4='1.2.3.4',
access_ipv6='::1')
fake_instance['cell_name'] = 'api!child'
fake_instance['scheduled_at'] = None
fake_instance['terminated_at'] = None
fake_instance['deleted_at'] = None
fake_instance['created_at'] = None
fake_instance['updated_at'] = None
fake_instance['launched_at'] = (
fake_instance['launched_at'].replace(
tzinfo=iso8601.iso8601.Utc(), microsecond=0))
fake_instance['deleted'] = False
fake_instance['info_cache']['instance_uuid'] = fake_instance['uuid']
fake_instance['security_groups'] = []
fake_instance['pci_devices'] = []
fake_instance['user_id'] = self.context.user_id
fake_instance['project_id'] = self.context.project_id
return fake_instance
def test_datetime_deserialization(self):
red_letter_date = timeutils.parse_isotime(
timeutils.isotime(datetime.datetime(1955, 11, 5)))
inst = instance.Instance(uuid='fake-uuid', launched_at=red_letter_date)
primitive = inst.obj_to_primitive()
expected = {'nova_object.name': 'Instance',
'nova_object.namespace': 'nova',
'nova_object.version': '1.15',
'nova_object.data':
{'uuid': 'fake-uuid',
'launched_at': '1955-11-05T00:00:00Z'},
'nova_object.changes': ['launched_at', 'uuid']}
self.assertEqual(primitive, expected)
inst2 = instance.Instance.obj_from_primitive(primitive)
self.assertIsInstance(inst2.launched_at, datetime.datetime)
self.assertEqual(inst2.launched_at, red_letter_date)
def test_ip_deserialization(self):
inst = instance.Instance(uuid='fake-uuid', access_ip_v4='1.2.3.4',
access_ip_v6='::1')
primitive = inst.obj_to_primitive()
expected = {'nova_object.name': 'Instance',
'nova_object.namespace': 'nova',
'nova_object.version': '1.15',
'nova_object.data':
{'uuid': 'fake-uuid',
'access_ip_v4': '1.2.3.4',
'access_ip_v6': '::1'},
'nova_object.changes': ['uuid', 'access_ip_v6',
'access_ip_v4']}
self.assertEqual(primitive, expected)
inst2 = instance.Instance.obj_from_primitive(primitive)
self.assertIsInstance(inst2.access_ip_v4, netaddr.IPAddress)
self.assertIsInstance(inst2.access_ip_v6, netaddr.IPAddress)
self.assertEqual(inst2.access_ip_v4, netaddr.IPAddress('1.2.3.4'))
self.assertEqual(inst2.access_ip_v6, netaddr.IPAddress('::1'))
def test_get_without_expected(self):
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
db.instance_get_by_uuid(self.context, 'uuid',
columns_to_join=[],
use_slave=False
).AndReturn(self.fake_instance)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, 'uuid',
expected_attrs=[])
for attr in instance.INSTANCE_OPTIONAL_ATTRS:
self.assertFalse(inst.obj_attr_is_set(attr))
self.assertRemotes()
def test_get_with_expected(self):
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
self.mox.StubOutWithMock(db, 'instance_fault_get_by_instance_uuids')
self.mox.StubOutWithMock(
db, 'instance_extra_get_by_instance_uuid')
exp_cols = instance.INSTANCE_OPTIONAL_ATTRS[:]
exp_cols.remove('fault')
exp_cols.remove('numa_topology')
db.instance_get_by_uuid(
self.context, 'uuid',
columns_to_join=exp_cols,
use_slave=False
).AndReturn(self.fake_instance)
fake_faults = test_instance_fault.fake_faults
db.instance_fault_get_by_instance_uuids(
self.context, [self.fake_instance['uuid']]
).AndReturn(fake_faults)
fake_topology = test_instance_numa_topology.fake_db_topology
db.instance_extra_get_by_instance_uuid(
self.context, self.fake_instance['uuid']
).AndReturn(fake_topology)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(
self.context, 'uuid',
expected_attrs=instance.INSTANCE_OPTIONAL_ATTRS)
for attr in instance.INSTANCE_OPTIONAL_ATTRS:
self.assertTrue(inst.obj_attr_is_set(attr))
self.assertRemotes()
def test_get_by_id(self):
self.mox.StubOutWithMock(db, 'instance_get')
db.instance_get(self.context, 'instid',
columns_to_join=['info_cache',
'security_groups']
).AndReturn(self.fake_instance)
self.mox.ReplayAll()
inst = instance.Instance.get_by_id(self.context, 'instid')
self.assertEqual(inst.uuid, self.fake_instance['uuid'])
self.assertRemotes()
def test_load(self):
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
fake_uuid = self.fake_instance['uuid']
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(self.fake_instance)
fake_inst2 = dict(self.fake_instance,
system_metadata=[{'key': 'foo', 'value': 'bar'}])
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['system_metadata'],
use_slave=False
).AndReturn(fake_inst2)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid)
self.assertFalse(hasattr(inst, '_system_metadata'))
sys_meta = inst.system_metadata
self.assertEqual(sys_meta, {'foo': 'bar'})
self.assertTrue(hasattr(inst, '_system_metadata'))
# Make sure we don't run load again
sys_meta2 = inst.system_metadata
self.assertEqual(sys_meta2, {'foo': 'bar'})
self.assertRemotes()
def test_load_invalid(self):
inst = instance.Instance(context=self.context, uuid='fake-uuid')
self.assertRaises(exception.ObjectActionError,
inst.obj_load_attr, 'foo')
def test_get_remote(self):
# isotime doesn't have microseconds and is always UTC
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
fake_instance = self.fake_instance
db.instance_get_by_uuid(self.context, 'fake-uuid',
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(fake_instance)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, 'fake-uuid')
self.assertEqual(inst.id, fake_instance['id'])
self.assertEqual(inst.launched_at, fake_instance['launched_at'])
self.assertEqual(str(inst.access_ip_v4),
fake_instance['access_ip_v4'])
self.assertEqual(str(inst.access_ip_v6),
fake_instance['access_ip_v6'])
self.assertRemotes()
def test_refresh(self):
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
fake_uuid = self.fake_instance['uuid']
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(dict(self.fake_instance,
host='orig-host'))
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(dict(self.fake_instance,
host='new-host'))
self.mox.StubOutWithMock(instance_info_cache.InstanceInfoCache,
'refresh')
instance_info_cache.InstanceInfoCache.refresh()
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid)
self.assertEqual(inst.host, 'orig-host')
inst.refresh()
self.assertEqual(inst.host, 'new-host')
self.assertRemotes()
self.assertEqual(set([]), inst.obj_what_changed())
def test_refresh_does_not_recurse(self):
inst = instance.Instance(context=self.context, uuid='fake-uuid',
metadata={})
inst_copy = instance.Instance()
inst_copy.uuid = inst.uuid
self.mox.StubOutWithMock(instance.Instance, 'get_by_uuid')
instance.Instance.get_by_uuid(self.context, uuid=inst.uuid,
expected_attrs=['metadata'],
use_slave=False
).AndReturn(inst_copy)
self.mox.ReplayAll()
self.assertRaises(exception.OrphanedObjectError, inst.refresh)
def _save_test_helper(self, cell_type, save_kwargs):
"""Common code for testing save() for cells/non-cells."""
if cell_type:
self.flags(enable=True, cell_type=cell_type, group='cells')
else:
self.flags(enable=False, group='cells')
old_ref = dict(self.fake_instance, host='oldhost', user_data='old',
vm_state='old', task_state='old')
fake_uuid = old_ref['uuid']
expected_updates = dict(vm_state='meow', task_state='wuff',
user_data='new')
new_ref = dict(old_ref, host='newhost', **expected_updates)
exp_vm_state = save_kwargs.get('expected_vm_state')
exp_task_state = save_kwargs.get('expected_task_state')
admin_reset = save_kwargs.get('admin_state_reset', False)
if exp_vm_state:
expected_updates['expected_vm_state'] = exp_vm_state
if exp_task_state:
if (exp_task_state == 'image_snapshot' and
'instance_version' in save_kwargs and
save_kwargs['instance_version'] == '1.9'):
expected_updates['expected_task_state'] = [
'image_snapshot', 'image_snapshot_pending']
else:
expected_updates['expected_task_state'] = exp_task_state
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
self.mox.StubOutWithMock(db, 'instance_update_and_get_original')
self.mox.StubOutWithMock(db, 'instance_info_cache_update')
cells_api_mock = self.mox.CreateMock(cells_rpcapi.CellsAPI)
self.mox.StubOutWithMock(cells_api_mock,
'instance_update_at_top')
self.mox.StubOutWithMock(cells_api_mock,
'instance_update_from_api')
self.mox.StubOutWithMock(cells_rpcapi, 'CellsAPI',
use_mock_anything=True)
self.mox.StubOutWithMock(notifications, 'send_update')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(old_ref)
db.instance_update_and_get_original(
self.context, fake_uuid, expected_updates,
update_cells=False,
columns_to_join=['info_cache', 'security_groups',
'system_metadata']
).AndReturn((old_ref, new_ref))
if cell_type == 'api':
cells_rpcapi.CellsAPI().AndReturn(cells_api_mock)
cells_api_mock.instance_update_from_api(
self.context, mox.IsA(instance.Instance),
exp_vm_state, exp_task_state, admin_reset)
elif cell_type == 'compute':
cells_rpcapi.CellsAPI().AndReturn(cells_api_mock)
cells_api_mock.instance_update_at_top(self.context, new_ref)
notifications.send_update(self.context, mox.IgnoreArg(),
mox.IgnoreArg())
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, old_ref['uuid'])
if 'instance_version' in save_kwargs:
inst.VERSION = save_kwargs.pop('instance_version')
self.assertEqual('old', inst.task_state)
self.assertEqual('old', inst.vm_state)
self.assertEqual('old', inst.user_data)
inst.vm_state = 'meow'
inst.task_state = 'wuff'
inst.user_data = 'new'
inst.save(**save_kwargs)
self.assertEqual('newhost', inst.host)
self.assertEqual('meow', inst.vm_state)
self.assertEqual('wuff', inst.task_state)
self.assertEqual('new', inst.user_data)
self.assertEqual(set([]), inst.obj_what_changed())
def test_save(self):
self._save_test_helper(None, {})
def test_save_in_api_cell(self):
self._save_test_helper('api', {})
def test_save_in_compute_cell(self):
self._save_test_helper('compute', {})
def test_save_exp_vm_state(self):
self._save_test_helper(None, {'expected_vm_state': ['meow']})
def test_save_exp_task_state(self):
self._save_test_helper(None, {'expected_task_state': ['meow']})
def test_save_exp_task_state_havana(self):
self._save_test_helper(None, {
'expected_task_state': 'image_snapshot',
'instance_version': '1.9'})
def test_save_exp_vm_state_api_cell(self):
self._save_test_helper('api', {'expected_vm_state': ['meow']})
def test_save_exp_task_state_api_cell(self):
self._save_test_helper('api', {'expected_task_state': ['meow']})
def test_save_exp_task_state_api_cell_admin_reset(self):
self._save_test_helper('api', {'admin_state_reset': True})
def test_save_rename_sends_notification(self):
# Tests that simply changing the 'display_name' on the instance
# will send a notification.
self.flags(enable=False, group='cells')
old_ref = dict(self.fake_instance, display_name='hello')
fake_uuid = old_ref['uuid']
expected_updates = dict(display_name='goodbye')
new_ref = dict(old_ref, **expected_updates)
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
self.mox.StubOutWithMock(db, 'instance_update_and_get_original')
self.mox.StubOutWithMock(notifications, 'send_update')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(old_ref)
db.instance_update_and_get_original(
self.context, fake_uuid, expected_updates, update_cells=False,
columns_to_join=['info_cache', 'security_groups',
'system_metadata']
).AndReturn((old_ref, new_ref))
notifications.send_update(self.context, mox.IgnoreArg(),
mox.IgnoreArg())
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, old_ref['uuid'],
use_slave=False)
self.assertEqual('hello', inst.display_name)
inst.display_name = 'goodbye'
inst.save()
self.assertEqual('goodbye', inst.display_name)
self.assertEqual(set([]), inst.obj_what_changed())
@mock.patch('nova.db.instance_update_and_get_original')
@mock.patch('nova.objects.Instance._from_db_object')
def test_save_does_not_refresh_pci_devices(self, mock_fdo, mock_update):
# NOTE(danms): This tests that we don't update the pci_devices
# field from the contents of the database. This is not because we
# don't necessarily want to, but because the way pci_devices is
# currently implemented it causes versioning issues. When that is
# resolved, this test should go away.
mock_update.return_value = None, None
inst = instance.Instance(context=self.context, id=123)
inst.uuid = 'foo'
inst.pci_devices = pci_device.PciDeviceList()
inst.save()
self.assertNotIn('pci_devices',
mock_fdo.call_args_list[0][1]['expected_attrs'])
def test_get_deleted(self):
fake_inst = dict(self.fake_instance, id=123, deleted=123)
fake_uuid = fake_inst['uuid']
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid)
# NOTE(danms): Make sure it's actually a bool
self.assertEqual(inst.deleted, True)
def test_get_not_cleaned(self):
fake_inst = dict(self.fake_instance, id=123, cleaned=None)
fake_uuid = fake_inst['uuid']
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid)
# NOTE(mikal): Make sure it's actually a bool
self.assertEqual(inst.cleaned, False)
def test_get_cleaned(self):
fake_inst = dict(self.fake_instance, id=123, cleaned=1)
fake_uuid = fake_inst['uuid']
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid)
# NOTE(mikal): Make sure it's actually a bool
self.assertEqual(inst.cleaned, True)
def test_with_info_cache(self):
fake_inst = dict(self.fake_instance)
fake_uuid = fake_inst['uuid']
nwinfo1 = network_model.NetworkInfo.hydrate([{'address': 'foo'}])
nwinfo2 = network_model.NetworkInfo.hydrate([{'address': 'bar'}])
nwinfo1_json = nwinfo1.json()
nwinfo2_json = nwinfo2.json()
fake_inst['info_cache'] = dict(
test_instance_info_cache.fake_info_cache,
network_info=nwinfo1_json,
instance_uuid=fake_uuid)
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
self.mox.StubOutWithMock(db, 'instance_update_and_get_original')
self.mox.StubOutWithMock(db, 'instance_info_cache_update')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(fake_inst)
db.instance_info_cache_update(self.context, fake_uuid,
{'network_info': nwinfo2_json})
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid)
self.assertEqual(inst.info_cache.network_info, nwinfo1)
self.assertEqual(inst.info_cache.instance_uuid, fake_uuid)
inst.info_cache.network_info = nwinfo2
inst.save()
def test_with_info_cache_none(self):
fake_inst = dict(self.fake_instance, info_cache=None)
fake_uuid = fake_inst['uuid']
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache'],
use_slave=False
).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid,
['info_cache'])
self.assertIsNone(inst.info_cache)
def test_with_security_groups(self):
fake_inst = dict(self.fake_instance)
fake_uuid = fake_inst['uuid']
fake_inst['security_groups'] = [
{'id': 1, 'name': 'secgroup1', 'description': 'fake-desc',
'user_id': 'fake-user', 'project_id': 'fake_project',
'created_at': None, 'updated_at': None, 'deleted_at': None,
'deleted': False},
{'id': 2, 'name': 'secgroup2', 'description': 'fake-desc',
'user_id': 'fake-user', 'project_id': 'fake_project',
'created_at': None, 'updated_at': None, 'deleted_at': None,
'deleted': False},
]
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
self.mox.StubOutWithMock(db, 'instance_update_and_get_original')
self.mox.StubOutWithMock(db, 'security_group_update')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(fake_inst)
db.security_group_update(self.context, 1, {'description': 'changed'}
).AndReturn(fake_inst['security_groups'][0])
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid)
self.assertEqual(len(inst.security_groups), 2)
for index, group in enumerate(fake_inst['security_groups']):
for key in group:
self.assertEqual(group[key],
inst.security_groups[index][key])
self.assertIsInstance(inst.security_groups[index],
security_group.SecurityGroup)
self.assertEqual(inst.security_groups.obj_what_changed(), set())
inst.security_groups[0].description = 'changed'
inst.save()
self.assertEqual(inst.security_groups.obj_what_changed(), set())
def test_with_empty_security_groups(self):
fake_inst = dict(self.fake_instance, security_groups=[])
fake_uuid = fake_inst['uuid']
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid)
self.assertEqual(0, len(inst.security_groups))
def test_with_empty_pci_devices(self):
fake_inst = dict(self.fake_instance, pci_devices=[])
fake_uuid = fake_inst['uuid']
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['pci_devices'],
use_slave=False
).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid,
['pci_devices'])
self.assertEqual(len(inst.pci_devices), 0)
def test_with_pci_devices(self):
fake_inst = dict(self.fake_instance)
fake_uuid = fake_inst['uuid']
fake_inst['pci_devices'] = [
{'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': None,
'id': 2,
'compute_node_id': 1,
'address': 'a1',
'vendor_id': 'v1',
'product_id': 'p1',
'dev_type': 't',
'status': 'allocated',
'dev_id': 'i',
'label': 'l',
'instance_uuid': fake_uuid,
'request_id': None,
'extra_info': '{}'},
{
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': None,
'id': 1,
'compute_node_id': 1,
'address': 'a',
'vendor_id': 'v',
'product_id': 'p',
'dev_type': 't',
'status': 'allocated',
'dev_id': 'i',
'label': 'l',
'instance_uuid': fake_uuid,
'request_id': None,
'extra_info': '{}'},
]
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['pci_devices'],
use_slave=False
).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid,
['pci_devices'])
self.assertEqual(len(inst.pci_devices), 2)
self.assertEqual(inst.pci_devices[0].instance_uuid, fake_uuid)
self.assertEqual(inst.pci_devices[1].instance_uuid, fake_uuid)
def test_with_fault(self):
fake_inst = dict(self.fake_instance)
fake_uuid = fake_inst['uuid']
fake_faults = [dict(x, instance_uuid=fake_uuid)
for x in test_instance_fault.fake_faults['fake-uuid']]
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
self.mox.StubOutWithMock(db, 'instance_fault_get_by_instance_uuids')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=[],
use_slave=False
).AndReturn(self.fake_instance)
db.instance_fault_get_by_instance_uuids(
self.context, [fake_uuid]).AndReturn({fake_uuid: fake_faults})
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid,
expected_attrs=['fault'])
self.assertEqual(fake_faults[0], dict(inst.fault.items()))
self.assertRemotes()
def test_iteritems_with_extra_attrs(self):
self.stubs.Set(instance.Instance, 'name', 'foo')
inst = instance.Instance(uuid='fake-uuid')
self.assertEqual(inst.items(),
{'uuid': 'fake-uuid',
'name': 'foo',
}.items())
def _test_metadata_change_tracking(self, which):
inst = instance.Instance(uuid='fake-uuid')
setattr(inst, which, {})
inst.obj_reset_changes()
getattr(inst, which)['foo'] = 'bar'
self.assertEqual(set([which]), inst.obj_what_changed())
inst.obj_reset_changes()
self.assertEqual(set(), inst.obj_what_changed())
def test_metadata_change_tracking(self):
self._test_metadata_change_tracking('metadata')
def test_system_metadata_change_tracking(self):
self._test_metadata_change_tracking('system_metadata')
def test_create_stubbed(self):
self.mox.StubOutWithMock(db, 'instance_create')
vals = {'host': 'foo-host',
'memory_mb': 128,
'system_metadata': {'foo': 'bar'}}
fake_inst = fake_instance.fake_db_instance(**vals)
db.instance_create(self.context, vals).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance(host='foo-host', memory_mb=128,
system_metadata={'foo': 'bar'})
inst.create(self.context)
def test_create(self):
self.mox.StubOutWithMock(db, 'instance_create')
db.instance_create(self.context, {}).AndReturn(self.fake_instance)
self.mox.ReplayAll()
inst = instance.Instance()
inst.create(self.context)
self.assertEqual(self.fake_instance['id'], inst.id)
def test_create_with_values(self):
inst1 = instance.Instance(user_id=self.context.user_id,
project_id=self.context.project_id,
host='foo-host')
inst1.create(self.context)
self.assertEqual(inst1.host, 'foo-host')
inst2 = instance.Instance.get_by_uuid(self.context, inst1.uuid)
self.assertEqual(inst2.host, 'foo-host')
def test_create_with_numa_topology(self):
inst = instance.Instance(uuid=self.fake_instance['uuid'],
numa_topology=instance_numa_topology.InstanceNUMATopology
.obj_from_topology(
test_instance_numa_topology.fake_numa_topology))
inst.create(self.context)
self.assertIsNotNone(inst.numa_topology)
got_numa_topo = (
instance_numa_topology.InstanceNUMATopology
.get_by_instance_uuid(self.context, inst.uuid))
self.assertEqual(inst.numa_topology.id, got_numa_topo.id)
def test_recreate_fails(self):
inst = instance.Instance(user_id=self.context.user_id,
project_id=self.context.project_id,
host='foo-host')
inst.create(self.context)
self.assertRaises(exception.ObjectActionError, inst.create,
self.context)
def test_create_with_special_things(self):
self.mox.StubOutWithMock(db, 'instance_create')
fake_inst = fake_instance.fake_db_instance()
db.instance_create(self.context,
{'host': 'foo-host',
'security_groups': ['foo', 'bar'],
'info_cache': {'network_info': '[]'},
}
).AndReturn(fake_inst)
self.mox.ReplayAll()
secgroups = security_group.SecurityGroupList()
secgroups.objects = []
for name in ('foo', 'bar'):
secgroup = security_group.SecurityGroup()
secgroup.name = name
secgroups.objects.append(secgroup)
info_cache = instance_info_cache.InstanceInfoCache()
info_cache.network_info = network_model.NetworkInfo()
inst = instance.Instance(host='foo-host', security_groups=secgroups,
info_cache=info_cache)
inst.create(self.context)
def test_destroy_stubbed(self):
self.mox.StubOutWithMock(db, 'instance_destroy')
deleted_at = datetime.datetime(1955, 11, 6)
fake_inst = fake_instance.fake_db_instance(deleted_at=deleted_at,
deleted=True)
db.instance_destroy(self.context, 'fake-uuid',
constraint=None).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance(id=1, uuid='fake-uuid', host='foo')
inst.destroy(self.context)
self.assertEqual(timeutils.normalize_time(inst.deleted_at),
timeutils.normalize_time(deleted_at))
self.assertTrue(inst.deleted)
def test_destroy(self):
values = {'user_id': self.context.user_id,
'project_id': self.context.project_id}
db_inst = db.instance_create(self.context, values)
inst = instance.Instance(id=db_inst['id'], uuid=db_inst['uuid'])
inst.destroy(self.context)
self.assertRaises(exception.InstanceNotFound,
db.instance_get_by_uuid, self.context,
db_inst['uuid'])
def test_destroy_host_constraint(self):
values = {'user_id': self.context.user_id,
'project_id': self.context.project_id,
'host': 'foo'}
db_inst = db.instance_create(self.context, values)
inst = instance.Instance.get_by_uuid(self.context, db_inst['uuid'])
inst.host = None
self.assertRaises(exception.ObjectActionError,
inst.destroy)
def test_name_does_not_trigger_lazy_loads(self):
values = {'user_id': self.context.user_id,
'project_id': self.context.project_id,
'host': 'foo'}
db_inst = db.instance_create(self.context, values)
inst = instance.Instance.get_by_uuid(self.context, db_inst['uuid'])
self.assertFalse(inst.obj_attr_is_set('fault'))
self.flags(instance_name_template='foo-%(uuid)s')
self.assertEqual('foo-%s' % db_inst['uuid'], inst.name)
self.assertFalse(inst.obj_attr_is_set('fault'))
def test_from_db_object_not_overwrite_info_cache(self):
info_cache = instance_info_cache.InstanceInfoCache()
inst = instance.Instance(context=self.context,
info_cache=info_cache)
db_inst = fake_instance.fake_db_instance()
db_inst['info_cache'] = dict(
test_instance_info_cache.fake_info_cache)
inst._from_db_object(self.context, inst, db_inst,
expected_attrs=['info_cache'])
self.assertIs(info_cache, inst.info_cache)
def test_compat_strings(self):
unicode_attributes = ['user_id', 'project_id', 'image_ref',
'kernel_id', 'ramdisk_id', 'hostname',
'key_name', 'key_data', 'host', 'node',
'user_data', 'availability_zone',
'display_name', 'display_description',
'launched_on', 'locked_by', 'os_type',
'architecture', 'vm_mode', 'root_device_name',
'default_ephemeral_device',
'default_swap_device', 'config_drive',
'cell_name']
inst = instance.Instance()
expected = {}
for key in unicode_attributes:
inst[key] = u'\u2603'
expected[key] = '?'
primitive = inst.obj_to_primitive(target_version='1.6')
self.assertEqual(expected, primitive['nova_object.data'])
self.assertEqual('1.6', primitive['nova_object.version'])
def test_compat_pci_devices(self):
inst = instance.Instance()
inst.pci_devices = pci_device.PciDeviceList()
primitive = inst.obj_to_primitive(target_version='1.5')
self.assertNotIn('pci_devices', primitive)
def test_compat_info_cache(self):
inst = instance.Instance()
inst.info_cache = instance_info_cache.InstanceInfoCache()
primitive = inst.obj_to_primitive(target_version='1.9')
self.assertEqual(
'1.4',
primitive['nova_object.data']['info_cache']['nova_object.version'])
def _test_get_flavor(self, namespace):
prefix = '%s_' % namespace if namespace is not None else ''
db_inst = db.instance_create(self.context, {
'user_id': self.context.user_id,
'project_id': self.context.project_id,
'system_metadata': flavors.save_flavor_info(
{}, flavors.get_default_flavor(), prefix)})
db_flavor = flavors.extract_flavor(db_inst, prefix)
inst = instance.Instance.get_by_uuid(self.context, db_inst['uuid'])
flavor = inst.get_flavor(namespace)
self.assertEqual(db_flavor['flavorid'], flavor.flavorid)
def test_get_flavor(self):
self._test_get_flavor(None)
self._test_get_flavor('foo')
def _test_set_flavor(self, namespace):
prefix = '%s_' % namespace if namespace is not None else ''
db_inst = db.instance_create(self.context, {
'user_id': self.context.user_id,
'project_id': self.context.project_id,
})
inst = instance.Instance.get_by_uuid(self.context, db_inst['uuid'])
db_flavor = flavors.get_default_flavor()
inst.set_flavor(db_flavor, namespace)
db_inst = db.instance_get(self.context, db_inst['id'])
self.assertEqual(
db_flavor['flavorid'], flavors.extract_flavor(
db_inst, prefix)['flavorid'])
def test_set_flavor(self):
self._test_set_flavor(None)
self._test_set_flavor('foo')
def test_delete_flavor(self):
namespace = 'foo'
prefix = '%s_' % namespace
db_inst = db.instance_create(self.context, {
'user_id': self.context.user_id,
'project_id': self.context.project_id,
'system_metadata': flavors.save_flavor_info(
{}, flavors.get_default_flavor(), prefix)})
inst = instance.Instance.get_by_uuid(self.context, db_inst['uuid'])
inst.delete_flavor(namespace)
db_inst = db.instance_get(self.context, db_inst['id'])
self.assertEqual({}, utils.instance_sys_meta(db_inst))
def test_delete_flavor_no_namespace_fails(self):
inst = instance.Instance(system_metadata={})
self.assertRaises(KeyError, inst.delete_flavor, None)
self.assertRaises(KeyError, inst.delete_flavor, '')
@mock.patch.object(db, 'instance_metadata_delete')
def test_delete_metadata_key(self, db_delete):
inst = instance.Instance(context=self.context,
id=1, uuid='fake-uuid')
inst.metadata = {'foo': '1', 'bar': '2'}
inst.obj_reset_changes()
inst.delete_metadata_key('foo')
self.assertEqual({'bar': '2'}, inst.metadata)
self.assertEqual({}, inst.obj_get_changes())
db_delete.assert_called_once_with(self.context, inst.uuid, 'foo')
def test_reset_changes(self):
inst = instance.Instance()
inst.metadata = {'1985': 'present'}
inst.system_metadata = {'1955': 'past'}
self.assertEqual({}, inst._orig_metadata)
inst.obj_reset_changes(['metadata'])
self.assertEqual({'1985': 'present'}, inst._orig_metadata)
self.assertEqual({}, inst._orig_system_metadata)
def test_load_generic_calls_handler(self):
inst = instance.Instance(context=self.context,
uuid='fake-uuid')
with mock.patch.object(inst, '_load_generic') as mock_load:
def fake_load(name):
inst.system_metadata = {}
mock_load.side_effect = fake_load
inst.system_metadata
mock_load.assert_called_once_with('system_metadata')
def test_load_fault_calls_handler(self):
inst = instance.Instance(context=self.context,
uuid='fake-uuid')
with mock.patch.object(inst, '_load_fault') as mock_load:
def fake_load():
inst.fault = None
mock_load.side_effect = fake_load
inst.fault
mock_load.assert_called_once_with()
@mock.patch('nova.objects.Instance.get_by_uuid')
def test_load_generic(self, mock_get):
inst2 = instance.Instance(metadata={'foo': 'bar'})
mock_get.return_value = inst2
inst = instance.Instance(context=self.context,
uuid='fake-uuid')
inst.metadata
self.assertEqual({'foo': 'bar'}, inst.metadata)
mock_get.assert_called_once_with(self.context,
uuid='fake-uuid',
expected_attrs=['metadata'])
self.assertNotIn('metadata', inst.obj_what_changed())
@mock.patch('nova.db.instance_fault_get_by_instance_uuids')
def test_load_fault(self, mock_get):
fake_fault = test_instance_fault.fake_faults['fake-uuid'][0]
mock_get.return_value = {'fake': [fake_fault]}
inst = instance.Instance(context=self.context, uuid='fake')
fault = inst.fault
mock_get.assert_called_once_with(self.context, ['fake'])
self.assertEqual(fake_fault['id'], fault.id)
self.assertNotIn('metadata', inst.obj_what_changed())
class TestInstanceObject(test_objects._LocalTest,
_TestInstanceObject):
pass
class TestRemoteInstanceObject(test_objects._RemoteTest,
_TestInstanceObject):
pass
class _TestInstanceListObject(object):
def fake_instance(self, id, updates=None):
fake_instance = fakes.stub_instance(id=2,
access_ipv4='1.2.3.4',
access_ipv6='::1')
fake_instance['scheduled_at'] = None
fake_instance['terminated_at'] = None
fake_instance['deleted_at'] = None
fake_instance['created_at'] = None
fake_instance['updated_at'] = None
fake_instance['launched_at'] = (
fake_instance['launched_at'].replace(
tzinfo=iso8601.iso8601.Utc(), microsecond=0))
fake_instance['info_cache'] = {'network_info': '[]',
'instance_uuid': fake_instance['uuid']}
fake_instance['security_groups'] = []
fake_instance['deleted'] = 0
if updates:
fake_instance.update(updates)
return fake_instance
def test_get_all_by_filters(self):
fakes = [self.fake_instance(1), self.fake_instance(2)]
self.mox.StubOutWithMock(db, 'instance_get_all_by_filters')
db.instance_get_all_by_filters(self.context, {'foo': 'bar'}, 'uuid',
'asc', limit=None, marker=None,
columns_to_join=['metadata'],
use_slave=False).AndReturn(fakes)
self.mox.ReplayAll()
inst_list = instance.InstanceList.get_by_filters(
self.context, {'foo': 'bar'}, 'uuid', 'asc',
expected_attrs=['metadata'], use_slave=False)
for i in range(0, len(fakes)):
self.assertIsInstance(inst_list.objects[i], instance.Instance)
self.assertEqual(inst_list.objects[i].uuid, fakes[i]['uuid'])
self.assertRemotes()
def test_get_all_by_filters_works_for_cleaned(self):
fakes = [self.fake_instance(1),
self.fake_instance(2, updates={'deleted': 2,
'cleaned': None})]
self.context.read_deleted = 'yes'
self.mox.StubOutWithMock(db, 'instance_get_all_by_filters')
db.instance_get_all_by_filters(self.context,
{'deleted': True, 'cleaned': False},
'uuid', 'asc', limit=None, marker=None,
columns_to_join=['metadata'],
use_slave=False).AndReturn(
[fakes[1]])
self.mox.ReplayAll()
inst_list = instance.InstanceList.get_by_filters(
self.context, {'deleted': True, 'cleaned': False}, 'uuid', 'asc',
expected_attrs=['metadata'], use_slave=False)
self.assertEqual(1, len(inst_list))
self.assertIsInstance(inst_list.objects[0], instance.Instance)
self.assertEqual(inst_list.objects[0].uuid, fakes[1]['uuid'])
self.assertRemotes()
def test_get_by_host(self):
fakes = [self.fake_instance(1),
self.fake_instance(2)]
self.mox.StubOutWithMock(db, 'instance_get_all_by_host')
db.instance_get_all_by_host(self.context, 'foo',
columns_to_join=None,
use_slave=False).AndReturn(fakes)
self.mox.ReplayAll()
inst_list = instance.InstanceList.get_by_host(self.context, 'foo')
for i in range(0, len(fakes)):
self.assertIsInstance(inst_list.objects[i], instance.Instance)
self.assertEqual(inst_list.objects[i].uuid, fakes[i]['uuid'])
self.assertEqual(inst_list.objects[i]._context, self.context)
self.assertEqual(inst_list.obj_what_changed(), set())
self.assertRemotes()
def test_get_by_host_and_node(self):
fakes = [self.fake_instance(1),
self.fake_instance(2)]
self.mox.StubOutWithMock(db, 'instance_get_all_by_host_and_node')
db.instance_get_all_by_host_and_node(self.context, 'foo', 'bar'
).AndReturn(fakes)
self.mox.ReplayAll()
inst_list = instance.InstanceList.get_by_host_and_node(self.context,
'foo', 'bar')
for i in range(0, len(fakes)):
self.assertIsInstance(inst_list.objects[i], instance.Instance)
self.assertEqual(inst_list.objects[i].uuid, fakes[i]['uuid'])
self.assertRemotes()
def test_get_by_host_and_not_type(self):
fakes = [self.fake_instance(1),
self.fake_instance(2)]
self.mox.StubOutWithMock(db, 'instance_get_all_by_host_and_not_type')
db.instance_get_all_by_host_and_not_type(self.context, 'foo',
type_id='bar').AndReturn(
fakes)
self.mox.ReplayAll()
inst_list = instance.InstanceList.get_by_host_and_not_type(
self.context, 'foo', 'bar')
for i in range(0, len(fakes)):
self.assertIsInstance(inst_list.objects[i], instance.Instance)
self.assertEqual(inst_list.objects[i].uuid, fakes[i]['uuid'])
self.assertRemotes()
def test_get_hung_in_rebooting(self):
fakes = [self.fake_instance(1),
self.fake_instance(2)]
dt = timeutils.isotime()
self.mox.StubOutWithMock(db, 'instance_get_all_hung_in_rebooting')
db.instance_get_all_hung_in_rebooting(self.context, dt).AndReturn(
fakes)
self.mox.ReplayAll()
inst_list = instance.InstanceList.get_hung_in_rebooting(self.context,
dt)
for i in range(0, len(fakes)):
self.assertIsInstance(inst_list.objects[i], instance.Instance)
self.assertEqual(inst_list.objects[i].uuid, fakes[i]['uuid'])
self.assertRemotes()
def test_get_active_by_window_joined(self):
fakes = [self.fake_instance(1), self.fake_instance(2)]
# NOTE(mriedem): Send in a timezone-naive datetime since the
# InstanceList.get_active_by_window_joined method should convert it
# to tz-aware for the DB API call, which we'll assert with our stub.
dt = timeutils.utcnow()
def fake_instance_get_active_by_window_joined(context, begin, end,
project_id, host):
# make sure begin is tz-aware
self.assertIsNotNone(begin.utcoffset())
self.assertIsNone(end)
return fakes
with mock.patch.object(db, 'instance_get_active_by_window_joined',
fake_instance_get_active_by_window_joined):
inst_list = instance.InstanceList.get_active_by_window_joined(
self.context, dt)
for fake, obj in zip(fakes, inst_list.objects):
self.assertIsInstance(obj, instance.Instance)
self.assertEqual(obj.uuid, fake['uuid'])
self.assertRemotes()
def test_with_fault(self):
fake_insts = [
fake_instance.fake_db_instance(uuid='fake-uuid', host='host'),
fake_instance.fake_db_instance(uuid='fake-inst2', host='host'),
]
fake_faults = test_instance_fault.fake_faults
self.mox.StubOutWithMock(db, 'instance_get_all_by_host')
self.mox.StubOutWithMock(db, 'instance_fault_get_by_instance_uuids')
db.instance_get_all_by_host(self.context, 'host',
columns_to_join=[],
use_slave=False
).AndReturn(fake_insts)
db.instance_fault_get_by_instance_uuids(
self.context, [x['uuid'] for x in fake_insts]
).AndReturn(fake_faults)
self.mox.ReplayAll()
instances = instance.InstanceList.get_by_host(self.context, 'host',
expected_attrs=['fault'],
use_slave=False)
self.assertEqual(2, len(instances))
self.assertEqual(fake_faults['fake-uuid'][0],
dict(instances[0].fault.iteritems()))
self.assertIsNone(instances[1].fault)
def test_fill_faults(self):
self.mox.StubOutWithMock(db, 'instance_fault_get_by_instance_uuids')
inst1 = instance.Instance(uuid='uuid1')
inst2 = instance.Instance(uuid='uuid2')
insts = [inst1, inst2]
for inst in insts:
inst.obj_reset_changes()
db_faults = {
'uuid1': [{'id': 123,
'instance_uuid': 'uuid1',
'code': 456,
'message': 'Fake message',
'details': 'No details',
'host': 'foo',
'deleted': False,
'deleted_at': None,
'updated_at': None,
'created_at': None,
}
]}
db.instance_fault_get_by_instance_uuids(self.context,
[x.uuid for x in insts],
).AndReturn(db_faults)
self.mox.ReplayAll()
inst_list = instance.InstanceList()
inst_list._context = self.context
inst_list.objects = insts
faulty = inst_list.fill_faults()
self.assertEqual(faulty, ['uuid1'])
self.assertEqual(inst_list[0].fault.message,
db_faults['uuid1'][0]['message'])
self.assertIsNone(inst_list[1].fault)
for inst in inst_list:
self.assertEqual(inst.obj_what_changed(), set())
def test_get_by_security_group(self):
fake_secgroup = dict(test_security_group.fake_secgroup)
fake_secgroup['instances'] = [
fake_instance.fake_db_instance(id=1,
system_metadata={'foo': 'bar'}),
fake_instance.fake_db_instance(id=2),
]
with mock.patch.object(db, 'security_group_get') as sgg:
sgg.return_value = fake_secgroup
secgroup = security_group.SecurityGroup()
secgroup.id = fake_secgroup['id']
instances = instance.InstanceList.get_by_security_group(
self.context, secgroup)
self.assertEqual(2, len(instances))
self.assertEqual([1, 2], [x.id for x in instances])
self.assertTrue(instances[0].obj_attr_is_set('system_metadata'))
self.assertEqual({'foo': 'bar'}, instances[0].system_metadata)
class TestInstanceListObject(test_objects._LocalTest,
_TestInstanceListObject):
pass
class TestRemoteInstanceListObject(test_objects._RemoteTest,
_TestInstanceListObject):
pass
class TestInstanceObjectMisc(test.NoDBTestCase):
def test_expected_cols(self):
self.stubs.Set(instance, '_INSTANCE_OPTIONAL_JOINED_FIELDS', ['bar'])
self.assertEqual(['bar'], instance._expected_cols(['foo', 'bar']))
self.assertIsNone(instance._expected_cols(None))
| apache-2.0 | 718,426,613,512,638,200 | 45.302979 | 79 | 0.553248 | false |
Vagab0nd/SiCKRAGE | sickchill/oldbeard/providers/kat.py | 1 | 7549 | import re
import traceback
import urllib
from collections import OrderedDict
from urllib.parse import urljoin
import validators
from sickchill import logger
from sickchill.helper.common import convert_size, try_int
from sickchill.oldbeard import tvcache
from sickchill.oldbeard.bs4_parser import BS4Parser
from sickchill.providers.torrent.TorrentProvider import TorrentProvider
class Provider(TorrentProvider):
def __init__(self):
super().__init__("KickAssTorrents")
self.public = True
self.confirmed = True
self.minseed = 0
self.minleech = 0
self.confirmed = True
self.mirrors = []
self.disabled_mirrors = []
# https://kickasskat.org/tv?field=time_add&sorder=desc
# https://kickasskat.org/usearch/{query}/?category=tv&field=seeders&sorder=desc
self.url = "https://kickasskat.org"
self.urls = None
self.custom_url = None
self.cache = tvcache.TVCache(self)
self.rows_selector = dict(class_=re.compile(r"even|odd"), id=re.compile(r"torrent_.*_torrents"))
def search(self, search_strings, age=0, ep_obj=None):
results = []
if not (self.url and self.urls):
self.find_domain()
if not (self.url and self.urls):
return results
anime = (self.show and self.show.anime) or (ep_obj and ep_obj.show and ep_obj.show.anime) or False
search_params = {
"field": "seeders",
"sorder": "desc",
"category": ("tv", "anime")[anime]
}
for mode in search_strings:
items = []
logger.debug(_("Search Mode: {mode}".format(mode=mode)))
for search_string in {*search_strings[mode]}:
# search_params["q"] = (search_string, None)[mode == "RSS"]
search_params["field"] = ("seeders", "time_add")[mode == "RSS"]
if mode != "RSS":
if anime:
continue
logger.debug(_("Search String: {search_string}".format(search_string=search_string)))
search_url = self.urls["search"].format(q=search_string)
else:
search_url = self.urls["rss"]
if self.custom_url:
if not validators.url(self.custom_url):
logger.warning("Invalid custom url: {0}".format(self.custom_url))
return results
search_url = urljoin(self.custom_url, search_url.split(self.url)[1])
data = self.get_url(search_url, params=OrderedDict(sorted(list(search_params.items()), key=lambda x: x[0])), returns="text")
if not data:
logger.info("{url} did not return any data, it may be disabled. Trying to get a new domain".format(url=self.url))
self.disabled_mirrors.append(self.url)
self.find_domain()
if self.url in self.disabled_mirrors:
logger.info("Could not find a better mirror to try.")
logger.info("The search did not return data, if the results are on the site maybe try a custom url, or a different one")
return results
# This will recurse a few times until all of the mirrors are exhausted if none of them work.
return self.search(search_strings, age, ep_obj)
with BS4Parser(data, "html5lib") as html:
labels = [cell.get_text() for cell in html.find(class_="firstr")("th")]
logger.info("Found {} results".format(len(html("tr", **self.rows_selector))))
for result in html("tr", **self.rows_selector):
try:
download_url = urllib.parse.unquote_plus(result.find(title="Torrent magnet link")["href"].split("url=")[1]) + self._custom_trackers
parsed_magnet = urllib.parse.parse_qs(download_url)
torrent_hash = self.hash_from_magnet(download_url)
title = result.find(class_="torrentname").find(class_="cellMainLink").get_text(strip=True)
if title.endswith("..."):
title = parsed_magnet['dn'][0]
if not (title and download_url):
if mode != "RSS":
logger.debug("Discarding torrent because We could not parse the title and url")
continue
seeders = try_int(result.find(class_="green").get_text(strip=True))
leechers = try_int(result.find(class_="red").get_text(strip=True))
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != "RSS":
logger.debug("Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format
(title, seeders, leechers))
continue
if self.confirmed and not result.find(class_="ka-green"):
if mode != "RSS":
logger.debug("Found result " + title + " but that doesn't seem like a verified result so I'm ignoring it")
continue
torrent_size = result("td")[labels.index("size")].get_text(strip=True)
size = convert_size(torrent_size) or -1
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': torrent_hash}
if mode != "RSS":
logger.debug("Found result: {0} with {1} seeders and {2} leechers".format(title, seeders, leechers))
items.append(item)
except (AttributeError, TypeError, KeyError, ValueError, Exception):
logger.info(traceback.format_exc())
continue
# For each search mode sort all the items by seeders if available
items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True)
results += items
return results
def find_domain(self):
data = self.get_url("https://kickass2.help")
if data:
with BS4Parser(data, "html5lib") as html:
mirrors = html(class_='domainLink')
if mirrors:
self.mirrors = []
for mirror in mirrors:
domain = mirror["href"]
if domain not in self.disabled_mirrors:
self.mirrors.append(mirror["href"])
if self.mirrors:
self.url = self.mirrors[0]
logger.info("Setting mirror to use to {url}".format(url=self.url))
else:
logger.warning("Unable to get a working mirror for kickasstorrents, you might need to enable another provider and disable KAT until KAT starts working "
"again.")
self.urls = {"search": urljoin(self.url, "/usearch/{q}/"), "rss": urljoin(self.url, "/tv/")}
return self.url
| gpl-3.0 | 2,564,928,592,693,131,300 | 43.934524 | 164 | 0.522851 | false |
jackrzhang/zulip | zerver/tests/test_timestamp.py | 14 | 1871 |
from django.utils.timezone import utc as timezone_utc
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.timestamp import floor_to_hour, floor_to_day, ceiling_to_hour, \
ceiling_to_day, timestamp_to_datetime, datetime_to_timestamp, \
TimezoneNotUTCException, convert_to_UTC
from datetime import datetime, timedelta
from dateutil import parser
import pytz
class TestTimestamp(ZulipTestCase):
def test_datetime_and_timestamp_conversions(self) -> None:
timestamp = 1483228800
for dt in [
parser.parse('2017-01-01 00:00:00.123 UTC'),
parser.parse('2017-01-01 00:00:00.123').replace(tzinfo=timezone_utc),
parser.parse('2017-01-01 00:00:00.123').replace(tzinfo=pytz.utc)]:
self.assertEqual(timestamp_to_datetime(timestamp), dt-timedelta(microseconds=123000))
self.assertEqual(datetime_to_timestamp(dt), timestamp)
for dt in [
parser.parse('2017-01-01 00:00:00.123+01:00'),
parser.parse('2017-01-01 00:00:00.123')]:
with self.assertRaises(TimezoneNotUTCException):
datetime_to_timestamp(dt)
def test_convert_to_UTC(self) -> None:
utc_datetime = parser.parse('2017-01-01 00:00:00.123 UTC')
for dt in [
parser.parse('2017-01-01 00:00:00.123').replace(tzinfo=timezone_utc),
parser.parse('2017-01-01 00:00:00.123'),
parser.parse('2017-01-01 05:00:00.123+05')]:
self.assertEqual(convert_to_UTC(dt), utc_datetime)
def test_enforce_UTC(self) -> None:
non_utc_datetime = parser.parse('2017-01-01 00:00:00.123')
for function in [floor_to_hour, floor_to_day, ceiling_to_hour, ceiling_to_hour]:
with self.assertRaises(TimezoneNotUTCException):
function(non_utc_datetime)
| apache-2.0 | -4,594,811,745,338,252,300 | 44.634146 | 97 | 0.643506 | false |
lnielsen/invenio | invenio/legacy/bibedit/webinterface.py | 2 | 13635 | ## This file is part of Invenio.
## Copyright (C) 2009, 2010, 2011, 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
# pylint: disable=C0103
"""Invenio BibEdit Administrator Interface."""
__revision__ = "$Id"
__lastupdated__ = """$Date: 2008/08/12 09:26:46 $"""
import cProfile
import cStringIO
import pstats
from flask.ext.login import current_user
from invenio.utils.json import json, json_unicode_to_utf8, CFG_JSON_AVAILABLE
from invenio.modules.access.engine import acc_authorize_action
from invenio.legacy.bibedit.engine import perform_request_ajax, perform_request_init, \
perform_request_newticket, perform_request_compare, \
perform_request_init_template_interface, \
perform_request_ajax_template_interface
from invenio.legacy.bibedit.utils import user_can_edit_record_collection
from invenio.config import CFG_SITE_LANG, CFG_SITE_SECURE_URL, CFG_SITE_RECORD
from invenio.base.i18n import gettext_set_language
from invenio.utils.url import redirect_to_url
from invenio.ext.legacy.handler import WebInterfaceDirectory, wash_urlargd
from invenio.legacy.webpage import page
from invenio.legacy.webuser import page_not_authorized
navtrail = (' <a class="navtrail" href=\"%s/help/admin\">Admin Area</a> '
) % CFG_SITE_SECURE_URL
navtrail_bibedit = (' <a class="navtrail" href=\"%s/help/admin\">Admin Area</a> ' + \
' > <a class="navtrail" href=\"%s/%s/edit\">Record Editor</a>'
) % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, CFG_SITE_RECORD)
def wrap_json_req_profiler(func):
def json_req_profiler(self, req, form):
if "ajaxProfile" in form:
profiler = cProfile.Profile()
return_val = profiler.runcall(func, self, req, form)
results = cStringIO.StringIO()
stats = pstats.Stats(profiler, stream=results)
stats.sort_stats('cumulative')
stats.print_stats(100)
json_in = json.loads(str(form['jsondata']))
# Deunicode all strings (Invenio doesn't have unicode
# support).
json_in = json_unicode_to_utf8(json_in)
json_data = json.loads(return_val)
json_data.update({"profilerStats": "<pre style='overflow: scroll'>" + json_in['requestType'] + results.getvalue() + "</pre>"})
return json.dumps(json_data)
else:
return func(self, req, form)
return json_req_profiler
class WebInterfaceEditPages(WebInterfaceDirectory):
"""Defines the set of /edit pages."""
_exports = ['', 'new_ticket', 'compare_revisions', 'templates']
def __init__(self, recid=None):
"""Initialize."""
self.recid = recid
@wrap_json_req_profiler
def index(self, req, form):
"""Handle all BibEdit requests.
The responsibilities of this functions is:
* JSON decoding and encoding.
* Redirection, if necessary.
* Authorization.
* Calling the appropriate function from the engine.
"""
uid = current_user.get_id()
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG)})
# Abort if the simplejson module isn't available
if not CFG_JSON_AVAILABLE:
title = 'Record Editor'
body = '''Sorry, the record editor cannot operate when the
`simplejson' module is not installed. Please see the INSTALL
file.'''
return page(title = title,
body = body,
errors = [],
warnings = [],
uid = uid,
language = argd['ln'],
navtrail = navtrail,
lastupdated = __lastupdated__,
req = req,
body_css_classes = ['bibedit'])
# If it is an Ajax request, extract any JSON data.
ajax_request, recid = False, None
if 'jsondata' in form:
json_data = json.loads(str(form['jsondata']))
# Deunicode all strings (Invenio doesn't have unicode
# support).
json_data = json_unicode_to_utf8(json_data)
ajax_request = True
if 'recID' in json_data:
recid = json_data['recID']
json_response = {'resultCode': 0, 'ID': json_data['ID']}
# Authorization.
if current_user.is_guest:
# User is not logged in.
if not ajax_request:
# Do not display the introductory recID selection box to guest
# users (as it used to be with v0.99.0):
auth_code, auth_message = acc_authorize_action(req,
'runbibedit')
referer = '/edit/'
if self.recid:
referer = '/%s/%s/edit/' % (CFG_SITE_RECORD, self.recid)
return page_not_authorized(req=req, referer=referer,
text=auth_message, navtrail=navtrail)
else:
# Session has most likely timed out.
json_response.update({'resultCode': 100})
return json.dumps(json_response)
elif self.recid:
# Handle RESTful calls from logged in users by redirecting to
# generic URL.
redirect_to_url(req, '%s/%s/edit/#state=edit&recid=%s&recrev=%s' % (
CFG_SITE_SECURE_URL, CFG_SITE_RECORD, self.recid, ""))
elif recid is not None:
json_response.update({'recID': recid})
if json_data['requestType'] == "getRecord":
# Authorize access to record.
if not user_can_edit_record_collection(req, recid):
json_response.update({'resultCode': 101})
return json.dumps(json_response)
# Handle request.
if not ajax_request:
# Show BibEdit start page.
body, errors, warnings = perform_request_init(uid, argd['ln'], req, __lastupdated__)
title = 'Record Editor'
return page(title = title,
body = body,
errors = errors,
warnings = warnings,
uid = uid,
language = argd['ln'],
navtrail = navtrail,
lastupdated = __lastupdated__,
req = req,
body_css_classes = ['bibedit'])
else:
# Handle AJAX request.
json_response.update(perform_request_ajax(req, recid, uid,
json_data))
return json.dumps(json_response)
def compare_revisions(self, req, form):
"""Handle the compare revisions request"""
argd = wash_urlargd(form, { \
'ln': (str, CFG_SITE_LANG), \
'rev1' : (str, ''), \
'rev2' : (str, ''), \
'recid': (int, 0)})
ln = argd['ln']
uid = current_user.get_id()
_ = gettext_set_language(ln)
# Checking if currently logged user has permission to perform this request
auth_code, auth_message = acc_authorize_action(req, 'runbibedit')
if auth_code != 0:
return page_not_authorized(req=req, referer="/edit",
text=auth_message, navtrail=navtrail)
recid = argd['recid']
rev1 = argd['rev1']
rev2 = argd['rev2']
ln = argd['ln']
body, errors, warnings = perform_request_compare(ln, recid, rev1, rev2)
return page(title = _("Comparing two record revisions"),
body = body,
errors = errors,
warnings = warnings,
uid = uid,
language = ln,
navtrail = navtrail,
lastupdated = __lastupdated__,
req = req,
body_css_classes = ['bibedit'])
def new_ticket(self, req, form):
"""handle a edit/new_ticket request"""
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG), 'recid': (int, 0)})
ln = argd['ln']
_ = gettext_set_language(ln)
auth_code, auth_message = acc_authorize_action(req, 'runbibedit')
if auth_code != 0:
return page_not_authorized(req=req, referer="/edit",
text=auth_message, navtrail=navtrail)
uid = current_user.get_id()
if argd['recid']:
(errmsg, url) = perform_request_newticket(argd['recid'], uid)
if errmsg:
return page(title = _("Failed to create a ticket"),
body = _("Error")+": "+errmsg,
errors = [],
warnings = [],
uid = uid,
language = ln,
navtrail = navtrail,
lastupdated = __lastupdated__,
req = req,
body_css_classes = ['bibedit'])
else:
#redirect..
redirect_to_url(req, url)
def templates(self, req, form):
"""handle a edit/templates request"""
uid = current_user.get_id()
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG)})
# Abort if the simplejson module isn't available
if not CFG_JSON_AVAILABLE:
title = 'Record Editor Template Manager'
body = '''Sorry, the record editor cannot operate when the
`simplejson' module is not installed. Please see the INSTALL
file.'''
return page(title = title,
body = body,
errors = [],
warnings = [],
uid = uid,
language = argd['ln'],
navtrail = navtrail_bibedit,
lastupdated = __lastupdated__,
req = req,
body_css_classes = ['bibedit'])
# If it is an Ajax request, extract any JSON data.
ajax_request = False
if 'jsondata' in form:
json_data = json.loads(str(form['jsondata']))
# Deunicode all strings (Invenio doesn't have unicode
# support).
json_data = json_unicode_to_utf8(json_data)
ajax_request = True
json_response = {'resultCode': 0}
# Authorization.
if current_user.is_guest:
# User is not logged in.
if not ajax_request:
# Do not display the introductory recID selection box to guest
# users (as it used to be with v0.99.0):
dummy_auth_code, auth_message = acc_authorize_action(req,
'runbibedit')
referer = '/edit'
return page_not_authorized(req=req, referer=referer,
text=auth_message, navtrail=navtrail)
else:
# Session has most likely timed out.
json_response.update({'resultCode': 100})
return json.dumps(json_response)
# Handle request.
if not ajax_request:
# Show BibEdit template management start page.
body, errors, warnings = perform_request_init_template_interface()
title = 'Record Editor Template Manager'
return page(title = title,
body = body,
errors = errors,
warnings = warnings,
uid = uid,
language = argd['ln'],
navtrail = navtrail_bibedit,
lastupdated = __lastupdated__,
req = req,
body_css_classes = ['bibedit'])
else:
# Handle AJAX request.
json_response.update(perform_request_ajax_template_interface(json_data))
return json.dumps(json_response)
def __call__(self, req, form):
"""Redirect calls without final slash."""
if self.recid:
redirect_to_url(req, '%s/%s/%s/edit/' % (CFG_SITE_SECURE_URL,
CFG_SITE_RECORD,
self.recid))
else:
redirect_to_url(req, '%s/%s/edit/' % (CFG_SITE_SECURE_URL, CFG_SITE_RECORD))
| gpl-2.0 | -1,200,456,052,752,920,800 | 42.148734 | 138 | 0.512578 | false |
hbrls/weixin-api-mockup | appl/jssdk/views.py | 1 | 1060 | # -*- coding: utf-8 -*-
import logging
from uuid import uuid4
from flask import Blueprint, request, current_app, jsonify
_logger = logging.getLogger(__name__)
mod = Blueprint('jssdk', __name__, template_folder='templates')
@mod.route('/cgi-bin/ticket/getticket', methods=['GET'])
def getticket():
"""
่ทๅ jsapi_ticket
http://mp.weixin.qq.com/wiki/7/aaa137b55fb2e0456bf8dd9148dd613f.html#.E9.99.84.E5.BD.951-JS-SDK.E4.BD.BF.E7.94.A8.E6.9D.83.E9.99.90.E7.AD.BE.E5.90.8D.E7.AE.97.E6.B3.95
"""
access_token = request.args.get('access_token', None)
if not access_token:
return jsonify({
'errcode': 40014,
'errmsg': 'ไธๅๆณ็ access_token'
})
auth_type = request.args.get('type', None)
if not auth_type or auth_type != 'jsapi':
return jsonify({
'errcode': 40014,
'errmsg': 'ไธๅๆณ็ type'
})
return jsonify({
'errcode': 0,
'errmsg': 'ok',
'ticket': 'FAKE_JSAPI_TICKET',
'expires_in': 7200,
})
| mit | -823,091,781,175,057,400 | 25 | 171 | 0.586538 | false |
rahiel/shellstats | shellstats.py | 1 | 3629 | # -*- coding: utf-8 -*-
from __future__ import division
from os import getenv
from os.path import isfile
from sys import exit
import click
@click.command()
@click.option("--n", default=10, help="How many commands to show.")
@click.option("--plot", is_flag=True, help="Plot command usage in pie chart.")
@click.option("--command", default=None,
help="Most frequent subcommands for command, e.g. sudo, git.")
@click.option("--history-file", type=click.Path(exists=True, readable=True),
default=None, help="Read shell history from history-file.")
@click.option("--shell", default=None,
help="Specify shell history format: bash, fish or zsh.")
def main(n, plot, command, history_file, shell):
"""Print the most frequently used shell commands."""
history = get_history(history_file, shell, command)
commands = {}
for line in history:
cmd = line.split()
if cmd[0] in commands:
commands[cmd[0]] += 1
else:
commands[cmd[0]] = 1
total = len(history)
# counts :: [(command, num_occurance)]
counts = sorted(commands.items(), key=lambda x: x[1], reverse=True)
print_top(n, counts, total)
if plot:
pie_top(n, counts, command)
return counts
def pie_top(n, counts, command):
"""Show a pie chart of n most used commands."""
try:
import matplotlib.pyplot as plt
except ImportError:
click.echo(click.style("Please install matplotlib for plotting.", fg="red"))
exit()
label, x = zip(*counts[:n])
fig = plt.figure()
fig.canvas.set_window_title("ShellStats")
plt.axes(aspect=1)
if command:
title = "Top {0} used {1} subcommands.".format(min(n, len(counts)), command)
else:
title = "Top {0} used shell commands.".format(min(n, len(counts)))
plt.title(title)
plt.pie(x, labels=label)
plt.show()
def print_top(n, counts, total):
"""Print the top n used commands."""
click.echo("{:>3} {:<20} {:<10} {:<3}"
.format('', "Command", "Count", "Percentage"))
# min for when history is too small
for i in min(range(n), range(len(counts)), key=len):
cmd, count = counts[i]
click.echo("{i:>3} {cmd:<20} {count:<10} {percent:<3.3}%"
.format(i=i+1, cmd=cmd, count=count,
percent=count / total * 100))
def get_history(history_file, shell, command):
"""Get usage history for the shell in use."""
shell = shell or getenv("SHELL").split('/')[-1]
if history_file is None:
home = getenv("HOME") + '/'
hist_files = {"bash": [".bash_history"],
"fish": [".config/fish/fish_history"],
"zsh": [".zhistory", ".zsh_history"]}
if shell in hist_files:
for hist_file in hist_files[shell]:
if isfile(home + hist_file):
history_file = home + hist_file
if not history_file:
click.echo(click.style("Shell history file not found.", fg="red"))
exit()
with open(history_file, 'r') as h:
history = [l.strip() for l in h.readlines() if l.strip()]
if shell == "fish":
history = [l[7:] for l in history if l.startswith("- cmd:")]
elif shell == "zsh":
hist = []
for l in history:
if l.startswith(": "):
hist.append(l.split(';', 1)[-1])
else:
hist.append(l)
history = hist
if command:
history = [l[len(command) + 1:] for l in history if l.startswith(str(command))]
return history
| mit | 8,060,386,650,571,835,000 | 34.930693 | 87 | 0.567925 | false |
18padx08/PPTex | PPTexEnv_x86_64/lib/python2.7/site-packages/sympy/core/tests/test_eval_power.py | 9 | 11820 | from sympy.core import (Rational, Symbol, S, Float, Integer, Number, Pow,
Basic, I, nan, pi, symbols)
from sympy.core.tests.test_evalf import NS
from sympy.functions.elementary.miscellaneous import sqrt, cbrt
from sympy.functions.elementary.exponential import exp, log
from sympy.functions.elementary.trigonometric import sin, cos
from sympy.series.order import O
def test_rational():
a = Rational(1, 5)
r = sqrt(5)/5
assert sqrt(a) == r
assert 2*sqrt(a) == 2*r
r = a*a**Rational(1, 2)
assert a**Rational(3, 2) == r
assert 2*a**Rational(3, 2) == 2*r
r = a**5*a**Rational(2, 3)
assert a**Rational(17, 3) == r
assert 2 * a**Rational(17, 3) == 2*r
def test_large_rational():
e = (Rational(123712**12 - 1, 7) + Rational(1, 7))**Rational(1, 3)
assert e == 234232585392159195136 * (Rational(1, 7)**Rational(1, 3))
def test_negative_real():
def feq(a, b):
return abs(a - b) < 1E-10
assert feq(S.One / Float(-0.5), -Integer(2))
def test_expand():
x = Symbol('x')
assert (2**(-1 - x)).expand() == Rational(1, 2)*2**(-x)
def test_issue_3449():
#test if powers are simplified correctly
#see also issue 3995
x = Symbol('x')
assert ((x**Rational(1, 3))**Rational(2)) == x**Rational(2, 3)
assert (
(x**Rational(3))**Rational(2, 5)) == (x**Rational(3))**Rational(2, 5)
a = Symbol('a', real=True)
b = Symbol('b', real=True)
assert (a**2)**b == (abs(a)**b)**2
assert sqrt(1/a) != 1/sqrt(a) # e.g. for a = -1
assert (a**3)**Rational(1, 3) != a
assert (x**a)**b != x**(a*b) # e.g. x = -1, a=2, b=1/2
assert (x**.5)**b == x**(.5*b)
assert (x**.5)**.5 == x**.25
assert (x**2.5)**.5 != x**1.25 # e.g. for x = 5*I
k = Symbol('k', integer=True)
m = Symbol('m', integer=True)
assert (x**k)**m == x**(k*m)
assert Number(5)**Rational(2, 3) == Number(25)**Rational(1, 3)
assert (x**.5)**2 == x**1.0
assert (x**2)**k == (x**k)**2 == x**(2*k)
a = Symbol('a', positive=True)
assert (a**3)**Rational(2, 5) == a**Rational(6, 5)
assert (a**2)**b == (a**b)**2
assert (a**Rational(2, 3))**x == (a**(2*x/3)) != (a**x)**Rational(2, 3)
def test_issue_3866():
assert --sqrt(sqrt(5) - 1) == sqrt(sqrt(5) - 1)
def test_negative_one():
x = Symbol('x', complex=True)
y = Symbol('y', complex=True)
assert 1/x**y == x**(-y)
def test_issue_4362():
neg = Symbol('neg', negative=True)
nonneg = Symbol('nonneg', nonnegative=True)
any = Symbol('any')
num, den = sqrt(1/neg).as_numer_denom()
assert num == sqrt(-1)
assert den == sqrt(-neg)
num, den = sqrt(1/nonneg).as_numer_denom()
assert num == 1
assert den == sqrt(nonneg)
num, den = sqrt(1/any).as_numer_denom()
assert num == sqrt(1/any)
assert den == 1
def eqn(num, den, pow):
return (num/den)**pow
npos = 1
nneg = -1
dpos = 2 - sqrt(3)
dneg = 1 - sqrt(3)
assert dpos > 0 and dneg < 0 and npos > 0 and nneg < 0
# pos or neg integer
eq = eqn(npos, dpos, 2)
assert eq.is_Pow and eq.as_numer_denom() == (1, dpos**2)
eq = eqn(npos, dneg, 2)
assert eq.is_Pow and eq.as_numer_denom() == (1, dneg**2)
eq = eqn(nneg, dpos, 2)
assert eq.is_Pow and eq.as_numer_denom() == (1, dpos**2)
eq = eqn(nneg, dneg, 2)
assert eq.is_Pow and eq.as_numer_denom() == (1, dneg**2)
eq = eqn(npos, dpos, -2)
assert eq.is_Pow and eq.as_numer_denom() == (dpos**2, 1)
eq = eqn(npos, dneg, -2)
assert eq.is_Pow and eq.as_numer_denom() == (dneg**2, 1)
eq = eqn(nneg, dpos, -2)
assert eq.is_Pow and eq.as_numer_denom() == (dpos**2, 1)
eq = eqn(nneg, dneg, -2)
assert eq.is_Pow and eq.as_numer_denom() == (dneg**2, 1)
# pos or neg rational
pow = S.Half
eq = eqn(npos, dpos, pow)
assert eq.is_Pow and eq.as_numer_denom() == (npos**pow, dpos**pow)
eq = eqn(npos, dneg, pow)
assert eq.is_Pow is False and eq.as_numer_denom() == ((-npos)**pow, (-dneg)**pow)
eq = eqn(nneg, dpos, pow)
assert not eq.is_Pow or eq.as_numer_denom() == (nneg**pow, dpos**pow)
eq = eqn(nneg, dneg, pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-nneg)**pow, (-dneg)**pow)
eq = eqn(npos, dpos, -pow)
assert eq.is_Pow and eq.as_numer_denom() == (dpos**pow, npos**pow)
eq = eqn(npos, dneg, -pow)
assert eq.is_Pow is False and eq.as_numer_denom() == (-(-npos)**pow*(-dneg)**pow, npos)
eq = eqn(nneg, dpos, -pow)
assert not eq.is_Pow or eq.as_numer_denom() == (dpos**pow, nneg**pow)
eq = eqn(nneg, dneg, -pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-dneg)**pow, (-nneg)**pow)
# unknown exponent
pow = 2*any
eq = eqn(npos, dpos, pow)
assert eq.is_Pow and eq.as_numer_denom() == (npos**pow, dpos**pow)
eq = eqn(npos, dneg, pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-npos)**pow, (-dneg)**pow)
eq = eqn(nneg, dpos, pow)
assert eq.is_Pow and eq.as_numer_denom() == (nneg**pow, dpos**pow)
eq = eqn(nneg, dneg, pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-nneg)**pow, (-dneg)**pow)
eq = eqn(npos, dpos, -pow)
assert eq.as_numer_denom() == (dpos**pow, npos**pow)
eq = eqn(npos, dneg, -pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-dneg)**pow, (-npos)**pow)
eq = eqn(nneg, dpos, -pow)
assert eq.is_Pow and eq.as_numer_denom() == (dpos**pow, nneg**pow)
eq = eqn(nneg, dneg, -pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-dneg)**pow, (-nneg)**pow)
x = Symbol('x')
y = Symbol('y')
assert ((1/(1 + x/3))**(-S.One)).as_numer_denom() == (3 + x, 3)
notp = Symbol('notp', positive=False) # not positive does not imply real
b = ((1 + x/notp)**-2)
assert (b**(-y)).as_numer_denom() == (1, b**y)
assert (b**(-S.One)).as_numer_denom() == ((notp + x)**2, notp**2)
nonp = Symbol('nonp', nonpositive=True)
assert (((1 + x/nonp)**-2)**(-S.One)).as_numer_denom() == ((-nonp -
x)**2, nonp**2)
n = Symbol('n', negative=True)
assert (x**n).as_numer_denom() == (1, x**-n)
assert sqrt(1/n).as_numer_denom() == (S.ImaginaryUnit, sqrt(-n))
n = Symbol('0 or neg', nonpositive=True)
# if x and n are split up without negating each term and n is negative
# then the answer might be wrong; if n is 0 it won't matter since
# 1/oo and 1/zoo are both zero as is sqrt(0)/sqrt(-x) unless x is also
# zero (in which case the negative sign doesn't matter):
# 1/sqrt(1/-1) = -I but sqrt(-1)/sqrt(1) = I
assert (1/sqrt(x/n)).as_numer_denom() == (sqrt(-n), sqrt(-x))
c = Symbol('c', complex=True)
e = sqrt(1/c)
assert e.as_numer_denom() == (e, 1)
i = Symbol('i', integer=True)
assert (((1 + x/y)**i)).as_numer_denom() == ((x + y)**i, y**i)
def test_Pow_signs():
"""Cf. issues 4595 and 5250"""
x = Symbol('x')
y = Symbol('y')
n = Symbol('n', even=True)
assert (3 - y)**2 != (y - 3)**2
assert (3 - y)**n != (y - 3)**n
assert (-3 + y - x)**2 != (3 - y + x)**2
assert (y - 3)**3 != -(3 - y)**3
def test_power_with_noncommutative_mul_as_base():
x = Symbol('x', commutative=False)
y = Symbol('y', commutative=False)
assert not (x*y)**3 == x**3*y**3
assert (2*x*y)**3 == 8*(x*y)**3
def test_zero():
x = Symbol('x')
y = Symbol('y')
assert 0**x != 0
assert 0**(2*x) == 0**x
assert 0**(1.0*x) == 0**x
assert 0**(2.0*x) == 0**x
assert (0**(2 - x)).as_base_exp() == (0, 2 - x)
assert 0**(x - 2) != S.Infinity**(2 - x)
assert 0**(2*x*y) == 0**(x*y)
assert 0**(-2*x*y) == S.ComplexInfinity**(x*y)
assert 0**I == nan
i = Symbol('i', imaginary=True)
assert 0**i == nan
def test_pow_as_base_exp():
x = Symbol('x')
assert (S.Infinity**(2 - x)).as_base_exp() == (S.Infinity, 2 - x)
assert (S.Infinity**(x - 2)).as_base_exp() == (S.Infinity, x - 2)
p = S.Half**x
assert p.base, p.exp == p.as_base_exp() == (S(2), -x)
# issue 8344:
assert Pow(1, 2, evaluate=False).as_base_exp() == (S(1), S(2))
def test_issue_6100():
x = Symbol('x')
y = Symbol('y')
assert x**1.0 == x
assert x == x**1.0
assert True != x**1.0
assert x**1.0 is not True
assert x is not True
assert x*y == (x*y)**1.0
assert (x**1.0)**1.0 == x
assert (x**1.0)**2.0 == x**2
b = Basic()
assert Pow(b, 1.0, evaluate=False) == b
# if the following gets distributed as a Mul (x**1.0*y**1.0 then
# __eq__ methods could be added to Symbol and Pow to detect the
# power-of-1.0 case.
assert ((x*y)**1.0).func is Pow
def test_issue_6208():
from sympy import root, Rational
I = S.ImaginaryUnit
assert sqrt(33**(9*I/10)) == -33**(9*I/20)
assert root((6*I)**(2*I), 3).as_base_exp()[1] == Rational(1, 3) # != 2*I/3
assert root((6*I)**(I/3), 3).as_base_exp()[1] == I/9
assert sqrt(exp(3*I)) == exp(3*I/2)
assert sqrt(-sqrt(3)*(1 + 2*I)) == sqrt(sqrt(3))*sqrt(-1 - 2*I)
assert sqrt(exp(5*I)) == -exp(5*I/2)
assert root(exp(5*I), 3).exp == Rational(1, 3)
def test_issue_6990():
x = Symbol('x')
a = Symbol('a')
b = Symbol('b')
assert (sqrt(a + b*x + x**2)).series(x, 0, 3).removeO() == \
b*x/(2*sqrt(a)) + x**2*(1/(2*sqrt(a)) - \
b**2/(8*a**(S(3)/2))) + sqrt(a)
def test_issue_6068():
x = Symbol('x')
assert sqrt(sin(x)).series(x, 0, 7) == \
sqrt(x) - x**(S(5)/2)/12 + x**(S(9)/2)/1440 - \
x**(S(13)/2)/24192 + O(x**7)
assert sqrt(sin(x)).series(x, 0, 9) == \
sqrt(x) - x**(S(5)/2)/12 + x**(S(9)/2)/1440 - \
x**(S(13)/2)/24192 - 67*x**(S(17)/2)/29030400 + O(x**9)
assert sqrt(sin(x**3)).series(x, 0, 19) == \
x**(S(3)/2) - x**(S(15)/2)/12 + x**(S(27)/2)/1440 + O(x**19)
assert sqrt(sin(x**3)).series(x, 0, 20) == \
x**(S(3)/2) - x**(S(15)/2)/12 + x**(S(27)/2)/1440 - \
x**(S(39)/2)/24192 + O(x**20)
def test_issue_6782():
x = Symbol('x')
assert sqrt(sin(x**3)).series(x, 0, 7) == x**(S(3)/2) + O(x**7)
assert sqrt(sin(x**4)).series(x, 0, 3) == x**2 + O(x**3)
def test_issue_6653():
x = Symbol('x')
assert (1 / sqrt(1 + sin(x**2))).series(x, 0, 3) == 1 - x**2/2 + O(x**3)
def test_issue_6429():
x = Symbol('x')
c = Symbol('c')
f = (c**2 + x)**(0.5)
assert f.series(x, x0=0, n=1) == (c**2)**0.5 + O(x)
assert f.taylor_term(0, x) == (c**2)**0.5
assert f.taylor_term(1, x) == 0.5*x*(c**2)**(-0.5)
assert f.taylor_term(2, x) == -0.125*x**2*(c**2)**(-1.5)
def test_issue_7638():
f = pi/log(sqrt(2))
assert ((1 + I)**(I*f/2))**0.3 == (1 + I)**(0.15*I*f)
# if 1/3 -> 1.0/3 this should fail since it cannot be shown that the
# sign will be +/-1; for the previous "small arg" case, it didn't matter
# that this could not be proved
assert (1 + I)**(4*I*f) == ((1 + I)**(12*I*f))**(S(1)/3)
assert (((1 + I)**(I*(1 + 7*f)))**(S(1)/3)).exp == S(1)/3
r = symbols('r', real=True)
assert sqrt(r**2) == abs(r)
assert cbrt(r**3) != r
assert sqrt(Pow(2*I, 5*S.Half)) != (2*I)**(5/S(4))
p = symbols('p', positive=True)
assert cbrt(p**2) == p**(2/S(3))
assert NS(((0.2 + 0.7*I)**(0.7 + 1.0*I))**(0.5 - 0.1*I), 1) == '0.4 + 0.2*I'
assert sqrt(1/(1 + I)) == sqrt((1 - I)/2) # or 1/sqrt(1 + I)
e = 1/(1 - sqrt(2))
assert sqrt(e) == I/sqrt(-1 + sqrt(2))
assert e**-S.Half == -I*sqrt(-1 + sqrt(2))
assert sqrt((cos(1)**2 + sin(1)**2 - 1)**(3 + I)).exp == S.Half
assert sqrt(r**(4/S(3))) != r**(2/S(3))
assert sqrt((p + I)**(4/S(3))) == (p + I)**(2/S(3))
assert sqrt((p - p**2*I)**2) == p - p**2*I
assert sqrt((p + r*I)**2) != p + r*I
e = (1 + I/5)
assert sqrt(e**5) == e**(5*S.Half)
assert sqrt(e**6) == e**3
assert sqrt((1 + I*r)**6) != (1 + I*r)**3
| mit | -7,808,279,482,758,563,000 | 34.178571 | 91 | 0.521997 | false |
woltage/ansible-modules-core | web_infrastructure/django_manage.py | 5 | 11099 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Scott Anderson <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: django_manage
short_description: Manages a Django application.
description:
- Manages a Django application using the I(manage.py) application frontend to I(django-admin). With the I(virtualenv) parameter, all management commands will be executed by the given I(virtualenv) installation.
version_added: "1.1"
options:
command:
choices: [ 'cleanup', 'collectstatic', 'flush', 'loaddata', 'migrate', 'runfcgi', 'syncdb', 'test', 'validate', ]
description:
- The name of the Django management command to run. Built in commands are cleanup, collectstatic, flush, loaddata, migrate, runfcgi, syncdb, test, and validate.
- Other commands can be entered, but will fail if they're unknown to Django. Other commands that may prompt for user input should be run with the I(--noinput) flag.
required: true
app_path:
description:
- The path to the root of the Django application where B(manage.py) lives.
required: true
settings:
description:
- The Python path to the application's settings module, such as 'myapp.settings'.
required: false
pythonpath:
description:
- A directory to add to the Python path. Typically used to include the settings module if it is located external to the application directory.
required: false
virtualenv:
description:
- An optional path to a I(virtualenv) installation to use while running the manage application.
required: false
apps:
description:
- A list of space-delimited apps to target. Used by the 'test' command.
required: false
cache_table:
description:
- The name of the table used for database-backed caching. Used by the 'createcachetable' command.
required: false
database:
description:
- The database to target. Used by the 'createcachetable', 'flush', 'loaddata', and 'syncdb' commands.
required: false
failfast:
description:
- Fail the command immediately if a test fails. Used by the 'test' command.
required: false
default: "no"
choices: [ "yes", "no" ]
fixtures:
description:
- A space-delimited list of fixture file names to load in the database. B(Required) by the 'loaddata' command.
required: false
skip:
description:
- Will skip over out-of-order missing migrations, you can only use this parameter with I(migrate)
required: false
version_added: "1.3"
merge:
description:
- Will run out-of-order or missing migrations as they are not rollback migrations, you can only use this parameter with 'migrate' command
required: false
version_added: "1.3"
link:
description:
- Will create links to the files instead of copying them, you can only use this parameter with 'collectstatic' command
required: false
version_added: "1.3"
notes:
- I(virtualenv) (U(http://www.virtualenv.org)) must be installed on the remote host if the virtualenv parameter is specified.
- This module will create a virtualenv if the virtualenv parameter is specified and a virtualenv does not already exist at the given location.
- This module assumes English error messages for the 'createcachetable' command to detect table existence, unfortunately.
- To be able to use the migrate command, you must have south installed and added as an app in your settings
- To be able to use the collectstatic command, you must have enabled staticfiles in your settings
requirements: [ "virtualenv", "django" ]
author: "Scott Anderson (@tastychutney)"
'''
EXAMPLES = """
# Run cleanup on the application installed in 'django_dir'.
- django_manage: command=cleanup app_path={{ django_dir }}
# Load the initial_data fixture into the application
- django_manage: command=loaddata app_path={{ django_dir }} fixtures={{ initial_data }}
# Run syncdb on the application
- django_manage: >
command=syncdb
app_path={{ django_dir }}
settings={{ settings_app_name }}
pythonpath={{ settings_dir }}
virtualenv={{ virtualenv_dir }}
# Run the SmokeTest test case from the main app. Useful for testing deploys.
- django_manage: command=test app_path={{ django_dir }} apps=main.SmokeTest
# Create an initial superuser.
- django_manage: command="createsuperuser --noinput --username=admin [email protected]" app_path={{ django_dir }}
"""
import os
def _fail(module, cmd, out, err, **kwargs):
msg = ''
if out:
msg += "stdout: %s" % (out, )
if err:
msg += "\n:stderr: %s" % (err, )
module.fail_json(cmd=cmd, msg=msg, **kwargs)
def _ensure_virtualenv(module):
venv_param = module.params['virtualenv']
if venv_param is None:
return
vbin = os.path.join(os.path.expanduser(venv_param), 'bin')
activate = os.path.join(vbin, 'activate')
if not os.path.exists(activate):
virtualenv = module.get_bin_path('virtualenv', True)
vcmd = '%s %s' % (virtualenv, venv_param)
vcmd = [virtualenv, venv_param]
rc, out_venv, err_venv = module.run_command(vcmd)
if rc != 0:
_fail(module, vcmd, out_venv, err_venv)
os.environ["PATH"] = "%s:%s" % (vbin, os.environ["PATH"])
os.environ["VIRTUAL_ENV"] = venv_param
def createcachetable_filter_output(line):
return "Already exists" not in line
def flush_filter_output(line):
return "Installed" in line and "Installed 0 object" not in line
def loaddata_filter_output(line):
return "Installed" in line and "Installed 0 object" not in line
def syncdb_filter_output(line):
return ("Creating table " in line) or ("Installed" in line and "Installed 0 object" not in line)
def migrate_filter_output(line):
return ("Migrating forwards " in line) or ("Installed" in line and "Installed 0 object" not in line)
def main():
command_allowed_param_map = dict(
cleanup=(),
createcachetable=('cache_table', 'database', ),
flush=('database', ),
loaddata=('database', 'fixtures', ),
syncdb=('database', ),
test=('failfast', 'testrunner', 'liveserver', 'apps', ),
validate=(),
migrate=('apps', 'skip', 'merge', 'database',),
collectstatic=('link', ),
)
command_required_param_map = dict(
loaddata=('fixtures', ),
createcachetable=('cache_table', ),
)
# forces --noinput on every command that needs it
noinput_commands = (
'flush',
'syncdb',
'migrate',
'test',
'collectstatic',
)
# These params are allowed for certain commands only
specific_params = ('apps', 'database', 'failfast', 'fixtures', 'liveserver', 'testrunner')
# These params are automatically added to the command if present
general_params = ('settings', 'pythonpath', 'database',)
specific_boolean_params = ('failfast', 'skip', 'merge', 'link')
end_of_command_params = ('apps', 'cache_table', 'fixtures')
module = AnsibleModule(
argument_spec=dict(
command = dict(default=None, required=True),
app_path = dict(default=None, required=True),
settings = dict(default=None, required=False),
pythonpath = dict(default=None, required=False, aliases=['python_path']),
virtualenv = dict(default=None, required=False, aliases=['virtual_env']),
apps = dict(default=None, required=False),
cache_table = dict(default=None, required=False),
database = dict(default=None, required=False),
failfast = dict(default='no', required=False, type='bool', aliases=['fail_fast']),
fixtures = dict(default=None, required=False),
liveserver = dict(default=None, required=False, aliases=['live_server']),
testrunner = dict(default=None, required=False, aliases=['test_runner']),
skip = dict(default=None, required=False, type='bool'),
merge = dict(default=None, required=False, type='bool'),
link = dict(default=None, required=False, type='bool'),
),
)
command = module.params['command']
app_path = os.path.expanduser(module.params['app_path'])
virtualenv = module.params['virtualenv']
for param in specific_params:
value = module.params[param]
if param in specific_boolean_params:
value = module.boolean(value)
if value and param not in command_allowed_param_map[command]:
module.fail_json(msg='%s param is incompatible with command=%s' % (param, command))
for param in command_required_param_map.get(command, ()):
if not module.params[param]:
module.fail_json(msg='%s param is required for command=%s' % (param, command))
_ensure_virtualenv(module)
cmd = "./manage.py %s" % (command, )
if command in noinput_commands:
cmd = '%s --noinput' % cmd
for param in general_params:
if module.params[param]:
cmd = '%s --%s=%s' % (cmd, param, module.params[param])
for param in specific_boolean_params:
if module.boolean(module.params[param]):
cmd = '%s --%s' % (cmd, param)
# these params always get tacked on the end of the command
for param in end_of_command_params:
if module.params[param]:
cmd = '%s %s' % (cmd, module.params[param])
rc, out, err = module.run_command(cmd, cwd=os.path.expanduser(app_path))
if rc != 0:
if command == 'createcachetable' and 'table' in err and 'already exists' in err:
out = 'Already exists.'
else:
if "Unknown command:" in err:
_fail(module, cmd, err, "Unknown django command: %s" % command)
_fail(module, cmd, out, err, path=os.environ["PATH"], syspath=sys.path)
changed = False
lines = out.split('\n')
filt = globals().get(command + "_filter_output", None)
if filt:
filtered_output = filter(filt, out.split('\n'))
if len(filtered_output):
changed = filtered_output
module.exit_json(changed=changed, out=out, cmd=cmd, app_path=app_path, virtualenv=virtualenv,
settings=module.params['settings'], pythonpath=module.params['pythonpath'])
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 | 5,044,081,640,488,415,000 | 38.219081 | 215 | 0.655104 | false |
mdavid/cherokee-webserver-svnclone | admin/plugins/ssi.py | 3 | 1122 | # -*- coding: utf-8 -*-
#
# Cherokee-admin
#
# Authors:
# Alvaro Lopez Ortega <[email protected]>
#
# Copyright (C) 2010 Alvaro Lopez Ortega
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import CTK
import Handler
HELPS = [('modules_handlers_ssi', _("Server Side Includes"))]
class Plugin_ssi (Handler.PluginHandler):
def __init__ (self, key, **kwargs):
kwargs['show_document_root'] = False
Handler.PluginHandler.__init__ (self, key, **kwargs)
Handler.PluginHandler.AddCommon (self)
| gpl-2.0 | -3,857,819,819,776,940,000 | 32 | 67 | 0.717469 | false |
snakeleon/YouCompleteMe-x86 | python/ycm/tests/event_notification_test.py | 1 | 18402 | # coding: utf-8
#
# Copyright (C) 2015-2016 YouCompleteMe contributors
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import * # noqa
from ycm.tests.test_utils import ( CurrentWorkingDirectory, ExtendedMock,
MockVimBuffers, MockVimModule, VimBuffer )
MockVimModule()
import contextlib
import os
from ycm.tests import PathToTestFile, YouCompleteMeInstance
from ycmd.responses import ( BuildDiagnosticData, Diagnostic, Location, Range,
UnknownExtraConf, ServerError )
from hamcrest import assert_that, contains, has_entries, has_item
from mock import call, MagicMock, patch
from nose.tools import eq_, ok_
def PresentDialog_Confirm_Call( message ):
"""Return a mock.call object for a call to vimsupport.PresentDialog, as called
why vimsupport.Confirm with the supplied confirmation message"""
return call( message, [ 'Ok', 'Cancel' ] )
def PlaceSign_Call( sign_id, line_num, buffer_num, is_error ):
sign_name = 'YcmError' if is_error else 'YcmWarning'
return call( 'sign place {0} line={1} name={2} buffer={3}'
.format( sign_id, line_num, sign_name, buffer_num ) )
def UnplaceSign_Call( sign_id, buffer_num ):
return call( 'try | exec "sign unplace {0} buffer={1}" |'
' catch /E158/ | endtry'.format( sign_id, buffer_num ) )
@contextlib.contextmanager
def MockArbitraryBuffer( filetype ):
"""Used via the with statement, set up a single buffer with an arbitrary name
and no contents. Its filetype is set to the supplied filetype."""
# Arbitrary, but valid, single buffer open.
current_buffer = VimBuffer( os.path.realpath( 'TEST_BUFFER' ),
window = 1,
filetype = filetype )
with MockVimBuffers( [ current_buffer ], current_buffer ):
yield
@contextlib.contextmanager
def MockEventNotification( response_method, native_filetype_completer = True ):
"""Mock out the EventNotification client request object, replacing the
Response handler's JsonFromFuture with the supplied |response_method|.
Additionally mock out YouCompleteMe's FiletypeCompleterExistsForFiletype
method to return the supplied |native_filetype_completer| parameter, rather
than querying the server"""
# We don't want the event to actually be sent to the server, just have it
# return success
with patch( 'ycm.client.base_request.BaseRequest.PostDataToHandlerAsync',
return_value = MagicMock( return_value=True ) ):
# We set up a fake a Response (as called by EventNotification.Response)
# which calls the supplied callback method. Generally this callback just
# raises an apropriate exception, otherwise it would have to return a mock
# future object.
#
# Note: JsonFromFuture is actually part of ycm.client.base_request, but we
# must patch where an object is looked up, not where it is defined.
# See https://docs.python.org/dev/library/unittest.mock.html#where-to-patch
# for details.
with patch( 'ycm.client.event_notification.JsonFromFuture',
side_effect = response_method ):
# Filetype available information comes from the server, so rather than
# relying on that request, we mock out the check. The caller decides if
# filetype completion is available
with patch(
'ycm.youcompleteme.YouCompleteMe.FiletypeCompleterExistsForFiletype',
return_value = native_filetype_completer ):
yield
@patch( 'ycm.vimsupport.PostVimMessage', new_callable = ExtendedMock )
@YouCompleteMeInstance()
def EventNotification_FileReadyToParse_NonDiagnostic_Error_test(
ycm, post_vim_message ):
# This test validates the behaviour of YouCompleteMe.HandleFileParseRequest
# in combination with YouCompleteMe.OnFileReadyToParse when the completer
# raises an exception handling FileReadyToParse event notification
ERROR_TEXT = 'Some completer response text'
def ErrorResponse( *args ):
raise ServerError( ERROR_TEXT )
with MockArbitraryBuffer( 'javascript' ):
with MockEventNotification( ErrorResponse ):
ycm.OnFileReadyToParse()
ok_( ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
# The first call raises a warning
post_vim_message.assert_has_exact_calls( [
call( ERROR_TEXT, truncate = True )
] )
# Subsequent calls don't re-raise the warning
ycm.HandleFileParseRequest()
post_vim_message.assert_has_exact_calls( [
call( ERROR_TEXT, truncate = True )
] )
# But it does if a subsequent event raises again
ycm.OnFileReadyToParse()
ok_( ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
post_vim_message.assert_has_exact_calls( [
call( ERROR_TEXT, truncate = True ),
call( ERROR_TEXT, truncate = True )
] )
@patch( 'vim.command' )
@YouCompleteMeInstance()
def EventNotification_FileReadyToParse_NonDiagnostic_Error_NonNative_test(
ycm, vim_command ):
with MockArbitraryBuffer( 'javascript' ):
with MockEventNotification( None, False ):
ycm.OnFileReadyToParse()
ycm.HandleFileParseRequest()
vim_command.assert_not_called()
@patch( 'ycm.client.base_request._LoadExtraConfFile',
new_callable = ExtendedMock )
@patch( 'ycm.client.base_request._IgnoreExtraConfFile',
new_callable = ExtendedMock )
@YouCompleteMeInstance()
def EventNotification_FileReadyToParse_NonDiagnostic_ConfirmExtraConf_test(
ycm, ignore_extra_conf, load_extra_conf ):
# This test validates the behaviour of YouCompleteMe.HandleFileParseRequest
# in combination with YouCompleteMe.OnFileReadyToParse when the completer
# raises the (special) UnknownExtraConf exception
FILE_NAME = 'a_file'
MESSAGE = ( 'Found ' + FILE_NAME + '. Load? \n\n(Question can be '
'turned off with options, see YCM docs)' )
def UnknownExtraConfResponse( *args ):
raise UnknownExtraConf( FILE_NAME )
with MockArbitraryBuffer( 'javascript' ):
with MockEventNotification( UnknownExtraConfResponse ):
# When the user accepts the extra conf, we load it
with patch( 'ycm.vimsupport.PresentDialog',
return_value = 0,
new_callable = ExtendedMock ) as present_dialog:
ycm.OnFileReadyToParse()
ok_( ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
present_dialog.assert_has_exact_calls( [
PresentDialog_Confirm_Call( MESSAGE ),
] )
load_extra_conf.assert_has_exact_calls( [
call( FILE_NAME ),
] )
# Subsequent calls don't re-raise the warning
ycm.HandleFileParseRequest()
present_dialog.assert_has_exact_calls( [
PresentDialog_Confirm_Call( MESSAGE )
] )
load_extra_conf.assert_has_exact_calls( [
call( FILE_NAME ),
] )
# But it does if a subsequent event raises again
ycm.OnFileReadyToParse()
ok_( ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
present_dialog.assert_has_exact_calls( [
PresentDialog_Confirm_Call( MESSAGE ),
PresentDialog_Confirm_Call( MESSAGE ),
] )
load_extra_conf.assert_has_exact_calls( [
call( FILE_NAME ),
call( FILE_NAME ),
] )
# When the user rejects the extra conf, we reject it
with patch( 'ycm.vimsupport.PresentDialog',
return_value = 1,
new_callable = ExtendedMock ) as present_dialog:
ycm.OnFileReadyToParse()
ok_( ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
present_dialog.assert_has_exact_calls( [
PresentDialog_Confirm_Call( MESSAGE ),
] )
ignore_extra_conf.assert_has_exact_calls( [
call( FILE_NAME ),
] )
# Subsequent calls don't re-raise the warning
ycm.HandleFileParseRequest()
present_dialog.assert_has_exact_calls( [
PresentDialog_Confirm_Call( MESSAGE )
] )
ignore_extra_conf.assert_has_exact_calls( [
call( FILE_NAME ),
] )
# But it does if a subsequent event raises again
ycm.OnFileReadyToParse()
ok_( ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
present_dialog.assert_has_exact_calls( [
PresentDialog_Confirm_Call( MESSAGE ),
PresentDialog_Confirm_Call( MESSAGE ),
] )
ignore_extra_conf.assert_has_exact_calls( [
call( FILE_NAME ),
call( FILE_NAME ),
] )
@YouCompleteMeInstance()
def EventNotification_FileReadyToParse_Diagnostic_Error_Native_test( ycm ):
_Check_FileReadyToParse_Diagnostic_Error( ycm )
_Check_FileReadyToParse_Diagnostic_Warning( ycm )
_Check_FileReadyToParse_Diagnostic_Clean( ycm )
@patch( 'vim.command' )
def _Check_FileReadyToParse_Diagnostic_Error( ycm, vim_command ):
# Tests Vim sign placement and error/warning count python API
# when one error is returned.
def DiagnosticResponse( *args ):
start = Location( 1, 2, 'TEST_BUFFER' )
end = Location( 1, 4, 'TEST_BUFFER' )
extent = Range( start, end )
diagnostic = Diagnostic( [], start, extent, 'expected ;', 'ERROR' )
return [ BuildDiagnosticData( diagnostic ) ]
with MockArbitraryBuffer( 'cpp' ):
with MockEventNotification( DiagnosticResponse ):
ycm.OnFileReadyToParse()
ok_( ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
vim_command.assert_has_calls( [
PlaceSign_Call( 1, 1, 1, True )
] )
eq_( ycm.GetErrorCount(), 1 )
eq_( ycm.GetWarningCount(), 0 )
# Consequent calls to HandleFileParseRequest shouldn't mess with
# existing diagnostics, when there is no new parse request.
vim_command.reset_mock()
ok_( not ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
vim_command.assert_not_called()
eq_( ycm.GetErrorCount(), 1 )
eq_( ycm.GetWarningCount(), 0 )
@patch( 'vim.command' )
def _Check_FileReadyToParse_Diagnostic_Warning( ycm, vim_command ):
# Tests Vim sign placement/unplacement and error/warning count python API
# when one warning is returned.
# Should be called after _Check_FileReadyToParse_Diagnostic_Error
def DiagnosticResponse( *args ):
start = Location( 2, 2, 'TEST_BUFFER' )
end = Location( 2, 4, 'TEST_BUFFER' )
extent = Range( start, end )
diagnostic = Diagnostic( [], start, extent, 'cast', 'WARNING' )
return [ BuildDiagnosticData( diagnostic ) ]
with MockArbitraryBuffer( 'cpp' ):
with MockEventNotification( DiagnosticResponse ):
ycm.OnFileReadyToParse()
ok_( ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
vim_command.assert_has_calls( [
PlaceSign_Call( 2, 2, 1, False ),
UnplaceSign_Call( 1, 1 )
] )
eq_( ycm.GetErrorCount(), 0 )
eq_( ycm.GetWarningCount(), 1 )
# Consequent calls to HandleFileParseRequest shouldn't mess with
# existing diagnostics, when there is no new parse request.
vim_command.reset_mock()
ok_( not ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
vim_command.assert_not_called()
eq_( ycm.GetErrorCount(), 0 )
eq_( ycm.GetWarningCount(), 1 )
@patch( 'vim.command' )
def _Check_FileReadyToParse_Diagnostic_Clean( ycm, vim_command ):
# Tests Vim sign unplacement and error/warning count python API
# when there are no errors/warnings left.
# Should be called after _Check_FileReadyToParse_Diagnostic_Warning
with MockArbitraryBuffer( 'cpp' ):
with MockEventNotification( MagicMock( return_value = [] ) ):
ycm.OnFileReadyToParse()
ycm.HandleFileParseRequest()
vim_command.assert_has_calls( [
UnplaceSign_Call( 2, 1 )
] )
eq_( ycm.GetErrorCount(), 0 )
eq_( ycm.GetWarningCount(), 0 )
@patch( 'ycm.youcompleteme.YouCompleteMe._AddUltiSnipsDataIfNeeded' )
@YouCompleteMeInstance( { 'collect_identifiers_from_tags_files': 1 } )
def EventNotification_FileReadyToParse_TagFiles_UnicodeWorkingDirectory_test(
ycm, *args ):
unicode_dir = PathToTestFile( 'uniยข๐dโฌ' )
current_buffer_file = PathToTestFile( 'uniยข๐dโฌ', 'current_buffer' )
current_buffer = VimBuffer( name = current_buffer_file,
contents = [ 'current_buffer_contents' ],
filetype = 'some_filetype' )
with patch( 'ycm.client.base_request.BaseRequest.'
'PostDataToHandlerAsync' ) as post_data_to_handler_async:
with CurrentWorkingDirectory( unicode_dir ):
with MockVimBuffers( [ current_buffer ], current_buffer, ( 6, 5 ) ):
ycm.OnFileReadyToParse()
assert_that(
# Positional arguments passed to PostDataToHandlerAsync.
post_data_to_handler_async.call_args[ 0 ],
contains(
has_entries( {
'filepath': current_buffer_file,
'line_num': 6,
'column_num': 6,
'file_data': has_entries( {
current_buffer_file: has_entries( {
'contents': 'current_buffer_contents\n',
'filetypes': [ 'some_filetype' ]
} )
} ),
'event_name': 'FileReadyToParse',
'tag_files': has_item( PathToTestFile( 'uniยข๐dโฌ', 'tags' ) )
} ),
'event_notification'
)
)
@patch( 'ycm.youcompleteme.YouCompleteMe._AddUltiSnipsDataIfNeeded' )
@YouCompleteMeInstance()
def EventNotification_BufferVisit_BuildRequestForCurrentAndUnsavedBuffers_test(
ycm, *args ):
current_buffer_file = os.path.realpath( 'current_buffer' )
current_buffer = VimBuffer( name = current_buffer_file,
number = 1,
contents = [ 'current_buffer_contents' ],
filetype = 'some_filetype',
modified = False )
modified_buffer_file = os.path.realpath( 'modified_buffer' )
modified_buffer = VimBuffer( name = modified_buffer_file,
number = 2,
contents = [ 'modified_buffer_contents' ],
filetype = 'some_filetype',
modified = True )
unmodified_buffer_file = os.path.realpath( 'unmodified_buffer' )
unmodified_buffer = VimBuffer( name = unmodified_buffer_file,
number = 3,
contents = [ 'unmodified_buffer_contents' ],
filetype = 'some_filetype',
modified = False )
with patch( 'ycm.client.base_request.BaseRequest.'
'PostDataToHandlerAsync' ) as post_data_to_handler_async:
with MockVimBuffers( [ current_buffer, modified_buffer, unmodified_buffer ],
current_buffer,
( 3, 5 ) ):
ycm.OnBufferVisit()
assert_that(
# Positional arguments passed to PostDataToHandlerAsync.
post_data_to_handler_async.call_args[ 0 ],
contains(
has_entries( {
'filepath': current_buffer_file,
'line_num': 3,
'column_num': 6,
'file_data': has_entries( {
current_buffer_file: has_entries( {
'contents': 'current_buffer_contents\n',
'filetypes': [ 'some_filetype' ]
} ),
modified_buffer_file: has_entries( {
'contents': 'modified_buffer_contents\n',
'filetypes': [ 'some_filetype' ]
} )
} ),
'event_name': 'BufferVisit'
} ),
'event_notification'
)
)
@YouCompleteMeInstance()
def EventNotification_BufferUnload_BuildRequestForDeletedAndUnsavedBuffers_test(
ycm ):
current_buffer_file = os.path.realpath( 'current_buffer' )
current_buffer = VimBuffer( name = current_buffer_file,
number = 1,
contents = [ 'current_buffer_contents' ],
filetype = 'some_filetype',
modified = True )
deleted_buffer_file = os.path.realpath( 'deleted_buffer' )
deleted_buffer = VimBuffer( name = deleted_buffer_file,
number = 2,
contents = [ 'deleted_buffer_contents' ],
filetype = 'some_filetype',
modified = False )
with patch( 'ycm.client.base_request.BaseRequest.'
'PostDataToHandlerAsync' ) as post_data_to_handler_async:
with MockVimBuffers( [ current_buffer, deleted_buffer ], current_buffer ):
ycm.OnBufferUnload( deleted_buffer_file )
assert_that(
# Positional arguments passed to PostDataToHandlerAsync.
post_data_to_handler_async.call_args[ 0 ],
contains(
has_entries( {
'filepath': deleted_buffer_file,
'line_num': 1,
'column_num': 1,
'file_data': has_entries( {
current_buffer_file: has_entries( {
'contents': 'current_buffer_contents\n',
'filetypes': [ 'some_filetype' ]
} ),
deleted_buffer_file: has_entries( {
'contents': 'deleted_buffer_contents\n',
'filetypes': [ 'some_filetype' ]
} )
} ),
'event_name': 'BufferUnload'
} ),
'event_notification'
)
)
| gpl-3.0 | -7,903,755,591,903,361,000 | 36.365854 | 80 | 0.63713 | false |
partofthething/home-assistant | tests/components/zwave/test_climate.py | 16 | 32535 | """Test Z-Wave climate devices."""
import pytest
from homeassistant.components.climate.const import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
HVAC_MODES,
PRESET_AWAY,
PRESET_BOOST,
PRESET_ECO,
PRESET_NONE,
SUPPORT_AUX_HEAT,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_SWING_MODE,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.components.zwave import climate, const
from homeassistant.components.zwave.climate import (
AUX_HEAT_ZWAVE_MODE,
DEFAULT_HVAC_MODES,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT
from tests.mock.zwave import MockEntityValues, MockNode, MockValue, value_changed
@pytest.fixture
def device(hass, mock_openzwave):
"""Fixture to provide a precreated climate device."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data=HVAC_MODE_HEAT,
data_items=[
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
],
node=node,
),
setpoint_heating=MockValue(data=1, node=node),
setpoint_cooling=MockValue(data=10, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data=CURRENT_HVAC_HEAT, node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_zxt_120(hass, mock_openzwave):
"""Fixture to provide a precreated climate device."""
node = MockNode(manufacturer_id="5254", product_id="8377")
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data=HVAC_MODE_HEAT,
data_items=[
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
],
node=node,
),
setpoint_heating=MockValue(data=1, node=node),
setpoint_cooling=MockValue(data=10, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data=CURRENT_HVAC_HEAT, node=node),
fan_action=MockValue(data=7, node=node),
zxt_120_swing_mode=MockValue(data="test3", data_items=[6, 7, 8], node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_mapping(hass, mock_openzwave):
"""Fixture to provide a precreated climate device. Test state mapping."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data="Heat",
data_items=["Off", "Cool", "Heat", "Full Power", "Auto"],
node=node,
),
setpoint_heating=MockValue(data=1, node=node),
setpoint_cooling=MockValue(data=10, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data="heating", node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_unknown(hass, mock_openzwave):
"""Fixture to provide a precreated climate device. Test state unknown."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data="Heat",
data_items=["Off", "Cool", "Heat", "heat_cool", "Abcdefg"],
node=node,
),
setpoint_heating=MockValue(data=1, node=node),
setpoint_cooling=MockValue(data=10, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data="test4", node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_heat_cool(hass, mock_openzwave):
"""Fixture to provide a precreated climate device. Test state heat only."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data=HVAC_MODE_HEAT,
data_items=[
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
"Heat Eco",
"Cool Eco",
],
node=node,
),
setpoint_heating=MockValue(data=1, node=node),
setpoint_cooling=MockValue(data=10, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data="test4", node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_heat_cool_range(hass, mock_openzwave):
"""Fixture to provide a precreated climate device. Target range mode."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data=HVAC_MODE_HEAT_COOL,
data_items=[
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
],
node=node,
),
setpoint_heating=MockValue(data=1, node=node),
setpoint_cooling=MockValue(data=10, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data="test4", node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_heat_cool_away(hass, mock_openzwave):
"""Fixture to provide a precreated climate device. Target range mode."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data=HVAC_MODE_HEAT_COOL,
data_items=[
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
PRESET_AWAY,
],
node=node,
),
setpoint_heating=MockValue(data=2, node=node),
setpoint_cooling=MockValue(data=9, node=node),
setpoint_away_heating=MockValue(data=1, node=node),
setpoint_away_cooling=MockValue(data=10, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data="test4", node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_heat_eco(hass, mock_openzwave):
"""Fixture to provide a precreated climate device. heat/heat eco."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data=HVAC_MODE_HEAT,
data_items=[HVAC_MODE_OFF, HVAC_MODE_HEAT, "heat econ"],
node=node,
),
setpoint_heating=MockValue(data=2, node=node),
setpoint_eco_heating=MockValue(data=1, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data="test4", node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_aux_heat(hass, mock_openzwave):
"""Fixture to provide a precreated climate device. aux heat."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data=HVAC_MODE_HEAT,
data_items=[HVAC_MODE_OFF, HVAC_MODE_HEAT, "Aux Heat"],
node=node,
),
setpoint_heating=MockValue(data=2, node=node),
setpoint_eco_heating=MockValue(data=1, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data="test4", node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_single_setpoint(hass, mock_openzwave):
"""Fixture to provide a precreated climate device.
SETPOINT_THERMOSTAT device class.
"""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_SETPOINT, data=1, node=node
),
mode=None,
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data=CURRENT_HVAC_HEAT, node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_single_setpoint_with_mode(hass, mock_openzwave):
"""Fixture to provide a precreated climate device.
SETPOINT_THERMOSTAT device class with COMMAND_CLASS_THERMOSTAT_MODE command class
"""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_SETPOINT, data=1, node=node
),
mode=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data=HVAC_MODE_HEAT,
data_items=[HVAC_MODE_OFF, HVAC_MODE_HEAT],
node=node,
),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data=CURRENT_HVAC_HEAT, node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
def test_get_device_detects_none(hass, mock_openzwave):
"""Test get_device returns None."""
node = MockNode()
value = MockValue(data=0, node=node)
values = MockEntityValues(primary=value)
device = climate.get_device(hass, node=node, values=values, node_config={})
assert device is None
def test_get_device_detects_multiple_setpoint_device(device):
"""Test get_device returns a Z-Wave multiple setpoint device."""
assert isinstance(device, climate.ZWaveClimateMultipleSetpoint)
def test_get_device_detects_single_setpoint_device(device_single_setpoint):
"""Test get_device returns a Z-Wave single setpoint device."""
assert isinstance(device_single_setpoint, climate.ZWaveClimateSingleSetpoint)
def test_default_hvac_modes():
"""Test whether all hvac modes are included in default_hvac_modes."""
for hvac_mode in HVAC_MODES:
assert hvac_mode in DEFAULT_HVAC_MODES
def test_supported_features(device):
"""Test supported features flags."""
assert (
device.supported_features
== SUPPORT_FAN_MODE
+ SUPPORT_TARGET_TEMPERATURE
+ SUPPORT_TARGET_TEMPERATURE_RANGE
)
def test_supported_features_temp_range(device_heat_cool_range):
"""Test supported features flags with target temp range."""
device = device_heat_cool_range
assert (
device.supported_features
== SUPPORT_FAN_MODE
+ SUPPORT_TARGET_TEMPERATURE
+ SUPPORT_TARGET_TEMPERATURE_RANGE
)
def test_supported_features_preset_mode(device_mapping):
"""Test supported features flags with swing mode."""
device = device_mapping
assert (
device.supported_features
== SUPPORT_FAN_MODE
+ SUPPORT_TARGET_TEMPERATURE
+ SUPPORT_TARGET_TEMPERATURE_RANGE
+ SUPPORT_PRESET_MODE
)
def test_supported_features_preset_mode_away(device_heat_cool_away):
"""Test supported features flags with swing mode."""
device = device_heat_cool_away
assert (
device.supported_features
== SUPPORT_FAN_MODE
+ SUPPORT_TARGET_TEMPERATURE
+ SUPPORT_TARGET_TEMPERATURE_RANGE
+ SUPPORT_PRESET_MODE
)
def test_supported_features_swing_mode(device_zxt_120):
"""Test supported features flags with swing mode."""
device = device_zxt_120
assert (
device.supported_features
== SUPPORT_FAN_MODE
+ SUPPORT_TARGET_TEMPERATURE
+ SUPPORT_TARGET_TEMPERATURE_RANGE
+ SUPPORT_SWING_MODE
)
def test_supported_features_aux_heat(device_aux_heat):
"""Test supported features flags with aux heat."""
device = device_aux_heat
assert (
device.supported_features
== SUPPORT_FAN_MODE + SUPPORT_TARGET_TEMPERATURE + SUPPORT_AUX_HEAT
)
def test_supported_features_single_setpoint(device_single_setpoint):
"""Test supported features flags for SETPOINT_THERMOSTAT."""
device = device_single_setpoint
assert device.supported_features == SUPPORT_FAN_MODE + SUPPORT_TARGET_TEMPERATURE
def test_supported_features_single_setpoint_with_mode(device_single_setpoint_with_mode):
"""Test supported features flags for SETPOINT_THERMOSTAT."""
device = device_single_setpoint_with_mode
assert device.supported_features == SUPPORT_FAN_MODE + SUPPORT_TARGET_TEMPERATURE
def test_zxt_120_swing_mode(device_zxt_120):
"""Test operation of the zxt 120 swing mode."""
device = device_zxt_120
assert device.swing_modes == [6, 7, 8]
assert device._zxt_120 == 1
# Test set mode
assert device.values.zxt_120_swing_mode.data == "test3"
device.set_swing_mode("test_swing_set")
assert device.values.zxt_120_swing_mode.data == "test_swing_set"
# Test mode changed
value_changed(device.values.zxt_120_swing_mode)
assert device.swing_mode == "test_swing_set"
device.values.zxt_120_swing_mode.data = "test_swing_updated"
value_changed(device.values.zxt_120_swing_mode)
assert device.swing_mode == "test_swing_updated"
def test_temperature_unit(device):
"""Test temperature unit."""
assert device.temperature_unit == TEMP_CELSIUS
device.values.temperature.units = "F"
value_changed(device.values.temperature)
assert device.temperature_unit == TEMP_FAHRENHEIT
device.values.temperature.units = "C"
value_changed(device.values.temperature)
assert device.temperature_unit == TEMP_CELSIUS
def test_data_lists(device):
"""Test data lists from zwave value items."""
assert device.fan_modes == [3, 4, 5]
assert device.hvac_modes == [
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
]
assert device.preset_modes == []
device.values.primary = None
assert device.preset_modes == []
def test_data_lists_single_setpoint(device_single_setpoint):
"""Test data lists from zwave value items."""
device = device_single_setpoint
assert device.fan_modes == [3, 4, 5]
assert device.hvac_modes == []
assert device.preset_modes == []
def test_data_lists_single_setpoint_with_mode(device_single_setpoint_with_mode):
"""Test data lists from zwave value items."""
device = device_single_setpoint_with_mode
assert device.fan_modes == [3, 4, 5]
assert device.hvac_modes == [HVAC_MODE_OFF, HVAC_MODE_HEAT]
assert device.preset_modes == []
def test_data_lists_mapping(device_mapping):
"""Test data lists from zwave value items."""
device = device_mapping
assert device.hvac_modes == ["off", "cool", "heat", "heat_cool"]
assert device.preset_modes == ["boost", "none"]
device.values.primary = None
assert device.preset_modes == []
def test_target_value_set(device):
"""Test values changed for climate device."""
assert device.values.setpoint_heating.data == 1
assert device.values.setpoint_cooling.data == 10
device.set_temperature()
assert device.values.setpoint_heating.data == 1
assert device.values.setpoint_cooling.data == 10
device.set_temperature(**{ATTR_TEMPERATURE: 2})
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_cooling.data == 10
device.set_hvac_mode(HVAC_MODE_COOL)
value_changed(device.values.primary)
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_cooling.data == 10
device.set_temperature(**{ATTR_TEMPERATURE: 9})
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_cooling.data == 9
def test_target_value_set_range(device_heat_cool_range):
"""Test values changed for climate device."""
device = device_heat_cool_range
assert device.values.setpoint_heating.data == 1
assert device.values.setpoint_cooling.data == 10
device.set_temperature()
assert device.values.setpoint_heating.data == 1
assert device.values.setpoint_cooling.data == 10
device.set_temperature(**{ATTR_TARGET_TEMP_LOW: 2})
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_cooling.data == 10
device.set_temperature(**{ATTR_TARGET_TEMP_HIGH: 9})
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_cooling.data == 9
device.set_temperature(**{ATTR_TARGET_TEMP_LOW: 3, ATTR_TARGET_TEMP_HIGH: 8})
assert device.values.setpoint_heating.data == 3
assert device.values.setpoint_cooling.data == 8
def test_target_value_set_range_away(device_heat_cool_away):
"""Test values changed for climate device."""
device = device_heat_cool_away
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_cooling.data == 9
assert device.values.setpoint_away_heating.data == 1
assert device.values.setpoint_away_cooling.data == 10
device.set_preset_mode(PRESET_AWAY)
device.set_temperature(**{ATTR_TARGET_TEMP_LOW: 0, ATTR_TARGET_TEMP_HIGH: 11})
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_cooling.data == 9
assert device.values.setpoint_away_heating.data == 0
assert device.values.setpoint_away_cooling.data == 11
def test_target_value_set_eco(device_heat_eco):
"""Test values changed for climate device."""
device = device_heat_eco
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_eco_heating.data == 1
device.set_preset_mode("heat econ")
device.set_temperature(**{ATTR_TEMPERATURE: 0})
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_eco_heating.data == 0
def test_target_value_set_single_setpoint(device_single_setpoint):
"""Test values changed for climate device."""
device = device_single_setpoint
assert device.values.primary.data == 1
device.set_temperature(**{ATTR_TEMPERATURE: 2})
assert device.values.primary.data == 2
def test_operation_value_set(device):
"""Test values changed for climate device."""
assert device.values.primary.data == HVAC_MODE_HEAT
device.set_hvac_mode(HVAC_MODE_COOL)
assert device.values.primary.data == HVAC_MODE_COOL
device.set_preset_mode(PRESET_ECO)
assert device.values.primary.data == PRESET_ECO
device.set_preset_mode(PRESET_NONE)
assert device.values.primary.data == HVAC_MODE_HEAT_COOL
device.values.primary = None
device.set_hvac_mode("test_set_failes")
assert device.values.primary is None
device.set_preset_mode("test_set_failes")
assert device.values.primary is None
def test_operation_value_set_mapping(device_mapping):
"""Test values changed for climate device. Mapping."""
device = device_mapping
assert device.values.primary.data == "Heat"
device.set_hvac_mode(HVAC_MODE_COOL)
assert device.values.primary.data == "Cool"
device.set_hvac_mode(HVAC_MODE_OFF)
assert device.values.primary.data == "Off"
device.set_preset_mode(PRESET_BOOST)
assert device.values.primary.data == "Full Power"
device.set_preset_mode(PRESET_ECO)
assert device.values.primary.data == "eco"
def test_operation_value_set_unknown(device_unknown):
"""Test values changed for climate device. Unknown."""
device = device_unknown
assert device.values.primary.data == "Heat"
device.set_preset_mode("Abcdefg")
assert device.values.primary.data == "Abcdefg"
device.set_preset_mode(PRESET_NONE)
assert device.values.primary.data == HVAC_MODE_HEAT_COOL
def test_operation_value_set_heat_cool(device_heat_cool):
"""Test values changed for climate device. Heat/Cool only."""
device = device_heat_cool
assert device.values.primary.data == HVAC_MODE_HEAT
device.set_preset_mode("Heat Eco")
assert device.values.primary.data == "Heat Eco"
device.set_preset_mode(PRESET_NONE)
assert device.values.primary.data == HVAC_MODE_HEAT
device.set_preset_mode("Cool Eco")
assert device.values.primary.data == "Cool Eco"
device.set_preset_mode(PRESET_NONE)
assert device.values.primary.data == HVAC_MODE_COOL
def test_fan_mode_value_set(device):
"""Test values changed for climate device."""
assert device.values.fan_mode.data == "test2"
device.set_fan_mode("test_fan_set")
assert device.values.fan_mode.data == "test_fan_set"
device.values.fan_mode = None
device.set_fan_mode("test_fan_set_failes")
assert device.values.fan_mode is None
def test_target_value_changed(device):
"""Test values changed for climate device."""
assert device.target_temperature == 1
device.values.setpoint_heating.data = 2
value_changed(device.values.setpoint_heating)
assert device.target_temperature == 2
device.values.primary.data = HVAC_MODE_COOL
value_changed(device.values.primary)
assert device.target_temperature == 10
device.values.setpoint_cooling.data = 9
value_changed(device.values.setpoint_cooling)
assert device.target_temperature == 9
def test_target_range_changed(device_heat_cool_range):
"""Test values changed for climate device."""
device = device_heat_cool_range
assert device.target_temperature_low == 1
assert device.target_temperature_high == 10
device.values.setpoint_heating.data = 2
value_changed(device.values.setpoint_heating)
assert device.target_temperature_low == 2
assert device.target_temperature_high == 10
device.values.setpoint_cooling.data = 9
value_changed(device.values.setpoint_cooling)
assert device.target_temperature_low == 2
assert device.target_temperature_high == 9
def test_target_changed_preset_range(device_heat_cool_away):
"""Test values changed for climate device."""
device = device_heat_cool_away
assert device.target_temperature_low == 2
assert device.target_temperature_high == 9
device.values.primary.data = PRESET_AWAY
value_changed(device.values.primary)
assert device.target_temperature_low == 1
assert device.target_temperature_high == 10
device.values.setpoint_away_heating.data = 0
value_changed(device.values.setpoint_away_heating)
device.values.setpoint_away_cooling.data = 11
value_changed(device.values.setpoint_away_cooling)
assert device.target_temperature_low == 0
assert device.target_temperature_high == 11
device.values.primary.data = HVAC_MODE_HEAT_COOL
value_changed(device.values.primary)
assert device.target_temperature_low == 2
assert device.target_temperature_high == 9
def test_target_changed_eco(device_heat_eco):
"""Test values changed for climate device."""
device = device_heat_eco
assert device.target_temperature == 2
device.values.primary.data = "heat econ"
value_changed(device.values.primary)
assert device.target_temperature == 1
device.values.setpoint_eco_heating.data = 0
value_changed(device.values.setpoint_eco_heating)
assert device.target_temperature == 0
device.values.primary.data = HVAC_MODE_HEAT
value_changed(device.values.primary)
assert device.target_temperature == 2
def test_target_changed_with_mode(device):
"""Test values changed for climate device."""
assert device.hvac_mode == HVAC_MODE_HEAT
assert device.target_temperature == 1
device.values.primary.data = HVAC_MODE_COOL
value_changed(device.values.primary)
assert device.target_temperature == 10
device.values.primary.data = HVAC_MODE_HEAT_COOL
value_changed(device.values.primary)
assert device.target_temperature_low == 1
assert device.target_temperature_high == 10
def test_target_value_changed_single_setpoint(device_single_setpoint):
"""Test values changed for climate device."""
device = device_single_setpoint
assert device.target_temperature == 1
device.values.primary.data = 2
value_changed(device.values.primary)
assert device.target_temperature == 2
def test_temperature_value_changed(device):
"""Test values changed for climate device."""
assert device.current_temperature == 5
device.values.temperature.data = 3
value_changed(device.values.temperature)
assert device.current_temperature == 3
def test_operation_value_changed(device):
"""Test values changed for climate device."""
assert device.hvac_mode == HVAC_MODE_HEAT
assert device.preset_mode == PRESET_NONE
device.values.primary.data = HVAC_MODE_COOL
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_COOL
assert device.preset_mode == PRESET_NONE
device.values.primary.data = HVAC_MODE_OFF
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_OFF
assert device.preset_mode == PRESET_NONE
device.values.primary = None
assert device.hvac_mode == HVAC_MODE_HEAT_COOL
assert device.preset_mode == PRESET_NONE
def test_operation_value_changed_preset(device_mapping):
"""Test preset changed for climate device."""
device = device_mapping
assert device.hvac_mode == HVAC_MODE_HEAT
assert device.preset_mode == PRESET_NONE
device.values.primary.data = PRESET_ECO
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_HEAT_COOL
assert device.preset_mode == PRESET_ECO
def test_operation_value_changed_mapping(device_mapping):
"""Test values changed for climate device. Mapping."""
device = device_mapping
assert device.hvac_mode == HVAC_MODE_HEAT
assert device.preset_mode == PRESET_NONE
device.values.primary.data = "Off"
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_OFF
assert device.preset_mode == PRESET_NONE
device.values.primary.data = "Cool"
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_COOL
assert device.preset_mode == PRESET_NONE
def test_operation_value_changed_mapping_preset(device_mapping):
"""Test values changed for climate device. Mapping with presets."""
device = device_mapping
assert device.hvac_mode == HVAC_MODE_HEAT
assert device.preset_mode == PRESET_NONE
device.values.primary.data = "Full Power"
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_HEAT_COOL
assert device.preset_mode == PRESET_BOOST
device.values.primary = None
assert device.hvac_mode == HVAC_MODE_HEAT_COOL
assert device.preset_mode == PRESET_NONE
def test_operation_value_changed_unknown(device_unknown):
"""Test preset changed for climate device. Unknown."""
device = device_unknown
assert device.hvac_mode == HVAC_MODE_HEAT
assert device.preset_mode == PRESET_NONE
device.values.primary.data = "Abcdefg"
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_HEAT_COOL
assert device.preset_mode == "Abcdefg"
def test_operation_value_changed_heat_cool(device_heat_cool):
"""Test preset changed for climate device. Heat/Cool only."""
device = device_heat_cool
assert device.hvac_mode == HVAC_MODE_HEAT
assert device.preset_mode == PRESET_NONE
device.values.primary.data = "Cool Eco"
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_COOL
assert device.preset_mode == "Cool Eco"
device.values.primary.data = "Heat Eco"
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_HEAT
assert device.preset_mode == "Heat Eco"
def test_fan_mode_value_changed(device):
"""Test values changed for climate device."""
assert device.fan_mode == "test2"
device.values.fan_mode.data = "test_updated_fan"
value_changed(device.values.fan_mode)
assert device.fan_mode == "test_updated_fan"
def test_hvac_action_value_changed(device):
"""Test values changed for climate device."""
assert device.hvac_action == CURRENT_HVAC_HEAT
device.values.operating_state.data = CURRENT_HVAC_COOL
value_changed(device.values.operating_state)
assert device.hvac_action == CURRENT_HVAC_COOL
def test_hvac_action_value_changed_mapping(device_mapping):
"""Test values changed for climate device."""
device = device_mapping
assert device.hvac_action == CURRENT_HVAC_HEAT
device.values.operating_state.data = "cooling"
value_changed(device.values.operating_state)
assert device.hvac_action == CURRENT_HVAC_COOL
def test_hvac_action_value_changed_unknown(device_unknown):
"""Test values changed for climate device."""
device = device_unknown
assert device.hvac_action == "test4"
device.values.operating_state.data = "another_hvac_action"
value_changed(device.values.operating_state)
assert device.hvac_action == "another_hvac_action"
def test_fan_action_value_changed(device):
"""Test values changed for climate device."""
assert device.device_state_attributes[climate.ATTR_FAN_ACTION] == 7
device.values.fan_action.data = 9
value_changed(device.values.fan_action)
assert device.device_state_attributes[climate.ATTR_FAN_ACTION] == 9
def test_aux_heat_unsupported_set(device):
"""Test aux heat for climate device."""
assert device.values.primary.data == HVAC_MODE_HEAT
device.turn_aux_heat_on()
assert device.values.primary.data == HVAC_MODE_HEAT
device.turn_aux_heat_off()
assert device.values.primary.data == HVAC_MODE_HEAT
def test_aux_heat_unsupported_value_changed(device):
"""Test aux heat for climate device."""
assert device.is_aux_heat is None
device.values.primary.data = HVAC_MODE_HEAT
value_changed(device.values.primary)
assert device.is_aux_heat is None
def test_aux_heat_set(device_aux_heat):
"""Test aux heat for climate device."""
device = device_aux_heat
assert device.values.primary.data == HVAC_MODE_HEAT
device.turn_aux_heat_on()
assert device.values.primary.data == AUX_HEAT_ZWAVE_MODE
device.turn_aux_heat_off()
assert device.values.primary.data == HVAC_MODE_HEAT
def test_aux_heat_value_changed(device_aux_heat):
"""Test aux heat for climate device."""
device = device_aux_heat
assert device.is_aux_heat is False
device.values.primary.data = AUX_HEAT_ZWAVE_MODE
value_changed(device.values.primary)
assert device.is_aux_heat is True
device.values.primary.data = HVAC_MODE_HEAT
value_changed(device.values.primary)
assert device.is_aux_heat is False
| mit | 5,659,001,596,047,276,000 | 35.55618 | 88 | 0.678684 | false |
inspirehep/sqlalchemy | lib/sqlalchemy/ext/associationproxy.py | 45 | 33253 | # ext/associationproxy.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Contain the ``AssociationProxy`` class.
The ``AssociationProxy`` is a Python property object which provides
transparent proxied access to the endpoint of an association object.
See the example ``examples/association/proxied_association.py``.
"""
import itertools
import operator
import weakref
from .. import exc, orm, util
from ..orm import collections, interfaces
from ..sql import not_, or_
def association_proxy(target_collection, attr, **kw):
"""Return a Python property implementing a view of a target
attribute which references an attribute on members of the
target.
The returned value is an instance of :class:`.AssociationProxy`.
Implements a Python property representing a relationship as a collection
of simpler values, or a scalar value. The proxied property will mimic
the collection type of the target (list, dict or set), or, in the case of
a one to one relationship, a simple scalar value.
:param target_collection: Name of the attribute we'll proxy to.
This attribute is typically mapped by
:func:`~sqlalchemy.orm.relationship` to link to a target collection, but
can also be a many-to-one or non-scalar relationship.
:param attr: Attribute on the associated instance or instances we'll
proxy for.
For example, given a target collection of [obj1, obj2], a list created
by this proxy property would look like [getattr(obj1, *attr*),
getattr(obj2, *attr*)]
If the relationship is one-to-one or otherwise uselist=False, then
simply: getattr(obj, *attr*)
:param creator: optional.
When new items are added to this proxied collection, new instances of
the class collected by the target collection will be created. For list
and set collections, the target class constructor will be called with
the 'value' for the new instance. For dict types, two arguments are
passed: key and value.
If you want to construct instances differently, supply a *creator*
function that takes arguments as above and returns instances.
For scalar relationships, creator() will be called if the target is None.
If the target is present, set operations are proxied to setattr() on the
associated object.
If you have an associated object with multiple attributes, you may set
up multiple association proxies mapping to different attributes. See
the unit tests for examples, and for examples of how creator() functions
can be used to construct the scalar relationship on-demand in this
situation.
:param \*\*kw: Passes along any other keyword arguments to
:class:`.AssociationProxy`.
"""
return AssociationProxy(target_collection, attr, **kw)
ASSOCIATION_PROXY = util.symbol('ASSOCIATION_PROXY')
"""Symbol indicating an :class:`InspectionAttr` that's
of type :class:`.AssociationProxy`.
Is assigned to the :attr:`.InspectionAttr.extension_type`
attibute.
"""
class AssociationProxy(interfaces.InspectionAttrInfo):
"""A descriptor that presents a read/write view of an object attribute."""
is_attribute = False
extension_type = ASSOCIATION_PROXY
def __init__(self, target_collection, attr, creator=None,
getset_factory=None, proxy_factory=None,
proxy_bulk_set=None):
"""Construct a new :class:`.AssociationProxy`.
The :func:`.association_proxy` function is provided as the usual
entrypoint here, though :class:`.AssociationProxy` can be instantiated
and/or subclassed directly.
:param target_collection: Name of the collection we'll proxy to,
usually created with :func:`.relationship`.
:param attr: Attribute on the collected instances we'll proxy
for. For example, given a target collection of [obj1, obj2], a
list created by this proxy property would look like
[getattr(obj1, attr), getattr(obj2, attr)]
:param creator: Optional. When new items are added to this proxied
collection, new instances of the class collected by the target
collection will be created. For list and set collections, the
target class constructor will be called with the 'value' for the
new instance. For dict types, two arguments are passed:
key and value.
If you want to construct instances differently, supply a 'creator'
function that takes arguments as above and returns instances.
:param getset_factory: Optional. Proxied attribute access is
automatically handled by routines that get and set values based on
the `attr` argument for this proxy.
If you would like to customize this behavior, you may supply a
`getset_factory` callable that produces a tuple of `getter` and
`setter` functions. The factory is called with two arguments, the
abstract type of the underlying collection and this proxy instance.
:param proxy_factory: Optional. The type of collection to emulate is
determined by sniffing the target collection. If your collection
type can't be determined by duck typing or you'd like to use a
different collection implementation, you may supply a factory
function to produce those collections. Only applicable to
non-scalar relationships.
:param proxy_bulk_set: Optional, use with proxy_factory. See
the _set() method for details.
"""
self.target_collection = target_collection
self.value_attr = attr
self.creator = creator
self.getset_factory = getset_factory
self.proxy_factory = proxy_factory
self.proxy_bulk_set = proxy_bulk_set
self.owning_class = None
self.key = '_%s_%s_%s' % (
type(self).__name__, target_collection, id(self))
self.collection_class = None
@property
def remote_attr(self):
"""The 'remote' :class:`.MapperProperty` referenced by this
:class:`.AssociationProxy`.
.. versionadded:: 0.7.3
See also:
:attr:`.AssociationProxy.attr`
:attr:`.AssociationProxy.local_attr`
"""
return getattr(self.target_class, self.value_attr)
@property
def local_attr(self):
"""The 'local' :class:`.MapperProperty` referenced by this
:class:`.AssociationProxy`.
.. versionadded:: 0.7.3
See also:
:attr:`.AssociationProxy.attr`
:attr:`.AssociationProxy.remote_attr`
"""
return getattr(self.owning_class, self.target_collection)
@property
def attr(self):
"""Return a tuple of ``(local_attr, remote_attr)``.
This attribute is convenient when specifying a join
using :meth:`.Query.join` across two relationships::
sess.query(Parent).join(*Parent.proxied.attr)
.. versionadded:: 0.7.3
See also:
:attr:`.AssociationProxy.local_attr`
:attr:`.AssociationProxy.remote_attr`
"""
return (self.local_attr, self.remote_attr)
def _get_property(self):
return (orm.class_mapper(self.owning_class).
get_property(self.target_collection))
@util.memoized_property
def target_class(self):
"""The intermediary class handled by this :class:`.AssociationProxy`.
Intercepted append/set/assignment events will result
in the generation of new instances of this class.
"""
return self._get_property().mapper.class_
@util.memoized_property
def scalar(self):
"""Return ``True`` if this :class:`.AssociationProxy` proxies a scalar
relationship on the local side."""
scalar = not self._get_property().uselist
if scalar:
self._initialize_scalar_accessors()
return scalar
@util.memoized_property
def _value_is_scalar(self):
return not self._get_property().\
mapper.get_property(self.value_attr).uselist
@util.memoized_property
def _target_is_object(self):
return getattr(self.target_class, self.value_attr).impl.uses_objects
def __get__(self, obj, class_):
if self.owning_class is None:
self.owning_class = class_ and class_ or type(obj)
if obj is None:
return self
if self.scalar:
target = getattr(obj, self.target_collection)
return self._scalar_get(target)
else:
try:
# If the owning instance is reborn (orm session resurrect,
# etc.), refresh the proxy cache.
creator_id, proxy = getattr(obj, self.key)
if id(obj) == creator_id:
return proxy
except AttributeError:
pass
proxy = self._new(_lazy_collection(obj, self.target_collection))
setattr(obj, self.key, (id(obj), proxy))
return proxy
def __set__(self, obj, values):
if self.owning_class is None:
self.owning_class = type(obj)
if self.scalar:
creator = self.creator and self.creator or self.target_class
target = getattr(obj, self.target_collection)
if target is None:
setattr(obj, self.target_collection, creator(values))
else:
self._scalar_set(target, values)
else:
proxy = self.__get__(obj, None)
if proxy is not values:
proxy.clear()
self._set(proxy, values)
def __delete__(self, obj):
if self.owning_class is None:
self.owning_class = type(obj)
delattr(obj, self.key)
def _initialize_scalar_accessors(self):
if self.getset_factory:
get, set = self.getset_factory(None, self)
else:
get, set = self._default_getset(None)
self._scalar_get, self._scalar_set = get, set
def _default_getset(self, collection_class):
attr = self.value_attr
_getter = operator.attrgetter(attr)
getter = lambda target: _getter(target) if target is not None else None
if collection_class is dict:
setter = lambda o, k, v: setattr(o, attr, v)
else:
setter = lambda o, v: setattr(o, attr, v)
return getter, setter
def _new(self, lazy_collection):
creator = self.creator and self.creator or self.target_class
self.collection_class = util.duck_type_collection(lazy_collection())
if self.proxy_factory:
return self.proxy_factory(
lazy_collection, creator, self.value_attr, self)
if self.getset_factory:
getter, setter = self.getset_factory(self.collection_class, self)
else:
getter, setter = self._default_getset(self.collection_class)
if self.collection_class is list:
return _AssociationList(
lazy_collection, creator, getter, setter, self)
elif self.collection_class is dict:
return _AssociationDict(
lazy_collection, creator, getter, setter, self)
elif self.collection_class is set:
return _AssociationSet(
lazy_collection, creator, getter, setter, self)
else:
raise exc.ArgumentError(
'could not guess which interface to use for '
'collection_class "%s" backing "%s"; specify a '
'proxy_factory and proxy_bulk_set manually' %
(self.collection_class.__name__, self.target_collection))
def _inflate(self, proxy):
creator = self.creator and self.creator or self.target_class
if self.getset_factory:
getter, setter = self.getset_factory(self.collection_class, self)
else:
getter, setter = self._default_getset(self.collection_class)
proxy.creator = creator
proxy.getter = getter
proxy.setter = setter
def _set(self, proxy, values):
if self.proxy_bulk_set:
self.proxy_bulk_set(proxy, values)
elif self.collection_class is list:
proxy.extend(values)
elif self.collection_class is dict:
proxy.update(values)
elif self.collection_class is set:
proxy.update(values)
else:
raise exc.ArgumentError(
'no proxy_bulk_set supplied for custom '
'collection_class implementation')
@property
def _comparator(self):
return self._get_property().comparator
def any(self, criterion=None, **kwargs):
"""Produce a proxied 'any' expression using EXISTS.
This expression will be a composed product
using the :meth:`.RelationshipProperty.Comparator.any`
and/or :meth:`.RelationshipProperty.Comparator.has`
operators of the underlying proxied attributes.
"""
if self._target_is_object:
if self._value_is_scalar:
value_expr = getattr(
self.target_class, self.value_attr).has(
criterion, **kwargs)
else:
value_expr = getattr(
self.target_class, self.value_attr).any(
criterion, **kwargs)
else:
value_expr = criterion
# check _value_is_scalar here, otherwise
# we're scalar->scalar - call .any() so that
# the "can't call any() on a scalar" msg is raised.
if self.scalar and not self._value_is_scalar:
return self._comparator.has(
value_expr
)
else:
return self._comparator.any(
value_expr
)
def has(self, criterion=None, **kwargs):
"""Produce a proxied 'has' expression using EXISTS.
This expression will be a composed product
using the :meth:`.RelationshipProperty.Comparator.any`
and/or :meth:`.RelationshipProperty.Comparator.has`
operators of the underlying proxied attributes.
"""
if self._target_is_object:
return self._comparator.has(
getattr(self.target_class, self.value_attr).
has(criterion, **kwargs)
)
else:
if criterion is not None or kwargs:
raise exc.ArgumentError(
"Non-empty has() not allowed for "
"column-targeted association proxy; use ==")
return self._comparator.has()
def contains(self, obj):
"""Produce a proxied 'contains' expression using EXISTS.
This expression will be a composed product
using the :meth:`.RelationshipProperty.Comparator.any`
, :meth:`.RelationshipProperty.Comparator.has`,
and/or :meth:`.RelationshipProperty.Comparator.contains`
operators of the underlying proxied attributes.
"""
if self.scalar and not self._value_is_scalar:
return self._comparator.has(
getattr(self.target_class, self.value_attr).contains(obj)
)
else:
return self._comparator.any(**{self.value_attr: obj})
def __eq__(self, obj):
# note the has() here will fail for collections; eq_()
# is only allowed with a scalar.
if obj is None:
return or_(
self._comparator.has(**{self.value_attr: obj}),
self._comparator == None
)
else:
return self._comparator.has(**{self.value_attr: obj})
def __ne__(self, obj):
# note the has() here will fail for collections; eq_()
# is only allowed with a scalar.
return self._comparator.has(
getattr(self.target_class, self.value_attr) != obj)
class _lazy_collection(object):
def __init__(self, obj, target):
self.ref = weakref.ref(obj)
self.target = target
def __call__(self):
obj = self.ref()
if obj is None:
raise exc.InvalidRequestError(
"stale association proxy, parent object has gone out of "
"scope")
return getattr(obj, self.target)
def __getstate__(self):
return {'obj': self.ref(), 'target': self.target}
def __setstate__(self, state):
self.ref = weakref.ref(state['obj'])
self.target = state['target']
class _AssociationCollection(object):
def __init__(self, lazy_collection, creator, getter, setter, parent):
"""Constructs an _AssociationCollection.
This will always be a subclass of either _AssociationList,
_AssociationSet, or _AssociationDict.
lazy_collection
A callable returning a list-based collection of entities (usually an
object attribute managed by a SQLAlchemy relationship())
creator
A function that creates new target entities. Given one parameter:
value. This assertion is assumed::
obj = creator(somevalue)
assert getter(obj) == somevalue
getter
A function. Given an associated object, return the 'value'.
setter
A function. Given an associated object and a value, store that
value on the object.
"""
self.lazy_collection = lazy_collection
self.creator = creator
self.getter = getter
self.setter = setter
self.parent = parent
col = property(lambda self: self.lazy_collection())
def __len__(self):
return len(self.col)
def __bool__(self):
return bool(self.col)
__nonzero__ = __bool__
def __getstate__(self):
return {'parent': self.parent, 'lazy_collection': self.lazy_collection}
def __setstate__(self, state):
self.parent = state['parent']
self.lazy_collection = state['lazy_collection']
self.parent._inflate(self)
class _AssociationList(_AssociationCollection):
"""Generic, converting, list-to-list proxy."""
def _create(self, value):
return self.creator(value)
def _get(self, object):
return self.getter(object)
def _set(self, object, value):
return self.setter(object, value)
def __getitem__(self, index):
if not isinstance(index, slice):
return self._get(self.col[index])
else:
return [self._get(member) for member in self.col[index]]
def __setitem__(self, index, value):
if not isinstance(index, slice):
self._set(self.col[index], value)
else:
if index.stop is None:
stop = len(self)
elif index.stop < 0:
stop = len(self) + index.stop
else:
stop = index.stop
step = index.step or 1
start = index.start or 0
rng = list(range(index.start or 0, stop, step))
if step == 1:
for i in rng:
del self[start]
i = start
for item in value:
self.insert(i, item)
i += 1
else:
if len(value) != len(rng):
raise ValueError(
"attempt to assign sequence of size %s to "
"extended slice of size %s" % (len(value),
len(rng)))
for i, item in zip(rng, value):
self._set(self.col[i], item)
def __delitem__(self, index):
del self.col[index]
def __contains__(self, value):
for member in self.col:
# testlib.pragma exempt:__eq__
if self._get(member) == value:
return True
return False
def __getslice__(self, start, end):
return [self._get(member) for member in self.col[start:end]]
def __setslice__(self, start, end, values):
members = [self._create(v) for v in values]
self.col[start:end] = members
def __delslice__(self, start, end):
del self.col[start:end]
def __iter__(self):
"""Iterate over proxied values.
For the actual domain objects, iterate over .col instead or
just use the underlying collection directly from its property
on the parent.
"""
for member in self.col:
yield self._get(member)
raise StopIteration
def append(self, value):
item = self._create(value)
self.col.append(item)
def count(self, value):
return sum([1 for _ in
util.itertools_filter(lambda v: v == value, iter(self))])
def extend(self, values):
for v in values:
self.append(v)
def insert(self, index, value):
self.col[index:index] = [self._create(value)]
def pop(self, index=-1):
return self.getter(self.col.pop(index))
def remove(self, value):
for i, val in enumerate(self):
if val == value:
del self.col[i]
return
raise ValueError("value not in list")
def reverse(self):
"""Not supported, use reversed(mylist)"""
raise NotImplementedError
def sort(self):
"""Not supported, use sorted(mylist)"""
raise NotImplementedError
def clear(self):
del self.col[0:len(self.col)]
def __eq__(self, other):
return list(self) == other
def __ne__(self, other):
return list(self) != other
def __lt__(self, other):
return list(self) < other
def __le__(self, other):
return list(self) <= other
def __gt__(self, other):
return list(self) > other
def __ge__(self, other):
return list(self) >= other
def __cmp__(self, other):
return cmp(list(self), other)
def __add__(self, iterable):
try:
other = list(iterable)
except TypeError:
return NotImplemented
return list(self) + other
def __radd__(self, iterable):
try:
other = list(iterable)
except TypeError:
return NotImplemented
return other + list(self)
def __mul__(self, n):
if not isinstance(n, int):
return NotImplemented
return list(self) * n
__rmul__ = __mul__
def __iadd__(self, iterable):
self.extend(iterable)
return self
def __imul__(self, n):
# unlike a regular list *=, proxied __imul__ will generate unique
# backing objects for each copy. *= on proxied lists is a bit of
# a stretch anyhow, and this interpretation of the __imul__ contract
# is more plausibly useful than copying the backing objects.
if not isinstance(n, int):
return NotImplemented
if n == 0:
self.clear()
elif n > 1:
self.extend(list(self) * (n - 1))
return self
def copy(self):
return list(self)
def __repr__(self):
return repr(list(self))
def __hash__(self):
raise TypeError("%s objects are unhashable" % type(self).__name__)
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
not func.__doc__ and hasattr(list, func_name)):
func.__doc__ = getattr(list, func_name).__doc__
del func_name, func
_NotProvided = util.symbol('_NotProvided')
class _AssociationDict(_AssociationCollection):
"""Generic, converting, dict-to-dict proxy."""
def _create(self, key, value):
return self.creator(key, value)
def _get(self, object):
return self.getter(object)
def _set(self, object, key, value):
return self.setter(object, key, value)
def __getitem__(self, key):
return self._get(self.col[key])
def __setitem__(self, key, value):
if key in self.col:
self._set(self.col[key], key, value)
else:
self.col[key] = self._create(key, value)
def __delitem__(self, key):
del self.col[key]
def __contains__(self, key):
# testlib.pragma exempt:__hash__
return key in self.col
def has_key(self, key):
# testlib.pragma exempt:__hash__
return key in self.col
def __iter__(self):
return iter(self.col.keys())
def clear(self):
self.col.clear()
def __eq__(self, other):
return dict(self) == other
def __ne__(self, other):
return dict(self) != other
def __lt__(self, other):
return dict(self) < other
def __le__(self, other):
return dict(self) <= other
def __gt__(self, other):
return dict(self) > other
def __ge__(self, other):
return dict(self) >= other
def __cmp__(self, other):
return cmp(dict(self), other)
def __repr__(self):
return repr(dict(self.items()))
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def setdefault(self, key, default=None):
if key not in self.col:
self.col[key] = self._create(key, default)
return default
else:
return self[key]
def keys(self):
return self.col.keys()
if util.py2k:
def iteritems(self):
return ((key, self._get(self.col[key])) for key in self.col)
def itervalues(self):
return (self._get(self.col[key]) for key in self.col)
def iterkeys(self):
return self.col.iterkeys()
def values(self):
return [self._get(member) for member in self.col.values()]
def items(self):
return [(k, self._get(self.col[k])) for k in self]
else:
def items(self):
return ((key, self._get(self.col[key])) for key in self.col)
def values(self):
return (self._get(self.col[key]) for key in self.col)
def pop(self, key, default=_NotProvided):
if default is _NotProvided:
member = self.col.pop(key)
else:
member = self.col.pop(key, default)
return self._get(member)
def popitem(self):
item = self.col.popitem()
return (item[0], self._get(item[1]))
def update(self, *a, **kw):
if len(a) > 1:
raise TypeError('update expected at most 1 arguments, got %i' %
len(a))
elif len(a) == 1:
seq_or_map = a[0]
# discern dict from sequence - took the advice from
# http://www.voidspace.org.uk/python/articles/duck_typing.shtml
# still not perfect :(
if hasattr(seq_or_map, 'keys'):
for item in seq_or_map:
self[item] = seq_or_map[item]
else:
try:
for k, v in seq_or_map:
self[k] = v
except ValueError:
raise ValueError(
"dictionary update sequence "
"requires 2-element tuples")
for key, value in kw:
self[key] = value
def copy(self):
return dict(self.items())
def __hash__(self):
raise TypeError("%s objects are unhashable" % type(self).__name__)
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
not func.__doc__ and hasattr(dict, func_name)):
func.__doc__ = getattr(dict, func_name).__doc__
del func_name, func
class _AssociationSet(_AssociationCollection):
"""Generic, converting, set-to-set proxy."""
def _create(self, value):
return self.creator(value)
def _get(self, object):
return self.getter(object)
def _set(self, object, value):
return self.setter(object, value)
def __len__(self):
return len(self.col)
def __bool__(self):
if self.col:
return True
else:
return False
__nonzero__ = __bool__
def __contains__(self, value):
for member in self.col:
# testlib.pragma exempt:__eq__
if self._get(member) == value:
return True
return False
def __iter__(self):
"""Iterate over proxied values.
For the actual domain objects, iterate over .col instead or just use
the underlying collection directly from its property on the parent.
"""
for member in self.col:
yield self._get(member)
raise StopIteration
def add(self, value):
if value not in self:
self.col.add(self._create(value))
# for discard and remove, choosing a more expensive check strategy rather
# than call self.creator()
def discard(self, value):
for member in self.col:
if self._get(member) == value:
self.col.discard(member)
break
def remove(self, value):
for member in self.col:
if self._get(member) == value:
self.col.discard(member)
return
raise KeyError(value)
def pop(self):
if not self.col:
raise KeyError('pop from an empty set')
member = self.col.pop()
return self._get(member)
def update(self, other):
for value in other:
self.add(value)
def __ior__(self, other):
if not collections._set_binops_check_strict(self, other):
return NotImplemented
for value in other:
self.add(value)
return self
def _set(self):
return set(iter(self))
def union(self, other):
return set(self).union(other)
__or__ = union
def difference(self, other):
return set(self).difference(other)
__sub__ = difference
def difference_update(self, other):
for value in other:
self.discard(value)
def __isub__(self, other):
if not collections._set_binops_check_strict(self, other):
return NotImplemented
for value in other:
self.discard(value)
return self
def intersection(self, other):
return set(self).intersection(other)
__and__ = intersection
def intersection_update(self, other):
want, have = self.intersection(other), set(self)
remove, add = have - want, want - have
for value in remove:
self.remove(value)
for value in add:
self.add(value)
def __iand__(self, other):
if not collections._set_binops_check_strict(self, other):
return NotImplemented
want, have = self.intersection(other), set(self)
remove, add = have - want, want - have
for value in remove:
self.remove(value)
for value in add:
self.add(value)
return self
def symmetric_difference(self, other):
return set(self).symmetric_difference(other)
__xor__ = symmetric_difference
def symmetric_difference_update(self, other):
want, have = self.symmetric_difference(other), set(self)
remove, add = have - want, want - have
for value in remove:
self.remove(value)
for value in add:
self.add(value)
def __ixor__(self, other):
if not collections._set_binops_check_strict(self, other):
return NotImplemented
want, have = self.symmetric_difference(other), set(self)
remove, add = have - want, want - have
for value in remove:
self.remove(value)
for value in add:
self.add(value)
return self
def issubset(self, other):
return set(self).issubset(other)
def issuperset(self, other):
return set(self).issuperset(other)
def clear(self):
self.col.clear()
def copy(self):
return set(self)
def __eq__(self, other):
return set(self) == other
def __ne__(self, other):
return set(self) != other
def __lt__(self, other):
return set(self) < other
def __le__(self, other):
return set(self) <= other
def __gt__(self, other):
return set(self) > other
def __ge__(self, other):
return set(self) >= other
def __repr__(self):
return repr(set(self))
def __hash__(self):
raise TypeError("%s objects are unhashable" % type(self).__name__)
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
not func.__doc__ and hasattr(set, func_name)):
func.__doc__ = getattr(set, func_name).__doc__
del func_name, func
| mit | -2,829,490,381,578,768,000 | 30.341188 | 79 | 0.581812 | false |
gautam1858/tensorflow | tensorflow/python/keras/applications/nasnet.py | 17 | 1814 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=invalid-name
"""NASNet-A models for Keras.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from keras_applications import nasnet
from tensorflow.python.keras.applications import keras_modules_injection
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.applications.nasnet.NASNetMobile',
'keras.applications.NASNetMobile')
@keras_modules_injection
def NASNetMobile(*args, **kwargs):
return nasnet.NASNetMobile(*args, **kwargs)
@keras_export('keras.applications.nasnet.NASNetLarge',
'keras.applications.NASNetLarge')
@keras_modules_injection
def NASNetLarge(*args, **kwargs):
return nasnet.NASNetLarge(*args, **kwargs)
@keras_export('keras.applications.nasnet.decode_predictions')
@keras_modules_injection
def decode_predictions(*args, **kwargs):
return nasnet.decode_predictions(*args, **kwargs)
@keras_export('keras.applications.nasnet.preprocess_input')
@keras_modules_injection
def preprocess_input(*args, **kwargs):
return nasnet.preprocess_input(*args, **kwargs)
| apache-2.0 | -1,192,352,238,814,521,300 | 34.568627 | 80 | 0.728776 | false |
aadrian/w2ui | server/python/django_w2ui/django_w2ui/demo/migrations/0001_initial.py | 25 | 4991 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Tipo_User'
db.create_table(u'demo_tipo_user', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('codice', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='codice', blank=True)),
('descri', self.gf('django.db.models.fields.CharField')(max_length=30L, db_column='descri', blank=True)),
))
db.send_create_signal(u'demo', ['Tipo_User'])
# Adding model 'Users'
db.create_table('users', (
('userid', self.gf('django.db.models.fields.IntegerField')(primary_key=True, db_column='userid')),
('fname', self.gf('django.db.models.fields.CharField')(max_length=50, null=True, db_column='fname', blank=True)),
('lname', self.gf('django.db.models.fields.CharField')(max_length=50, null=True, db_column='lname', blank=True)),
('email', self.gf('django.db.models.fields.CharField')(max_length=75, null=True, db_column='email', blank=True)),
('login', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, db_column='login', blank=True)),
('password', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, db_column='password', blank=True)),
('date_birthday', self.gf('django.db.models.fields.DateField')(null=True, db_column='date_birthday', blank=True)),
('date_registration', self.gf('django.db.models.fields.DateField')(null=True, db_column='date_birthday', blank=True)),
('importo_registrato', self.gf('django.db.models.fields.DecimalField')(blank=True, null=True, db_column='importo_registrato', decimal_places=3, max_digits=15)),
('text', self.gf('django.db.models.fields.CharField')(default='', max_length=512, null=True, db_column='text', blank=True)),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(null=True, db_column='timestamp', blank=True)),
('tipo_user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['demo.Tipo_User'], null=True, db_column='tipo_user', blank=True)),
))
db.send_create_signal(u'demo', ['Users'])
def backwards(self, orm):
# Deleting model 'Tipo_User'
db.delete_table(u'demo_tipo_user')
# Deleting model 'Users'
db.delete_table('users')
models = {
u'demo.tipo_user': {
'Meta': {'ordering': "['descri']", 'object_name': 'Tipo_User'},
'codice': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'codice'", 'blank': 'True'}),
'descri': ('django.db.models.fields.CharField', [], {'max_length': '30L', 'db_column': "'descri'", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'demo.users': {
'Meta': {'object_name': 'Users', 'db_table': "'users'"},
'date_birthday': ('django.db.models.fields.DateField', [], {'null': 'True', 'db_column': "'date_birthday'", 'blank': 'True'}),
'date_registration': ('django.db.models.fields.DateField', [], {'null': 'True', 'db_column': "'date_birthday'", 'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '75', 'null': 'True', 'db_column': "'email'", 'blank': 'True'}),
'fname': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'db_column': "'fname'", 'blank': 'True'}),
'importo_registrato': ('django.db.models.fields.DecimalField', [], {'blank': 'True', 'null': 'True', 'db_column': "'importo_registrato'", 'decimal_places': '3', 'max_digits': '15'}),
'lname': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'db_column': "'lname'", 'blank': 'True'}),
'login': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'login'", 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'password'", 'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'null': 'True', 'db_column': "'text'", 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_column': "'timestamp'", 'blank': 'True'}),
'tipo_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['demo.Tipo_User']", 'null': 'True', 'db_column': "'tipo_user'", 'blank': 'True'}),
'userid': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True', 'db_column': "'userid'"})
}
}
complete_apps = ['demo'] | mit | 4,548,978,994,194,011,000 | 71.347826 | 194 | 0.58866 | false |
reinhrst/hooks | checks/compile.py | 1 | 4934 | import os
import subprocess
from . import base
from . import filetype
from . import status
class CompileCheck(base.PerFileCheck):
COMPILECOMMAND = []
ONLY_IF_OLDFILE_COMPILES = True
def prepareOldFileDir(self, dirname):
return dirname
def checkOldFile(self, changedFile):
with base.TempDir() as dirname:
dirname = self.prepareOldFileDir(dirname)
tempfilename = os.path.join(
dirname,
os.path.basename(changedFile.filename))
with open(tempfilename, "w") as f:
f.write("\n".join(changedFile.oldlines))
cmd = list(self.COMPILECOMMAND)
cmd.append(tempfilename)
try:
subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError:
return False
return True
def checkFile(self, changedFile):
if changedFile.status != status.ADDED:
if (self.ONLY_IF_OLDFILE_COMPILES and
not self.checkOldFile(changedFile)):
# nothing to check, old file didn't compile
return []
cmd = list(self.COMPILECOMMAND)
cmd.append(changedFile.filename)
try:
subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as calledprocesserror:
return [base.CheckError(changedFile, self.__class__,
calledprocesserror.output)]
except OSError as e:
error = (
"Trying to execute:\n%s\n. This failed (%s), possibly "
"executable is not installed on your system." % (
repr(cmd)[1:-1], str(e)))
return [base.CheckError(changedFile, self.__class__, error)]
return []
class PythonCompileCheck(CompileCheck):
INTERESTED_IN_FILETYPES = [filetype.PYTHON]
COMPILECOMMAND = ['python', '-m' 'py_compile']
class Pep8Check(CompileCheck):
INTERESTED_IN_FILETYPES = [filetype.PYTHON]
COMPILECOMMAND = ['flake8']
def prepareOldFileDir(self, dirname):
if os.path.exists("setup.cfg"):
subprocess.check_call([
"cp", "setup.cfg", dirname])
return dirname
def check_file_get_error_numbers(self, filename):
cmd = list(self.COMPILECOMMAND) + [filename]
try:
output = subprocess.check_output(
cmd, stderr=subprocess.STDOUT).decode("UTF-8")
except subprocess.CalledProcessError as e:
errornos = set()
for line in e.output.decode("UTF-8").split("\n"):
if line == "":
continue
filenameandline, errorno, error = line.split(" ", 2)
errornos.add(errorno)
return (False, errornos, e.output.decode("UTF-8"), e.returncode)
return (True, set(), output, 0)
def checkFile(self, changedFile):
if changedFile.status != status.ADDED:
with base.TempDir() as dirname:
dirname = self.prepareOldFileDir(dirname)
tempfilename = os.path.join(
dirname,
os.path.basename(changedFile.filename))
with open(tempfilename, "w") as f:
f.write("\n".join(changedFile.oldlines))
_, old_errornos, _, _ = \
self.check_file_get_error_numbers(tempfilename)
else:
old_errornos = set()
_, new_errornos, output, returncode = \
self.check_file_get_error_numbers(changedFile.filename)
cmd = list(self.COMPILECOMMAND) + [changedFile.filename]
if returncode == 127:
return [base.CheckError(
changedFile, self.__class__,
"Could not run %s, is it installed on the system?" % (
cmd, ))]
extra_errornos = new_errornos - old_errornos
if extra_errornos:
return [base.CheckError(
changedFile, self.__class__,
"Running %s resulted in new errors, number %s:\n%s" % (
cmd, ", ".join(extra_errornos), output))]
killed_errornos = old_errornos - new_errornos
if killed_errornos:
if new_errornos:
print((
"You got rid of errors %s in %s, you deserve stars: " +
("\U00002B50" * len(killed_errornos))) % (
", ".join(killed_errornos),
changedFile.filename)) # noqa
else:
print((
"You got rid of all errors (%s) in %s, you deserve stars: "
"" + ("\U0001F31F" * len(killed_errornos))) % (
", ".join(killed_errornos),
changedFile.filename)) # noqa
return []
| mit | -2,194,930,266,072,547,600 | 36.953846 | 79 | 0.541548 | false |
davehunt/kuma | vendor/packages/nose/plugins/xunit.py | 48 | 11667 | """This plugin provides test results in the standard XUnit XML format.
It's designed for the `Jenkins`_ (previously Hudson) continuous build
system, but will probably work for anything else that understands an
XUnit-formatted XML representation of test results.
Add this shell command to your builder ::
nosetests --with-xunit
And by default a file named nosetests.xml will be written to the
working directory.
In a Jenkins builder, tick the box named "Publish JUnit test result report"
under the Post-build Actions and enter this value for Test report XMLs::
**/nosetests.xml
If you need to change the name or location of the file, you can set the
``--xunit-file`` option.
If you need to change the name of the test suite, you can set the
``--xunit-testsuite-name`` option.
Here is an abbreviated version of what an XML test report might look like::
<?xml version="1.0" encoding="UTF-8"?>
<testsuite name="nosetests" tests="1" errors="1" failures="0" skip="0">
<testcase classname="path_to_test_suite.TestSomething"
name="test_it" time="0">
<error type="exceptions.TypeError" message="oops, wrong type">
Traceback (most recent call last):
...
TypeError: oops, wrong type
</error>
</testcase>
</testsuite>
.. _Jenkins: http://jenkins-ci.org/
"""
import codecs
import doctest
import os
import sys
import traceback
import re
import inspect
from StringIO import StringIO
from time import time
from xml.sax import saxutils
from nose.plugins.base import Plugin
from nose.exc import SkipTest
from nose.pyversion import force_unicode, format_exception
# Invalid XML characters, control characters 0-31 sans \t, \n and \r
CONTROL_CHARACTERS = re.compile(r"[\000-\010\013\014\016-\037]")
TEST_ID = re.compile(r'^(.*?)(\(.*\))$')
def xml_safe(value):
"""Replaces invalid XML characters with '?'."""
return CONTROL_CHARACTERS.sub('?', value)
def escape_cdata(cdata):
"""Escape a string for an XML CDATA section."""
return xml_safe(cdata).replace(']]>', ']]>]]><![CDATA[')
def id_split(idval):
m = TEST_ID.match(idval)
if m:
name, fargs = m.groups()
head, tail = name.rsplit(".", 1)
return [head, tail+fargs]
else:
return idval.rsplit(".", 1)
def nice_classname(obj):
"""Returns a nice name for class object or class instance.
>>> nice_classname(Exception()) # doctest: +ELLIPSIS
'...Exception'
>>> nice_classname(Exception) # doctest: +ELLIPSIS
'...Exception'
"""
if inspect.isclass(obj):
cls_name = obj.__name__
else:
cls_name = obj.__class__.__name__
mod = inspect.getmodule(obj)
if mod:
name = mod.__name__
# jython
if name.startswith('org.python.core.'):
name = name[len('org.python.core.'):]
return "%s.%s" % (name, cls_name)
else:
return cls_name
def exc_message(exc_info):
"""Return the exception's message."""
exc = exc_info[1]
if exc is None:
# str exception
result = exc_info[0]
else:
try:
result = str(exc)
except UnicodeEncodeError:
try:
result = unicode(exc)
except UnicodeError:
# Fallback to args as neither str nor
# unicode(Exception(u'\xe6')) work in Python < 2.6
result = exc.args[0]
result = force_unicode(result, 'UTF-8')
return xml_safe(result)
class Tee(object):
def __init__(self, encoding, *args):
self._encoding = encoding
self._streams = args
def write(self, data):
data = force_unicode(data, self._encoding)
for s in self._streams:
s.write(data)
def writelines(self, lines):
for line in lines:
self.write(line)
def flush(self):
for s in self._streams:
s.flush()
def isatty(self):
return False
class Xunit(Plugin):
"""This plugin provides test results in the standard XUnit XML format."""
name = 'xunit'
score = 1500
encoding = 'UTF-8'
error_report_file = None
def __init__(self):
super(Xunit, self).__init__()
self._capture_stack = []
self._currentStdout = None
self._currentStderr = None
def _timeTaken(self):
if hasattr(self, '_timer'):
taken = time() - self._timer
else:
# test died before it ran (probably error in setup())
# or success/failure added before test started probably
# due to custom TestResult munging
taken = 0.0
return taken
def _quoteattr(self, attr):
"""Escape an XML attribute. Value can be unicode."""
attr = xml_safe(attr)
return saxutils.quoteattr(attr)
def options(self, parser, env):
"""Sets additional command line options."""
Plugin.options(self, parser, env)
parser.add_option(
'--xunit-file', action='store',
dest='xunit_file', metavar="FILE",
default=env.get('NOSE_XUNIT_FILE', 'nosetests.xml'),
help=("Path to xml file to store the xunit report in. "
"Default is nosetests.xml in the working directory "
"[NOSE_XUNIT_FILE]"))
parser.add_option(
'--xunit-testsuite-name', action='store',
dest='xunit_testsuite_name', metavar="PACKAGE",
default=env.get('NOSE_XUNIT_TESTSUITE_NAME', 'nosetests'),
help=("Name of the testsuite in the xunit xml, generated by plugin. "
"Default test suite name is nosetests."))
def configure(self, options, config):
"""Configures the xunit plugin."""
Plugin.configure(self, options, config)
self.config = config
if self.enabled:
self.stats = {'errors': 0,
'failures': 0,
'passes': 0,
'skipped': 0
}
self.errorlist = []
self.error_report_file_name = os.path.realpath(options.xunit_file)
self.xunit_testsuite_name = options.xunit_testsuite_name
def report(self, stream):
"""Writes an Xunit-formatted XML file
The file includes a report of test errors and failures.
"""
self.error_report_file = codecs.open(self.error_report_file_name, 'w',
self.encoding, 'replace')
self.stats['encoding'] = self.encoding
self.stats['testsuite_name'] = self.xunit_testsuite_name
self.stats['total'] = (self.stats['errors'] + self.stats['failures']
+ self.stats['passes'] + self.stats['skipped'])
self.error_report_file.write(
u'<?xml version="1.0" encoding="%(encoding)s"?>'
u'<testsuite name="%(testsuite_name)s" tests="%(total)d" '
u'errors="%(errors)d" failures="%(failures)d" '
u'skip="%(skipped)d">' % self.stats)
self.error_report_file.write(u''.join([force_unicode(e, self.encoding)
for e in self.errorlist]))
self.error_report_file.write(u'</testsuite>')
self.error_report_file.close()
if self.config.verbosity > 1:
stream.writeln("-" * 70)
stream.writeln("XML: %s" % self.error_report_file.name)
def _startCapture(self):
self._capture_stack.append((sys.stdout, sys.stderr))
self._currentStdout = StringIO()
self._currentStderr = StringIO()
sys.stdout = Tee(self.encoding, self._currentStdout, sys.stdout)
sys.stderr = Tee(self.encoding, self._currentStderr, sys.stderr)
def startContext(self, context):
self._startCapture()
def stopContext(self, context):
self._endCapture()
def beforeTest(self, test):
"""Initializes a timer before starting a test."""
self._timer = time()
self._startCapture()
def _endCapture(self):
if self._capture_stack:
sys.stdout, sys.stderr = self._capture_stack.pop()
def afterTest(self, test):
self._endCapture()
self._currentStdout = None
self._currentStderr = None
def finalize(self, test):
while self._capture_stack:
self._endCapture()
def _getCapturedStdout(self):
if self._currentStdout:
value = self._currentStdout.getvalue()
if value:
return '<system-out><![CDATA[%s]]></system-out>' % escape_cdata(
value)
return ''
def _getCapturedStderr(self):
if self._currentStderr:
value = self._currentStderr.getvalue()
if value:
return '<system-err><![CDATA[%s]]></system-err>' % escape_cdata(
value)
return ''
def addError(self, test, err, capt=None):
"""Add error output to Xunit report.
"""
taken = self._timeTaken()
if issubclass(err[0], SkipTest):
type = 'skipped'
self.stats['skipped'] += 1
else:
type = 'error'
self.stats['errors'] += 1
tb = format_exception(err, self.encoding)
id = test.id()
self.errorlist.append(
u'<testcase classname=%(cls)s name=%(name)s time="%(taken).3f">'
u'<%(type)s type=%(errtype)s message=%(message)s><![CDATA[%(tb)s]]>'
u'</%(type)s>%(systemout)s%(systemerr)s</testcase>' %
{'cls': self._quoteattr(id_split(id)[0]),
'name': self._quoteattr(id_split(id)[-1]),
'taken': taken,
'type': type,
'errtype': self._quoteattr(nice_classname(err[0])),
'message': self._quoteattr(exc_message(err)),
'tb': escape_cdata(tb),
'systemout': self._getCapturedStdout(),
'systemerr': self._getCapturedStderr(),
})
def addFailure(self, test, err, capt=None, tb_info=None):
"""Add failure output to Xunit report.
"""
taken = self._timeTaken()
tb = format_exception(err, self.encoding)
self.stats['failures'] += 1
id = test.id()
self.errorlist.append(
u'<testcase classname=%(cls)s name=%(name)s time="%(taken).3f">'
u'<failure type=%(errtype)s message=%(message)s><![CDATA[%(tb)s]]>'
u'</failure>%(systemout)s%(systemerr)s</testcase>' %
{'cls': self._quoteattr(id_split(id)[0]),
'name': self._quoteattr(id_split(id)[-1]),
'taken': taken,
'errtype': self._quoteattr(nice_classname(err[0])),
'message': self._quoteattr(exc_message(err)),
'tb': escape_cdata(tb),
'systemout': self._getCapturedStdout(),
'systemerr': self._getCapturedStderr(),
})
def addSuccess(self, test, capt=None):
"""Add success output to Xunit report.
"""
taken = self._timeTaken()
self.stats['passes'] += 1
id = test.id()
self.errorlist.append(
'<testcase classname=%(cls)s name=%(name)s '
'time="%(taken).3f">%(systemout)s%(systemerr)s</testcase>' %
{'cls': self._quoteattr(id_split(id)[0]),
'name': self._quoteattr(id_split(id)[-1]),
'taken': taken,
'systemout': self._getCapturedStdout(),
'systemerr': self._getCapturedStderr(),
})
| mpl-2.0 | 3,531,498,034,428,540,400 | 33.214076 | 81 | 0.563898 | false |
codeaudit/pattern-1 | pattern/web/json/encoder.py | 26 | 21771 | """Implementation of JSONEncoder
"""
import re
from decimal import Decimal
def _import_speedups():
try:
import _speedups
return _speedups.encode_basestring_ascii, _speedups.make_encoder
except ImportError:
return None, None
c_encode_basestring_ascii, c_make_encoder = _import_speedups()
from decoder import PosInf
ESCAPE = re.compile(ur'[\x00-\x1f\\"\b\f\n\r\t\u2028\u2029]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
u'\u2028': '\\u2028',
u'\u2029': '\\u2029',
}
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
return ESCAPE_DCT[match.group(0)]
return u'"' + ESCAPE.sub(replace, s) + u'"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
#return '\\u{0:04x}'.format(n)
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = (
c_encode_basestring_ascii or py_encode_basestring_ascii)
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict, namedtuple | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None,
use_decimal=True, namedtuple_as_object=True,
tuple_as_array=True, bigint_as_string=False,
item_sort_key=None):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
If use_decimal is true (not the default), ``decimal.Decimal`` will
be supported directly by the encoder. For the inverse, decode JSON
with ``parse_float=decimal.Decimal``.
If namedtuple_as_object is true (the default), objects with
``_asdict()`` methods will be encoded as JSON objects.
If tuple_as_array is true (the default), tuple (and subclasses) will
be encoded as JSON arrays.
If bigint_as_string is true (not the default), ints 2**53 and higher
or lower than -2**53 will be encoded as strings. This is to avoid the
rounding that happens in Javascript otherwise.
If specified, item_sort_key is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.use_decimal = use_decimal
self.namedtuple_as_object = namedtuple_as_object
self.tuple_as_array = tuple_as_array
self.bigint_as_string = bigint_as_string
self.item_sort_key = item_sort_key
if indent is not None and not isinstance(indent, basestring):
indent = indent * ' '
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
elif indent is not None:
self.item_separator = ','
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> from simplejson import JSONEncoder
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan,
_repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf):
# Check for specials. Note that this type of test is processor
# and/or platform-specific, so do tests which don't depend on
# the internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
key_memo = {}
if (_one_shot and c_make_encoder is not None
and self.indent is None):
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan, key_memo, self.use_decimal,
self.namedtuple_as_object, self.tuple_as_array,
self.bigint_as_string, self.item_sort_key,
Decimal)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot, self.use_decimal,
self.namedtuple_as_object, self.tuple_as_array,
self.bigint_as_string, self.item_sort_key,
Decimal=Decimal)
try:
return _iterencode(o, 0)
finally:
key_memo.clear()
class JSONEncoderForHTML(JSONEncoder):
"""An encoder that produces JSON safe to embed in HTML.
To embed JSON content in, say, a script tag on a web page, the
characters &, < and > should be escaped. They cannot be escaped
with the usual entities (e.g. &) because they are not expanded
within <script> tags.
"""
def encode(self, o):
# Override JSONEncoder.encode because it has hacks for
# performance that make things more complicated.
chunks = self.iterencode(o, True)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot)
for chunk in chunks:
chunk = chunk.replace('&', '\\u0026')
chunk = chunk.replace('<', '\\u003c')
chunk = chunk.replace('>', '\\u003e')
yield chunk
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
_use_decimal, _namedtuple_as_object, _tuple_as_array,
_bigint_as_string, _item_sort_key,
## HACK: hand-optimized bytecode; turn globals into locals
False=False,
True=True,
ValueError=ValueError,
basestring=basestring,
Decimal=Decimal,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
if _item_sort_key and not callable(_item_sort_key):
raise TypeError("item_sort_key must be None or callable")
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield ((buf + str(value))
if (not _bigint_as_string or
(-1 << 53) < value < (1 << 53))
else (buf + '"' + str(value) + '"'))
elif isinstance(value, float):
yield buf + _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield buf + str(value)
else:
yield buf
if isinstance(value, list):
chunks = _iterencode_list(value, _current_indent_level)
else:
_asdict = _namedtuple_as_object and getattr(value, '_asdict', None)
if _asdict and callable(_asdict):
chunks = _iterencode_dict(_asdict(),
_current_indent_level)
elif _tuple_as_array and isinstance(value, tuple):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _item_sort_key:
items = dct.items()
items.sort(key=_item_sort_key)
elif _sort_keys:
items = dct.items()
items.sort(key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield (str(value)
if (not _bigint_as_string or
(-1 << 53) < value < (1 << 53))
else ('"' + str(value) + '"'))
elif isinstance(value, float):
yield _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield str(value)
else:
if isinstance(value, list):
chunks = _iterencode_list(value, _current_indent_level)
else:
_asdict = _namedtuple_as_object and getattr(value, '_asdict', None)
if _asdict and callable(_asdict):
chunks = _iterencode_dict(_asdict(),
_current_indent_level)
elif _tuple_as_array and isinstance(value, tuple):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield (str(o)
if (not _bigint_as_string or
(-1 << 53) < o < (1 << 53))
else ('"' + str(o) + '"'))
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, list):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
else:
_asdict = _namedtuple_as_object and getattr(o, '_asdict', None)
if _asdict and callable(_asdict):
for chunk in _iterencode_dict(_asdict(), _current_indent_level):
yield chunk
elif (_tuple_as_array and isinstance(o, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
elif _use_decimal and isinstance(o, Decimal):
yield str(o)
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| bsd-3-clause | -2,631,113,945,259,697,000 | 37.396825 | 87 | 0.527123 | false |
mkhuthir/learnPython | Book_pythonlearn_com/code3/bs4/__init__.py | 20 | 18244 | """Beautiful Soup
Elixir and Tonic
"The Screen-Scraper's Friend"
http://www.crummy.com/software/BeautifulSoup/
Beautiful Soup uses a pluggable XML or HTML parser to parse a
(possibly invalid) document into a tree representation. Beautiful Soup
provides provides methods and Pythonic idioms that make it easy to
navigate, search, and modify the parse tree.
Beautiful Soup works with Python 2.6 and up. It works better if lxml
and/or html5lib is installed.
For more than you ever wanted to know about Beautiful Soup, see the
documentation:
http://www.crummy.com/software/BeautifulSoup/bs4/doc/
"""
__author__ = "Leonard Richardson ([email protected])"
__version__ = "4.4.1"
__copyright__ = "Copyright (c) 2004-2015 Leonard Richardson"
__license__ = "MIT"
__all__ = ['BeautifulSoup']
import os
import re
import warnings
from .builder import builder_registry, ParserRejectedMarkup
from .dammit import UnicodeDammit
from .element import (
CData,
Comment,
DEFAULT_OUTPUT_ENCODING,
Declaration,
Doctype,
NavigableString,
PageElement,
ProcessingInstruction,
ResultSet,
SoupStrainer,
Tag,
)
# The very first thing we do is give a useful error if someone is
# running this code under Python 3 without converting it.
'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work.'!='You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).'
class BeautifulSoup(Tag):
"""
This class defines the basic interface called by the tree builders.
These methods will be called by the parser:
reset()
feed(markup)
The tree builder may call these methods from its feed() implementation:
handle_starttag(name, attrs) # See note about return value
handle_endtag(name)
handle_data(data) # Appends to the current data node
endData(containerClass=NavigableString) # Ends the current data node
No matter how complicated the underlying parser is, you should be
able to build a tree using 'start tag' events, 'end tag' events,
'data' events, and "done with data" events.
If you encounter an empty-element tag (aka a self-closing tag,
like HTML's <br> tag), call handle_starttag and then
handle_endtag.
"""
ROOT_TAG_NAME = '[document]'
# If the end-user gives no indication which tree builder they
# want, look for one with these features.
DEFAULT_BUILDER_FEATURES = ['html', 'fast']
ASCII_SPACES = '\x20\x0a\x09\x0c\x0d'
NO_PARSER_SPECIFIED_WARNING = "No parser was explicitly specified, so I'm using the best available %(markup_type)s parser for this system (\"%(parser)s\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n\nTo get rid of this warning, change this:\n\n BeautifulSoup([your markup])\n\nto this:\n\n BeautifulSoup([your markup], \"%(parser)s\")\n"
def __init__(self, markup="", features=None, builder=None,
parse_only=None, from_encoding=None, exclude_encodings=None,
**kwargs):
"""The Soup object is initialized as the 'root tag', and the
provided markup (which can be a string or a file-like object)
is fed into the underlying parser."""
if 'convertEntities' in kwargs:
warnings.warn(
"BS4 does not respect the convertEntities argument to the "
"BeautifulSoup constructor. Entities are always converted "
"to Unicode characters.")
if 'markupMassage' in kwargs:
del kwargs['markupMassage']
warnings.warn(
"BS4 does not respect the markupMassage argument to the "
"BeautifulSoup constructor. The tree builder is responsible "
"for any necessary markup massage.")
if 'smartQuotesTo' in kwargs:
del kwargs['smartQuotesTo']
warnings.warn(
"BS4 does not respect the smartQuotesTo argument to the "
"BeautifulSoup constructor. Smart quotes are always converted "
"to Unicode characters.")
if 'selfClosingTags' in kwargs:
del kwargs['selfClosingTags']
warnings.warn(
"BS4 does not respect the selfClosingTags argument to the "
"BeautifulSoup constructor. The tree builder is responsible "
"for understanding self-closing tags.")
if 'isHTML' in kwargs:
del kwargs['isHTML']
warnings.warn(
"BS4 does not respect the isHTML argument to the "
"BeautifulSoup constructor. Suggest you use "
"features='lxml' for HTML and features='lxml-xml' for "
"XML.")
def deprecated_argument(old_name, new_name):
if old_name in kwargs:
warnings.warn(
'The "%s" argument to the BeautifulSoup constructor '
'has been renamed to "%s."' % (old_name, new_name))
value = kwargs[old_name]
del kwargs[old_name]
return value
return None
parse_only = parse_only or deprecated_argument(
"parseOnlyThese", "parse_only")
from_encoding = from_encoding or deprecated_argument(
"fromEncoding", "from_encoding")
if len(kwargs) > 0:
arg = list(kwargs.keys()).pop()
raise TypeError(
"__init__() got an unexpected keyword argument '%s'" % arg)
if builder is None:
original_features = features
if isinstance(features, str):
features = [features]
if features is None or len(features) == 0:
features = self.DEFAULT_BUILDER_FEATURES
builder_class = builder_registry.lookup(*features)
if builder_class is None:
raise FeatureNotFound(
"Couldn't find a tree builder with the features you "
"requested: %s. Do you need to install a parser library?"
% ",".join(features))
builder = builder_class()
if not (original_features == builder.NAME or
original_features in builder.ALTERNATE_NAMES):
if builder.is_xml:
markup_type = "XML"
else:
markup_type = "HTML"
warnings.warn(self.NO_PARSER_SPECIFIED_WARNING % dict(
parser=builder.NAME,
markup_type=markup_type))
self.builder = builder
self.is_xml = builder.is_xml
self.builder.soup = self
self.parse_only = parse_only
if hasattr(markup, 'read'): # It's a file-type object.
markup = markup.read()
elif len(markup) <= 256:
# Print out warnings for a couple beginner problems
# involving passing non-markup to Beautiful Soup.
# Beautiful Soup will still parse the input as markup,
# just in case that's what the user really wants.
if (isinstance(markup, str)
and not os.path.supports_unicode_filenames):
possible_filename = markup.encode("utf8")
else:
possible_filename = markup
is_file = False
try:
is_file = os.path.exists(possible_filename)
except Exception as e:
# This is almost certainly a problem involving
# characters not valid in filenames on this
# system. Just let it go.
pass
if is_file:
if isinstance(markup, str):
markup = markup.encode("utf8")
warnings.warn(
'"%s" looks like a filename, not markup. You should probably open this file and pass the filehandle into Beautiful Soup.' % markup)
if markup[:5] == "http:" or markup[:6] == "https:":
# TODO: This is ugly but I couldn't get it to work in
# Python 3 otherwise.
if ((isinstance(markup, bytes) and not b' ' in markup)
or (isinstance(markup, str) and not ' ' in markup)):
if isinstance(markup, str):
markup = markup.encode("utf8")
warnings.warn(
'"%s" looks like a URL. Beautiful Soup is not an HTTP client. You should probably use an HTTP client to get the document behind the URL, and feed that document to Beautiful Soup.' % markup)
for (self.markup, self.original_encoding, self.declared_html_encoding,
self.contains_replacement_characters) in (
self.builder.prepare_markup(
markup, from_encoding, exclude_encodings=exclude_encodings)):
self.reset()
try:
self._feed()
break
except ParserRejectedMarkup:
pass
# Clear out the markup and remove the builder's circular
# reference to this object.
self.markup = None
self.builder.soup = None
def __copy__(self):
return type(self)(self.encode(), builder=self.builder)
def __getstate__(self):
# Frequently a tree builder can't be pickled.
d = dict(self.__dict__)
if 'builder' in d and not self.builder.picklable:
del d['builder']
return d
def _feed(self):
# Convert the document to Unicode.
self.builder.reset()
self.builder.feed(self.markup)
# Close out any unfinished strings and close all the open tags.
self.endData()
while self.currentTag.name != self.ROOT_TAG_NAME:
self.popTag()
def reset(self):
Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME)
self.hidden = 1
self.builder.reset()
self.current_data = []
self.currentTag = None
self.tagStack = []
self.preserve_whitespace_tag_stack = []
self.pushTag(self)
def new_tag(self, name, namespace=None, nsprefix=None, **attrs):
"""Create a new tag associated with this soup."""
return Tag(None, self.builder, name, namespace, nsprefix, attrs)
def new_string(self, s, subclass=NavigableString):
"""Create a new NavigableString associated with this soup."""
return subclass(s)
def insert_before(self, successor):
raise NotImplementedError("BeautifulSoup objects don't support insert_before().")
def insert_after(self, successor):
raise NotImplementedError("BeautifulSoup objects don't support insert_after().")
def popTag(self):
tag = self.tagStack.pop()
if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]:
self.preserve_whitespace_tag_stack.pop()
#print "Pop", tag.name
if self.tagStack:
self.currentTag = self.tagStack[-1]
return self.currentTag
def pushTag(self, tag):
#print "Push", tag.name
if self.currentTag:
self.currentTag.contents.append(tag)
self.tagStack.append(tag)
self.currentTag = self.tagStack[-1]
if tag.name in self.builder.preserve_whitespace_tags:
self.preserve_whitespace_tag_stack.append(tag)
def endData(self, containerClass=NavigableString):
if self.current_data:
current_data = ''.join(self.current_data)
# If whitespace is not preserved, and this string contains
# nothing but ASCII spaces, replace it with a single space
# or newline.
if not self.preserve_whitespace_tag_stack:
strippable = True
for i in current_data:
if i not in self.ASCII_SPACES:
strippable = False
break
if strippable:
if '\n' in current_data:
current_data = '\n'
else:
current_data = ' '
# Reset the data collector.
self.current_data = []
# Should we add this string to the tree at all?
if self.parse_only and len(self.tagStack) <= 1 and \
(not self.parse_only.text or \
not self.parse_only.search(current_data)):
return
o = containerClass(current_data)
self.object_was_parsed(o)
def object_was_parsed(self, o, parent=None, most_recent_element=None):
"""Add an object to the parse tree."""
parent = parent or self.currentTag
previous_element = most_recent_element or self._most_recent_element
next_element = previous_sibling = next_sibling = None
if isinstance(o, Tag):
next_element = o.next_element
next_sibling = o.next_sibling
previous_sibling = o.previous_sibling
if not previous_element:
previous_element = o.previous_element
o.setup(parent, previous_element, next_element, previous_sibling, next_sibling)
self._most_recent_element = o
parent.contents.append(o)
if parent.next_sibling:
# This node is being inserted into an element that has
# already been parsed. Deal with any dangling references.
index = parent.contents.index(o)
if index == 0:
previous_element = parent
previous_sibling = None
else:
previous_element = previous_sibling = parent.contents[index-1]
if index == len(parent.contents)-1:
next_element = parent.next_sibling
next_sibling = None
else:
next_element = next_sibling = parent.contents[index+1]
o.previous_element = previous_element
if previous_element:
previous_element.next_element = o
o.next_element = next_element
if next_element:
next_element.previous_element = o
o.next_sibling = next_sibling
if next_sibling:
next_sibling.previous_sibling = o
o.previous_sibling = previous_sibling
if previous_sibling:
previous_sibling.next_sibling = o
def _popToTag(self, name, nsprefix=None, inclusivePop=True):
"""Pops the tag stack up to and including the most recent
instance of the given tag. If inclusivePop is false, pops the tag
stack up to but *not* including the most recent instqance of
the given tag."""
#print "Popping to %s" % name
if name == self.ROOT_TAG_NAME:
# The BeautifulSoup object itself can never be popped.
return
most_recently_popped = None
stack_size = len(self.tagStack)
for i in range(stack_size - 1, 0, -1):
t = self.tagStack[i]
if (name == t.name and nsprefix == t.prefix):
if inclusivePop:
most_recently_popped = self.popTag()
break
most_recently_popped = self.popTag()
return most_recently_popped
def handle_starttag(self, name, namespace, nsprefix, attrs):
"""Push a start tag on to the stack.
If this method returns None, the tag was rejected by the
SoupStrainer. You should proceed as if the tag had not occured
in the document. For instance, if this was a self-closing tag,
don't call handle_endtag.
"""
# print "Start tag %s: %s" % (name, attrs)
self.endData()
if (self.parse_only and len(self.tagStack) <= 1
and (self.parse_only.text
or not self.parse_only.search_tag(name, attrs))):
return None
tag = Tag(self, self.builder, name, namespace, nsprefix, attrs,
self.currentTag, self._most_recent_element)
if tag is None:
return tag
if self._most_recent_element:
self._most_recent_element.next_element = tag
self._most_recent_element = tag
self.pushTag(tag)
return tag
def handle_endtag(self, name, nsprefix=None):
#print "End tag: " + name
self.endData()
self._popToTag(name, nsprefix)
def handle_data(self, data):
self.current_data.append(data)
def decode(self, pretty_print=False,
eventual_encoding=DEFAULT_OUTPUT_ENCODING,
formatter="minimal"):
"""Returns a string or Unicode representation of this document.
To get Unicode, pass None for encoding."""
if self.is_xml:
# Print the XML declaration
encoding_part = ''
if eventual_encoding != None:
encoding_part = ' encoding="%s"' % eventual_encoding
prefix = '<?xml version="1.0"%s?>\n' % encoding_part
else:
prefix = ''
if not pretty_print:
indent_level = None
else:
indent_level = 0
return prefix + super(BeautifulSoup, self).decode(
indent_level, eventual_encoding, formatter)
# Alias to make it easier to type import: 'from bs4 import _soup'
_s = BeautifulSoup
_soup = BeautifulSoup
class BeautifulStoneSoup(BeautifulSoup):
"""Deprecated interface to an XML parser."""
def __init__(self, *args, **kwargs):
kwargs['features'] = 'xml'
warnings.warn(
'The BeautifulStoneSoup class is deprecated. Instead of using '
'it, pass features="xml" into the BeautifulSoup constructor.')
super(BeautifulStoneSoup, self).__init__(*args, **kwargs)
class StopParsing(Exception):
pass
class FeatureNotFound(ValueError):
pass
#By default, act as an HTML pretty-printer.
if __name__ == '__main__':
import sys
soup = BeautifulSoup(sys.stdin)
print(soup.prettify())
| mit | 7,125,214,579,258,331,000 | 37.982906 | 467 | 0.591373 | false |
pbaesse/Sissens | lib/python2.7/site-packages/sqlalchemy/orm/path_registry.py | 28 | 7751 | # orm/path_registry.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Path tracking utilities, representing mapper graph traversals.
"""
from .. import inspection
from .. import util
from .. import exc
from itertools import chain
from .base import class_mapper
import logging
log = logging.getLogger(__name__)
def _unreduce_path(path):
return PathRegistry.deserialize(path)
_WILDCARD_TOKEN = "*"
_DEFAULT_TOKEN = "_sa_default"
class PathRegistry(object):
"""Represent query load paths and registry functions.
Basically represents structures like:
(<User mapper>, "orders", <Order mapper>, "items", <Item mapper>)
These structures are generated by things like
query options (joinedload(), subqueryload(), etc.) and are
used to compose keys stored in the query._attributes dictionary
for various options.
They are then re-composed at query compile/result row time as
the query is formed and as rows are fetched, where they again
serve to compose keys to look up options in the context.attributes
dictionary, which is copied from query._attributes.
The path structure has a limited amount of caching, where each
"root" ultimately pulls from a fixed registry associated with
the first mapper, that also contains elements for each of its
property keys. However paths longer than two elements, which
are the exception rather than the rule, are generated on an
as-needed basis.
"""
is_token = False
is_root = False
def __eq__(self, other):
return other is not None and \
self.path == other.path
def set(self, attributes, key, value):
log.debug("set '%s' on path '%s' to '%s'", key, self, value)
attributes[(key, self.path)] = value
def setdefault(self, attributes, key, value):
log.debug("setdefault '%s' on path '%s' to '%s'", key, self, value)
attributes.setdefault((key, self.path), value)
def get(self, attributes, key, value=None):
key = (key, self.path)
if key in attributes:
return attributes[key]
else:
return value
def __len__(self):
return len(self.path)
@property
def length(self):
return len(self.path)
def pairs(self):
path = self.path
for i in range(0, len(path), 2):
yield path[i], path[i + 1]
def contains_mapper(self, mapper):
for path_mapper in [
self.path[i] for i in range(0, len(self.path), 2)
]:
if path_mapper.is_mapper and \
path_mapper.isa(mapper):
return True
else:
return False
def contains(self, attributes, key):
return (key, self.path) in attributes
def __reduce__(self):
return _unreduce_path, (self.serialize(), )
def serialize(self):
path = self.path
return list(zip(
[m.class_ for m in [path[i] for i in range(0, len(path), 2)]],
[path[i].key for i in range(1, len(path), 2)] + [None]
))
@classmethod
def deserialize(cls, path):
if path is None:
return None
p = tuple(chain(*[(class_mapper(mcls),
class_mapper(mcls).attrs[key]
if key is not None else None)
for mcls, key in path]))
if p and p[-1] is None:
p = p[0:-1]
return cls.coerce(p)
@classmethod
def per_mapper(cls, mapper):
return EntityRegistry(
cls.root, mapper
)
@classmethod
def coerce(cls, raw):
return util.reduce(lambda prev, next: prev[next], raw, cls.root)
def token(self, token):
if token.endswith(':' + _WILDCARD_TOKEN):
return TokenRegistry(self, token)
elif token.endswith(":" + _DEFAULT_TOKEN):
return TokenRegistry(self.root, token)
else:
raise exc.ArgumentError("invalid token: %s" % token)
def __add__(self, other):
return util.reduce(
lambda prev, next: prev[next],
other.path, self)
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.path, )
class RootRegistry(PathRegistry):
"""Root registry, defers to mappers so that
paths are maintained per-root-mapper.
"""
path = ()
has_entity = False
is_aliased_class = False
is_root = True
def __getitem__(self, entity):
return entity._path_registry
PathRegistry.root = RootRegistry()
class TokenRegistry(PathRegistry):
def __init__(self, parent, token):
self.token = token
self.parent = parent
self.path = parent.path + (token,)
has_entity = False
is_token = True
def generate_for_superclasses(self):
if not self.parent.is_aliased_class and not self.parent.is_root:
for ent in self.parent.mapper.iterate_to_root():
yield TokenRegistry(self.parent.parent[ent], self.token)
else:
yield self
def __getitem__(self, entity):
raise NotImplementedError()
class PropRegistry(PathRegistry):
def __init__(self, parent, prop):
# restate this path in terms of the
# given MapperProperty's parent.
insp = inspection.inspect(parent[-1])
if not insp.is_aliased_class or insp._use_mapper_path:
parent = parent.parent[prop.parent]
elif insp.is_aliased_class and insp.with_polymorphic_mappers:
if prop.parent is not insp.mapper and \
prop.parent in insp.with_polymorphic_mappers:
subclass_entity = parent[-1]._entity_for_mapper(prop.parent)
parent = parent.parent[subclass_entity]
self.prop = prop
self.parent = parent
self.path = parent.path + (prop,)
self._wildcard_path_loader_key = (
"loader",
self.parent.path + self.prop._wildcard_token
)
self._default_path_loader_key = self.prop._default_path_loader_key
self._loader_key = ("loader", self.path)
def __str__(self):
return " -> ".join(
str(elem) for elem in self.path
)
@util.memoized_property
def has_entity(self):
return hasattr(self.prop, "mapper")
@util.memoized_property
def entity(self):
return self.prop.mapper
@property
def mapper(self):
return self.entity
@property
def entity_path(self):
return self[self.entity]
def __getitem__(self, entity):
if isinstance(entity, (int, slice)):
return self.path[entity]
else:
return EntityRegistry(
self, entity
)
class EntityRegistry(PathRegistry, dict):
is_aliased_class = False
has_entity = True
def __init__(self, parent, entity):
self.key = entity
self.parent = parent
self.is_aliased_class = entity.is_aliased_class
self.entity = entity
self.path = parent.path + (entity,)
self.entity_path = self
@property
def mapper(self):
return inspection.inspect(self.entity).mapper
def __bool__(self):
return True
__nonzero__ = __bool__
def __getitem__(self, entity):
if isinstance(entity, (int, slice)):
return self.path[entity]
else:
return dict.__getitem__(self, entity)
def __missing__(self, key):
self[key] = item = PropRegistry(self, key)
return item
| gpl-3.0 | 832,123,739,482,167,700 | 27.601476 | 76 | 0.595536 | false |
EntityFXCode/arsenalsuite | cpp/lib/PyQt4/examples/demos/embeddeddialogs/embeddeddialogs.py | 15 | 9138 | #!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2010 Riverbank Computing Limited.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:LGPL$
## Commercial Usage
## Licensees holding valid Qt Commercial licenses may use this file in
## accordance with the Qt Commercial License Agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and Nokia.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 2.1 as published by the Free Software
## Foundation and appearing in the file LICENSE.LGPL included in the
## packaging of this file. Please review the following information to
## ensure the GNU Lesser General Public License version 2.1 requirements
## will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
##
## In addition, as a special exception, Nokia gives you certain additional
## rights. These rights are described in the Nokia Qt LGPL Exception
## version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
##
## GNU General Public License Usage
## Alternatively, this file may be used under the terms of the GNU
## General Public License version 3.0 as published by the Free Software
## Foundation and appearing in the file LICENSE.GPL included in the
## packaging of this file. Please review the following information to
## ensure the GNU General Public License version 3.0 requirements will be
## met: http://www.gnu.org/copyleft/gpl.html.
##
## If you have questions regarding the use of this file, please contact
## Nokia at [email protected].
## $QT_END_LICENSE$
##
#############################################################################
# This is only needed for Python v2 but is harmless for Python v3.
import sip
sip.setapi('QString', 2)
from PyQt4 import QtCore, QtGui
from embeddeddialog import Ui_embeddedDialog
from embeddeddialogs_rc import *
class CustomProxy(QtGui.QGraphicsProxyWidget):
def __init__(self, parent=None, wFlags=0):
super(CustomProxy, self).__init__(parent, wFlags)
self.popupShown = False
self.timeLine = QtCore.QTimeLine(250, self)
self.timeLine.valueChanged.connect(self.updateStep)
self.timeLine.stateChanged.connect(self.stateChanged)
def boundingRect(self):
return QtGui.QGraphicsProxyWidget.boundingRect(self).adjusted(0, 0, 10, 10)
def paintWindowFrame(self, painter, option, widget):
color = QtGui.QColor(0, 0, 0, 64)
r = self.windowFrameRect()
right = QtCore.QRectF(r.right(), r.top()+10, 10, r.height()-10)
bottom = QtCore.QRectF(r.left()+10, r.bottom(), r.width(), 10)
intersectsRight = right.intersects(option.exposedRect)
intersectsBottom = bottom.intersects(option.exposedRect)
if intersectsRight and intersectsBottom:
path=QtGui.QPainterPath()
path.addRect(right)
path.addRect(bottom)
painter.setPen(QtCore.Qt.NoPen)
painter.setBrush(color)
painter.drawPath(path)
elif intersectsBottom:
painter.fillRect(bottom, color)
elif intersectsRight:
painter.fillRect(right, color)
super(CustomProxy, self).paintWindowFrame(painter, option, widget)
def hoverEnterEvent(self, event):
super(CustomProxy, self).hoverEnterEvent(event)
self.scene().setActiveWindow(self)
if self.timeLine.currentValue != 1:
self.zoomIn()
def hoverLeaveEvent(self, event):
super(CustomProxy, self).hoverLeaveEvent(event)
if not self.popupShown and (self.timeLine.direction() != QtCore.QTimeLine.Backward or self.timeLine.currentValue() != 0):
self.zoomOut()
def sceneEventFilter(self, watched, event):
if watched.isWindow() and (event.type() == QtCore.QEvent.UngrabMouse or event.type() == QtCore.QEvent.GrabMouse):
self.popupShown = watched.isVisible()
if not self.popupShown and not self.isUnderMouse():
self.zoomOut()
return super(CustomProxy, self).sceneEventFilter(watched, event)
def itemChange(self, change, value):
if change == self.ItemChildAddedChange or change == self.ItemChildRemovedChange :
# how to translate this line to python?
# QGraphicsItem *item = qVariantValue<QGraphicsItem *>(value);
item = value
try:
if change == self.ItemChildAddedChange:
item.installSceneEventFilter(self)
else:
item.removeSceneEventFilter(self)
except:
pass
return super(CustomProxy, self).itemChange(change, value)
def updateStep(self, step):
r=self.boundingRect()
self.setTransform( QtGui.QTransform() \
.translate(r.width() / 2, r.height() / 2)\
.rotate(step * 30, QtCore.Qt.XAxis)\
.rotate(step * 10, QtCore.Qt.YAxis)\
.rotate(step * 5, QtCore.Qt.ZAxis)\
.scale(1 + 1.5 * step, 1 + 1.5 * step)\
.translate(-r.width() / 2, -r.height() / 2))
def stateChanged(self, state):
if state == QtCore.QTimeLine.Running:
if self.timeLine.direction() == QtCore.QTimeLine.Forward:
self.setCacheMode(self.NoCache)
elif state == QtCore.QTimeLine.NotRunning:
if self.timeLine.direction() == QtCore.QTimeLine.Backward:
self.setCacheMode(self.DeviceCoordinateCache)
def zoomIn(self):
if self.timeLine.direction() != QtCore.QTimeLine.Forward:
self.timeLine.setDirection(QtCore.QTimeLine.Forward)
if self.timeLine.state() == QtCore.QTimeLine.NotRunning:
self.timeLine.start()
def zoomOut(self):
if self.timeLine.direction() != QtCore.QTimeLine.Backward:
self.timeLine.setDirection(QtCore.QTimeLine.Backward)
if self.timeLine.state() == QtCore.QTimeLine.NotRunning:
self.timeLine.start()
class EmbeddedDialog(QtGui.QDialog):
def __init__(self, parent=None):
super(EmbeddedDialog, self).__init__(parent)
self.ui = Ui_embeddedDialog()
self.ui.setupUi(self)
self.ui.layoutDirection.setCurrentIndex(self.layoutDirection() != QtCore.Qt.LeftToRight)
for styleName in QtGui.QStyleFactory.keys():
self.ui.style.addItem(styleName)
if self.style().objectName().lower() == styleName.lower():
self.ui.style.setCurrentIndex(self.ui.style.count() -1)
self.ui.layoutDirection.activated.connect(self.layoutDirectionChanged)
self.ui.spacing.valueChanged.connect(self.spacingChanged)
self.ui.fontComboBox.currentFontChanged.connect(self.fontChanged)
self.ui.style.activated[str].connect(self.styleChanged)
def layoutDirectionChanged(self, index):
if index == 0:
self.setLayoutDirection(QtCore.Qt.LeftToRight)
else:
self.setLayoutDirection(QtCore.Qt.RightToLeft)
def spacingChanged(self, spacing):
self.layout().setSpacing(spacing)
self.adjustSize()
def fontChanged(self, font):
self.setFont(font)
def setStyleHelper(self, widget, style):
widget.setStyle(style)
widget.setPalette(style.standardPalette())
for child in widget.children():
if isinstance(child, QtGui.QWidget):
self.setStyleHelper(child, style)
def styleChanged(self, styleName):
style=QtGui.QStyleFactory.create(styleName)
if style:
self.setStyleHelper(self, style)
# Keep a reference to the style.
self._style = style
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
scene = QtGui.QGraphicsScene()
for y in range(10):
for x in range(10):
proxy = CustomProxy(None, QtCore.Qt.Window)
proxy.setWidget(EmbeddedDialog())
rect = proxy.boundingRect()
proxy.setPos( x * rect.width()*1.05, y*rect.height()*1.05 )
proxy.setCacheMode(QtGui.QGraphicsItem.DeviceCoordinateCache)
scene.addItem(proxy)
scene.setSceneRect(scene.itemsBoundingRect())
view = QtGui.QGraphicsView(scene)
view.scale(0.5, 0.5)
view.setRenderHints(view.renderHints() | QtGui.QPainter.Antialiasing | QtGui.QPainter.SmoothPixmapTransform)
view.setBackgroundBrush(QtGui.QBrush(QtGui.QPixmap(':/No-Ones-Laughing-3.jpg')))
view.setCacheMode(QtGui.QGraphicsView.CacheBackground)
view.setViewportUpdateMode(QtGui.QGraphicsView.BoundingRectViewportUpdate)
view.show()
view.setWindowTitle("Embedded Dialogs Demo")
sys.exit(app.exec_())
| gpl-2.0 | -6,895,222,502,349,413,000 | 38.387931 | 129 | 0.646312 | false |
yencarnacion/jaikuengine | .google_appengine/lib/django-1.2/django/db/backends/sqlite3/introspection.py | 65 | 5857 | import re
from django.db.backends import BaseDatabaseIntrospection
# This light wrapper "fakes" a dictionary interface, because some SQLite data
# types include variables in them -- e.g. "varchar(30)" -- and can't be matched
# as a simple dictionary lookup.
class FlexibleFieldLookupDict:
# Maps SQL types to Django Field types. Some of the SQL types have multiple
# entries here because SQLite allows for anything and doesn't normalize the
# field type; it uses whatever was given.
base_data_types_reverse = {
'bool': 'BooleanField',
'boolean': 'BooleanField',
'smallint': 'SmallIntegerField',
'smallint unsigned': 'PositiveSmallIntegerField',
'smallinteger': 'SmallIntegerField',
'int': 'IntegerField',
'integer': 'IntegerField',
'bigint': 'BigIntegerField',
'integer unsigned': 'PositiveIntegerField',
'decimal': 'DecimalField',
'real': 'FloatField',
'text': 'TextField',
'char': 'CharField',
'date': 'DateField',
'datetime': 'DateTimeField',
'time': 'TimeField',
}
def __getitem__(self, key):
key = key.lower()
try:
return self.base_data_types_reverse[key]
except KeyError:
import re
m = re.search(r'^\s*(?:var)?char\s*\(\s*(\d+)\s*\)\s*$', key)
if m:
return ('CharField', {'max_length': int(m.group(1))})
raise KeyError
class DatabaseIntrospection(BaseDatabaseIntrospection):
data_types_reverse = FlexibleFieldLookupDict()
def get_table_list(self, cursor):
"Returns a list of table names in the current database."
# Skip the sqlite_sequence system table used for autoincrement key
# generation.
cursor.execute("""
SELECT name FROM sqlite_master
WHERE type='table' AND NOT name='sqlite_sequence'
ORDER BY name""")
return [row[0] for row in cursor.fetchall()]
def get_table_description(self, cursor, table_name):
"Returns a description of the table, with the DB-API cursor.description interface."
return [(info['name'], info['type'], None, None, None, None,
info['null_ok']) for info in self._table_info(cursor, table_name)]
def get_relations(self, cursor, table_name):
"""
Returns a dictionary of {field_index: (field_index_other_table, other_table)}
representing all relationships to the given table. Indexes are 0-based.
"""
# Dictionary of relations to return
relations = {}
# Schema for this table
cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s AND type = %s", [table_name, "table"])
results = cursor.fetchone()[0].strip()
results = results[results.index('(')+1:results.rindex(')')]
# Walk through and look for references to other tables. SQLite doesn't
# really have enforced references, but since it echoes out the SQL used
# to create the table we can look for REFERENCES statements used there.
for field_index, field_desc in enumerate(results.split(',')):
field_desc = field_desc.strip()
if field_desc.startswith("UNIQUE"):
continue
m = re.search('references (.*) \(["|](.*)["|]\)', field_desc, re.I)
if not m:
continue
table, column = [s.strip('"') for s in m.groups()]
cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s", [table])
result = cursor.fetchone()
if not result:
continue
other_table_results = result[0].strip()
li, ri = other_table_results.index('('), other_table_results.rindex(')')
other_table_results = other_table_results[li+1:ri]
for other_index, other_desc in enumerate(other_table_results.split(',')):
other_desc = other_desc.strip()
if other_desc.startswith('UNIQUE'):
continue
name = other_desc.split(' ', 1)[0].strip('"')
if name == column:
relations[field_index] = (other_index, table)
break
return relations
def get_indexes(self, cursor, table_name):
"""
Returns a dictionary of fieldname -> infodict for the given table,
where each infodict is in the format:
{'primary_key': boolean representing whether it's the primary key,
'unique': boolean representing whether it's a unique index}
"""
indexes = {}
for info in self._table_info(cursor, table_name):
indexes[info['name']] = {'primary_key': info['pk'] != 0,
'unique': False}
cursor.execute('PRAGMA index_list(%s)' % self.connection.ops.quote_name(table_name))
# seq, name, unique
for index, unique in [(field[1], field[2]) for field in cursor.fetchall()]:
if not unique:
continue
cursor.execute('PRAGMA index_info(%s)' % self.connection.ops.quote_name(index))
info = cursor.fetchall()
# Skip indexes across multiple fields
if len(info) != 1:
continue
name = info[0][2] # seqno, cid, name
indexes[name]['unique'] = True
return indexes
def _table_info(self, cursor, name):
cursor.execute('PRAGMA table_info(%s)' % self.connection.ops.quote_name(name))
# cid, name, type, notnull, dflt_value, pk
return [{'name': field[1],
'type': field[2],
'null_ok': not field[3],
'pk': field[5] # undocumented
} for field in cursor.fetchall()]
| apache-2.0 | -8,169,105,904,665,678,000 | 40.539007 | 112 | 0.575209 | false |
mrbandrews/bitcoin | qa/rpc-tests/netutil.py | 8 | 4561 | #!/usr/bin/env python
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Linux network utilities
import sys
import socket
import fcntl
import struct
import array
import os
import binascii
# Roughly based on http://voorloopnul.com/blog/a-python-netstat-in-less-than-100-lines-of-code/ by Ricardo Pascal
STATE_ESTABLISHED = '01'
STATE_SYN_SENT = '02'
STATE_SYN_RECV = '03'
STATE_FIN_WAIT1 = '04'
STATE_FIN_WAIT2 = '05'
STATE_TIME_WAIT = '06'
STATE_CLOSE = '07'
STATE_CLOSE_WAIT = '08'
STATE_LAST_ACK = '09'
STATE_LISTEN = '0A'
STATE_CLOSING = '0B'
def get_socket_inodes(pid):
'''
Get list of socket inodes for process pid.
'''
base = '/proc/%i/fd' % pid
inodes = []
for item in os.listdir(base):
target = os.readlink(os.path.join(base, item))
if target.startswith('socket:'):
inodes.append(int(target[8:-1]))
return inodes
def _remove_empty(array):
return [x for x in array if x !='']
def _convert_ip_port(array):
host,port = array.split(':')
# convert host from mangled-per-four-bytes form as used by kernel
host = binascii.unhexlify(host)
host_out = ''
for x in range(0, len(host)/4):
(val,) = struct.unpack('=I', host[x*4:(x+1)*4])
host_out += '%08x' % val
return host_out,int(port,16)
def netstat(typ='tcp'):
'''
Function to return a list with status of tcp connections at linux systems
To get pid of all network process running on system, you must run this script
as superuser
'''
with open('/proc/net/'+typ,'r') as f:
content = f.readlines()
content.pop(0)
result = []
for line in content:
line_array = _remove_empty(line.split(' ')) # Split lines and remove empty spaces.
tcp_id = line_array[0]
l_addr = _convert_ip_port(line_array[1])
r_addr = _convert_ip_port(line_array[2])
state = line_array[3]
inode = int(line_array[9]) # Need the inode to match with process pid.
nline = [tcp_id, l_addr, r_addr, state, inode]
result.append(nline)
return result
def get_bind_addrs(pid):
'''
Get bind addresses as (host,port) tuples for process pid.
'''
inodes = get_socket_inodes(pid)
bind_addrs = []
for conn in netstat('tcp') + netstat('tcp6'):
if conn[3] == STATE_LISTEN and conn[4] in inodes:
bind_addrs.append(conn[1])
return bind_addrs
# from: http://code.activestate.com/recipes/439093/
def all_interfaces():
'''
Return all interfaces that are up
'''
is_64bits = sys.maxsize > 2**32
struct_size = 40 if is_64bits else 32
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
max_possible = 8 # initial value
while True:
bytes = max_possible * struct_size
names = array.array('B', '\0' * bytes)
outbytes = struct.unpack('iL', fcntl.ioctl(
s.fileno(),
0x8912, # SIOCGIFCONF
struct.pack('iL', bytes, names.buffer_info()[0])
))[0]
if outbytes == bytes:
max_possible *= 2
else:
break
namestr = names.tostring()
return [(namestr[i:i+16].split('\0', 1)[0],
socket.inet_ntoa(namestr[i+20:i+24]))
for i in range(0, outbytes, struct_size)]
def addr_to_hex(addr):
'''
Convert string IPv4 or IPv6 address to binary address as returned by
get_bind_addrs.
Very naive implementation that certainly doesn't work for all IPv6 variants.
'''
if '.' in addr: # IPv4
addr = [int(x) for x in addr.split('.')]
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
addr = sub[0] + ([0] * nullbytes) + sub[1]
else:
raise ValueError('Could not parse address %s' % addr)
return binascii.hexlify(bytearray(addr))
| mit | -5,939,139,010,465,586,000 | 31.81295 | 113 | 0.583644 | false |
chacoroot/planetary | addons/hr_expense/__init__.py | 436 | 1079 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_expense
import report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -2,422,844,497,721,098,000 | 40.5 | 78 | 0.617238 | false |
chaubold/opengm | src/interfaces/python/test.py | 2 | 52311 | import numpy
import opengm
import os
import sys
import random
import opengm.learning
class TestAllExampes:
def test_run(self):
for r, d, f in os.walk("examples"):
for files in f:
if files.endswith(".py"):
if(not str(files).endswith('gui.py')):
pass
# execfile(filePath)
# subprocess.call([filePath, arg1, arg2])
#execfile("examples/" + files)
def lenOfGen(gen):
return len([i for i in gen])
def generate_grid(dimx, dimy, labels, beta1, beta2, operator="adder"):
nos = numpy.ones(dimx * dimy, dtype=numpy.uint64) * labels
gm = opengm.gm(nos, operator, 0)
for vi in range(dimx * dimy):
f1 = numpy.random.random((labels,)).astype(numpy.float64) * 0.6 + 0.2
assert len(f1.shape) == 1
assert f1.shape[0] == labels
fid1 = gm.addFunction(f1)
gm.addFactor(fid1, (vi,))
f2 = numpy.ones([labels, labels], dtype=numpy.float64)
for l in range(labels):
f2[l, l] = beta1
fid2 = gm.addFunction(f2)
for y in range(dimy):
for x in range(dimx):
if x + 1 < dimx:
vis = [x + y * dimx, x + 1 + y * dimx]
assert vis.sort is not None
vis.sort
gm.addFactor(fid2, vis)
if y + 1 < dimy:
vis = [x + y * dimx, x + (y + 1) * dimx]
vis.sort()
gm.addFactor(fid2, vis)
return gm
def generate_mc_grid(dimx, dimy, operator="adder"):
labels=dimx*dimy
nos = numpy.ones(labels, dtype=numpy.uint64) * labels
gm = opengm.gm(nos, operator, 0)
for y in range(dimy):
for x in range(dimx):
if x + 1 < dimx:
vis = [x + y * dimx, x + 1 + y * dimx]
assert vis.sort is not None
vis.sort
l=random.random()*2.0 - 1.0
fr=opengm.pottsFunction([labels,labels],0.0,l)
fid2=gm.addFunction(fr)
gm.addFactor(fid2, vis)
if y + 1 < dimy:
vis = [x + y * dimx, x + (y + 1) * dimx]
vis.sort()
l=random.random()*2.0 - 1.0
fr=opengm.pottsFunction([labels,labels],0.0,l)
fid2=gm.addFunction(fr)
gm.addFactor(fid2, vis)
return gm
def makeGrid(dimx, dimy, labels, beta, acc="min"):
nos = numpy.ones(dimx * dimy, dtype=numpy.uint64) * labels
if acc == "min":
gm = opengm.adder.GraphicalModel(nos)
else:
gm = opengm.multiplier.GraphicalModel(nos)
for vi in range(dimx * dimy):
f1 = numpy.random.random((labels,)).astype(numpy.float64)
fid1 = gm.addFunction(f1)
gm.addFactor(fid1, (vi,))
f2 = numpy.ones(labels * labels, dtype=numpy.float64).reshape(
labels, labels) * beta
for l in range(labels):
f2[l, l] = 0
fid2 = gm.addFunction(f2)
for y in range(dimy):
for x in range(dimx):
if x + 1 < dimx - 1:
gm.addFactor(fid2, [x + y * dimx, x + 1 + y * dimx])
if y + 1 < dimy - 1:
gm.addFactor(fid2, [x + y * dimx, x + (y + 1) * dimx])
return gm
def checkSolution(gm, argOpt, arg, acc="min", tolerance=None, check=True):
valOpt = gm.evaluate(argOpt)
val = gm.evaluate(arg)
numtol = 0.00000000001
if check:
if acc == "min":
if tolerance is None:
tol = numtol
assert(val - tol <= valOpt)
else:
tol = valOpt * tolerance
assert(val - tol <= valOpt)
if acc == "max":
if tolerance is None:
tol = numtol
assert(val - tol >= valOpt)
else:
tol = valOpt * tolerance + numtol
assert(val - tol >= valOpt)
def checkInference(gm, solver, argOpt, optimal=False, tolerance=None,
acc="min"):
solver.infer()
arg = solver.arg()
checkSolution(gm, argOpt, arg, acc, tolerance, optimal)
class TestUtilities:
def test_vector(self):
assert(True)
def test_enums(self):
assert(True)
def test_is_build_in_simple_parameter(self):
class MyClass(object):
def __init__(self):
pass
assert(not opengm._to_native_converter.is_build_in_simple_parameter(
classType=MyClass))
assert(not opengm._to_native_converter.is_build_in_simple_parameter(
instanceType=MyClass()))
assert(opengm._to_native_converter.is_build_in_simple_parameter(
classType=bool))
assert(opengm._to_native_converter.is_build_in_simple_parameter(
instanceType=bool()))
assert(opengm._to_native_converter.is_build_in_simple_parameter(
instanceType=1))
assert(opengm._to_native_converter.is_build_in_simple_parameter(
instanceType=1.0))
assert(opengm._to_native_converter.is_build_in_simple_parameter(
instanceType='1.0'))
simple_types = [int, long, float, bool, str]
for st in simple_types:
assert(opengm._to_native_converter.is_build_in_simple_parameter(
classType=st))
assert(opengm._to_native_converter.is_build_in_simple_parameter(
instanceType=st()))
def test_is_tribool(self):
assert(opengm._to_native_converter.is_tribool(
classType=opengm.Tribool))
assert(opengm._to_native_converter.is_tribool(
instanceType=opengm.Tribool(0)))
assert(not opengm._to_native_converter.is_tribool(classType=bool))
assert(not opengm._to_native_converter.is_tribool(
instanceType=True))
class TestSparseFunction:
def test_constructor(self):
functions = []
functions.append(opengm.SparseFunction([2, 3, 4], 1))
functions.append(opengm.SparseFunction((2, 3, 4), 1))
for f in functions:
assert(f.defaultValue == 1)
assert(f.dimension == 3)
assert(f.shape[0] == 2)
assert(f.shape[1] == 3)
assert(f.shape[2] == 4)
assert(len(f.shape) == 3)
assert(f.size == 2 * 3 * 4)
def test_key_to_coordinate(self):
f = opengm.SparseFunction([2, 3, 4], 0)
c = numpy.ones(3, dtype=numpy.uint64)
for key, cTrue in enumerate(opengm.shapeWalker(f.shape)):
f.keyToCoordinate(key, c)
for ct, cOwn in zip(cTrue, c):
assert ct == cOwn
def test_dense_assignment(self):
f = opengm.SparseFunction()
fDense = numpy.zeros([3, 4])
fDense[0, 1] = 1
fDense[0, 2] = 2
f.assignDense(fDense, 0)
assert f.dimension == 2
assert f.shape[0] == 3
assert f.shape[1] == 4
assert f[[0, 0]] == 0
assert f[[0, 1]] == 1
assert f[[0, 2]] == 2
for c in opengm.shapeWalker(f.shape):
assert f[c] == fDense[c[0], c[1]]
assert len(f.container) == 2
class TestFunctions:
def test_potts(self):
nl1 = numpy.ones(10, dtype=numpy.uint64) * 2
nl2 = numpy.ones(5, dtype=numpy.uint64) * 3
veq = numpy.zeros(1, dtype=numpy.float64)
vnew = numpy.arange(0, 10, dtype=numpy.float64)
pottsFunctionVector = opengm.PottsFunctionVector(nl1, nl2, veq, vnew)
assert len(pottsFunctionVector) == 10
for i, f in enumerate(pottsFunctionVector):
assert f.shape[0] == 2
assert f.shape[1] == 3
assert f[0, 0] == 0
assert f[[1, 1]] == 0
assert f[[0, 1]] == vnew[i]
class TestGm:
def test_constructor_generic(self):
def mygen():
yield 2
yield 3
yield 4
nos_list = [
numpy.arange(2, 5, dtype=numpy.uint64),
[2, 3, 4],
(2, 3, 4),
(x for x in xrange(2, 5)),
mygen(),
opengm.IndexVector(x for x in xrange(2, 5))
]
for i, nos in enumerate(nos_list):
if(type(nos) != type(mygen())):
pass
# assert(len(nos)==3)
gm = opengm.gm(nos, operator='adder')
assert(gm.numberOfVariables == 3)
assert(gm.numberOfLabels(0) == 2)
assert(gm.numberOfLabels(1) == 3)
assert(gm.numberOfLabels(2) == 4)
assert(gm.space().numberOfVariables == 3)
assert(gm.space()[0] == 2)
assert(gm.space()[1] == 3)
assert(gm.space()[2] == 4)
nos_list = [
numpy.arange(2, 5, dtype=numpy.uint64),
[2, 3, 4],
(2, 3, 4),
(x for x in xrange(2, 5)),
mygen(),
opengm.IndexVector(x for x in xrange(2, 5))
]
for i, nos in enumerate(nos_list):
if(type(nos) != type(mygen())):
pass # assert(len(nos)==3)
gm = opengm.adder.GraphicalModel()
gm.assign(nos)
assert(gm.numberOfVariables == 3)
assert(gm.numberOfLabels(0) == 2)
assert(gm.numberOfLabels(1) == 3)
assert(gm.numberOfLabels(2) == 4)
assert(gm.space().numberOfVariables == 3)
assert(gm.space()[0] == 2)
assert(gm.space()[1] == 3)
assert(gm.space()[2] == 4)
def test_add_factors_generic(self):
def mygen():
yield 0
yield 1
gm = opengm.gm([2, 4])
f = opengm.PottsFunction([2, 4], 0.0, 1.0)
fid = gm.addFunction(f)
vis_list = [
[0, 1],
(0, 1),
(x for x in xrange(2)),
mygen(),
opengm.IndexVector(x for x in xrange(0, 2)),
numpy.arange(0, 2, dtype=numpy.uint64)
]
for i, vis in enumerate(vis_list):
fIndex = gm.addFactor(fid, vis)
assert(gm.numberOfFactors == i + 1)
assert(fIndex == i)
assert(gm[fIndex].numberOfVariables == 2)
assert(gm[fIndex].shape[0] == 2)
assert(gm[fIndex].shape[1] == 4)
assert(gm[fIndex].variableIndices[0] == 0)
assert(gm[fIndex].variableIndices[1] == 1)
def test_add_function(self):
numberOfStates = [2, 3, 4]
gm = opengm.adder.GraphicalModel(numberOfStates)
f1 = numpy.ones(6 * 4, numpy.float64)
p = 1
for i in range(2 * 3 * 4):
f1[i] = i
p *= i
f1 = f1.reshape(2, 3, 4)
idf = gm.addFunction(f1)
gm.addFactor(idf, (0, 1, 2))
assert(gm[0].min() == 0)
assert(gm[0].max() == 2 * 3 * 4 - 1)
assert(gm[0].sum() == sum(range(2 * 3 * 4)))
assert(gm[0].product() == p)
nf1 = gm[0].__array__()
assert(len(f1.shape) == len(nf1.shape))
for i in range(len(f1.shape)):
assert(f1.shape[i] == nf1.shape[i])
for k in range(f1.shape[2]):
for j in range(f1.shape[1]):
for i in range(f1.shape[0]):
assert(gm[0][numpy.array(
[i, j, k], dtype=numpy.uint64)] == f1[i, j, k])
assert(gm[0][(i, j, k)] == f1[i, j, k])
assert(gm[0][(i, j, k)] == nf1[i, j, k])
def test_add_multiple_functions(self):
nVar = 10
nLabels = 2
for nFunctions in [1, 10]:
for order in [1, 2, 3, 4]:
gm = opengm.gm([nLabels] * nVar)
# add functionS
fShape = [nFunctions] + [nLabels] * order
f = numpy.ones(fShape, dtype=opengm.value_type).reshape(-1)
f[:] = numpy.random.rand(f.size)[:]
f = f.reshape(fShape)
fids = gm.addFunctions(f)
# assertions
assert len(fids) == nFunctions
def test_add_multiple_functions_order1(self):
nVar = 4
nLabels = 2
gm = opengm.gm([nLabels] * nVar)
# add functionS
fShape =[4,2]
f = opengm.randomFunction(fShape)
vis=numpy.ones([4,1])
vis[0,0]=0
vis[1,0]=1
vis[2,0]=2
vis[3,0]=3
fids = gm.addFunctions(f)
gm.addFactors(fids,vis)
assert gm[1][(0,)]==f[1,0]
assert gm[1][(1,)]==f[1,1]
for x in xrange(4):
assert gm[x][(0,)]==f[x,0]
assert gm[x][(1,)]==f[x,1]
def test_add_multiple_functions_order2a(self):
nVar = 4
nLabels = 2
gm = opengm.gm([nLabels] * nVar)
# add functionS
fShape =[2,2,2]
f = opengm.randomFunction(fShape)
vis=numpy.ones([4,2])
vis[0,0]=0
vis[0,1]=1
vis[1,0]=1
vis[1,1]=2
fid = gm.addFunction(f)
gm.addFactor(fid,[0,1,2])
assert gm[0][0,0,0]==f[0,0,0]
assert gm[0][1,0,0]==f[1,0,0]
assert gm[0][0,1,0]==f[0,1,0]
assert gm[0][1,1,0]==f[1,1,0]
def test_add_multiple_functions_order2(self):
nVar = 4
nLabels = 2
gm = opengm.gm([nLabels] * nVar)
# add functionS
fShape =[4,2,2]
f = opengm.randomFunction(fShape)
vis=numpy.ones([4,2])
vis[0,0]=0
vis[0,1]=1
vis[1,0]=1
vis[1,1]=2
vis[2,0]=2
vis[2,1]=3
vis[3,0]=0
vis[3,1]=3
fids = gm.addFunctions(f)
gm.addFactors(fids,vis)
assert gm[1][0,0]==f[1,0,0]
assert gm[1][1,1]==f[1,1,1]
assert gm[1][1,0]==f[1,1,0]
assert gm[1][0,1]==f[1,0,1]
for x in xrange(4):
assert gm[x][0,0]==f[x,0,0]
assert gm[x][1,1]==f[x,1,1]
assert gm[x][1,0]==f[x,1,0]
assert gm[x][0,1]==f[x,0,1]
def test_add_multiple_functions_with_map(self):
gm = opengm.gm([2] * 10)
def add_a_function(w):
return gm.addFunction(opengm.differenceFunction(shape=[2, 2],
weight=w))
weights = [0.2, 0.3, 0.4]
fidList = map(add_a_function, weights)
assert isinstance(fidList, list)
assert len(fidList) == len(weights)
gm.addFactors(fidList, [[0, 1], [1, 2], [3, 4]])
def test_evaluate(self):
numberOfStates = [2, 2, 2, 2]
gm = opengm.adder.GraphicalModel(numberOfStates)
f1 = numpy.ones(2, dtype=numpy.float64).reshape(2)
f2 = numpy.ones(4, dtype=numpy.float64).reshape(2, 2)
for i in range(3):
gm.addFactor(gm.addFunction(f1), [i])
for i in range(2):
gm.addFactor(gm.addFunction(f2), [i, i + 1])
sequenceList = [0, 1, 0, 1]
valueList = gm.evaluate(sequenceList)
assert(float(valueList) == float(gm.numberOfFactors))
sequenceNumpy = numpy.array([0, 1, 0, 1], dtype=numpy.uint64)
valueNumpy = gm.evaluate(sequenceNumpy)
assert(float(valueNumpy) == float(gm.numberOfFactors))
assert(float(valueNumpy) == float(valueList))
def test_variables_generator(self):
nos = [2, 3, 4, 5, 6]
gm = opengm.adder.GraphicalModel(nos)
truevis = [0, 1, 2, 3, 4]
myvis = [vi for vi in gm.variables()]
assert (len(truevis) == len(myvis))
for a, b in zip(truevis, myvis):
assert a == b
truevis = [2]
myvis = [vi for vi in gm.variables(labels=4)]
assert (len(truevis) == len(myvis))
for a, b in zip(truevis, myvis):
assert a == b
truevis = [1, 2, 3, 4]
myvis = [vi for vi in gm.variables(minLabels=3)]
assert (len(truevis) == len(myvis))
for a, b in zip(truevis, myvis):
assert a == b
truevis = [0, 1, 2]
myvis = [vi for vi in gm.variables(maxLabels=4)]
assert (len(truevis) == len(myvis))
for a, b in zip(truevis, myvis):
assert a == b
truevis = [1, 2]
myvis = [vi for vi in gm.variables(minLabels=3, maxLabels=4)]
assert (len(truevis) == len(myvis))
for a, b in zip(truevis, myvis):
assert a == b
def test_factor_generators(self):
numberOfStates = [2, 2, 2, 2, 2]
gm = opengm.adder.GraphicalModel(numberOfStates)
functions = [numpy.ones(2, dtype=numpy.float64).reshape(2),
numpy.ones(4, dtype=numpy.float64).reshape(2, 2),
numpy.ones(8, dtype=numpy.float64).reshape(2, 2, 2),
numpy.ones(16, dtype=numpy.float64).reshape(2, 2, 2, 2),
numpy.ones(32,
dtype=numpy.float64).reshape(2, 2, 2, 2, 2)]
for f in functions:
fid = gm.addFunction(f)
vis = [i for i in xrange(len(f.shape))]
gm.addFactor(fid, vis)
assert gm.numberOfVariables == 5
# test generators
for i, factor in enumerate(gm.factors(), start=1):
assert factor.numberOfVariables == i
for i, fId in enumerate(gm.factorIds()):
assert fId == i
for i, (factor, fId) in enumerate(gm.factorsAndIds()):
assert fId == i
assert factor.numberOfVariables == i + 1
# with order
for order in xrange(1, 6):
gens = []
gens.append(gm.factors(order=order))
gens.append(gm.factorIds(order=order))
gens.append(gm.factorsAndIds(order=order))
for gen in gens:
assert lenOfGen(gen) == 1
gens = []
gens.append(gm.factors(order=order))
gens.append(gm.factorIds(order=order))
gens.append(gm.factorsAndIds(order=order))
for factor in gens[0]:
assert factor.numberOfVariables == order
for fId in gens[1]:
assert gm[fId].numberOfVariables == order
for factor, fId in gens[2]:
assert factor.numberOfVariables == order
assert gm[fId].numberOfVariables == order
# with order
for order in xrange(1, 6):
orderSets = [set(), set(), set()]
gens = [gm.factors(minOrder=order), gm.factorIds(
minOrder=order), gm.factorsAndIds(minOrder=order)]
assert(len(gens) == 3)
for gen in gens:
print "len assert"
assert lenOfGen(gen) == 6 - order
gens = [gm.factors(minOrder=order), gm.factorIds(
minOrder=order), gm.factorsAndIds(minOrder=order)]
for factor in gens[0]:
assert factor.numberOfVariables >= order
orderSets[0].add(factor.numberOfVariables)
for fId in gens[1]:
assert gm[fId].numberOfVariables >= order
orderSets[1].add(gm[fId].numberOfVariables)
for factor, fId in gens[2]:
assert factor.numberOfVariables >= order
assert gm[fId].numberOfVariables >= order
orderSets[2].add(factor.numberOfVariables)
for oset in orderSets:
assert len(oset) == 6 - order
for order in xrange(2, 6):
orderSets = [set(), set(), set()]
gens = [gm.factors(maxOrder=order), gm.factorIds(
maxOrder=order), gm.factorsAndIds(maxOrder=order)]
assert(len(gens) == 3)
for gen in gens:
print "len assert"
assert lenOfGen(gen) == order
gens = [gm.factors(maxOrder=order), gm.factorIds(
maxOrder=order), gm.factorsAndIds(maxOrder=order)]
for factor in gens[0]:
assert factor.numberOfVariables <= order
orderSets[0].add(factor.numberOfVariables)
for fId in gens[1]:
assert gm[fId].numberOfVariables <= order
orderSets[1].add(gm[fId].numberOfVariables)
for factor, fId in gens[2]:
assert factor.numberOfVariables <= order
assert gm[fId].numberOfVariables <= order
orderSets[2].add(factor.numberOfVariables)
for oset in orderSets:
assert len(oset) == order
for order in xrange(1, 6):
orderSets = [set(), set(), set()]
gens = [gm.factors(minOrder=order, maxOrder=4),
gm.factorIds(minOrder=order, maxOrder=4),
gm.factorsAndIds(minOrder=order, maxOrder=4)]
assert(len(gens) == 3)
for gen in gens:
print "len assert"
assert lenOfGen(gen) == 6 - order - 1
gens = [gm.factors(minOrder=order, maxOrder=4),
gm.factorIds(minOrder=order, maxOrder=4),
gm.factorsAndIds(minOrder=order, maxOrder=4)]
for factor in gens[0]:
assert (factor.numberOfVariables >= order
and factor.numberOfVariables <= 4)
orderSets[0].add(factor.numberOfVariables)
for fId in gens[1]:
assert gm[fId].numberOfVariables >= order and gm[
fId].numberOfVariables <= 4
orderSets[1].add(gm[fId].numberOfVariables)
for factor, fId in gens[2]:
assert(factor.numberOfVariables >= order
and factor.numberOfVariables <= 4)
assert gm[fId].numberOfVariables >= order and gm[
fId].numberOfVariables <= 4
orderSets[2].add(factor.numberOfVariables)
for oset in orderSets:
assert len(oset) == 6 - order - 1
class TestFactor:
def test_factor_shape(self):
numberOfStates = [2, 3, 4]
gm = opengm.adder.GraphicalModel(numberOfStates)
f1 = numpy.ones(6 * 4, numpy.float64).reshape(2, 3, 4)
idf = gm.addFunction(f1)
gm.addFactor(idf, (0, 1, 2))
nf1 = gm[0].__array__() # not used?
for i in range(3):
assert(gm[0].shape[i] == numberOfStates[i])
assert(gm[0].shape.__array__()[i] == numberOfStates[i])
assert(gm[0].shape.__list__()[i] == numberOfStates[i])
assert(gm[0].shape.__tuple__()[i] == numberOfStates[i])
def test_factor_vi(self):
numberOfStates = [2, 3, 4]
gm = opengm.adder.GraphicalModel(numberOfStates)
f1 = numpy.ones(6 * 4, numpy.float64).reshape(2, 3, 4)
idf = gm.addFunction(f1)
gm.addFactor(idf, (0, 1, 2))
nf1 = gm[0].__array__() # not used?
for i in range(3):
assert(gm[0].variableIndices[i] == i)
assert(gm[0].variableIndices.__array__()[i] == i)
assert(gm[0].variableIndices.__list__()[i] == i)
assert(gm[0].variableIndices.__tuple__()[i] == i)
def test_factor_properties(self):
numberOfStates = [2, 2, 2, 2]
gm = opengm.adder.GraphicalModel(numberOfStates)
assert(gm.space().numberOfVariables == 4)
assert(gm.numberOfFactors == 0)
f1 = numpy.array([2, 3], numpy.float64)
f2 = numpy.array([1, 2, 3, 4], numpy.float64).reshape(2, 2)
if1 = gm.addFunction(f1)
if2 = gm.addFunction(f2)
gm.addFactor(if1, (0,))
gm.addFactor(if2, (0, 1))
nf0 = gm[0].__array__()
nf1 = gm[1].__array__()
for i in range(f1.shape[0]):
assert(nf0[i] == gm[0][(i,)])
assert(nf0[i] == f1[i])
for i in range(f2.shape[0]):
for j in range(f2.shape[1]):
assert(nf1[i, j] == gm[1][(i, j)])
assert(nf1[i, j] == f2[i, j])
assert(gm[0].min() == 2)
assert(gm[0].max() == 3)
assert(gm[0].sum() == 5)
assert(gm[0].product() == 6)
assert(gm[0][(0,)] == 2)
assert(gm[0][(1,)] == 3)
assert(gm[1].min() == 1)
assert(gm[1].max() == 4)
assert(gm[1].sum() == 1 + 2 + 3 + 4)
assert(gm[1].product() == 1 * 2 * 3 * 4)
def genericSolverCheck(solverClass, params, gms, semiRings,checkPartial=False,checkMarginals=False,testPythonVisitor=True,testLpInterface=False):
class PyCallback(object):
def __init__(self):
self.inBegin=False
self.inEnd=False
self.inVisit=False
def begin(self,inference):
self.inBegin=True
def end(self,inference):
self.inEnd=True
def visit(self,inference):
self.inVisit=True
for operator, accumulator in semiRings:
for gmGen in gms:
gm = gmGen[operator]
for param in params:
# start inference
solver = solverClass(gm=gm, accumulator=accumulator, parameter=param)
solver2 = solverClass(gm=gm, accumulator=accumulator, parameter=param)
if (testLpInterface==True):
c=0
for vi in range(gm.numberOfVariables):
nl = gm.numberOfLabels(vi)
for l in range(nl):
assert c==solver.lpNodeVariableIndex(vi,l)
c+=1
cv=c
for fi in range(gm.numberOfFactors):
if gm[fi].numberOfVariables>1:
s = gm[fi].size
for l in range(nl):
assert solver.lpFactorVariableIndex(fi,s)>0 or cv==0
sw = opengm.shapeWalker(gm[fi].shape)
for c in sw:
assert solver.lpFactorVariableIndex(fi,c)>0 or cv==0
solver2.addConstraint(lpVariableIndices=[0,1],coefficients=[1,1],lowerBound=0.0,upperBound=1.0)
solver2.addConstraints(lpVariableIndices=[ [0,1],[0,2] ,[1,2]],coefficients=[ [1,1],[2,2],[1,2]],lowerBounds=[0,0,0],upperBounds=[1,1,1])
solver.infer()
arg = solver.arg() # no used?
value = solver.value()
bound = solver.bound()
if testPythonVisitor==True:
solver = solverClass(gm=gm, accumulator=accumulator, parameter=param)
callback=PyCallback()
pvisitor=solver.pythonVisitor(callback,1)
solver.infer(pvisitor)
assert callback.inBegin == True
assert callback.inEnd == True
if checkPartial :
pOptimal = solver.partialOptimality()
assert len(pOptimal)==gm.numberOfVariables
#assert len(numpy.where(pOptimal==True)[0]) == gm.numberOfVariables
if checkMarginals :
visRange=numpy.arange(gm.numberOfVariables)
marginal = solver.marginals(visRange)
assert marginal.shape[0]==gm.numberOfVariables
assert marginal.shape[1]==gm.numberOfLabels(0)
fis1 = gm.factorSubset(order=1).factorIndices
fis2 = gm.factorSubset(order=2).factorIndices
assert len(fis1)!=0
assert len(fis2)!=0
factorMarginal1 = solver.factorMarginals(fis1)
assert factorMarginal1.ndim==2
assert factorMarginal1.shape[0]==len(fis1)
assert factorMarginal1.shape[1]==gm.numberOfLabels(0)
factorMarginal2 = solver.factorMarginals(fis2)
assert factorMarginal2.ndim==3
assert factorMarginal2.shape[0]==len(fis2)
assert factorMarginal2.shape[1]==gm.numberOfLabels(0)
assert factorMarginal2.shape[2]==gm.numberOfLabels(0)
class Test_Inference():
def __init__(self):
self.gridGm = {
'adder': generate_grid(dimx=2, dimy=2, labels=2, beta1=0.1,
beta2=0.2, operator='adder'),
'multiplier': generate_grid(dimx=2, dimy=2, labels=2, beta1=0.1,
beta2=0.2, operator='multiplier'),
}
self.gridGm3 = {
'adder': generate_grid(dimx=3, dimy=2, labels=3, beta1=0.1,
beta2=0.2, operator='adder'),
'multiplier': generate_grid(dimx=3, dimy=2, labels=3, beta1=0.1,
beta2=0.2, operator='multiplier'),
}
self.gridGm30 = {
'adder': generate_grid(dimx=3, dimy=2, labels=3, beta1=0.0,
beta2=0.2, operator='adder'),
'multiplier': generate_grid(dimx=3, dimy=2, labels=3, beta1=0.0,
beta2=0.2, operator='multiplier'),
}
self.chainGm = {
'adder': generate_grid(dimx=4, dimy=1, labels=2, beta1=0.1,
beta2=0.2, operator='adder'),
'multiplier': generate_grid(dimx=4, dimy=1, labels=2, beta1=0.1,
beta2=0.2, operator='multiplier')
}
self.chainGm3 = {
'adder': generate_grid(dimx=4, dimy=1, labels=3, beta1=0.1,
beta2=0.2, operator='adder'),
'multiplier': generate_grid(dimx=4, dimy=1, labels=3, beta1=0.1,
beta2=0.2, operator='multiplier')
}
self.mcGm={
'adder' : generate_mc_grid(dimx=5,dimy=5,operator='adder'),
'multiplier' : generate_mc_grid(dimx=5,dimy=5,operator='multiplier')
}
self.all = [('adder', 'minimizer'), ('adder', 'maximizer'), (
'multiplier', 'minimizer'), ('multiplier', 'maximizer')]
self.minSum = [('adder', 'minimizer')]
self.minSumMaxSum = [('adder', 'minimizer'),('adder', 'maximizer')]
self.minSumMaxProd = [('adder', 'minimizer'), (
'multiplier', 'maximizer')]
self.allAndIntegrator= [('adder', 'minimizer'), ('adder', 'maximizer'),
('multiplier', 'minimizer'), ('multiplier', 'maximizer'), ('multiplier','integrator')]
def test_bruteforce(self):
solverClass = opengm.inference.Bruteforce
params = [None, opengm.InfParam()]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.all)
def test_astar_fast(self):
solverClass = opengm.inference.AStar
params = [None, opengm.InfParam(heuristic='fast')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.all)
def test_icm(self):
solverClass = opengm.inference.Icm
params = [None, opengm.InfParam(moveType='variable'), opengm.InfParam(
moveType='factor'), opengm.InfParam()]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.all)
def test_lazyflipper(self):
solverClass = opengm.inference.LazyFlipper
params = [None, opengm.InfParam(
maxSubgraphSize=2), opengm.InfParam()]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.all)
def test_loc(self):
if opengm.configuration.withAd3:
solverClass = opengm.inference.Loc
params = [None]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSumMaxSum)
def test_dualdecompostion_subgradient(self):
solverClass = opengm.inference.DualDecompositionSubgradient
params = [opengm.InfParam()]
try:
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.gridGm3, self.chainGm,
self.chainGm3],
semiRings=self.minSum)
except RuntimeError as detail:
raise RuntimeError("Error In C++ Impl. of "
"DualDecompositionSubgradient:\n\nReason: %s"
% (str(detail),))
def test_dualdecompostion_subgradient_dynamic_programming(self):
solverClass = opengm.inference.DualDecompositionSubgradient
params = [opengm.InfParam(
subInference='dynamic-programming', subInfParam=opengm.InfParam()),
opengm.InfParam(subInference='dynamic-programming',
decompositionId='tree',
subInfParam=opengm.InfParam())
]
try:
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.gridGm3, self.chainGm,
self.chainGm3],
semiRings=self.minSum)
except RuntimeError as detail:
raise RuntimeError("Error In C++ Impl. of "
"DualDecompositionSubgradient:\n\nReason: %s"
% (str(detail),))
"""
def test_dualdecompostion_subgradient_graph_cut(self):
solverClass = opengm.inference.DualDecompositionSubgradient
params = [opengm.InfParam(subInference='graph-cut',
decompositionId='blocks',
subInfParam=opengm.InfParam())]
try:
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm],
semiRings=self.minSum)
except RuntimeError as detail:
raise RuntimeError("Error In C++ Impl. of "
"DualDecompositionSubgradient:\n\nReason: %s" %
(str(detail),))
"""
#def test_gibbs(self):
# solverClass = opengm.inference.Gibbs
# params = [opengm.InfParam(steps=10000)]
# genericSolverCheck(solverClass, params=params,
# gms=[self.gridGm, self.chainGm, self.gridGm3,
# self.chainGm3],
# semiRings=self.minSumMaxProd)
def test_bp(self):
solverClass = opengm.inference.BeliefPropagation
params = [opengm.InfParam(steps=10)]
genericSolverCheck(solverClass,
params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,self.chainGm3],
semiRings=self.allAndIntegrator,checkMarginals=True)
def test_trwbp(self):
solverClass = opengm.inference.TreeReweightedBp
params = [opengm.InfParam(steps=10)]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.allAndIntegrator,checkMarginals=True)
def test_trws_external(self):
if opengm.configuration.withTrws:
solverClass = opengm.inference.TrwsExternal
params = [None, opengm.InfParam(),
opengm.InfParam(steps=100, energyType='view'),
opengm.InfParam(steps=1, energyType='tables')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_graphcut(self):
solverClass = opengm.inference.GraphCut
params = [None, opengm.InfParam(),
opengm.InfParam(minStCut='boost-kolmogorov'),
opengm.InfParam(minStCut='push-relabel')]
if opengm.configuration.withMaxflow:
params.append(opengm.InfParam(minStCut='kolmogorov'))
genericSolverCheck(solverClass, params=params,gms=[self.gridGm, self.chainGm], semiRings=self.minSum,testPythonVisitor=False)
def test_graphcut_maxflow_ibfs(self):
if opengm.configuration.withMaxflowIbfs :
solverClass = opengm.inference.GraphCut
params=[ opengm.InfParam(minStCut='ibfs') ]
genericSolverCheck(solverClass, params=params,gms=[self.gridGm, self.chainGm], semiRings=self.minSum,testPythonVisitor=False)
def test_mqpbo(self):
if opengm.configuration.withQpbo:
solverClass = opengm.inference.Mqpbo
params = [opengm.InfParam(useKovtunsMethod=True)]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm3, self.chainGm3],
semiRings=self.minSum, checkPartial = True,testPythonVisitor=False)
def test_fastPd(self):
pass
#if opengm.configuration.withFastPd:
# solverClass = opengm.inference.FastPd
# params = [ None, opengm.InfParam(steps=1000)]
# genericSolverCheck(solverClass, params=params,
# gms=[self.gridGm3],
# semiRings=self.minSum,testPythonVisitor=False)
def test_ad3(self):
if opengm.configuration.withAd3:
solverClass = opengm.inference.Ad3
params = [ None, opengm.InfParam(steps=1000,solverType='ad3_ilp')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm3],
semiRings=self.minSumMaxSum,testPythonVisitor=False)
def test_qpbo_external(self):
if opengm.configuration.withQpbo:
solverClass = opengm.inference.QpboExternal
params = [
opengm.InfParam(strongPersistency=True),
opengm.InfParam(useImproveing=True)]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm],
semiRings=self.minSum, checkPartial = True,testPythonVisitor=False)
def test_alpha_beta_swap(self):
solverClass = opengm.inference.AlphaBetaSwap
params = [None, opengm.InfParam(steps=10),
opengm.InfParam(minStCut='boost-kolmogorov', steps=10),
opengm.InfParam(minStCut='push-relabel', steps=10)]
if opengm.configuration.withMaxflow:
params.append(opengm.InfParam(minStCut='kolmogorov', steps=10))
genericSolverCheck(solverClass, params=params, gms=[
self.gridGm3, self.chainGm3], semiRings=self.minSum)
def test_alpha_expansion(self):
solverClass = opengm.inference.AlphaExpansion
params = [None, opengm.InfParam(steps=10),
opengm.InfParam(minStCut='boost-kolmogorov', steps=10),
opengm.InfParam(minStCut='push-relabel', steps=10)]
if opengm.configuration.withMaxflow:
params.append(opengm.InfParam(minStCut='kolmogorov', steps=10))
genericSolverCheck(solverClass, params=params, gms=[
self.gridGm3, self.chainGm3], semiRings=self.minSum)
def test_alpha_expansion_fusion(self):
if opengm.configuration.withQpbo:
solverClass = opengm.inference.AlphaExpansionFusion
params = [None, opengm.InfParam(steps=10)]
genericSolverCheck(solverClass, params=params, gms=[
self.gridGm3, self.chainGm3], semiRings=self.minSum)
def test_partition_move(self):
solverClass = opengm.inference.PartitionMove
params = [None, opengm.InfParam()]
genericSolverCheck(solverClass, params=params,
gms=[self.mcGm],
semiRings=self.minSum,testPythonVisitor=False)
def test_multicut(self):
if opengm.configuration.withCplex:
solverClass = opengm.inference.Multicut
params = [None, opengm.InfParam()]
genericSolverCheck(solverClass, params=params,
gms=[self.mcGm],
semiRings=self.minSum,testPythonVisitor=False)
"""
def test_lpcplex(self):
if opengm.configuration.withCplex:
solverClass = opengm.inference.LpCplex
params = [None, opengm.InfParam(),
opengm.InfParam(integerConstraint=True),
opengm.InfParam(integerConstraint=False)]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False,testLpInterface=True)
def test_lpcplex2(self):
if False and opengm.configuration.withCplex:
solverClass = opengm.inference.LpCplex2
params = [None, opengm.InfParam(),
opengm.InfParam(integerConstraint=True),
opengm.InfParam(integerConstraint=False)]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False,testLpInterface=True)
def test_gurobi(self):
if False and opengm.configuration.withGurobi:
solverClass = opengm.inference.LpGurobi
params = [None, opengm.InfParam(),
opengm.InfParam(integerConstraint=True),
opengm.InfParam(integerConstraint=False)]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False,testLpInterface=True)
"""
def test_libdai_bp(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.BeliefPropagationLibDai
params = [None, opengm.InfParam(), opengm.InfParam(
updateRule='parall'), opengm.InfParam(updateRule='seqrnd')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_fractional_bp(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.FractionalBpLibDai
params = [None, opengm.InfParam(), opengm.InfParam(
updateRule='parall'), opengm.InfParam(updateRule='seqrnd')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_trw_bp(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.TreeReweightedBpLibDai
params = [None, opengm.InfParam(),
opengm.InfParam(updateRule='parall'),
opengm.InfParam(updateRule='seqrnd', ntrees=2)]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_gibbs(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.GibbsLibDai
params = [None, opengm.InfParam(),
opengm.InfParam(steps=100)]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_junction_tree(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.JunctionTreeLibDai
params = [None, opengm.InfParam()]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_decimation(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.DecimationLibDai
params = [None, opengm.InfParam()]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_decimation_bp(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.DecimationLibDai
params = [opengm.InfParam(subInference='bp')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_decimation_trwbp(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.DecimationLibDai
params = [opengm.InfParam(subInference='trwBp')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_decimation_fractional_bp(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.DecimationLibDai
params = [opengm.InfParam(subInference='fractionalBp')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_decimation_gibbs(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.DecimationLibDai
params = [opengm.InfParam(subInference='gibbs')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
class Test_Learning:
def __init__(self):
self.__nWeights = 12
self.__shape = [10,10]
# utility functions
def __makeGt(self, shape):
gt=numpy.ones(shape,dtype='uint8')
gt[0:shape[0]/2,:] = 0
return gt
def __create_dataset(self, functionType, numModels=1):
numWeights = 4
dataset = opengm.learning.createDataset(numWeights=numWeights)
weights = dataset.getWeights()
gt = self.__makeGt(self.__shape)
numVars = self.__shape[0] * self.__shape[1]
numLabels = 2
uWeightIds = numpy.array([[0, 1]], dtype='uint64')
bWeightIds = numpy.array([2, 3], dtype='uint64')
for m in range(numModels):
gm = opengm.gm(numpy.ones(numVars) * numLabels)
# create noisy data
random = (numpy.random.rand(*gt.shape)-0.5)*0.3
noisyGt = random + gt
# add unarias
for x in range(self.__shape[0]):
for y in range(self.__shape[1]):
# use noised GT input, and a constant feature
uFeat = numpy.array([[noisyGt[x,y], 1]], dtype='float64')
lu = opengm.learning.lUnaryFunction(weights=weights,numberOfLabels=numLabels,
features=uFeat, weightIds=uWeightIds)
fid = gm.addFunction(lu)
facIndex = gm.addFactor(fid, y+x*self.__shape[1])
# add pairwise
for x in range(self.__shape[0]):
for y in range(self.__shape[1]):
if x+1 < self.__shape[0]:
gradientMag = (noisyGt[x+1,y] - noisyGt[x,y])**2
bFeat = numpy.array([gradientMag, 1], dtype='float64')
pf = opengm.LPottsFunction(weights=weights,numberOfLabels=numLabels, features=bFeat, weightIds=bWeightIds)
fid= gm.addFunction(pf)
gm.addFactor(fid, [y+x*self.__shape[1], y+(x+1)*self.__shape[1]])
if y+1 < self.__shape[1]:
gradientMag = (noisyGt[x,y+1] - noisyGt[x,y])**2
bFeat = numpy.array([gradientMag, 1], dtype='float64')
pf = opengm.LPottsFunction(weights=weights,numberOfLabels=numLabels, features=bFeat, weightIds=bWeightIds)
fid= gm.addFunction(pf)
gm.addFactor(fid, [y+x*self.__shape[1], (y+1)+x*self.__shape[1]])
# store GM and its GT
dataset.pushBackInstance(gm, gt.reshape([-1]).astype(opengm.label_type))
return dataset
def __create_weights(self, numWeights):
weightVals = numpy.ones(numWeights)
weights = opengm.learning.Weights(weightVals)
return weights
# def __create_loss(self):
def __generic_learner_test(self, learner):
if opengm.configuration.withTrws:
learner.learn(infCls=opengm.inference.TrwsExternal, parameter=opengm.InfParam())
elif opengm.configuration.withCplex:
learner.learn(infCls=opengm.inference.LpCplex, parameter=opengm.InfParam())
else:
learner.learn(infCls=opengm.inference.Icm, parameter=opengm.InfParam())
# tests
def test_weights(self):
weights = self.__create_weights(self.__nWeights)
assert(len(weights) == self.__nWeights)
value = 15
weights[3] = value
assert(weights[3] == value)
def test_dataset(self):
ds = self.__create_dataset('potts', 1)
assert(ds.getNumberOfWeights() == 4)
assert(ds.getNumberOfModels() == 1)
def test_dataset_serialization(self):
import tempfile
import shutil
ds = self.__create_dataset(self.__nWeights)
# TODO: create temp directory
temp_path = tempfile.mkdtemp()
prefix = 'test'
ds.save(temp_path, prefix)
loaded_ds = opengm.learning.DatasetWithFlexibleLoss(0)
loaded_ds.load(temp_path, prefix)
shutil.rmtree(temp_path)
assert(ds.getNumberOfWeights() == loaded_ds.getNumberOfWeights())
assert(ds.getNumberOfModels() == loaded_ds.getNumberOfModels())
assert(ds.getModel(0).numberOfVariables == loaded_ds.getModel(0).numberOfVariables)
assert(ds.getModel(0).numberOfFactors == loaded_ds.getModel(0).numberOfFactors)
if __name__ == "__main__":
t = Test_Inference()
t.test_trwbp()
| mit | 814,070,450,038,851,300 | 39.394595 | 157 | 0.531877 | false |
fengbeihong/tempest_automate_ironic | tempest/scenario/test_load_balancer_basic.py | 4 | 13056 | # Copyright 2014 Mirantis.inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import tempfile
import time
import urllib2
from tempest.common import commands
from tempest import config
from tempest import exceptions
from tempest.scenario import manager
from tempest.services.network import resources as net_resources
from tempest import test
config = config.CONF
class TestLoadBalancerBasic(manager.NetworkScenarioTest):
"""
This test checks basic load balancing.
The following is the scenario outline:
1. Create an instance
2. SSH to the instance and start two servers
3. Create a load balancer with two members and with ROUND_ROBIN algorithm
associate the VIP with a floating ip
4. Send NUM requests to the floating ip and check that they are shared
between the two servers.
"""
@classmethod
def skip_checks(cls):
super(TestLoadBalancerBasic, cls).skip_checks()
cfg = config.network
if not test.is_extension_enabled('lbaas', 'network'):
msg = 'LBaaS Extension is not enabled'
raise cls.skipException(msg)
if not (cfg.tenant_networks_reachable or cfg.public_network_id):
msg = ('Either tenant_networks_reachable must be "true", or '
'public_network_id must be defined.')
raise cls.skipException(msg)
@classmethod
def resource_setup(cls):
super(TestLoadBalancerBasic, cls).resource_setup()
cls.servers_keypairs = {}
cls.members = []
cls.floating_ips = {}
cls.server_ips = {}
cls.port1 = 80
cls.port2 = 88
cls.num = 50
def setUp(self):
super(TestLoadBalancerBasic, self).setUp()
self.server_ips = {}
self.server_fixed_ips = {}
self._create_security_group_for_test()
self._set_net_and_subnet()
def _set_net_and_subnet(self):
"""
Query and set appropriate network and subnet attributes to be used
for the test. Existing tenant networks are used if they are found.
The configured private network and associated subnet is used as a
fallback in absence of tenant networking.
"""
try:
tenant_net = self._list_networks(tenant_id=self.tenant_id)[0]
except IndexError:
tenant_net = None
if tenant_net:
tenant_subnet = self._list_subnets(tenant_id=self.tenant_id)[0]
self.subnet = net_resources.DeletableSubnet(
client=self.network_client,
**tenant_subnet)
self.network = tenant_net
else:
self.network = self._get_network_by_name(
config.compute.fixed_network_name)
# TODO(adam_g): We are assuming that the first subnet associated
# with the fixed network is the one we want. In the future, we
# should instead pull a subnet id from config, which is set by
# devstack/admin/etc.
subnet = self._list_subnets(network_id=self.network['id'])[0]
self.subnet = net_resources.AttributeDict(subnet)
def _create_security_group_for_test(self):
self.security_group = self._create_security_group(
tenant_id=self.tenant_id)
self._create_security_group_rules_for_port(self.port1)
self._create_security_group_rules_for_port(self.port2)
def _create_security_group_rules_for_port(self, port):
rule = {
'direction': 'ingress',
'protocol': 'tcp',
'port_range_min': port,
'port_range_max': port,
}
self._create_security_group_rule(
secgroup=self.security_group,
tenant_id=self.tenant_id,
**rule)
def _create_server(self, name):
keypair = self.create_keypair()
security_groups = [{'name': self.security_group['name']}]
create_kwargs = {
'networks': [
{'uuid': self.network['id']},
],
'key_name': keypair['name'],
'security_groups': security_groups,
}
net_name = self.network['name']
server = self.create_server(name=name, create_kwargs=create_kwargs)
self.servers_keypairs[server['id']] = keypair
if (config.network.public_network_id and not
config.network.tenant_networks_reachable):
public_network_id = config.network.public_network_id
floating_ip = self.create_floating_ip(
server, public_network_id)
self.floating_ips[floating_ip] = server
self.server_ips[server['id']] = floating_ip.floating_ip_address
else:
self.server_ips[server['id']] =\
server['addresses'][net_name][0]['addr']
self.server_fixed_ips[server['id']] =\
server['addresses'][net_name][0]['addr']
self.assertTrue(self.servers_keypairs)
return server
def _create_servers(self):
for count in range(2):
self._create_server(name=("server%s" % (count + 1)))
self.assertEqual(len(self.servers_keypairs), 2)
def _start_servers(self):
"""
Start two backends
1. SSH to the instance
2. Start two http backends listening on ports 80 and 88 respectively
"""
for server_id, ip in self.server_ips.iteritems():
private_key = self.servers_keypairs[server_id]['private_key']
server_name = self.servers_client.get_server(server_id)['name']
username = config.scenario.ssh_user
ssh_client = self.get_remote_client(
server_or_ip=ip,
private_key=private_key)
# Write a backend's response into a file
resp = ('echo -ne "HTTP/1.1 200 OK\r\nContent-Length: 7\r\n'
'Connection: close\r\nContent-Type: text/html; '
'charset=UTF-8\r\n\r\n%s"; cat >/dev/null')
with tempfile.NamedTemporaryFile() as script:
script.write(resp % server_name)
script.flush()
with tempfile.NamedTemporaryFile() as key:
key.write(private_key)
key.flush()
commands.copy_file_to_host(script.name,
"/tmp/script1",
ip,
username, key.name)
# Start netcat
start_server = ('while true; do '
'sudo nc -ll -p %(port)s -e sh /tmp/%(script)s; '
'done &')
cmd = start_server % {'port': self.port1,
'script': 'script1'}
ssh_client.exec_command(cmd)
if len(self.server_ips) == 1:
with tempfile.NamedTemporaryFile() as script:
script.write(resp % 'server2')
script.flush()
with tempfile.NamedTemporaryFile() as key:
key.write(private_key)
key.flush()
commands.copy_file_to_host(script.name,
"/tmp/script2", ip,
username, key.name)
cmd = start_server % {'port': self.port2,
'script': 'script2'}
ssh_client.exec_command(cmd)
def _check_connection(self, check_ip, port=80):
def try_connect(ip, port):
try:
resp = urllib2.urlopen("http://{0}:{1}/".format(ip, port))
if resp.getcode() == 200:
return True
return False
except IOError:
return False
except urllib2.HTTPError:
return False
timeout = config.compute.ping_timeout
start = time.time()
while not try_connect(check_ip, port):
if (time.time() - start) > timeout:
message = "Timed out trying to connect to %s" % check_ip
raise exceptions.TimeoutException(message)
def _create_pool(self):
"""Create a pool with ROUND_ROBIN algorithm."""
self.pool = super(TestLoadBalancerBasic, self)._create_pool(
lb_method='ROUND_ROBIN',
protocol='HTTP',
subnet_id=self.subnet.id)
self.assertTrue(self.pool)
def _create_members(self):
"""
Create two members.
In case there is only one server, create both members with the same ip
but with different ports to listen on.
"""
for server_id, ip in self.server_fixed_ips.iteritems():
if len(self.server_fixed_ips) == 1:
member1 = self._create_member(address=ip,
protocol_port=self.port1,
pool_id=self.pool.id)
member2 = self._create_member(address=ip,
protocol_port=self.port2,
pool_id=self.pool.id)
self.members.extend([member1, member2])
else:
member = self._create_member(address=ip,
protocol_port=self.port1,
pool_id=self.pool.id)
self.members.append(member)
self.assertTrue(self.members)
def _assign_floating_ip_to_vip(self, vip):
public_network_id = config.network.public_network_id
port_id = vip.port_id
floating_ip = self.create_floating_ip(vip, public_network_id,
port_id=port_id)
self.floating_ips.setdefault(vip.id, [])
self.floating_ips[vip.id].append(floating_ip)
# Check for floating ip status before you check load-balancer
self.check_floating_ip_status(floating_ip, "ACTIVE")
def _create_load_balancer(self):
self._create_pool()
self._create_members()
self.vip = self._create_vip(protocol='HTTP',
protocol_port=80,
subnet_id=self.subnet.id,
pool_id=self.pool.id)
self.vip.wait_for_status('ACTIVE')
if (config.network.public_network_id and not
config.network.tenant_networks_reachable):
self._assign_floating_ip_to_vip(self.vip)
self.vip_ip = self.floating_ips[
self.vip.id][0]['floating_ip_address']
else:
self.vip_ip = self.vip.address
# Currently the ovs-agent is not enforcing security groups on the
# vip port - see https://bugs.launchpad.net/neutron/+bug/1163569
# However the linuxbridge-agent does, and it is necessary to add a
# security group with a rule that allows tcp port 80 to the vip port.
self.network_client.update_port(
self.vip.port_id, security_groups=[self.security_group.id])
def _check_load_balancing(self):
"""
1. Send NUM requests on the floating ip associated with the VIP
2. Check that the requests are shared between the two servers
"""
self._check_connection(self.vip_ip)
self._send_requests(self.vip_ip, ["server1", "server2"])
def _send_requests(self, vip_ip, servers):
counters = dict.fromkeys(servers, 0)
for i in range(self.num):
try:
server = urllib2.urlopen("http://{0}/".format(vip_ip)).read()
counters[server] += 1
# HTTP exception means fail of server, so don't increase counter
# of success and continue connection tries
except urllib2.HTTPError:
continue
# Assert that each member of the pool gets balanced at least once
for member, counter in counters.iteritems():
self.assertGreater(counter, 0, 'Member %s never balanced' % member)
@test.idempotent_id('c0c6f1ca-603b-4509-9c0f-2c63f0d838ee')
@test.services('compute', 'network')
def test_load_balancer_basic(self):
self._create_server('server1')
self._start_servers()
self._create_load_balancer()
self._check_load_balancing()
| apache-2.0 | 7,608,615,314,487,169,000 | 39.9279 | 79 | 0.559053 | false |
prospwro/odoo | addons/hr_evaluation/__init__.py | 432 | 1084 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_evaluation
import report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 930,902,949,428,508,300 | 40.692308 | 78 | 0.619004 | false |
yiannist/ganeti | test/py/ganeti.locking_unittest.py | 9 | 34568 | #!/usr/bin/python
#
# Copyright (C) 2006, 2007, 2010 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Script for unittesting the locking module"""
import os
import unittest
import time
import Queue
import threading
import random
import gc
import itertools
from ganeti import constants
from ganeti import locking
from ganeti import errors
from ganeti import utils
from ganeti import compat
from ganeti import objects
from ganeti import query
import testutils
# This is used to test the ssynchronize decorator.
# Since it's passed as input to a decorator it must be declared as a global.
_decoratorlock = locking.SharedLock("decorator lock")
#: List for looping tests
ITERATIONS = range(8)
def _Repeat(fn):
"""Decorator for executing a function many times"""
def wrapper(*args, **kwargs):
for i in ITERATIONS:
fn(*args, **kwargs)
return wrapper
def SafeSleep(duration):
start = time.time()
while True:
delay = start + duration - time.time()
if delay <= 0.0:
break
time.sleep(delay)
class _ThreadedTestCase(unittest.TestCase):
"""Test class that supports adding/waiting on threads"""
def setUp(self):
unittest.TestCase.setUp(self)
self.done = Queue.Queue(0)
self.threads = []
def _addThread(self, *args, **kwargs):
"""Create and remember a new thread"""
t = threading.Thread(*args, **kwargs)
self.threads.append(t)
t.start()
return t
def _waitThreads(self):
"""Wait for all our threads to finish"""
for t in self.threads:
t.join(60)
self.failIf(t.isAlive())
self.threads = []
class _ConditionTestCase(_ThreadedTestCase):
"""Common test case for conditions"""
def setUp(self, cls):
_ThreadedTestCase.setUp(self)
self.lock = threading.Lock()
self.cond = cls(self.lock)
def _testAcquireRelease(self):
self.assertFalse(self.cond._is_owned())
self.assertRaises(RuntimeError, self.cond.wait, None)
self.assertRaises(RuntimeError, self.cond.notifyAll)
self.cond.acquire()
self.assert_(self.cond._is_owned())
self.cond.notifyAll()
self.assert_(self.cond._is_owned())
self.cond.release()
self.assertFalse(self.cond._is_owned())
self.assertRaises(RuntimeError, self.cond.wait, None)
self.assertRaises(RuntimeError, self.cond.notifyAll)
def _testNotification(self):
def _NotifyAll():
self.done.put("NE")
self.cond.acquire()
self.done.put("NA")
self.cond.notifyAll()
self.done.put("NN")
self.cond.release()
self.cond.acquire()
self._addThread(target=_NotifyAll)
self.assertEqual(self.done.get(True, 1), "NE")
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.cond.wait(None)
self.assertEqual(self.done.get(True, 1), "NA")
self.assertEqual(self.done.get(True, 1), "NN")
self.assert_(self.cond._is_owned())
self.cond.release()
self.assertFalse(self.cond._is_owned())
class TestSingleNotifyPipeCondition(_ConditionTestCase):
"""SingleNotifyPipeCondition tests"""
def setUp(self):
_ConditionTestCase.setUp(self, locking.SingleNotifyPipeCondition)
def testAcquireRelease(self):
self._testAcquireRelease()
def testNotification(self):
self._testNotification()
def testWaitReuse(self):
self.cond.acquire()
self.cond.wait(0)
self.cond.wait(0.1)
self.cond.release()
def testNoNotifyReuse(self):
self.cond.acquire()
self.cond.notifyAll()
self.assertRaises(RuntimeError, self.cond.wait, None)
self.assertRaises(RuntimeError, self.cond.notifyAll)
self.cond.release()
class TestPipeCondition(_ConditionTestCase):
"""PipeCondition tests"""
def setUp(self):
_ConditionTestCase.setUp(self, locking.PipeCondition)
def testAcquireRelease(self):
self._testAcquireRelease()
def testNotification(self):
self._testNotification()
def _TestWait(self, fn):
threads = [
self._addThread(target=fn),
self._addThread(target=fn),
self._addThread(target=fn),
]
# Wait for threads to be waiting
for _ in threads:
self.assertEqual(self.done.get(True, 1), "A")
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.cond.acquire()
self.assertEqual(len(self.cond._waiters), 3)
self.assertEqual(self.cond._waiters, set(threads))
self.assertTrue(repr(self.cond).startswith("<"))
self.assertTrue("waiters=" in repr(self.cond))
# This new thread can't acquire the lock, and thus call wait, before we
# release it
self._addThread(target=fn)
self.cond.notifyAll()
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.cond.release()
# We should now get 3 W and 1 A (for the new thread) in whatever order
w = 0
a = 0
for i in range(4):
got = self.done.get(True, 1)
if got == "W":
w += 1
elif got == "A":
a += 1
else:
self.fail("Got %s on the done queue" % got)
self.assertEqual(w, 3)
self.assertEqual(a, 1)
self.cond.acquire()
self.cond.notifyAll()
self.cond.release()
self._waitThreads()
self.assertEqual(self.done.get_nowait(), "W")
self.assertRaises(Queue.Empty, self.done.get_nowait)
def testBlockingWait(self):
def _BlockingWait():
self.cond.acquire()
self.done.put("A")
self.cond.wait(None)
self.cond.release()
self.done.put("W")
self._TestWait(_BlockingWait)
def testLongTimeoutWait(self):
def _Helper():
self.cond.acquire()
self.done.put("A")
self.cond.wait(15.0)
self.cond.release()
self.done.put("W")
self._TestWait(_Helper)
def _TimeoutWait(self, timeout, check):
self.cond.acquire()
self.cond.wait(timeout)
self.cond.release()
self.done.put(check)
def testShortTimeoutWait(self):
self._addThread(target=self._TimeoutWait, args=(0.1, "T1"))
self._addThread(target=self._TimeoutWait, args=(0.1, "T1"))
self._waitThreads()
self.assertEqual(self.done.get_nowait(), "T1")
self.assertEqual(self.done.get_nowait(), "T1")
self.assertRaises(Queue.Empty, self.done.get_nowait)
def testZeroTimeoutWait(self):
self._addThread(target=self._TimeoutWait, args=(0, "T0"))
self._addThread(target=self._TimeoutWait, args=(0, "T0"))
self._addThread(target=self._TimeoutWait, args=(0, "T0"))
self._waitThreads()
self.assertEqual(self.done.get_nowait(), "T0")
self.assertEqual(self.done.get_nowait(), "T0")
self.assertEqual(self.done.get_nowait(), "T0")
self.assertRaises(Queue.Empty, self.done.get_nowait)
class TestSharedLock(_ThreadedTestCase):
"""SharedLock tests"""
def setUp(self):
_ThreadedTestCase.setUp(self)
self.sl = locking.SharedLock("TestSharedLock")
self.assertTrue(repr(self.sl).startswith("<"))
self.assertTrue("name=TestSharedLock" in repr(self.sl))
def testSequenceAndOwnership(self):
self.assertFalse(self.sl.is_owned())
self.sl.acquire(shared=1)
self.assert_(self.sl.is_owned())
self.assert_(self.sl.is_owned(shared=1))
self.assertFalse(self.sl.is_owned(shared=0))
self.sl.release()
self.assertFalse(self.sl.is_owned())
self.sl.acquire()
self.assert_(self.sl.is_owned())
self.assertFalse(self.sl.is_owned(shared=1))
self.assert_(self.sl.is_owned(shared=0))
self.sl.release()
self.assertFalse(self.sl.is_owned())
self.sl.acquire(shared=1)
self.assert_(self.sl.is_owned())
self.assert_(self.sl.is_owned(shared=1))
self.assertFalse(self.sl.is_owned(shared=0))
self.sl.release()
self.assertFalse(self.sl.is_owned())
def testBooleanValue(self):
# semaphores are supposed to return a true value on a successful acquire
self.assert_(self.sl.acquire(shared=1))
self.sl.release()
self.assert_(self.sl.acquire())
self.sl.release()
def testDoubleLockingStoE(self):
self.sl.acquire(shared=1)
self.assertRaises(AssertionError, self.sl.acquire)
def testDoubleLockingEtoS(self):
self.sl.acquire()
self.assertRaises(AssertionError, self.sl.acquire, shared=1)
def testDoubleLockingStoS(self):
self.sl.acquire(shared=1)
self.assertRaises(AssertionError, self.sl.acquire, shared=1)
def testDoubleLockingEtoE(self):
self.sl.acquire()
self.assertRaises(AssertionError, self.sl.acquire)
# helper functions: called in a separate thread they acquire the lock, send
# their identifier on the done queue, then release it.
def _doItSharer(self):
try:
self.sl.acquire(shared=1)
self.done.put("SHR")
self.sl.release()
except errors.LockError:
self.done.put("ERR")
def _doItExclusive(self):
try:
self.sl.acquire()
self.done.put("EXC")
self.sl.release()
except errors.LockError:
self.done.put("ERR")
def _doItDelete(self):
try:
self.sl.delete()
self.done.put("DEL")
except errors.LockError:
self.done.put("ERR")
def testSharersCanCoexist(self):
self.sl.acquire(shared=1)
threading.Thread(target=self._doItSharer).start()
self.assert_(self.done.get(True, 1))
self.sl.release()
@_Repeat
def testExclusiveBlocksExclusive(self):
self.sl.acquire()
self._addThread(target=self._doItExclusive)
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.sl.release()
self._waitThreads()
self.failUnlessEqual(self.done.get_nowait(), "EXC")
@_Repeat
def testExclusiveBlocksDelete(self):
self.sl.acquire()
self._addThread(target=self._doItDelete)
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.sl.release()
self._waitThreads()
self.failUnlessEqual(self.done.get_nowait(), "DEL")
self.sl = locking.SharedLock(self.sl.name)
@_Repeat
def testExclusiveBlocksSharer(self):
self.sl.acquire()
self._addThread(target=self._doItSharer)
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.sl.release()
self._waitThreads()
self.failUnlessEqual(self.done.get_nowait(), "SHR")
@_Repeat
def testSharerBlocksExclusive(self):
self.sl.acquire(shared=1)
self._addThread(target=self._doItExclusive)
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.sl.release()
self._waitThreads()
self.failUnlessEqual(self.done.get_nowait(), "EXC")
@_Repeat
def testSharerBlocksDelete(self):
self.sl.acquire(shared=1)
self._addThread(target=self._doItDelete)
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.sl.release()
self._waitThreads()
self.failUnlessEqual(self.done.get_nowait(), "DEL")
self.sl = locking.SharedLock(self.sl.name)
@_Repeat
def testWaitingExclusiveBlocksSharer(self):
"""SKIPPED testWaitingExclusiveBlockSharer"""
return
self.sl.acquire(shared=1)
# the lock is acquired in shared mode...
self._addThread(target=self._doItExclusive)
# ...but now an exclusive is waiting...
self._addThread(target=self._doItSharer)
# ...so the sharer should be blocked as well
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.sl.release()
self._waitThreads()
# The exclusive passed before
self.failUnlessEqual(self.done.get_nowait(), "EXC")
self.failUnlessEqual(self.done.get_nowait(), "SHR")
@_Repeat
def testWaitingSharerBlocksExclusive(self):
"""SKIPPED testWaitingSharerBlocksExclusive"""
return
self.sl.acquire()
# the lock is acquired in exclusive mode...
self._addThread(target=self._doItSharer)
# ...but now a sharer is waiting...
self._addThread(target=self._doItExclusive)
# ...the exclusive is waiting too...
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.sl.release()
self._waitThreads()
# The sharer passed before
self.assertEqual(self.done.get_nowait(), "SHR")
self.assertEqual(self.done.get_nowait(), "EXC")
def testDelete(self):
self.sl.delete()
self.assertRaises(errors.LockError, self.sl.acquire)
self.assertRaises(errors.LockError, self.sl.acquire, shared=1)
self.assertRaises(errors.LockError, self.sl.delete)
def testDeleteTimeout(self):
self.assertTrue(self.sl.delete(timeout=60))
def testDeleteTimeoutFail(self):
ready = threading.Event()
finish = threading.Event()
def fn():
self.sl.acquire(shared=0)
ready.set()
finish.wait()
self.sl.release()
self._addThread(target=fn)
ready.wait()
# Test if deleting a lock owned in exclusive mode by another thread fails
# to delete when a timeout is used
self.assertFalse(self.sl.delete(timeout=0.02))
finish.set()
self._waitThreads()
self.assertTrue(self.sl.delete())
self.assertRaises(errors.LockError, self.sl.acquire)
def testNoDeleteIfSharer(self):
self.sl.acquire(shared=1)
self.assertRaises(AssertionError, self.sl.delete)
@_Repeat
def testDeletePendingSharersExclusiveDelete(self):
self.sl.acquire()
self._addThread(target=self._doItSharer)
self._addThread(target=self._doItSharer)
self._addThread(target=self._doItExclusive)
self._addThread(target=self._doItDelete)
self.sl.delete()
self._waitThreads()
# The threads who were pending return ERR
for _ in range(4):
self.assertEqual(self.done.get_nowait(), "ERR")
self.sl = locking.SharedLock(self.sl.name)
@_Repeat
def testDeletePendingDeleteExclusiveSharers(self):
self.sl.acquire()
self._addThread(target=self._doItDelete)
self._addThread(target=self._doItExclusive)
self._addThread(target=self._doItSharer)
self._addThread(target=self._doItSharer)
self.sl.delete()
self._waitThreads()
# The two threads who were pending return both ERR
self.assertEqual(self.done.get_nowait(), "ERR")
self.assertEqual(self.done.get_nowait(), "ERR")
self.assertEqual(self.done.get_nowait(), "ERR")
self.assertEqual(self.done.get_nowait(), "ERR")
self.sl = locking.SharedLock(self.sl.name)
@_Repeat
def testExclusiveAcquireTimeout(self):
for shared in [0, 1]:
on_queue = threading.Event()
release_exclusive = threading.Event()
def _LockExclusive():
self.sl.acquire(shared=0, test_notify=on_queue.set)
self.done.put("A: start wait")
release_exclusive.wait()
self.done.put("A: end wait")
self.sl.release()
# Start thread to hold lock in exclusive mode
self._addThread(target=_LockExclusive)
# Wait for wait to begin
self.assertEqual(self.done.get(timeout=60), "A: start wait")
# Wait up to 60s to get lock, but release exclusive lock as soon as we're
# on the queue
self.failUnless(self.sl.acquire(shared=shared, timeout=60,
test_notify=release_exclusive.set))
self.done.put("got 2nd")
self.sl.release()
self._waitThreads()
self.assertEqual(self.done.get_nowait(), "A: end wait")
self.assertEqual(self.done.get_nowait(), "got 2nd")
self.assertRaises(Queue.Empty, self.done.get_nowait)
@_Repeat
def testAcquireExpiringTimeout(self):
def _AcquireWithTimeout(shared, timeout):
if not self.sl.acquire(shared=shared, timeout=timeout):
self.done.put("timeout")
for shared in [0, 1]:
# Lock exclusively
self.sl.acquire()
# Start shared acquires with timeout between 0 and 20 ms
for i in range(11):
self._addThread(target=_AcquireWithTimeout,
args=(shared, i * 2.0 / 1000.0))
# Wait for threads to finish (makes sure the acquire timeout expires
# before releasing the lock)
self._waitThreads()
# Release lock
self.sl.release()
for _ in range(11):
self.assertEqual(self.done.get_nowait(), "timeout")
self.assertRaises(Queue.Empty, self.done.get_nowait)
@_Repeat
def testSharedSkipExclusiveAcquires(self):
# Tests whether shared acquires jump in front of exclusive acquires in the
# queue.
def _Acquire(shared, name, notify_ev, wait_ev):
if notify_ev:
notify_fn = notify_ev.set
else:
notify_fn = None
if wait_ev:
wait_ev.wait()
if not self.sl.acquire(shared=shared, test_notify=notify_fn):
return
self.done.put(name)
self.sl.release()
# Get exclusive lock while we fill the queue
self.sl.acquire()
shrcnt1 = 5
shrcnt2 = 7
shrcnt3 = 9
shrcnt4 = 2
# Add acquires using threading.Event for synchronization. They'll be
# acquired exactly in the order defined in this list.
acquires = (shrcnt1 * [(1, "shared 1")] +
3 * [(0, "exclusive 1")] +
shrcnt2 * [(1, "shared 2")] +
shrcnt3 * [(1, "shared 3")] +
shrcnt4 * [(1, "shared 4")] +
3 * [(0, "exclusive 2")])
ev_cur = None
ev_prev = None
for args in acquires:
ev_cur = threading.Event()
self._addThread(target=_Acquire, args=args + (ev_cur, ev_prev))
ev_prev = ev_cur
# Wait for last acquire to start
ev_prev.wait()
# Expect 6 pending exclusive acquires and 1 for all shared acquires
# together
self.assertEqual(self.sl._count_pending(), 7)
# Release exclusive lock and wait
self.sl.release()
self._waitThreads()
# Check sequence
for _ in range(shrcnt1 + shrcnt2 + shrcnt3 + shrcnt4):
# Shared locks aren't guaranteed to be notified in order, but they'll be
# first
tmp = self.done.get_nowait()
if tmp == "shared 1":
shrcnt1 -= 1
elif tmp == "shared 2":
shrcnt2 -= 1
elif tmp == "shared 3":
shrcnt3 -= 1
elif tmp == "shared 4":
shrcnt4 -= 1
self.assertEqual(shrcnt1, 0)
self.assertEqual(shrcnt2, 0)
self.assertEqual(shrcnt3, 0)
self.assertEqual(shrcnt3, 0)
for _ in range(3):
self.assertEqual(self.done.get_nowait(), "exclusive 1")
for _ in range(3):
self.assertEqual(self.done.get_nowait(), "exclusive 2")
self.assertRaises(Queue.Empty, self.done.get_nowait)
def testIllegalDowngrade(self):
# Not yet acquired
self.assertRaises(AssertionError, self.sl.downgrade)
# Acquire in shared mode, downgrade should be no-op
self.assertTrue(self.sl.acquire(shared=1))
self.assertTrue(self.sl.is_owned(shared=1))
self.assertTrue(self.sl.downgrade())
self.assertTrue(self.sl.is_owned(shared=1))
self.sl.release()
def testDowngrade(self):
self.assertTrue(self.sl.acquire())
self.assertTrue(self.sl.is_owned(shared=0))
self.assertTrue(self.sl.downgrade())
self.assertTrue(self.sl.is_owned(shared=1))
self.sl.release()
@_Repeat
def testDowngradeJumpsAheadOfExclusive(self):
def _KeepExclusive(ev_got, ev_downgrade, ev_release):
self.assertTrue(self.sl.acquire())
self.assertTrue(self.sl.is_owned(shared=0))
ev_got.set()
ev_downgrade.wait()
self.assertTrue(self.sl.is_owned(shared=0))
self.assertTrue(self.sl.downgrade())
self.assertTrue(self.sl.is_owned(shared=1))
ev_release.wait()
self.assertTrue(self.sl.is_owned(shared=1))
self.sl.release()
def _KeepExclusive2(ev_started, ev_release):
self.assertTrue(self.sl.acquire(test_notify=ev_started.set))
self.assertTrue(self.sl.is_owned(shared=0))
ev_release.wait()
self.assertTrue(self.sl.is_owned(shared=0))
self.sl.release()
def _KeepShared(ev_started, ev_got, ev_release):
self.assertTrue(self.sl.acquire(shared=1, test_notify=ev_started.set))
self.assertTrue(self.sl.is_owned(shared=1))
ev_got.set()
ev_release.wait()
self.assertTrue(self.sl.is_owned(shared=1))
self.sl.release()
# Acquire lock in exclusive mode
ev_got_excl1 = threading.Event()
ev_downgrade_excl1 = threading.Event()
ev_release_excl1 = threading.Event()
th_excl1 = self._addThread(target=_KeepExclusive,
args=(ev_got_excl1, ev_downgrade_excl1,
ev_release_excl1))
ev_got_excl1.wait()
# Start a second exclusive acquire
ev_started_excl2 = threading.Event()
ev_release_excl2 = threading.Event()
th_excl2 = self._addThread(target=_KeepExclusive2,
args=(ev_started_excl2, ev_release_excl2))
ev_started_excl2.wait()
# Start shared acquires, will jump ahead of second exclusive acquire when
# first exclusive acquire downgrades
ev_shared = [(threading.Event(), threading.Event()) for _ in range(5)]
ev_release_shared = threading.Event()
th_shared = [self._addThread(target=_KeepShared,
args=(ev_started, ev_got, ev_release_shared))
for (ev_started, ev_got) in ev_shared]
# Wait for all shared acquires to start
for (ev, _) in ev_shared:
ev.wait()
# Check lock information
self.assertEqual(self.sl.GetLockInfo(set([query.LQ_MODE, query.LQ_OWNER])),
[(self.sl.name, "exclusive", [th_excl1.getName()], None)])
[(_, _, _, pending), ] = self.sl.GetLockInfo(set([query.LQ_PENDING]))
self.assertEqual([(pendmode, sorted(waiting))
for (pendmode, waiting) in pending],
[("exclusive", [th_excl2.getName()]),
("shared", sorted(th.getName() for th in th_shared))])
# Shared acquires won't start until the exclusive lock is downgraded
ev_downgrade_excl1.set()
# Wait for all shared acquires to be successful
for (_, ev) in ev_shared:
ev.wait()
# Check lock information again
self.assertEqual(self.sl.GetLockInfo(set([query.LQ_MODE,
query.LQ_PENDING])),
[(self.sl.name, "shared", None,
[("exclusive", [th_excl2.getName()])])])
[(_, _, owner, _), ] = self.sl.GetLockInfo(set([query.LQ_OWNER]))
self.assertEqual(set(owner), set([th_excl1.getName()] +
[th.getName() for th in th_shared]))
ev_release_excl1.set()
ev_release_excl2.set()
ev_release_shared.set()
self._waitThreads()
self.assertEqual(self.sl.GetLockInfo(set([query.LQ_MODE, query.LQ_OWNER,
query.LQ_PENDING])),
[(self.sl.name, None, None, [])])
@_Repeat
def testMixedAcquireTimeout(self):
sync = threading.Event()
def _AcquireShared(ev):
if not self.sl.acquire(shared=1, timeout=None):
return
self.done.put("shared")
# Notify main thread
ev.set()
# Wait for notification from main thread
sync.wait()
# Release lock
self.sl.release()
acquires = []
for _ in range(3):
ev = threading.Event()
self._addThread(target=_AcquireShared, args=(ev, ))
acquires.append(ev)
# Wait for all acquires to finish
for i in acquires:
i.wait()
self.assertEqual(self.sl._count_pending(), 0)
# Try to get exclusive lock
self.failIf(self.sl.acquire(shared=0, timeout=0.02))
# Acquire exclusive without timeout
exclsync = threading.Event()
exclev = threading.Event()
def _AcquireExclusive():
if not self.sl.acquire(shared=0):
return
self.done.put("exclusive")
# Notify main thread
exclev.set()
# Wait for notification from main thread
exclsync.wait()
self.sl.release()
self._addThread(target=_AcquireExclusive)
# Try to get exclusive lock
self.failIf(self.sl.acquire(shared=0, timeout=0.02))
# Make all shared holders release their locks
sync.set()
# Wait for exclusive acquire to succeed
exclev.wait()
self.assertEqual(self.sl._count_pending(), 0)
# Try to get exclusive lock
self.failIf(self.sl.acquire(shared=0, timeout=0.02))
def _AcquireSharedSimple():
if self.sl.acquire(shared=1, timeout=None):
self.done.put("shared2")
self.sl.release()
for _ in range(10):
self._addThread(target=_AcquireSharedSimple)
# Tell exclusive lock to release
exclsync.set()
# Wait for everything to finish
self._waitThreads()
self.assertEqual(self.sl._count_pending(), 0)
# Check sequence
for _ in range(3):
self.assertEqual(self.done.get_nowait(), "shared")
self.assertEqual(self.done.get_nowait(), "exclusive")
for _ in range(10):
self.assertEqual(self.done.get_nowait(), "shared2")
self.assertRaises(Queue.Empty, self.done.get_nowait)
def testPriority(self):
# Acquire in exclusive mode
self.assert_(self.sl.acquire(shared=0))
# Queue acquires
def _Acquire(prev, next, shared, priority, result):
prev.wait()
self.sl.acquire(shared=shared, priority=priority, test_notify=next.set)
try:
self.done.put(result)
finally:
self.sl.release()
counter = itertools.count(0)
priorities = range(-20, 30)
first = threading.Event()
prev = first
# Data structure:
# {
# priority:
# [(shared/exclusive, set(acquire names), set(pending threads)),
# (shared/exclusive, ...),
# ...,
# ],
# }
perprio = {}
# References shared acquire per priority in L{perprio}. Data structure:
# {
# priority: (shared=1, set(acquire names), set(pending threads)),
# }
prioshared = {}
for seed in [4979, 9523, 14902, 32440]:
# Use a deterministic random generator
rnd = random.Random(seed)
for priority in [rnd.choice(priorities) for _ in range(30)]:
modes = [0, 1]
rnd.shuffle(modes)
for shared in modes:
# Unique name
acqname = "%s/shr=%s/prio=%s" % (counter.next(), shared, priority)
ev = threading.Event()
thread = self._addThread(target=_Acquire,
args=(prev, ev, shared, priority, acqname))
prev = ev
# Record expected aqcuire, see above for structure
data = (shared, set([acqname]), set([thread]))
priolist = perprio.setdefault(priority, [])
if shared:
priosh = prioshared.get(priority, None)
if priosh:
# Shared acquires are merged
for i, j in zip(priosh[1:], data[1:]):
i.update(j)
assert data[0] == priosh[0]
else:
prioshared[priority] = data
priolist.append(data)
else:
priolist.append(data)
# Start all acquires and wait for them
first.set()
prev.wait()
# Check lock information
self.assertEqual(self.sl.GetLockInfo(set()),
[(self.sl.name, None, None, None)])
self.assertEqual(self.sl.GetLockInfo(set([query.LQ_MODE, query.LQ_OWNER])),
[(self.sl.name, "exclusive",
[threading.currentThread().getName()], None)])
self._VerifyPrioPending(self.sl.GetLockInfo(set([query.LQ_PENDING])),
perprio)
# Let threads acquire the lock
self.sl.release()
# Wait for everything to finish
self._waitThreads()
self.assert_(self.sl._check_empty())
# Check acquires by priority
for acquires in [perprio[i] for i in sorted(perprio.keys())]:
for (_, names, _) in acquires:
# For shared acquires, the set will contain 1..n entries. For exclusive
# acquires only one.
while names:
names.remove(self.done.get_nowait())
self.assertFalse(compat.any(names for (_, names, _) in acquires))
self.assertRaises(Queue.Empty, self.done.get_nowait)
def _VerifyPrioPending(self, ((name, mode, owner, pending), ), perprio):
self.assertEqual(name, self.sl.name)
self.assert_(mode is None)
self.assert_(owner is None)
self.assertEqual([(pendmode, sorted(waiting))
for (pendmode, waiting) in pending],
[(["exclusive", "shared"][int(bool(shared))],
sorted(t.getName() for t in threads))
for acquires in [perprio[i]
for i in sorted(perprio.keys())]
for (shared, _, threads) in acquires])
class _FakeTimeForSpuriousNotifications:
def __init__(self, now, check_end):
self.now = now
self.check_end = check_end
# Deterministic random number generator
self.rnd = random.Random(15086)
def time(self):
# Advance time if the random number generator thinks so (this is to test
# multiple notifications without advancing the time)
if self.rnd.random() < 0.3:
self.now += self.rnd.random()
self.check_end(self.now)
return self.now
@_Repeat
def testAcquireTimeoutWithSpuriousNotifications(self):
ready = threading.Event()
locked = threading.Event()
req = Queue.Queue(0)
epoch = 4000.0
timeout = 60.0
def check_end(now):
self.assertFalse(locked.isSet())
# If we waited long enough (in virtual time), tell main thread to release
# lock, otherwise tell it to notify once more
req.put(now < (epoch + (timeout * 0.8)))
time_fn = self._FakeTimeForSpuriousNotifications(epoch, check_end).time
sl = locking.SharedLock("test", _time_fn=time_fn)
# Acquire in exclusive mode
sl.acquire(shared=0)
def fn():
self.assertTrue(sl.acquire(shared=0, timeout=timeout,
test_notify=ready.set))
locked.set()
sl.release()
self.done.put("success")
# Start acquire with timeout and wait for it to be ready
self._addThread(target=fn)
ready.wait()
# The separate thread is now waiting to acquire the lock, so start sending
# spurious notifications.
# Wait for separate thread to ask for another notification
count = 0
while req.get():
# After sending the notification, the lock will take a short amount of
# time to notice and to retrieve the current time
sl._notify_topmost()
count += 1
self.assertTrue(count > 100, "Not enough notifications were sent")
self.assertFalse(locked.isSet())
# Some notifications have been sent, now actually release the lock
sl.release()
# Wait for lock to be acquired
locked.wait()
self._waitThreads()
self.assertEqual(self.done.get_nowait(), "success")
self.assertRaises(Queue.Empty, self.done.get_nowait)
class TestSharedLockInCondition(_ThreadedTestCase):
"""SharedLock as a condition lock tests"""
def setUp(self):
_ThreadedTestCase.setUp(self)
self.sl = locking.SharedLock("TestSharedLockInCondition")
self.setCondition()
def setCondition(self):
self.cond = threading.Condition(self.sl)
def testKeepMode(self):
self.cond.acquire(shared=1)
self.assert_(self.sl.is_owned(shared=1))
self.cond.wait(0)
self.assert_(self.sl.is_owned(shared=1))
self.cond.release()
self.cond.acquire(shared=0)
self.assert_(self.sl.is_owned(shared=0))
self.cond.wait(0)
self.assert_(self.sl.is_owned(shared=0))
self.cond.release()
class TestSharedLockInPipeCondition(TestSharedLockInCondition):
"""SharedLock as a pipe condition lock tests"""
def setCondition(self):
self.cond = locking.PipeCondition(self.sl)
class TestSSynchronizedDecorator(_ThreadedTestCase):
"""Shared Lock Synchronized decorator test"""
def setUp(self):
_ThreadedTestCase.setUp(self)
@locking.ssynchronized(_decoratorlock)
def _doItExclusive(self):
self.assert_(_decoratorlock.is_owned())
self.done.put("EXC")
@locking.ssynchronized(_decoratorlock, shared=1)
def _doItSharer(self):
self.assert_(_decoratorlock.is_owned(shared=1))
self.done.put("SHR")
def testDecoratedFunctions(self):
self._doItExclusive()
self.assertFalse(_decoratorlock.is_owned())
self._doItSharer()
self.assertFalse(_decoratorlock.is_owned())
def testSharersCanCoexist(self):
_decoratorlock.acquire(shared=1)
threading.Thread(target=self._doItSharer).start()
self.assert_(self.done.get(True, 1))
_decoratorlock.release()
@_Repeat
def testExclusiveBlocksExclusive(self):
_decoratorlock.acquire()
self._addThread(target=self._doItExclusive)
# give it a bit of time to check that it's not actually doing anything
self.assertRaises(Queue.Empty, self.done.get_nowait)
_decoratorlock.release()
self._waitThreads()
self.failUnlessEqual(self.done.get_nowait(), "EXC")
@_Repeat
def testExclusiveBlocksSharer(self):
_decoratorlock.acquire()
self._addThread(target=self._doItSharer)
self.assertRaises(Queue.Empty, self.done.get_nowait)
_decoratorlock.release()
self._waitThreads()
self.failUnlessEqual(self.done.get_nowait(), "SHR")
@_Repeat
def testSharerBlocksExclusive(self):
_decoratorlock.acquire(shared=1)
self._addThread(target=self._doItExclusive)
self.assertRaises(Queue.Empty, self.done.get_nowait)
_decoratorlock.release()
self._waitThreads()
self.failUnlessEqual(self.done.get_nowait(), "EXC")
if __name__ == "__main__":
testutils.GanetiTestProgram()
| bsd-2-clause | -6,438,217,864,445,574,000 | 29.322807 | 79 | 0.650486 | false |
hamiltont/CouchPotatoServer | couchpotato/core/_base/_core/__init__.py | 16 | 3338 | from .main import Core
from uuid import uuid4
def start():
return Core()
config = [{
'name': 'core',
'order': 1,
'groups': [
{
'tab': 'general',
'name': 'basics',
'description': 'Needs restart before changes take effect.',
'wizard': True,
'options': [
{
'name': 'username',
'default': '',
},
{
'name': 'password',
'default': '',
'type': 'password',
},
{
'name': 'port',
'default': 5050,
'type': 'int',
'description': 'The port I should listen to.',
},
{
'name': 'ssl_cert',
'description': 'Path to SSL server.crt',
'advanced': True,
},
{
'name': 'ssl_key',
'description': 'Path to SSL server.key',
'advanced': True,
},
{
'name': 'launch_browser',
'default': True,
'type': 'bool',
'description': 'Launch the browser when I start.',
'wizard': True,
},
],
},
{
'tab': 'general',
'name': 'advanced',
'description': "For those who know what they're doing",
'advanced': True,
'options': [
{
'name': 'api_key',
'default': uuid4().hex,
'readonly': 1,
'description': 'Let 3rd party app do stuff. <a target="_self" href="../../docs/">Docs</a>',
},
{
'name': 'debug',
'default': 0,
'type': 'bool',
'description': 'Enable debugging.',
},
{
'name': 'development',
'default': 0,
'type': 'bool',
'description': 'Enable this if you\'re developing, and NOT in any other case, thanks.',
},
{
'name': 'data_dir',
'type': 'directory',
'description': 'Where cache/logs/etc are stored. Keep empty for defaults.',
},
{
'name': 'url_base',
'default': '',
'description': 'When using mod_proxy use this to append the url with this.',
},
{
'name': 'permission_folder',
'default': '0755',
'label': 'Folder CHMOD',
'description': 'Can be either decimal (493) or octal (leading zero: 0755)',
},
{
'name': 'permission_file',
'default': '0755',
'label': 'File CHMOD',
'description': 'Same as Folder CHMOD but for files',
},
],
},
],
}]
| gpl-3.0 | 1,166,122,914,705,334,300 | 32.38 | 111 | 0.334332 | false |
GREO/gnuradio-git | gnuradio-examples/python/usrp2/qt_wfm_interface.py | 10 | 6160 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'qt_wfm_interface.ui'
#
# Created: Thu Jun 18 23:41:03 2009
# by: PyQt4 UI code generator 4.4.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_InterfaceWindow(object):
def setupUi(self, InterfaceWindow):
InterfaceWindow.setObjectName("InterfaceWindow")
InterfaceWindow.resize(909, 711)
self.centralwidget = QtGui.QWidget(InterfaceWindow)
self.centralwidget.setObjectName("centralwidget")
self.closeButton = QtGui.QPushButton(self.centralwidget)
self.closeButton.setGeometry(QtCore.QRect(790, 580, 101, 31))
self.closeButton.setObjectName("closeButton")
self.sinkFrame = QtGui.QFrame(self.centralwidget)
self.sinkFrame.setGeometry(QtCore.QRect(10, 10, 891, 501))
self.sinkFrame.setFrameShape(QtGui.QFrame.StyledPanel)
self.sinkFrame.setFrameShadow(QtGui.QFrame.Raised)
self.sinkFrame.setObjectName("sinkFrame")
self.horizontalLayoutWidget = QtGui.QWidget(self.sinkFrame)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(10, 10, 871, 481))
self.horizontalLayoutWidget.setObjectName("horizontalLayoutWidget")
self.sinkLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)
self.sinkLayout.setObjectName("sinkLayout")
self.channelModeBox = QtGui.QGroupBox(self.centralwidget)
self.channelModeBox.setGeometry(QtCore.QRect(10, 520, 261, 131))
self.channelModeBox.setObjectName("channelModeBox")
self.bandwidthabel = QtGui.QLabel(self.channelModeBox)
self.bandwidthabel.setGeometry(QtCore.QRect(10, 90, 101, 17))
self.bandwidthabel.setObjectName("bandwidthabel")
self.bandwidthEdit = QtGui.QLineEdit(self.channelModeBox)
self.bandwidthEdit.setGeometry(QtCore.QRect(130, 90, 113, 23))
self.bandwidthEdit.setObjectName("bandwidthEdit")
self.gainEdit = QtGui.QLineEdit(self.channelModeBox)
self.gainEdit.setGeometry(QtCore.QRect(130, 60, 113, 23))
self.gainEdit.setObjectName("gainEdit")
self.gainLabel = QtGui.QLabel(self.channelModeBox)
self.gainLabel.setGeometry(QtCore.QRect(10, 60, 111, 20))
self.gainLabel.setObjectName("gainLabel")
self.freqEdit = QtGui.QLineEdit(self.channelModeBox)
self.freqEdit.setGeometry(QtCore.QRect(130, 30, 113, 23))
self.freqEdit.setObjectName("freqEdit")
self.freqLabel = QtGui.QLabel(self.channelModeBox)
self.freqLabel.setGeometry(QtCore.QRect(10, 30, 111, 17))
self.freqLabel.setObjectName("freqLabel")
self.pauseButton = QtGui.QPushButton(self.centralwidget)
self.pauseButton.setGeometry(QtCore.QRect(790, 520, 101, 31))
self.pauseButton.setObjectName("pauseButton")
self.fmBox = QtGui.QGroupBox(self.centralwidget)
self.fmBox.setGeometry(QtCore.QRect(290, 520, 251, 131))
self.fmBox.setObjectName("fmBox")
self.volumeEdit = QtGui.QLineEdit(self.fmBox)
self.volumeEdit.setGeometry(QtCore.QRect(130, 20, 113, 23))
self.volumeEdit.setObjectName("volumeEdit")
self.volumeLabel = QtGui.QLabel(self.fmBox)
self.volumeLabel.setGeometry(QtCore.QRect(10, 20, 111, 17))
self.volumeLabel.setObjectName("volumeLabel")
InterfaceWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(InterfaceWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 909, 24))
self.menubar.setObjectName("menubar")
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setObjectName("menuFile")
InterfaceWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(InterfaceWindow)
self.statusbar.setObjectName("statusbar")
InterfaceWindow.setStatusBar(self.statusbar)
self.actionExit = QtGui.QAction(InterfaceWindow)
self.actionExit.setObjectName("actionExit")
self.menuFile.addAction(self.actionExit)
self.menubar.addAction(self.menuFile.menuAction())
self.retranslateUi(InterfaceWindow)
QtCore.QObject.connect(self.closeButton, QtCore.SIGNAL("clicked()"), InterfaceWindow.close)
QtCore.QObject.connect(self.actionExit, QtCore.SIGNAL("triggered()"), InterfaceWindow.close)
QtCore.QMetaObject.connectSlotsByName(InterfaceWindow)
InterfaceWindow.setTabOrder(self.closeButton, self.gainEdit)
InterfaceWindow.setTabOrder(self.gainEdit, self.freqEdit)
InterfaceWindow.setTabOrder(self.freqEdit, self.bandwidthEdit)
def retranslateUi(self, InterfaceWindow):
InterfaceWindow.setWindowTitle(QtGui.QApplication.translate("InterfaceWindow", "MainWindow", None, QtGui.QApplication.UnicodeUTF8))
self.closeButton.setText(QtGui.QApplication.translate("InterfaceWindow", "Close", None, QtGui.QApplication.UnicodeUTF8))
self.channelModeBox.setTitle(QtGui.QApplication.translate("InterfaceWindow", "USRP Parameters", None, QtGui.QApplication.UnicodeUTF8))
self.bandwidthabel.setText(QtGui.QApplication.translate("InterfaceWindow", "Bandwidth (Hz)", None, QtGui.QApplication.UnicodeUTF8))
self.gainLabel.setText(QtGui.QApplication.translate("InterfaceWindow", "Gain (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.freqLabel.setText(QtGui.QApplication.translate("InterfaceWindow", "Frequency", None, QtGui.QApplication.UnicodeUTF8))
self.pauseButton.setText(QtGui.QApplication.translate("InterfaceWindow", "Pause", None, QtGui.QApplication.UnicodeUTF8))
self.fmBox.setTitle(QtGui.QApplication.translate("InterfaceWindow", "FM Tuner Parameters", None, QtGui.QApplication.UnicodeUTF8))
self.volumeLabel.setText(QtGui.QApplication.translate("InterfaceWindow", "Volume", None, QtGui.QApplication.UnicodeUTF8))
self.menuFile.setTitle(QtGui.QApplication.translate("InterfaceWindow", "&File", None, QtGui.QApplication.UnicodeUTF8))
self.actionExit.setText(QtGui.QApplication.translate("InterfaceWindow", "E&xit", None, QtGui.QApplication.UnicodeUTF8))
| gpl-3.0 | 8,151,632,256,941,568,000 | 61.222222 | 142 | 0.730195 | false |
mtr574/Midburn-ThemeCamps | midburn/models.py | 3 | 4096 | from django.db import models
from django.contrib.auth.models import User
CAMPSTATUS = (
(0, 'Deleted'),
(1, 'Accepting new members'),
(2, 'Closed to new members'),
(3, 'Camp will not come to Midburn 2016'),
)
# Create your models here.
class Camp(models.Model):
users = models.ManyToManyField(User)
camp_name_he = models.CharField(max_length=50, unique=True)
camp_name_en = models.CharField(max_length=50, unique=True)
camp_desc_he = models.TextField()
camp_desc_en = models.TextField()
camp_status = models.IntegerField(choices=CAMPSTATUS, default=1)
is_published = models.BooleanField(default=False)
contact_email = models.CharField(max_length=254, blank=True)
contact_facebook = models.CharField(max_length=254, blank=True)
contact_name_he = models.CharField(max_length=50, blank=True)
contact_name_en = models.CharField(max_length=50, blank=True)
contact_phone = models.CharField(max_length=50, blank=True)
accepting_families = models.BooleanField(default=True)
def get_status(self):
self.status = CAMPSTATUS[self.camp_status][1]
return self
def __str__(self):
return self.camp_name_en
CAMPTYPES = (
(1, 'food'),
(2, 'drinking/bar'),
(3, 'music'),
(4, 'workshops'),
(5, 'art-supporting'),
(6, 'other'),
)
CAMPTIMES = (
(1, 'morning'),
(2, 'noon'),
(3, 'evening'),
(4, 'night'),
)
NOISE_LEVELS = (
(1, 'quiet'),
(2, 'medium'),
(3, 'noisy'),
(4, 'very noisy'),
)
class CampLocation(models.Model): # Can be part of camp, but for better modularity
camp = models.OneToOneField(Camp)
camp_type = models.IntegerField(choices=CAMPTYPES)
camp_type_other = models.TextField()
camp_activity_time = models.CommaSeparatedIntegerField(choices=CAMPTIMES, max_length=64)
child_friendly = models.BooleanField()
noise_level = models.IntegerField(choices=NOISE_LEVELS)
public_activity_area_sqm = models.IntegerField()
public_activity_area_desc = models.TextField()
support_art = models.BooleanField()
location_comments = models.TextField()
# These 3 will be set by mikumation
camp_location_street = models.CharField(max_length=100)
camp_location_street_time = models.CharField(max_length=100)
camp_location_area = models.IntegerField()
# Arrival
arriving_at = models.DateTimeField()
# Arrival Checklist
has_construction_team = models.BooleanField()
has_deconst_team = models.BooleanField()
has_gifting = models.BooleanField()
has_leds = models.BooleanField()
# Neighbour Camps
requested_nearby_camps = models.ManyToManyField(Camp, related_name="requested_nearby_camps")
def __str__(self):
return self.camp.camp_name_en
CAMP_MEMBERSHIP_STATUS = (
(1, 'not a member'),
(2, 'awaiting approval'),
(3, 'approved'),
)
class CampMember(models.Model):
camp = models.ForeignKey(Camp)
status = models.IntegerField(choices=CAMP_MEMBERSHIP_STATUS)
has_ticket = models.BooleanField()
early_arrival = models.BooleanField()
is_editor = models.BooleanField()
class CampSafety(models.Model):
camp = models.OneToOneField(Camp)
have_art = models.BooleanField()
installation_over_2m = models.BooleanField()
# Safety checklist:
is_gas_2m_from_stove = models.BooleanField()
is_electricity_not_near_water = models.BooleanField()
ACTIVITY_TYPES = (
(1, 'workshop'),
(2, 'party'),
(3, 'lecture'),
(4, 'show'),
(5, 'parade/hike'),
(6, 'game'),
(7, 'movie'),
(8, 'other'),
)
class Workshop(models.Model):
owner = models.ForeignKey(Camp)
activity_name_he = models.CharField(max_length=50)
activity_name_en = models.CharField(max_length=50)
activity_desc_he = models.TextField()
activity_desc_en = models.TextField()
activity_datetime = models.DateTimeField()
activity_type = models.IntegerField(choices=ACTIVITY_TYPES)
activity_type_other = models.TextField()
adult_only = models.BooleanField()
child_friendly = models.BooleanField()
| apache-2.0 | 4,628,022,054,827,346,000 | 30.751938 | 96 | 0.671387 | false |
gartung/dxr | dxr/plugins/rust/__init__.py | 1 | 40098 | """DXR plugin for Rust. Relies on output from running rustc with -Zsave-analysis
It is somewhat painful dealing with the untyped-ness of the CSV input. We want
to treat all ids as ints rather than strings, getting this wrong causes annoying
bugs because Python will not check the type of things, but does distinguish between
`n: int` and `'n': string`, and thus dictionary lookups will mysteriously fail.
* All input is strings
* Anything placed into the hashtable data must have had all ids converted to ints
- that is mostly (but not all) done by convert_ids/find_id (since we convert
Rust NodeIds/DefIds to internal ids)
* Helper methods might take args which may or may not have been int-ified :-(
This will all go away when we convert to using JSON instead of CSV for the data
interchange format.
Line and column numbers are stored as strings though.
"""
import csv
import os
from itertools import chain
from dxr import indexers
from dxr.plugins import Plugin, filters_from_namespace, refs_from_namespace
from dxr.filters import LINE
from dxr.indexers import Extent, Position, iterable_per_line, with_start_and_end, split_into_lines, QUALIFIED_LINE_NEEDLE
from dxr.plugins.rust import filters
from dxr.plugins.rust import refs
RUST_DXR_FLAG = " -Zsave-analysis"
# We know these crates come from the rust distribution (probably, the user could
# override that, but lets assume for now...).
std_libs = ['alloc', 'arena', 'backtrace', 'collections', 'core', 'coretest',
'flate','fmt_macros', 'getopts', 'graphviz', 'libc', 'log', 'rand',
'rbml', 'regex', 'rustc', 'rustc_bitflags', 'rustc_back', 'rustc_borrowck',
'rustc_driver', 'rustc_llvm', 'rustc_privacy', 'rustc_resolve', 'rustc_trans',
'rustc_typeck', 'rustdoc', 'serialize', 'std', 'syntax', 'term',
'test', 'unicode']
id = 0
def next_id():
global id
id += 1
return id
class FileToIndex(indexers.FileToIndex):
def __init__(self, path, contents, plugin_name, tti):
super(FileToIndex, self).__init__(path, contents, plugin_name, tti.tree)
self.tree_index = tti
def needles_by_line(self):
#iterable of key/value mapping; one iterable per line
return self.all_needles()
def refs(self):
classes_and_tables = [(refs.FunctionRef, 'functions'),
(refs.FunctionRefRef, 'function_refs'),
(refs.VariableRef, 'variables'),
(refs.VariableRefRef, 'variable_refs'),
(refs.TypeRef, 'types'),
(refs.TypeRefRef, 'type_refs'),
(refs.ModuleRef, 'modules'),
(refs.ModuleRefRef, 'module_refs'),
(refs.ModuleAliasRef, 'module_aliases'),
(refs.UnknownRef, 'unknown_refs')]
# Note there is no ref for impls since both the trait and struct parts
# are covered as refs already. If you add this, then you will get overlapping
# extents, which is bad. We have impl_defs in the db because we do want
# to jump _to_ them.
for make_ref, table_name in classes_and_tables:
for datum in self.tree_index.by_file(table_name, self.path):
ref = make_ref(self.tree, datum, tree_index=self.tree_index)
if ref and 'extent_start' in datum:
yield (int(datum['extent_start']),
int(datum['extent_end']),
ref)
def annotations_by_line(self):
# FIXME(#4) links in the lefthand margin (warnings, etc.)
return []
def links(self):
# FIXME(#16) RHS links
#return (sort order, heading, [(icon, title, href), ...])
return []
def all_needles(self):
return iterable_per_line(with_start_and_end(split_into_lines(chain(
self.file_needles('function', 'functions'),
self.file_needles('function_ref', 'function_refs'),
self.file_needles('var', 'variables'),
self.file_needles('var_ref', 'variable_refs'),
self.file_needles('type', 'types'),
self.file_needles('type_ref', 'type_refs'),
self.file_needles('module', 'modules'),
self.file_needles('module_ref', 'module_refs'),
self.file_needles('module_alias_ref', 'module_aliases'),
self.alias_needles(),
self.module_use_needles(),
self.file_needles('extern_ref', 'unknown_refs'),
self.impl_needles(),
self.fn_impls_needles(),
self.inherit_needles(self.tree_index.super_traits, 'derived'),
self.inherit_needles(self.tree_index.sub_traits, 'bases'),
self.call_needles(self.tree_index.callers, 'called_by'),
self.call_needles(self.tree_index.callees, 'callers'),
))))
def file_needles(self, filter_name, table_name, keys=('name', 'qualname')):
data = self.tree_index.by_file(table_name, self.path)
return self.needles_for_table(filter_name, data)
def needles_for_table(self, filter_name, data):
# Each needle is a (needle name, needle value dict, Extent) triple.
result = (('rust_{0}'.format(filter_name),
datum,
self.make_span(datum))
for datum in data if 'extent_start' in datum)
return result
def alias_needles(self):
# When we have a reference to an alias, it is useful to have a needle for
# both the alias and the aliased 'module'.
refs = self.tree_index.by_file('module_refs', self.path)
aliases = self.tree_index.data.module_aliases
mods = self.tree_index.data.modules
for datum in refs:
if datum['aliasid'] in aliases:
a_ref = aliases[datum['aliasid']]
alias = {
'qualname': a_ref['qualname'],
'name': a_ref['name']
}
yield ('rust_module_alias_ref', alias, self.make_span(datum))
if a_ref['refid'] in mods:
mod = {
'qualname': mods[a_ref['refid']]['qualname'],
'name': mods[a_ref['refid']]['name']
}
yield ('rust_module_ref', mod, self.make_span(datum))
def module_use_needles(self):
aliases = self.tree_index.by_file('module_aliases', self.path)
modules = self.tree_index.data.modules
for datum in aliases:
if datum['refid'] in modules:
alias = {
'qualname': modules[datum['refid']]['qualname'],
'name': modules[datum['refid']]['name']
}
yield ('rust_module_use', alias, self.make_span(datum))
def impl_needles(self):
impls = self.tree_index.by_file('impl_defs', self.path)
types = self.tree_index.data.types
for datum in impls:
if datum['refid'] in types:
impl = {
'qualname': types[datum['refid']]['qualname'],
'name': types[datum['refid']]['name']
}
yield ('rust_impl', impl, self.make_span(datum))
if datum['traitid'] in types:
impl = {
'qualname': types[datum['traitid']]['qualname'],
'name': types[datum['traitid']]['name']
}
yield ('rust_impl', impl, self.make_span(datum))
def fn_impls_needles(self):
fns = self.tree_index.by_file('functions', self.path)
all_fns = self.tree_index.data.functions
for datum in fns:
if 'declid' in datum and datum['declid'] in all_fns:
fn = {
'qualname': all_fns[datum['declid']]['qualname'],
'name': all_fns[datum['declid']]['name']
}
yield ('rust_fn_impls', fn, self.make_span(datum))
def inherit_needles(self, inheritance, filter_name):
types = self.tree_index.by_file('types', self.path)
all_types = self.tree_index.data.types
for datum in types:
if datum['id'] not in inheritance:
continue
for s in inheritance[datum['id']]:
t = {
'qualname': all_types[s]['qualname'],
'name': all_types[s]['name']
}
yield ('rust_{0}'.format(filter_name), t, self.make_span(datum))
def call_needles(self, calls, filter_name):
fns = self.tree_index.by_file('functions', self.path)
all_fns = self.tree_index.data.functions
for datum in fns:
if datum['id'] not in calls:
continue
for s in calls[datum['id']]:
fn = {
'qualname': all_fns[s]['qualname'],
'name': all_fns[s]['name']
}
yield ('rust_{0}'.format(filter_name), fn, self.make_span(datum))
# Takes a row of data and returns an Extent.
def make_span(self, row):
return Extent(Position(int(row['file_line']), int(row['file_col'])),
Position(int(row['file_line_end']), int(row['file_col_end'])))
class RustLine:
def __init__(self):
self.defs = []
class RustFile:
def __init__(self):
self.lines = {}
def get_line(self, line):
if line not in self.lines:
self.lines[line] = RustLine()
return self.lines[line]
# Data for the tree, mappings for each of the various kinds of language item to
# the place it occurs and info about it.
class TreeData:
def __init__(self):
# non-refs are id->args, refs are lists
self.unknowns = {}
self.unknown_refs = []
self.modules = {}
# A module for each crate linked using extern crate, indexed by the module id for the crate
self.extern_crate_mods = {}
self.module_refs = []
self.module_aliases = {}
self.variables = {}
self.variable_refs = []
self.functions = {}
self.function_refs = []
self.types = {}
self.type_refs = []
self.impl_defs = {}
self.indices = {}
# Create an index for a dict
def index(self, table_name, field_name):
if (table_name, field_name) in self.indices:
return self.indices[(table_name, field_name)]
table = getattr(self, table_name)
index = {}
values = None
if table_name.endswith('_refs'):
values = table
else:
values = table.values()
for v in values:
if field_name in v and v[field_name]:
if v[field_name] in index:
index[v[field_name]].append(v)
else:
index[v[field_name]] = [v]
self.indices[(table_name, field_name)] = index
return index
def delete_indices(self):
self.indices = {}
class TreeToIndex(indexers.TreeToIndex):
def __init__(self, plugin_name, tree, vcs_cache):
super(TreeToIndex, self).__init__(plugin_name, tree, vcs_cache)
self.tree = tree
src_folder = self.tree.source_folder
if not src_folder.endswith('/'):
src_folder += '/'
self.src_folder = src_folder
self.crate_map = {}
self.crates_by_name = {}
self.id_map = {}
self.local_libs = []
self.files = {} # map from file name to RustFile, which in turn stores all data
# mapping location -> info.
self.data = TreeData()
# Map from the id of a scope to the id of its parent (or 0), if there is no parent.
# Note that these are Rust ids, not DXR ids
self.mod_parents = {}
# map from ctor_id to def_id for structs
# The domains should be disjoint
self.ctor_ids = {}
# list of (base, derived) trait ids
self.inheritance = []
# convenience lookups for self.inheritance
self.sub_traits = {}
self.super_traits = {}
# maps from a fn to its callers or callees (by id)
self.callers = {}
self.callees = {}
# map from inner to outer scopes
self.scope_inheritance = {}
# URLs for std libs
self.locations = {}
# The name of the crate being processed
self.crate_name = None
self._temp_folder = os.path.join(self.tree.temp_folder, 'plugins', plugin_name)
# return data by file, indexed by the file's path
def by_file(self, table_name, file_path):
table = self.data.index(table_name, 'file_name')
if file_path not in table:
return []
return table[file_path]
def environment(self, env):
print("rust-dxr environment")
# Setup environment variables for using the rust-dxr tool
# We'll store all the havested metadata in the plugins temporary folder.
env['RUSTC'] = env.get('RUSTC', 'rustc') + RUST_DXR_FLAG
if 'RUSTFLAGS_STAGE2' in env:
env['RUSTFLAGS_STAGE2'] += RUST_DXR_FLAG
else:
env['RUSTFLAGS_STAGE2'] = RUST_DXR_FLAG
env['DXR_RUST_OBJECT_FOLDER'] = self.tree.object_folder
env['DXR_RUST_TEMP_FOLDER'] = self._temp_folder
return env
def post_build(self):
print "rust-dxr post_build"
for root, dirs, files in os.walk(self._temp_folder):
print " - Processing files - first pass"
for f in [f for f in files if f.endswith('.csv')]:
self.process_csv_first_pass(os.path.join(root, f))
self.crate_name = None
print " - Processing files - second pass"
for f in [f for f in files if f.endswith('.csv')]:
self.process_csv_second_pass(os.path.join(root, f))
self.crate_name = None
# don't need to look in sub-directories
break
print " - Updating references"
self.fixup_struct_ids()
self.fixup_sub_mods()
print " - Generating inheritance graph"
self.generate_inheritance()
self.generate_callgraph()
print " - Generating crate info"
self.generate_locations()
print " - Generating qualnames"
self.generate_qualnames()
def file_to_index(self, path, contents):
return FileToIndex(path, contents, self.plugin_name, self)
# Just record the crates we index (process_crate).
def process_csv_first_pass(self, path):
self.process_csv(path, True)
# All the proper indexing.
def process_csv_second_pass(self, path):
self.process_csv(path, False)
# We need to do this once per crate whilst the current crate is still current
self.generate_scopes()
self.std_hack()
def process_csv(self, file_name, header_only):
try:
f = open(file_name, 'rb')
print 'processing ' + file_name
parsed_iter = csv.reader(f)
try:
# the first item on a line is the kind of entity we are dealing with and so
# we can use that to dispatch to the appropriate process_... function
for line in parsed_iter:
# convert key:value pairs to a map
args = {}
for i in range(1, len(line), 2):
args[line[i]] = line[i + 1]
func = None
try:
func = globals()['process_' + line[0]]
except KeyError:
print " - 'process_" + line[0] + "' not implemented!"
continue
# The Rust compiler can output noncanonical paths, which
# don't match the ones DXR comes up with. Canonicalize
# them. We don't use relpath() because, in cases where the
# path doesn't end up starting with source_folder, we
# don't want to end up with an absolute path, since that
# certainly won't match what DXR constructs.
if 'file_name' in args:
file_name_value = os.path.normpath(args['file_name'])
if file_name_value.startswith(self.tree.source_folder):
file_name_value = file_name_value[len(self.tree.source_folder)+1:]
args['file_name'] = file_name_value
stop = func(args, self)
if stop and header_only:
break
except Exception:
print "error in", file_name, line
raise
except Exception:
print "error in", file_name
raise
finally:
f.close()
def fixup_struct_ids(self):
""" Sadness. Structs have an id for their definition and an id for their ctor.
Sometimes, we get one, sometimes the other. This method fixes up any refs
to the latter into refs to the former."""
type_refs_by_ref = self.data.index('type_refs', 'refid')
for ctor in self.ctor_ids.keys():
if ctor in type_refs_by_ref:
for ref in type_refs_by_ref[ctor]:
ref['refid'] = self.ctor_ids[ctor]
# Indices are now out of date, need to delete them
self.data.delete_indices()
def fixup_sub_mods(self):
""" When we have a path like a::b::c, we want to have info for a and a::b.
Unfortunately Rust does not give us much info, so we have to
construct it ourselves from the module info we have.
We have the qualname for the module (e.g, a or a::b) but we do not have
the refid. """
self.fixup_sub_mods_impl('modules', 'module_refs')
# paths leading up to a static method have a module path, then a type at the end,
# so we have to fixup the type in the same way as we do modules.
self.fixup_sub_mods_impl('types', 'type_refs')
# Some module_refs are refs to types, e.g., enums in paths
self.fixup_sub_mods_impl('types', 'module_refs')
# FIXME - does not seem to work for external crates - refid = 0, crateid = 0
# they must be in the same module crate as their parent though, and we can cache
# module name and scope -> crate and always get a hit, so maybe we can win.
def fixup_sub_mods_impl(self, table_name, table_ref_name):
""" NOTE table_name and table_ref_name should not come from user input, otherwise
there is potential for SQL injection attacks. """
# First create refids for module refs whose qualnames match the qualname of
# the module (i.e., no aliases).
table_refs = getattr(self.data, table_ref_name)
table_by_name = self.data.index(table_name, 'qualname')
for v in table_refs:
if v['refid'] > 0:
continue
if v['qualname'] and v['qualname'] in table_by_name:
v['refid'] = table_by_name[v['qualname']][0]['id']
# We do our own scpoing of aliases and it is kinda nasty. We keep a record
# of a reflexive, transitive 'inside' relation for scopes in impl. So we
# check that the alias is outside the reference to the alias.
# XXX This does not take into account overriding/shadowing, so if there is
# an alias in a smaller scope which hides an outer alias, it is chance which
# you will get.
if table_name == 'modules':
# Next account for where the path is an aliased modules e.g., alias::c,
# where c is already accounted for.
module_aliases_by_scope = self.data.index('module_aliases', 'scopeid')
module_refs_0 = [item for item in self.data.module_refs if item['refid'] == -1]
for mod_ref in module_refs_0:
if mod_ref['scopeid'] not in self.scope_inheritance:
continue
parent_ids = self.scope_inheritance[mod_ref['scopeid']]
for parent_id in parent_ids:
if parent_id in module_aliases_by_scope:
for alias in module_aliases_by_scope[parent_id]:
if alias['name'] == mod_ref['qualname']:
qualname = str(parent_id) +"$" + alias['name']
mod_ref['qualname'] = qualname
mod = None
id = alias['refid']
if id in self.data.modules:
mod = self.data.modules[id]
elif id in self.data.extern_crate_mods:
mod = self.data.extern_crate_mods[id]
if mod:
mod_ref['refid'] = mod['id']
mod_ref['aliasid'] = alias['id']
def generate_inheritance(self):
direct = [(base, derived) for (base, derived) in self.inheritance]
transitive = [(base, derived) for (base, derived) in self.closure(self.inheritance) if (base, derived) not in self.inheritance]
self.inheritance = direct + transitive
for (b, d) in self.inheritance:
self.sub_traits.setdefault(b, []).append(d)
self.super_traits.setdefault(d, []).append(b)
def generate_callgraph(self):
# staticaly dispatched call
static_calls = [(value['refid'], value['scopeid']) for value in self.data.function_refs if value['refid'] and value['refid'] in self.data.functions and value['scopeid'] in self.data.functions]
# dynamically dispatched call
fns_by_declid = self.data.index('functions', 'declid')
dynamic_calls = [(fns_by_declid[value['declid']][0]['id'], value['scopeid'])
for value in self.data.function_refs
if ('refid' not in value or not value['refid']) and 'declid' in value and value['declid'] in fns_by_declid and fns_by_declid[value['declid']][0]['id'] in self.data.functions and value['scopeid'] in self.data.functions]
for (er, ee) in static_calls + dynamic_calls:
self.callers.setdefault(er, []).append(ee)
self.callees.setdefault(ee, []).append(er)
def generate_locations(self):
docurl = "http://static.rust-lang.org/doc/master/%s/index.html"
srcurl = "https://github.com/rust-lang/rust/tree/master/src/lib%s"
dxrurl = "http://dxr.mozilla.org/rust/source/lib%s/lib.rs.html"
for l in std_libs:
# If we are indexing the standard libs for some reason, then don't give
# them special treatment.
if l not in self.local_libs:
self.locations[l] = (docurl%l, srcurl%l, dxrurl%l)
def generate_qualnames(self):
def generate_qualname_for_table(ref_table, table):
for datum in ref_table:
if 'qualname' not in datum or not datum['qualname']:
if datum['refid'] and datum['refid'] in table:
datum['qualname'] = table[datum['refid']]['qualname']
datum['name'] = table[datum['refid']]['name']
generate_qualname_for_table(self.data.type_refs, self.data.types)
generate_qualname_for_table(self.data.module_refs, self.data.types)
generate_qualname_for_table(self.data.variable_refs, self.data.variables)
# function refs
for f in self.data.function_refs:
if 'qualname' not in f or not f['qualname']:
if 'refid' in f and f['refid'] and f['refid'] in self.data.functions:
fn_def = self.data.functions[f['refid']]
f['qualname'] = fn_def['qualname']
f['name'] = fn_def['name']
elif 'refid' in f and f['refid'] and f['refid'] in self.data.types:
fn_def = self.data.types[f['refid']]
f['qualname'] = fn_def['qualname']
f['name'] = fn_def['name']
elif 'declid' in f and f['declid'] and f['declid'] in self.data.functions:
fn_decl = self.data.functions[f['declid']]
f['qualname'] = fn_decl['qualname']
f['name'] = fn_decl['name']
# unknown refs
for datum in self.data.unknown_refs:
if 'qualname' not in datum or not datum['qualname']:
if datum['refid']:
datum['qualname'] = datum['refid']
datum['name'] = datum['refid']
# module aliases
for datum in self.data.module_refs:
if 'qualname' not in datum or not datum['qualname']:
if datum['aliasid'] and datum['aliasid'] in self.data.module_aliases:
alias = self.data.module_aliases[datum['aliasid']]
datum['qualname'] = alias['qualname']
datum['name'] = alias['name']
def generate_scopes(self):
self.scope_inheritance[self.find_id_cur(0)] = [self.find_id_cur(0)]
for (child, parent) in self.mod_parents.items():
self.scope_inheritance.setdefault(child, []).append(parent)
# reflexivity
self.scope_inheritance.setdefault(child, []).append(child)
# transitivity
for (child, parent) in self.closure(self.mod_parents.items()):
if (child, parent) not in self.mod_parents.items():
self.scope_inheritance.setdefault(child, []).append(parent)
self.mod_parents = {}
def std_hack(self):
# This is nasty - Rust implicitly includes the standard library,
# crate `std`, but without generating an `extern crate` item, so we need
# to do that. However, it is possible the project includes some other crate
# called `std` (by building without the standard lib, we can't tell from
# the indexing data which is the case), so we need to check in case there
# is one already.
# We probably wouldn't need this if we dealt with generated code properly
# in the compiler indexing.
if 'std' not in self.data.index('module_aliases', 'name').keys():
id = next_id()
scopeid = self.find_id_cur('0')
args = {
'name': 'std',
'location': 'std',
'id': id,
'scopeid': scopeid,
# Jesus, this is fragile
'crate': '1',
'qualname': str(scopeid) + '$std',
'refid': self.crate_map[1][1]['id']
}
self.data.module_aliases[id] = args
def closure(self, input):
""" Compute the (non-refexive) transitive closure of a list."""
closure = set(input)
while True:
next_set = set([(b,dd) for (b,d) in closure for (bb,dd) in closure if d == bb])
next_set |= closure
if next_set == closure:
return closure
closure = next_set
def find_id(self, crate, node):
""" Maps a crate name and a node number to a globally unique id. """
if node == None:
return None
if node < 0:
return node
node = int(node)
if (crate, node) not in self.id_map:
result = next_id()
self.id_map[(crate, node)] = (result, 0)
return result
return self.id_map[(crate, node)][0]
def add_external_item(self, args):
""" Returns True if the refid in the args points to an item in an external crate. """
node, crate = args['refid'], args['refidcrate']
if not node:
return False
crate = self.crate_map[int(crate)][0]
if crate in self.local_libs:
return False
id = self.find_id(crate, node)
if id not in self.data.unknowns:
self.data.unknowns[id] = {'id': id, 'crate': crate }
args = self.convert_ids(args)
self.data.unknown_refs.append(args)
self.add_to_lines(args, ('unknowns', args))
return True
def add_external_decl(self, args):
decl_node, decl_crate = args['declid'], args['declidcrate']
if not decl_node:
return False
decl_crate = self.crate_map[int(decl_crate)][0]
if decl_crate in self.local_libs:
return False
id = self.find_id(decl_crate, decl_node)
if id not in self.data.unknowns:
self.data.unknowns[id] = {'id': id, 'crate': decl_crate }
new_args = self.convert_ids(args)
new_args['refid'] = new_args['declid']
self.add_to_lines(new_args, ('unknowns', new_args))
args['refid'] = new_args['declid']
return True
def add_to_lines(self, args, data):
r_file = self.get_file(args['file_name'])
start_line = args['file_line']
end_line = args['file_line_end']
for i in range(int(start_line), int(end_line) + 1):
r_line = r_file.get_line(i)
r_line.defs.append(data)
def get_file(self, file_name):
if file_name.startswith(self.src_folder):
file_name = file_name[len(self.src_folder):]
if file_name in self.files:
return self.files[file_name]
r_file = RustFile()
self.files[file_name] = r_file
return r_file
# XXX this feels a little bit fragile...
def convert_ids(self, args):
def convert(k, v):
if k.endswith('crate'):
return -1
elif k == 'ctor_id' or k == 'aliasid':
return int(v)
elif k == 'refid' and (not v or int(v) <= 0):
return -1
elif k == 'id' or k == 'scopeid':
return self.find_id_cur(v)
elif v == '' and (k.endswith('id') or k == 'base' or k == 'derived'):
return None
elif k.endswith('id') or k == 'base' or k == 'derived':
return self.find_id(self.crate_map[int(args[k + 'crate'])][0], v)
else:
return v
new_args = {k: convert(k, v) for k, v in args.items() if not k.endswith('crate')}
return new_args
def find_id_cur(self, node):
""" Shorthand for nodes in the current crate. """
return self.find_id(self.crate_map[0][0], node)
def fixup_qualname(self, datum):
# FIXME(#19) we should not do this here, we should do it in the compiler
if 'qualname' in datum and datum['qualname'] and datum['qualname'][:2] == '::':
datum['qualname'] = self.crate_name + datum['qualname']
# FIXME(#15) all these process_* methods would be better off in TreeToIndex
def process_crate(args, tree):
""" There should only be one of these per crate and it gives info about the current
crate.
Note that this gets called twice for the same crate line - once per pass. """
if args['name'] not in tree.local_libs:
tree.local_libs.append(args['name'])
args = tree.convert_ids(args)
args['id'] = next_id()
tree.crate_map[0] = (args['name'], args)
tree.crates_by_name[args['name']] = args
tree.crate_name = args['name']
def process_external_crate(args, tree):
""" These have to happen before anything else in the csv and have to be concluded
by 'end_external_crate'. """
mod_id = next_id()
name = args['name']
id = int(args['crate'])
args = {'id': mod_id,
'name': name,
'qualname': "0$" + name,
'def_file': args['file_name'],
'kind': 'extern',
'scopeid': 0,
'extent_start': -1,
'extent_end': -1}
# don't need to convert_args because the args are all post-transform
tree.data.extern_crate_mods[mod_id] = args
tree.crate_map[id] = (name, args)
def process_type_ref(args, tree):
if tree.add_external_item(args):
return;
args = tree.convert_ids(args)
tree.fixup_qualname(args)
if 'qualname' not in args:
args['qualname'] = ''
tree.data.type_refs.append(args)
tree.add_to_lines(args, ('type_refs', args))
def process_variable(args, tree):
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.data.variables[args['id']] = args
tree.add_to_lines(args, ('variables', args))
def process_function_impl(args, tree):
args['name'] = args['qualname'].split('::')[-1]
args['args'] = ''
args['type'] = ''
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.mod_parents[int(args['id'])] = int(args['scopeid'])
tree.data.functions[args['id']] = args
tree.add_to_lines(args, ('functions', args))
def process_function(args, tree):
process_function_impl(args, tree)
def process_method_decl(args, tree):
process_function_impl(args, tree)
def process_enum(args, tree):
args['kind'] = 'enum'
args['name'] = args['qualname'].split('::')[-1]
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.data.types[args['id']] = args
tree.add_to_lines(args, ('types', args))
def process_struct(args, tree, kind = 'struct'):
# Used for fixing up the refid in fixup_struct_ids
if args['ctor_id'] != '-1':
tree.ctor_ids[tree.find_id_cur(args['ctor_id'])] = tree.find_id_cur(args['id'])
args['name'] = args['qualname'].split('::')[-1]
tree.fixup_qualname(args)
args['kind'] = kind
scope_args = tree.convert_ids({'id': args['id'],
'name' : args['name']})
args = tree.convert_ids(args)
tree.mod_parents[int(args['id'])] = int(args['scopeid'])
tree.data.types[args['id']] = args
tree.add_to_lines(args, ('types', args))
def process_impl(args, tree):
scope_args = tree.convert_ids({'id': args['id'],
'name' : 'impl'})
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.mod_parents[int(args['id'])] = int(args['scopeid'])
tree.data.impl_defs[args['id']] = args
tree.add_to_lines(args, ('impl_defs', args))
def process_fn_call(args, tree):
if tree.add_external_item(args):
return;
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.data.function_refs.append(args)
tree.add_to_lines(args, ('function_refs', args))
def process_var_ref(args, tree):
if tree.add_external_item(args):
return;
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.data.variable_refs.append(args)
tree.add_to_lines(args, ('variable_refs', args))
def process_struct_ref(args, tree):
if 'qualname' not in args:
args['qualname'] = ''
process_type_ref(args, tree)
def process_method_call(args, tree):
if args['refid'] == '0':
args['refid'] = None
ex_def = tree.add_external_item(args)
ex_decl = tree.add_external_decl(args)
if ex_def and ex_decl:
return;
if (ex_def and not args['declid']) or (ex_decl and not args['refid']):
# FIXME, I think this is meant to be an assertion, but not sure
print "Unexpected(?) missing id in method call"
return;
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.data.function_refs.append(args)
tree.add_to_lines(args, ('function_refs', args))
def process_mod_ref(args, tree):
args['name'] = args['qualname'].split('::')[-1]
if tree.add_external_item(args):
return;
args['aliasid'] = 0
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.data.module_refs.append(args)
tree.add_to_lines(args, ('module_refs', args))
def process_use_alias(args, tree):
# module_aliases includes aliases to things other than modules
args = tree.convert_ids(args)
args['qualname'] = str(args['scopeid']) + "$" + args['name']
tree.data.module_aliases[args['id']] = args
tree.add_to_lines(args, ('module_aliases', args))
def process_typedef(args, tree):
args['name'] = args['qualname'].split('::')[-1]
args['kind'] = 'typedef'
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.data.types[args['id']] = args
tree.add_to_lines(args, ('types', args))
def process_variant(args, tree):
process_variable(args, tree)
def process_variant_struct(args, tree):
process_struct(args, tree, 'variant_struct')
def process_trait(args, tree):
args['name'] = args['qualname'].split('::')[-1]
args['kind'] = 'trait'
scope_args = tree.convert_ids({'id': args['id'],
'name' : 'name'})
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.mod_parents[int(args['id'])] = int(args['scopeid'])
tree.data.types[args['id']] = args
tree.add_to_lines(args, ('types', args))
def process_module(args, tree):
args['name'] = args['qualname'].split('::')[-1]
# Need the file name for the menu, at least
# args['def_file'] = tree.get_file(args['def_file'])
args['kind'] = 'mod'
scope_args = tree.convert_ids({'id': args['id'],
'name' : 'name'})
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.mod_parents[int(args['id'])] = int(args['scopeid'])
tree.data.modules[args['id']] = args
tree.add_to_lines(args, ('modules', args))
# FIXME: hmm, I'm not exactly clear on the difference between a fn call and fn ref, some of the former
# are logically the latter and this is stupid code dup...
def process_fn_ref(args, tree):
if tree.add_external_item(args):
return;
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.data.function_refs.append(args)
tree.add_to_lines(args, ('function_refs', args))
def process_extern_crate(args, tree):
crate = int(args['crate'])
args['refid'] = '0'
args['refidcrate'] = '0'
args = tree.convert_ids(args)
args['qualname'] = str(args['scopeid']) + "$" + args['name']
args['refid'] = tree.crate_map[crate][1]['id']
tree.data.module_aliases[args['id']] = args
tree.add_to_lines(args, ('module_aliases', args))
def process_inheritance(args, tree):
args = tree.convert_ids(args)
tree.inheritance.append((args['base'], args['derived']))
def process_use_glob(args, tree):
# FIXME(#9)
pass
def process_end_external_crates(args, tree):
# We've got all the info we're going to get about external crates now.
return True
mappings = {
LINE: {
'properties': {
'rust_function': QUALIFIED_LINE_NEEDLE,
'rust_function_ref': QUALIFIED_LINE_NEEDLE,
'rust_var': QUALIFIED_LINE_NEEDLE,
'rust_var_ref': QUALIFIED_LINE_NEEDLE,
'rust_type': QUALIFIED_LINE_NEEDLE,
'rust_type_ref': QUALIFIED_LINE_NEEDLE,
'rust_module': QUALIFIED_LINE_NEEDLE,
'rust_module_ref': QUALIFIED_LINE_NEEDLE,
'rust_module_alias_ref': QUALIFIED_LINE_NEEDLE,
'rust_extern_ref': QUALIFIED_LINE_NEEDLE,
'rust_module_use': QUALIFIED_LINE_NEEDLE,
'rust_impl': QUALIFIED_LINE_NEEDLE,
'rust_fn_impls': QUALIFIED_LINE_NEEDLE,
'rust_bases': QUALIFIED_LINE_NEEDLE,
'rust_derived': QUALIFIED_LINE_NEEDLE,
'rust_callers': QUALIFIED_LINE_NEEDLE,
'rust_called_by': QUALIFIED_LINE_NEEDLE,
}
}
}
plugin = Plugin(filters=filters_from_namespace(filters.__dict__),
tree_to_index=TreeToIndex,
mappings=mappings,
refs=refs_from_namespace(refs.__dict__))
| mit | -8,749,179,638,295,343,000 | 36.721543 | 246 | 0.561699 | false |
olafhauk/mne-python | mne/io/bti/read.py | 14 | 2981 | # Authors: Denis A. Engemann <[email protected]>
# simplified BSD-3 license
import numpy as np
from ..utils import read_str
def _unpack_matrix(fid, rows, cols, dtype, out_dtype):
"""Unpack matrix."""
dtype = np.dtype(dtype)
string = fid.read(int(dtype.itemsize * rows * cols))
out = np.frombuffer(string, dtype=dtype).reshape(
rows, cols).astype(out_dtype)
return out
def _unpack_simple(fid, dtype, out_dtype):
"""Unpack a NumPy type."""
dtype = np.dtype(dtype)
string = fid.read(dtype.itemsize)
out = np.frombuffer(string, dtype=dtype).astype(out_dtype)
if len(out) > 0:
out = out[0]
return out
def read_char(fid, count=1):
"""Read character from bti file."""
return _unpack_simple(fid, '>S%s' % count, 'S')
def read_bool(fid):
"""Read bool value from bti file."""
return _unpack_simple(fid, '>?', bool)
def read_uint8(fid):
"""Read unsigned 8bit integer from bti file."""
return _unpack_simple(fid, '>u1', np.uint8)
def read_int8(fid):
"""Read 8bit integer from bti file."""
return _unpack_simple(fid, '>i1', np.int8)
def read_uint16(fid):
"""Read unsigned 16bit integer from bti file."""
return _unpack_simple(fid, '>u2', np.uint16)
def read_int16(fid):
"""Read 16bit integer from bti file."""
return _unpack_simple(fid, '>i2', np.int16)
def read_uint32(fid):
"""Read unsigned 32bit integer from bti file."""
return _unpack_simple(fid, '>u4', np.uint32)
def read_int32(fid):
"""Read 32bit integer from bti file."""
return _unpack_simple(fid, '>i4', np.int32)
def read_uint64(fid):
"""Read unsigned 64bit integer from bti file."""
return _unpack_simple(fid, '>u8', np.uint64)
def read_int64(fid):
"""Read 64bit integer from bti file."""
return _unpack_simple(fid, '>u8', np.int64)
def read_float(fid):
"""Read 32bit float from bti file."""
return _unpack_simple(fid, '>f4', np.float32)
def read_double(fid):
"""Read 64bit float from bti file."""
return _unpack_simple(fid, '>f8', np.float64)
def read_int16_matrix(fid, rows, cols):
"""Read 16bit integer matrix from bti file."""
return _unpack_matrix(fid, rows, cols, dtype='>i2',
out_dtype=np.int16)
def read_float_matrix(fid, rows, cols):
"""Read 32bit float matrix from bti file."""
return _unpack_matrix(fid, rows, cols, dtype='>f4',
out_dtype=np.float32)
def read_double_matrix(fid, rows, cols):
"""Read 64bit float matrix from bti file."""
return _unpack_matrix(fid, rows, cols, dtype='>f8',
out_dtype=np.float64)
def read_transform(fid):
"""Read 64bit float matrix transform from bti file."""
return read_double_matrix(fid, rows=4, cols=4)
def read_dev_header(x):
"""Create a dev header."""
return dict(size=read_int32(x), checksum=read_int32(x),
reserved=read_str(x, 32))
| bsd-3-clause | -2,974,149,963,007,538,700 | 24.698276 | 62 | 0.621268 | false |
briancoutinho0905/2dsampling | ext/ply/test/yacc_error3.py | 174 | 1485 | # -----------------------------------------------------------------------------
# yacc_error3.py
#
# Bad p_error() function
# -----------------------------------------------------------------------------
import sys
if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
# Parsing rules
precedence = (
('left','PLUS','MINUS'),
('left','TIMES','DIVIDE'),
('right','UMINUS'),
)
# dictionary of names
names = { }
def p_statement_assign(t):
'statement : NAME EQUALS expression'
names[t[1]] = t[3]
def p_statement_expr(t):
'statement : expression'
print(t[1])
def p_expression_binop(t):
'''expression : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression'''
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
t[0] = -t[2]
def p_expression_group(t):
'expression : LPAREN expression RPAREN'
t[0] = t[2]
def p_expression_number(t):
'expression : NUMBER'
t[0] = t[1]
def p_expression_name(t):
'expression : NAME'
try:
t[0] = names[t[1]]
except LookupError:
print("Undefined name '%s'" % t[1])
t[0] = 0
p_error = "blah"
yacc.yacc()
| bsd-3-clause | 2,357,410,049,017,217,000 | 21.164179 | 79 | 0.491582 | false |
ahmadRagheb/goldenHR | erpnext/config/hr.py | 19 | 6497 | from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label": _("Employee and Attendance"),
"items": [
{
"type": "doctype",
"name": "Employee",
"description": _("Employee records."),
},
{
"type": "doctype",
"name": "Employee Attendance Tool",
"label": _("Employee Attendance Tool"),
"description":_("Mark Attendance for multiple employees"),
"hide_count": True
},
{
"type": "doctype",
"name": "Attendance",
"description": _("Attendance record."),
},
{
"type": "doctype",
"name": "Upload Attendance",
"description":_("Upload attendance from a .csv file"),
"hide_count": True
},
]
},
{
"label": _("Recruitment"),
"items": [
{
"type": "doctype",
"name": "Job Applicant",
"description": _("Applicant for a Job."),
},
{
"type": "doctype",
"name": "Job Opening",
"description": _("Opening for a Job."),
},
{
"type": "doctype",
"name": "Offer Letter",
"description": _("Offer candidate a Job."),
},
]
},
{
"label": _("Leaves and Holiday"),
"items": [
{
"type": "doctype",
"name": "Leave Application",
"description": _("Applications for leave."),
},
{
"type": "doctype",
"name":"Leave Type",
"description": _("Type of leaves like casual, sick etc."),
},
{
"type": "doctype",
"name": "Holiday List",
"description": _("Holiday master.")
},
{
"type": "doctype",
"name": "Leave Allocation",
"description": _("Allocate leaves for a period.")
},
{
"type": "doctype",
"name": "Leave Control Panel",
"label": _("Leave Allocation Tool"),
"description":_("Allocate leaves for the year."),
"hide_count": True
},
{
"type": "doctype",
"name": "Leave Block List",
"description": _("Block leave applications by department.")
},
]
},
{
"label": _("Payroll"),
"items": [
{
"type": "doctype",
"name": "Salary Slip",
"description": _("Monthly salary statement."),
},
{
"type": "doctype",
"name": "Process Payroll",
"label": _("Process Payroll"),
"description":_("Generate Salary Slips"),
"hide_count": True
},
{
"type": "doctype",
"name": "Salary Structure",
"description": _("Salary template master.")
},
{
"type": "doctype",
"name": "Salary Component",
"label": _("Salary Components"),
"description": _("Earnings, Deductions and other Salary components")
},
]
},
{
"label": _("Expense Claims"),
"items": [
{
"type": "doctype",
"name": "Expense Claim",
"description": _("Claims for company expense."),
},
{
"type": "doctype",
"name": "Expense Claim Type",
"description": _("Types of Expense Claim.")
},
]
},
{
"label": _("Appraisals"),
"items": [
{
"type": "doctype",
"name": "Appraisal",
"description": _("Performance appraisal."),
},
{
"type": "doctype",
"name": "Appraisal Template",
"description": _("Template for performance appraisals.")
},
{
"type": "page",
"name": "team-updates",
"label": _("Team Updates")
},
]
},
{
"label": _("Employee Loan Management"),
"icon": "icon-list",
"items": [
{
"type": "doctype",
"name": "Loan Type",
"description": _("Define various loan types")
},
{
"type": "doctype",
"name": "Employee Loan Application",
"description": _("Employee Loan Application")
},
{
"type": "doctype",
"name": "Employee Loan"
},
]
},
{
"label": _("Training"),
"items": [
{
"type": "doctype",
"name": "Training Event"
},
{
"type": "doctype",
"name": "Training Result"
},
{
"type": "doctype",
"name": "Training Feedback"
},
]
},
{
"label": _("Fleet Management"),
"items": [
{
"type": "doctype",
"name": "Vehicle"
},
{
"type": "doctype",
"name": "Vehicle Log"
},
]
},
{
"label": _("Setup"),
"icon": "fa fa-cog",
"items": [
{
"type": "doctype",
"name": "HR Settings",
"description": _("Settings for HR Module")
},
{
"type": "doctype",
"name": "Employment Type",
"description": _("Types of employment (permanent, contract, intern etc.).")
},
{
"type": "doctype",
"name": "Branch",
"description": _("Organization branch master.")
},
{
"type": "doctype",
"name": "Department",
"description": _("Organization unit (department) master.")
},
{
"type": "doctype",
"name": "Designation",
"description": _("Employee designation (e.g. CEO, Director etc.).")
},
{
"type": "doctype",
"name": "Daily Work Summary Settings"
},
]
},
{
"label": _("Reports"),
"icon": "fa fa-list",
"items": [
{
"type": "report",
"is_query_report": True,
"name": "Employee Leave Balance",
"doctype": "Leave Application"
},
{
"type": "report",
"is_query_report": True,
"name": "Employee Birthday",
"doctype": "Employee"
},
{
"type": "report",
"is_query_report": True,
"name": "Employees working on a holiday",
"doctype": "Employee"
},
{
"type": "report",
"name": "Employee Information",
"doctype": "Employee"
},
{
"type": "report",
"is_query_report": True,
"name": "Salary Register",
"doctype": "Salary Slip"
},
{
"type": "report",
"is_query_report": True,
"name": "Monthly Attendance Sheet",
"doctype": "Attendance"
},
{
"type": "report",
"is_query_report": True,
"name": "Vehicle Expenses",
"doctype": "Vehicle"
},
]
},
{
"label": _("Help"),
"icon": "fa fa-facetime-video",
"items": [
{
"type": "help",
"label": _("Setting up Employees"),
"youtube_id": "USfIUdZlUhw"
},
{
"type": "help",
"label": _("Leave Management"),
"youtube_id": "fc0p_AXebc8"
},
{
"type": "help",
"label": _("Expense Claims"),
"youtube_id": "5SZHJF--ZFY"
},
{
"type": "help",
"label": _("Processing Payroll"),
"youtube_id": "apgE-f25Rm0"
},
]
}
]
| gpl-3.0 | 3,082,866,873,085,429,000 | 19.560127 | 80 | 0.487764 | false |
fhaoquan/kbengine | kbe/src/lib/python/Lib/tkinter/_fix.py | 76 | 2897 | import sys, os
# Delay import _tkinter until we have set TCL_LIBRARY,
# so that Tcl_FindExecutable has a chance to locate its
# encoding directory.
# Unfortunately, we cannot know the TCL_LIBRARY directory
# if we don't know the tcl version, which we cannot find out
# without import Tcl. Fortunately, Tcl will itself look in
# <TCL_LIBRARY>\..\tcl<TCL_VERSION>, so anything close to
# the real Tcl library will do.
# Expand symbolic links on Vista
try:
import ctypes
ctypes.windll.kernel32.GetFinalPathNameByHandleW
except (ImportError, AttributeError):
def convert_path(s):
return s
else:
def convert_path(s):
if isinstance(s, bytes):
s = s.decode("mbcs")
hdir = ctypes.windll.kernel32.\
CreateFileW(s, 0x80, # FILE_READ_ATTRIBUTES
1, # FILE_SHARE_READ
None, 3, # OPEN_EXISTING
0x02000000, # FILE_FLAG_BACKUP_SEMANTICS
None)
if hdir == -1:
# Cannot open directory, give up
return s
buf = ctypes.create_unicode_buffer("", 32768)
res = ctypes.windll.kernel32.\
GetFinalPathNameByHandleW(hdir, buf, len(buf),
0) # VOLUME_NAME_DOS
ctypes.windll.kernel32.CloseHandle(hdir)
if res == 0:
# Conversion failed (e.g. network location)
return s
s = buf[:res]
# Ignore leading \\?\
if s.startswith("\\\\?\\"):
s = s[4:]
if s.startswith("UNC"):
s = "\\" + s[3:]
return s
prefix = os.path.join(sys.base_prefix,"tcl")
if not os.path.exists(prefix):
# devdir/../tcltk/lib
prefix = os.path.join(sys.base_prefix, os.path.pardir, "tcltk", "lib")
prefix = os.path.abspath(prefix)
# if this does not exist, no further search is needed
if os.path.exists(prefix):
prefix = convert_path(prefix)
if "TCL_LIBRARY" not in os.environ:
for name in os.listdir(prefix):
if name.startswith("tcl"):
tcldir = os.path.join(prefix,name)
if os.path.isdir(tcldir):
os.environ["TCL_LIBRARY"] = tcldir
# Compute TK_LIBRARY, knowing that it has the same version
# as Tcl
import _tkinter
ver = str(_tkinter.TCL_VERSION)
if "TK_LIBRARY" not in os.environ:
v = os.path.join(prefix, 'tk'+ver)
if os.path.exists(os.path.join(v, "tclIndex")):
os.environ['TK_LIBRARY'] = v
# We don't know the Tix version, so we must search the entire
# directory
if "TIX_LIBRARY" not in os.environ:
for name in os.listdir(prefix):
if name.startswith("tix"):
tixdir = os.path.join(prefix,name)
if os.path.isdir(tixdir):
os.environ["TIX_LIBRARY"] = tixdir
| lgpl-3.0 | 3,164,252,328,874,640,000 | 36.141026 | 74 | 0.574732 | false |
CG-F16-4-Rutgers/steersuite-rutgers | steerstats/steersuitedb/Test.py | 8 | 7607 | import psycopg2
import psycopg2.extras
from steersuitedb.Util import getTime
from Sequence import TestSequence
# this is not completely encapsulated by another transaction so it should
# be used by the client when inserting data
class Test(object):
"""A simple example class"""
__id_name = "test_id"
__table_name = "test"
__insert_order = "(test_id, algorithm_data_id, test_timestamp, test_comments, benchmark_type, test_case, test_status, scenario_group, num_agents, num_obstacles)"
#test
#(
# test_id integer NOT NULL primary key,
# algorithm_data_id int NOT NULL references algorithm_data(algorithm_data_id),
# test_timestamp timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
# test_comments text NOT NULL,
# benchmark_type int NOT NULL references benchmark(benchmark_id),
# test_case text NOT NULL,
# test_status int NOT NULL,
# scenario_group int Not NULL references scenario(scenario_group)
#)
def __init__(self, algorithm_data_id=0, test_comments="", benchmark_type=0,
test_case="", test_status=0, scenario_group=0, num_agents=0, num_obstacles=0):
self._algorithm_data_id = algorithm_data_id
self._test_comments = test_comments
self._benchmark_type = benchmark_type
self._test_case = test_case
self._test_status = test_status
self._scenario_group = scenario_group
self._num_agents = num_agents
self._num_obstacles = num_obstacles
# Returns a Test object
def getTestData(self, cur, n):
cur.execute("SELECT * FROM " + self.__table_name + " where " + self.__id_name + " = %s", (n,))
row = cur.fetchone()
testy = Test(row['algorithm_data_id'], row['test_comments'], row['benchmark_type'], row['test_case'], row['test_status'], row['scenario_group'], row['num_agents'], row['num_obstacles'])
return testy
def getTestXML(self):
return self._test_case
def setBenchmarkValuesFromDict(self, valuesDict):
self._algorithm_data_id=valuesDict['algorithm_data_id']
self._test_comments=valuesDict['test_comments']
self._benchmark_type=valuesDict['benchmark_type']
self._test_case=valuesDict['test_case']
self._test_status=valuesDict['test_status']
self._scenario_group=valuesDict['scenario_group']
self._num_agents=valuesDict['num_agents']
self._num_obstacles=valuesDict['num_obstacles']
# will Return -1 if insert did not work
def insertTest(self, cur, algorithm_data_id, test_comments, benchmark_type, test_case, test_status, scenario_group, num_agents, num_obstacles):
try:
testSeq = TestSequence()
next_id = testSeq.getNextVal(cur)
# print "obstacles: " + str(num_obstacles)
# print "agents: " + str(num_agents)
# get the current timestamp
timestamp = getTime(cur)
cur.execute("INSERT INTO " + self.__table_name + " " +
self.__insert_order + " " +
"VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)" ,
(
str(next_id) ,
str(algorithm_data_id) ,
str(timestamp) ,
test_comments ,
str(benchmark_type),
test_case ,
str(test_status) ,
str(scenario_group),
num_agents,
num_obstacles
)
)
return next_id
except psycopg2.DatabaseError, e:
print 'Error Test.py %s' % e
def insertTest2(self, cur):
return self.insertTest(cur, self._algorithm_data_id, self._test_comments, self._benchmark_type, self._test_case, self._test_status, self._scenario_group,
self._num_agents, self._num_obstacles)
def get_id_name(self):
return self._id_name
def get_table_name(self):
return self._table_name
def get_insert_order(self):
return self._insert_order
def get_algorithm_data_id(self):
return self._algorithm_data_id
def get_test_comments(self):
return self._test_comments
def get_benchmark_type(self):
return self._benchmark_type
def get_test_case(self):
return self._test_case
def get_test_status(self):
return self._test_status
def get_scenario_group(self):
return self._scenario_group
def get_num_agents(self):
return self._num_agents
def get_num_obstacles(self):
return self._num_obstacles
def set_id_name(self, value):
self._id_name = value
def set_table_name(self, value):
self._table_name = value
def set_insert_order(self, value):
self._insert_order = value
def set_algorithm_data_id(self, value):
self._algorithm_data_id = value
def set_test_comments(self, value):
self._test_comments = value
def set_benchmark_type(self, value):
self._benchmark_type = value
def set_test_case(self, value):
self._test_case = value
def set_test_status(self, value):
self._test_status = value
def set_scenario_group(self, value):
self._scenario_group = value
def set_num_agents(self, value):
self._num_agents = value
def set_num_obstacles(self, value):
self._num_obstacles = value
def del_id_name(self):
del self._id_name
def del_table_name(self):
del self._table_name
def del_insert_order(self):
del self._insert_order
def del_algorithm_data_id(self):
del self._algorithm_data_id
def del_test_comments(self):
del self._test_comments
def del_benchmark_type(self):
del self._benchmark_type
def del_test_case(self):
del self._test_case
def del_test_status(self):
del self._test_status
def del_scenario_group(self):
del self._scenario_group
def del_num_agents(self):
del self._num_agents
def del_num_obstacles(self):
del self._num_obstacles
id_name = property(get_id_name, set_id_name, del_id_name, "id_name's docstring")
table_name = property(get_table_name, set_table_name, del_table_name, "table_name's docstring")
insert_order = property(get_insert_order, set_insert_order, del_insert_order, "insert_order's docstring")
algorithm_data_id = property(get_algorithm_data_id, set_algorithm_data_id, del_algorithm_data_id, "algorithm_data_id's docstring")
test_comments = property(get_test_comments, set_test_comments, del_test_comments, "test_comments's docstring")
benchmark_type = property(get_benchmark_type, set_benchmark_type, del_benchmark_type, "benchmark_type's docstring")
test_case = property(get_test_case, set_test_case, del_test_case, "test_case's docstring")
test_status = property(get_test_status, set_test_status, del_test_status, "test_status's docstring")
scenario_group = property(get_scenario_group, set_scenario_group, del_scenario_group, "scenario_group's docstring")
num_agents = property(get_num_agents, set_num_agents, del_num_agents, "num_agents's docstring")
num_obstacles = property(get_num_obstacles, set_num_obstacles, del_num_obstacles, "num_obstacles's docstring")
# sys.exit(1)
| gpl-3.0 | -3,820,726,596,370,665,500 | 30.695833 | 193 | 0.604838 | false |
valkjsaaa/sl4a | python/gdata/tests/gdata_tests/youtube/service_test.py | 89 | 23513 | #!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = '[email protected] (Jochen Hartmann)'
import getpass
import time
import StringIO
import random
import unittest
import atom
import gdata.youtube
import gdata.youtube.service
YOUTUBE_TEST_CLIENT_ID = 'ytapi-pythonclientlibrary_servicetest'
class YouTubeServiceTest(unittest.TestCase):
def setUp(self):
self.client = gdata.youtube.service.YouTubeService()
self.client.email = username
self.client.password = password
self.client.source = YOUTUBE_TEST_CLIENT_ID
self.client.developer_key = developer_key
self.client.client_id = YOUTUBE_TEST_CLIENT_ID
self.client.ProgrammaticLogin()
def testRetrieveVideoFeed(self):
feed = self.client.GetYouTubeVideoFeed(
'http://gdata.youtube.com/feeds/api/standardfeeds/recently_featured');
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
for entry in feed.entry:
self.assert_(entry.title.text != '')
def testRetrieveTopRatedVideoFeed(self):
feed = self.client.GetTopRatedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveMostViewedVideoFeed(self):
feed = self.client.GetMostViewedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveRecentlyFeaturedVideoFeed(self):
feed = self.client.GetRecentlyFeaturedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveWatchOnMobileVideoFeed(self):
feed = self.client.GetWatchOnMobileVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveTopFavoritesVideoFeed(self):
feed = self.client.GetTopFavoritesVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveMostRecentVideoFeed(self):
feed = self.client.GetMostRecentVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveMostDiscussedVideoFeed(self):
feed = self.client.GetMostDiscussedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveMostLinkedVideoFeed(self):
feed = self.client.GetMostLinkedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveMostRespondedVideoFeed(self):
feed = self.client.GetMostRespondedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveVideoEntryByUri(self):
entry = self.client.GetYouTubeVideoEntry(
'http://gdata.youtube.com/feeds/videos/Ncakifd_16k')
self.assert_(isinstance(entry, gdata.youtube.YouTubeVideoEntry))
self.assert_(entry.title.text != '')
def testRetrieveVideoEntryByVideoId(self):
entry = self.client.GetYouTubeVideoEntry(video_id='Ncakifd_16k')
self.assert_(isinstance(entry, gdata.youtube.YouTubeVideoEntry))
self.assert_(entry.title.text != '')
def testRetrieveUserVideosbyUri(self):
feed = self.client.GetYouTubeUserFeed(
'http://gdata.youtube.com/feeds/users/gdpython/uploads')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveUserVideosbyUsername(self):
feed = self.client.GetYouTubeUserFeed(username='gdpython')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testSearchWithVideoQuery(self):
query = gdata.youtube.service.YouTubeVideoQuery()
query.vq = 'google'
query.max_results = 8
feed = self.client.YouTubeQuery(query)
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assertEquals(len(feed.entry), 8)
def testDirectVideoUploadStatusUpdateAndDeletion(self):
self.assertEquals(self.client.developer_key, developer_key)
self.assertEquals(self.client.client_id, YOUTUBE_TEST_CLIENT_ID)
self.assertEquals(self.client.additional_headers['X-GData-Key'],
'key=' + developer_key)
self.assertEquals(self.client.additional_headers['X-Gdata-Client'],
YOUTUBE_TEST_CLIENT_ID)
test_video_title = 'my cool video ' + str(random.randint(1000,5000))
test_video_description = 'description ' + str(random.randint(1000,5000))
my_media_group = gdata.media.Group(
title = gdata.media.Title(text=test_video_title),
description = gdata.media.Description(description_type='plain',
text=test_video_description),
keywords = gdata.media.Keywords(text='video, foo'),
category = gdata.media.Category(
text='Autos',
scheme='http://gdata.youtube.com/schemas/2007/categories.cat',
label='Autos'),
player=None
)
self.assert_(isinstance(my_media_group, gdata.media.Group))
# Set Geo location to 37,-122 lat, long
where = gdata.geo.Where()
where.set_location((37.0,-122.0))
video_entry = gdata.youtube.YouTubeVideoEntry(media=my_media_group,
geo=where)
self.assert_(isinstance(video_entry, gdata.youtube.YouTubeVideoEntry))
new_entry = self.client.InsertVideoEntry(video_entry, video_file_location)
self.assert_(isinstance(new_entry, gdata.youtube.YouTubeVideoEntry))
self.assertEquals(new_entry.title.text, test_video_title)
self.assertEquals(new_entry.media.description.text, test_video_description)
self.assert_(new_entry.id.text)
# check upload status also
upload_status = self.client.CheckUploadStatus(new_entry)
self.assert_(upload_status[0] != '')
# test updating entry meta-data
new_video_description = 'description ' + str(random.randint(1000,5000))
new_entry.media.description.text = new_video_description
updated_entry = self.client.UpdateVideoEntry(new_entry)
self.assert_(isinstance(updated_entry, gdata.youtube.YouTubeVideoEntry))
self.assertEquals(updated_entry.media.description.text,
new_video_description)
# sleep for 10 seconds
time.sleep(10)
# test to delete the entry
value = self.client.DeleteVideoEntry(updated_entry)
if not value:
# sleep more and try again
time.sleep(20)
# test to delete the entry
value = self.client.DeleteVideoEntry(updated_entry)
self.assert_(value == True)
def testDirectVideoUploadWithDeveloperTags(self):
self.assertEquals(self.client.developer_key, developer_key)
self.assertEquals(self.client.client_id, YOUTUBE_TEST_CLIENT_ID)
self.assertEquals(self.client.additional_headers['X-GData-Key'],
'key=' + developer_key)
self.assertEquals(self.client.additional_headers['X-Gdata-Client'],
YOUTUBE_TEST_CLIENT_ID)
test_video_title = 'my cool video ' + str(random.randint(1000,5000))
test_video_description = 'description ' + str(random.randint(1000,5000))
test_developer_tag_01 = 'tag' + str(random.randint(1000,5000))
test_developer_tag_02 = 'tag' + str(random.randint(1000,5000))
test_developer_tag_03 = 'tag' + str(random.randint(1000,5000))
my_media_group = gdata.media.Group(
title = gdata.media.Title(text=test_video_title),
description = gdata.media.Description(description_type='plain',
text=test_video_description),
keywords = gdata.media.Keywords(text='video, foo'),
category = [gdata.media.Category(
text='Autos',
scheme='http://gdata.youtube.com/schemas/2007/categories.cat',
label='Autos')],
player=None
)
self.assert_(isinstance(my_media_group, gdata.media.Group))
video_entry = gdata.youtube.YouTubeVideoEntry(media=my_media_group)
original_developer_tags = [test_developer_tag_01, test_developer_tag_02,
test_developer_tag_03]
dev_tags = video_entry.AddDeveloperTags(original_developer_tags)
for dev_tag in dev_tags:
self.assert_(dev_tag.text in original_developer_tags)
self.assert_(isinstance(video_entry, gdata.youtube.YouTubeVideoEntry))
new_entry = self.client.InsertVideoEntry(video_entry, video_file_location)
self.assert_(isinstance(new_entry, gdata.youtube.YouTubeVideoEntry))
self.assertEquals(new_entry.title.text, test_video_title)
self.assertEquals(new_entry.media.description.text, test_video_description)
self.assert_(new_entry.id.text)
developer_tags_from_new_entry = new_entry.GetDeveloperTags()
for dev_tag in developer_tags_from_new_entry:
self.assert_(dev_tag.text in original_developer_tags)
self.assertEquals(len(developer_tags_from_new_entry),
len(original_developer_tags))
# sleep for 10 seconds
time.sleep(10)
# test to delete the entry
value = self.client.DeleteVideoEntry(new_entry)
if not value:
# sleep more and try again
time.sleep(20)
# test to delete the entry
value = self.client.DeleteVideoEntry(new_entry)
self.assert_(value == True)
def testBrowserBasedVideoUpload(self):
self.assertEquals(self.client.developer_key, developer_key)
self.assertEquals(self.client.client_id, YOUTUBE_TEST_CLIENT_ID)
self.assertEquals(self.client.additional_headers['X-GData-Key'],
'key=' + developer_key)
self.assertEquals(self.client.additional_headers['X-Gdata-Client'],
YOUTUBE_TEST_CLIENT_ID)
test_video_title = 'my cool video ' + str(random.randint(1000,5000))
test_video_description = 'description ' + str(random.randint(1000,5000))
my_media_group = gdata.media.Group(
title = gdata.media.Title(text=test_video_title),
description = gdata.media.Description(description_type='plain',
text=test_video_description),
keywords = gdata.media.Keywords(text='video, foo'),
category = gdata.media.Category(
text='Autos',
scheme='http://gdata.youtube.com/schemas/2007/categories.cat',
label='Autos'),
player=None
)
self.assert_(isinstance(my_media_group, gdata.media.Group))
video_entry = gdata.youtube.YouTubeVideoEntry(media=my_media_group)
self.assert_(isinstance(video_entry, gdata.youtube.YouTubeVideoEntry))
response = self.client.GetFormUploadToken(video_entry)
self.assert_(response[0].startswith(
'http://uploads.gdata.youtube.com/action/FormDataUpload/'))
self.assert_(len(response[0]) > 55)
self.assert_(len(response[1]) > 100)
def testRetrieveRelatedVideoFeedByUri(self):
feed = self.client.GetYouTubeRelatedVideoFeed(
'http://gdata.youtube.com/feeds/videos/Ncakifd_16k/related')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveRelatedVideoFeedById(self):
feed = self.client.GetYouTubeRelatedVideoFeed(video_id = 'Ncakifd_16k')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveResponseVideoFeedByUri(self):
feed = self.client.GetYouTubeVideoResponseFeed(
'http://gdata.youtube.com/feeds/videos/Ncakifd_16k/responses')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoResponseFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveResponseVideoFeedById(self):
feed = self.client.GetYouTubeVideoResponseFeed(video_id='Ncakifd_16k')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoResponseFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveVideoCommentFeedByUri(self):
feed = self.client.GetYouTubeVideoCommentFeed(
'http://gdata.youtube.com/feeds/api/videos/Ncakifd_16k/comments')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoCommentFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveVideoCommentFeedByVideoId(self):
feed = self.client.GetYouTubeVideoCommentFeed(video_id='Ncakifd_16k')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoCommentFeed))
self.assert_(len(feed.entry) > 0)
def testAddComment(self):
video_id = '9g6buYJTt_g'
video_entry = self.client.GetYouTubeVideoEntry(video_id=video_id)
random_comment_text = 'test_comment_' + str(random.randint(1000,50000))
self.client.AddComment(comment_text=random_comment_text,
video_entry=video_entry)
comment_feed = self.client.GetYouTubeVideoCommentFeed(video_id=video_id)
comment_found = False
for item in comment_feed.entry:
if (item.content.text == random_comment_text):
comment_found = True
self.assertEquals(comment_found, True)
def testAddRating(self):
video_id_to_rate = 'Ncakifd_16k'
video_entry = self.client.GetYouTubeVideoEntry(video_id=video_id_to_rate)
response = self.client.AddRating(3, video_entry)
self.assert_(isinstance(response, gdata.GDataEntry))
def testRetrievePlaylistFeedByUri(self):
feed = self.client.GetYouTubePlaylistFeed(
'http://gdata.youtube.com/feeds/users/gdpython/playlists')
self.assert_(isinstance(feed, gdata.youtube.YouTubePlaylistFeed))
self.assert_(len(feed.entry) > 0)
def testRetrievePlaylistListFeedByUsername(self):
feed = self.client.GetYouTubePlaylistFeed(username='gdpython')
self.assert_(isinstance(feed, gdata.youtube.YouTubePlaylistFeed))
self.assert_(len(feed.entry) > 0)
def testRetrievePlaylistVideoFeed(self):
feed = self.client.GetYouTubePlaylistVideoFeed(
'http://gdata.youtube.com/feeds/api/playlists/BCB3BB96DF51B505')
self.assert_(isinstance(feed, gdata.youtube.YouTubePlaylistVideoFeed))
self.assert_(len(feed.entry) > 0)
self.assert_(isinstance(feed.entry[0],
gdata.youtube.YouTubePlaylistVideoEntry))
def testAddUpdateAndDeletePlaylist(self):
test_playlist_title = 'my test playlist ' + str(random.randint(1000,3000))
test_playlist_description = 'test playlist '
response = self.client.AddPlaylist(test_playlist_title,
test_playlist_description)
self.assert_(isinstance(response, gdata.youtube.YouTubePlaylistEntry))
new_playlist_title = 'my updated playlist ' + str(random.randint(1000,4000))
new_playlist_description = 'my updated playlist '
playlist_entry_id = response.id.text.split('/')[-1]
updated_playlist = self.client.UpdatePlaylist(playlist_entry_id,
new_playlist_title,
new_playlist_description)
playlist_feed = self.client.GetYouTubePlaylistFeed()
update_successful = False
for playlist_entry in playlist_feed.entry:
if playlist_entry.title.text == new_playlist_title:
update_successful = True
break
self.assertEquals(update_successful, True)
# wait
time.sleep(10)
# delete it
playlist_uri = updated_playlist.id.text
response = self.client.DeletePlaylist(playlist_uri)
self.assertEquals(response, True)
def testAddUpdateAndDeletePrivatePlaylist(self):
test_playlist_title = 'my test playlist ' + str(random.randint(1000,3000))
test_playlist_description = 'test playlist '
response = self.client.AddPlaylist(test_playlist_title,
test_playlist_description,
playlist_private=True)
self.assert_(isinstance(response, gdata.youtube.YouTubePlaylistEntry))
new_playlist_title = 'my updated playlist ' + str(random.randint(1000,4000))
new_playlist_description = 'my updated playlist '
playlist_entry_id = response.id.text.split('/')[-1]
updated_playlist = self.client.UpdatePlaylist(playlist_entry_id,
new_playlist_title,
new_playlist_description,
playlist_private=True)
playlist_feed = self.client.GetYouTubePlaylistFeed()
update_successful = False
playlist_still_private = False
for playlist_entry in playlist_feed.entry:
if playlist_entry.title.text == new_playlist_title:
update_successful = True
if playlist_entry.private is not None:
playlist_still_private = True
self.assertEquals(update_successful, True)
self.assertEquals(playlist_still_private, True)
# wait
time.sleep(10)
# delete it
playlist_uri = updated_playlist.id.text
response = self.client.DeletePlaylist(playlist_uri)
self.assertEquals(response, True)
def testAddEditAndDeleteVideoFromPlaylist(self):
test_playlist_title = 'my test playlist ' + str(random.randint(1000,3000))
test_playlist_description = 'test playlist '
response = self.client.AddPlaylist(test_playlist_title,
test_playlist_description)
self.assert_(isinstance(response, gdata.youtube.YouTubePlaylistEntry))
custom_video_title = 'my test video on my test playlist'
custom_video_description = 'this is a test video on my test playlist'
video_id = 'Ncakifd_16k'
playlist_uri = response.feed_link[0].href
time.sleep(10)
response = self.client.AddPlaylistVideoEntryToPlaylist(
playlist_uri, video_id, custom_video_title, custom_video_description)
self.assert_(isinstance(response, gdata.youtube.YouTubePlaylistVideoEntry))
playlist_entry_id = response.id.text.split('/')[-1]
playlist_uri = response.id.text.split(playlist_entry_id)[0][:-1]
new_video_title = 'video number ' + str(random.randint(1000,3000))
new_video_description = 'test video'
time.sleep(10)
response = self.client.UpdatePlaylistVideoEntryMetaData(
playlist_uri,
playlist_entry_id,
new_video_title,
new_video_description,
1)
self.assert_(isinstance(response, gdata.youtube.YouTubePlaylistVideoEntry))
time.sleep(10)
playlist_entry_id = response.id.text.split('/')[-1]
# remove video from playlist
response = self.client.DeletePlaylistVideoEntry(playlist_uri,
playlist_entry_id)
self.assertEquals(response, True)
time.sleep(10)
# delete the playlist
response = self.client.DeletePlaylist(playlist_uri)
self.assertEquals(response, True)
def testRetrieveSubscriptionFeedByUri(self):
feed = self.client.GetYouTubeSubscriptionFeed(
'http://gdata.youtube.com/feeds/users/gdpython/subscriptions')
self.assert_(isinstance(feed, gdata.youtube.YouTubeSubscriptionFeed))
self.assert_(len(feed.entry) == 3)
subscription_to_channel_found = False
subscription_to_favorites_found = False
subscription_to_query_found = False
all_types_found = False
for entry in feed.entry:
self.assert_(isinstance(entry, gdata.youtube.YouTubeSubscriptionEntry))
subscription_type = entry.GetSubscriptionType()
if subscription_type == 'channel':
subscription_to_channel_found = True
elif subscription_type == 'favorites':
subscription_to_favorites_found = True
elif subscription_type == 'query':
subscription_to_query_found = True
if (subscription_to_channel_found and subscription_to_favorites_found and
subscription_to_query_found):
all_types_found = True
self.assertEquals(all_types_found, True)
def testRetrieveSubscriptionFeedByUsername(self):
feed = self.client.GetYouTubeSubscriptionFeed(username='gdpython')
self.assert_(isinstance(feed, gdata.youtube.YouTubeSubscriptionFeed))
self.assert_(len(feed.entry) == 3)
subscription_to_channel_found = False
subscription_to_favorites_found = False
subscription_to_query_found = False
all_types_found = False
for entry in feed.entry:
self.assert_(isinstance(entry, gdata.youtube.YouTubeSubscriptionEntry))
subscription_type = entry.GetSubscriptionType()
if subscription_type == 'channel':
subscription_to_channel_found = True
elif subscription_type == 'favorites':
subscription_to_favorites_found = True
elif subscription_type == 'query':
subscription_to_query_found = True
if (subscription_to_channel_found and subscription_to_favorites_found and
subscription_to_query_found):
all_types_found = True
self.assertEquals(all_types_found, True)
def testRetrieveUserProfileByUri(self):
user = self.client.GetYouTubeUserEntry(
'http://gdata.youtube.com/feeds/users/gdpython')
self.assert_(isinstance(user, gdata.youtube.YouTubeUserEntry))
self.assertEquals(user.location.text, 'US')
def testRetrieveUserProfileByUsername(self):
user = self.client.GetYouTubeUserEntry(username='gdpython')
self.assert_(isinstance(user, gdata.youtube.YouTubeUserEntry))
self.assertEquals(user.location.text, 'US')
def testRetrieveUserFavoritesFeed(self):
feed = self.client.GetUserFavoritesFeed(username='gdpython')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveDefaultUserFavoritesFeed(self):
feed = self.client.GetUserFavoritesFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testAddAndDeleteVideoFromFavorites(self):
video_id = 'Ncakifd_16k'
video_entry = self.client.GetYouTubeVideoEntry(video_id=video_id)
response = self.client.AddVideoEntryToFavorites(video_entry)
self.assert_(isinstance(response, gdata.GDataEntry))
time.sleep(10)
response = self.client.DeleteVideoEntryFromFavorites(video_id)
self.assertEquals(response, True)
def testRetrieveContactFeedByUri(self):
feed = self.client.GetYouTubeContactFeed(
'http://gdata.youtube.com/feeds/users/gdpython/contacts')
self.assert_(isinstance(feed, gdata.youtube.YouTubeContactFeed))
self.assertEquals(len(feed.entry), 1)
def testRetrieveContactFeedByUsername(self):
feed = self.client.GetYouTubeContactFeed(username='gdpython')
self.assert_(isinstance(feed, gdata.youtube.YouTubeContactFeed))
self.assertEquals(len(feed.entry), 1)
if __name__ == '__main__':
print ('NOTE: Please run these tests only with a test account. '
'The tests may delete or update your data.')
username = raw_input('Please enter your username: ')
password = getpass.getpass()
developer_key = raw_input('Please enter your developer key: ')
video_file_location = raw_input(
'Please enter the absolute path to a video file: ')
unittest.main()
| apache-2.0 | 9,138,632,734,908,374,000 | 39.821181 | 80 | 0.702505 | false |
yanheven/console | openstack_dashboard/dashboards/project/network_topology/routers/tables.py | 8 | 1108 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 NTT Innovation Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from openstack_dashboard.dashboards.project.routers import\
tables as r_tables
class DeleteRouter(r_tables.DeleteRouter):
redirect_url = "horizon:project:network_topology:router"
class RoutersTable(r_tables.RoutersTable):
class Meta:
name = "Routers"
verbose_name = _("Routers")
status_columns = ["status"]
row_actions = (DeleteRouter,)
| apache-2.0 | 45,672,959,536,783,200 | 34.741935 | 78 | 0.720217 | false |
bartvm/pylearn2 | pylearn2/datasets/tests/test_dense_design_matrix.py | 12 | 3329 | import numpy as np
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrixPyTables
from pylearn2.datasets.dense_design_matrix import DefaultViewConverter
from pylearn2.datasets.dense_design_matrix import from_dataset
from pylearn2.utils import serial
def test_init_with_X_or_topo():
# tests that constructing with topo_view works
# tests that construction with design matrix works
# tests that conversion from topo_view to design matrix and back works
# tests that conversion the other way works too
rng = np.random.RandomState([1, 2, 3])
topo_view = rng.randn(5, 2, 2, 3)
d1 = DenseDesignMatrix(topo_view=topo_view)
X = d1.get_design_matrix()
d2 = DenseDesignMatrix(X=X, view_converter=d1.view_converter)
topo_view_2 = d2.get_topological_view()
assert np.allclose(topo_view, topo_view_2)
X = rng.randn(*X.shape)
topo_view_3 = d2.get_topological_view(X)
X2 = d2.get_design_matrix(topo_view_3)
assert np.allclose(X, X2)
def test_convert_to_one_hot():
rng = np.random.RandomState([2013, 11, 14])
m = 11
d = DenseDesignMatrix(
X=rng.randn(m, 4),
y=rng.randint(low=0, high=10, size=(m,)))
d.convert_to_one_hot()
def test_init_with_vc():
rng = np.random.RandomState([4, 5, 6])
d = DenseDesignMatrix(
X=rng.randn(12, 5),
view_converter=DefaultViewConverter([1, 2, 3]))
def get_rnd_design_matrix():
rng = np.random.RandomState([1, 2, 3])
topo_view = rng.randn(10, 2, 2, 3)
d1 = DenseDesignMatrix(topo_view=topo_view)
return d1
def test_split_datasets():
# Test the split dataset function.
ddm = get_rnd_design_matrix()
(train, valid) = ddm.split_dataset_holdout(train_prop=0.5)
assert valid.shape[0] == np.ceil(ddm.get_num_examples() * 0.5)
assert train.shape[0] == (ddm.get_num_examples() - valid.shape[0])
def test_split_nfold_datasets():
# Load and create ddm from cifar100
ddm = get_rnd_design_matrix()
folds = ddm.split_dataset_nfolds(10)
assert folds[0].shape[0] == np.ceil(ddm.get_num_examples() / 10)
def test_pytables():
"""
tests wether DenseDesignMatrixPyTables can be loaded and
initialize iterator
"""
# TODO more through test
x = np.ones((2, 3))
y = np.ones(2)
ds = DenseDesignMatrixPyTables(X=x, y=y)
it = ds.iterator(mode='sequential', batch_size=1)
it.next()
def test_from_dataset():
"""
Tests whether it supports integer labels.
"""
rng = np.random.RandomState([1, 2, 3])
topo_view = rng.randn(12, 2, 3, 3)
y = rng.randint(0, 5, (12, 1))
# without y:
d1 = DenseDesignMatrix(topo_view=topo_view)
slice_d = from_dataset(d1, 5)
assert slice_d.X.shape[1] == d1.X.shape[1]
assert slice_d.X.shape[0] == 5
# with y:
d2 = DenseDesignMatrix(topo_view=topo_view, y=y)
slice_d = from_dataset(d2, 5)
assert slice_d.X.shape[1] == d2.X.shape[1]
assert slice_d.X.shape[0] == 5
assert slice_d.y.shape[0] == 5
# without topo_view:
x = topo_view.reshape(12, 18)
d3 = DenseDesignMatrix(X=x, y=y)
slice_d = from_dataset(d3, 5)
assert slice_d.X.shape[1] == d3.X.shape[1]
assert slice_d.X.shape[0] == 5
assert slice_d.y.shape[0] == 5
| bsd-3-clause | -4,092,660,507,010,322,000 | 29.824074 | 75 | 0.651247 | false |
bopo/tablib | tablib/packages/xlwt3/ExcelFormulaParser.py | 46 | 22812 | ### $ANTLR 2.7.7 (20060930): "xlwt/excel-formula.g" -> "ExcelFormulaParser.py"$
### import antlr and other modules ..
import sys
from . import antlr
### header action >>>
import struct
from . import Utils
from .UnicodeUtils import upack1
from .ExcelMagic import *
_RVAdelta = {"R": 0, "V": 0x20, "A": 0x40}
_RVAdeltaRef = {"R": 0, "V": 0x20, "A": 0x40, "D": 0x20}
_RVAdeltaArea = {"R": 0, "V": 0x20, "A": 0x40, "D": 0}
class FormulaParseException(Exception):
"""
An exception indicating that a Formula could not be successfully parsed.
"""
### header action <<<
### preamble action>>>
### preamble action <<<
### import antlr.Token
from .antlr import Token
### >>>The Known Token Types <<<
SKIP = antlr.SKIP
INVALID_TYPE = antlr.INVALID_TYPE
EOF_TYPE = antlr.EOF_TYPE
EOF = antlr.EOF
NULL_TREE_LOOKAHEAD = antlr.NULL_TREE_LOOKAHEAD
MIN_USER_TYPE = antlr.MIN_USER_TYPE
TRUE_CONST = 4
FALSE_CONST = 5
STR_CONST = 6
NUM_CONST = 7
INT_CONST = 8
FUNC_IF = 9
FUNC_CHOOSE = 10
NAME = 11
QUOTENAME = 12
EQ = 13
NE = 14
GT = 15
LT = 16
GE = 17
LE = 18
ADD = 19
SUB = 20
MUL = 21
DIV = 22
POWER = 23
PERCENT = 24
LP = 25
RP = 26
LB = 27
RB = 28
COLON = 29
COMMA = 30
SEMICOLON = 31
REF2D = 32
REF2D_R1C1 = 33
BANG = 34
CONCAT = 35
class Parser(antlr.LLkParser):
### user action >>>
### user action <<<
def __init__(self, *args, **kwargs):
antlr.LLkParser.__init__(self, *args, **kwargs)
self.tokenNames = _tokenNames
### __init__ header action >>>
self.rpn = b""
self.sheet_references = []
self.xcall_references = []
### __init__ header action <<<
def formula(self):
self.expr("V")
def expr(self, arg_type):
self.prec0_expr(arg_type)
while True:
if ((self.LA(1) >= EQ and self.LA(1) <= LE)):
pass
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [EQ]:
pass
self.match(EQ)
op = struct.pack('B', ptgEQ)
elif la1 and la1 in [NE]:
pass
self.match(NE)
op = struct.pack('B', ptgNE)
elif la1 and la1 in [GT]:
pass
self.match(GT)
op = struct.pack('B', ptgGT)
elif la1 and la1 in [LT]:
pass
self.match(LT)
op = struct.pack('B', ptgLT)
elif la1 and la1 in [GE]:
pass
self.match(GE)
op = struct.pack('B', ptgGE)
elif la1 and la1 in [LE]:
pass
self.match(LE)
op = struct.pack('B', ptgLE)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
self.prec0_expr(arg_type)
self.rpn += op
else:
break
def prec0_expr(self,
arg_type
):
pass
self.prec1_expr(arg_type)
while True:
if (self.LA(1)==CONCAT):
pass
pass
self.match(CONCAT)
op = struct.pack('B', ptgConcat)
self.prec1_expr(arg_type)
self.rpn += op
else:
break
def prec1_expr(self,
arg_type
):
pass
self.prec2_expr(arg_type)
while True:
if (self.LA(1)==ADD or self.LA(1)==SUB):
pass
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [ADD]:
pass
self.match(ADD)
op = struct.pack('B', ptgAdd)
elif la1 and la1 in [SUB]:
pass
self.match(SUB)
op = struct.pack('B', ptgSub)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
self.prec2_expr(arg_type)
self.rpn += op;
# print "**prec1_expr4 %s" % arg_type
else:
break
def prec2_expr(self,
arg_type
):
pass
self.prec3_expr(arg_type)
while True:
if (self.LA(1)==MUL or self.LA(1)==DIV):
pass
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [MUL]:
pass
self.match(MUL)
op = struct.pack('B', ptgMul)
elif la1 and la1 in [DIV]:
pass
self.match(DIV)
op = struct.pack('B', ptgDiv)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
self.prec3_expr(arg_type)
self.rpn += op
else:
break
def prec3_expr(self,
arg_type
):
pass
self.prec4_expr(arg_type)
while True:
if (self.LA(1)==POWER):
pass
pass
self.match(POWER)
op = struct.pack('B', ptgPower)
self.prec4_expr(arg_type)
self.rpn += op
else:
break
def prec4_expr(self,
arg_type
):
pass
self.prec5_expr(arg_type)
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [PERCENT]:
pass
self.match(PERCENT)
self.rpn += struct.pack('B', ptgPercent)
elif la1 and la1 in [EOF,EQ,NE,GT,LT,GE,LE,ADD,SUB,MUL,DIV,POWER,RP,COMMA,SEMICOLON,CONCAT]:
pass
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
def prec5_expr(self,
arg_type
):
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [TRUE_CONST,FALSE_CONST,STR_CONST,NUM_CONST,INT_CONST,FUNC_IF,FUNC_CHOOSE,NAME,QUOTENAME,LP,REF2D]:
pass
self.primary(arg_type)
elif la1 and la1 in [SUB]:
pass
self.match(SUB)
self.primary(arg_type)
self.rpn += struct.pack('B', ptgUminus)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
def primary(self,
arg_type
):
str_tok = None
int_tok = None
num_tok = None
ref2d_tok = None
ref2d1_tok = None
ref2d2_tok = None
ref3d_ref2d = None
ref3d_ref2d2 = None
name_tok = None
func_tok = None
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [TRUE_CONST]:
pass
self.match(TRUE_CONST)
self.rpn += struct.pack("2B", ptgBool, 1)
elif la1 and la1 in [FALSE_CONST]:
pass
self.match(FALSE_CONST)
self.rpn += struct.pack("2B", ptgBool, 0)
elif la1 and la1 in [STR_CONST]:
pass
str_tok = self.LT(1)
self.match(STR_CONST)
self.rpn += struct.pack("B", ptgStr) + upack1(str_tok.text[1:-1].replace("\"\"", "\""))
elif la1 and la1 in [NUM_CONST]:
pass
num_tok = self.LT(1)
self.match(NUM_CONST)
self.rpn += struct.pack("<Bd", ptgNum, float(num_tok.text))
elif la1 and la1 in [FUNC_IF]:
pass
self.match(FUNC_IF)
self.match(LP)
self.expr("V")
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [SEMICOLON]:
pass
self.match(SEMICOLON)
elif la1 and la1 in [COMMA]:
pass
self.match(COMMA)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
self.rpn += struct.pack("<BBH", ptgAttr, 0x02, 0) # tAttrIf
pos0 = len(self.rpn) - 2
self.expr(arg_type)
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [SEMICOLON]:
pass
self.match(SEMICOLON)
elif la1 and la1 in [COMMA]:
pass
self.match(COMMA)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
self.rpn += struct.pack("<BBH", ptgAttr, 0x08, 0) # tAttrSkip
pos1 = len(self.rpn) - 2
self.rpn = self.rpn[:pos0] + struct.pack("<H", pos1-pos0) + self.rpn[pos0+2:]
self.expr(arg_type)
self.match(RP)
self.rpn += struct.pack("<BBH", ptgAttr, 0x08, 3) # tAttrSkip
self.rpn += struct.pack("<BBH", ptgFuncVarR, 3, 1) # 3 = nargs, 1 = IF func
pos2 = len(self.rpn)
self.rpn = self.rpn[:pos1] + struct.pack("<H", pos2-(pos1+2)-1) + self.rpn[pos1+2:]
elif la1 and la1 in [FUNC_CHOOSE]:
pass
self.match(FUNC_CHOOSE)
arg_type = b"R"
rpn_chunks = []
self.match(LP)
self.expr("V")
rpn_start = len(self.rpn)
ref_markers = [len(self.sheet_references)]
while True:
if (self.LA(1)==COMMA or self.LA(1)==SEMICOLON):
pass
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [SEMICOLON]:
pass
self.match(SEMICOLON)
elif la1 and la1 in [COMMA]:
pass
self.match(COMMA)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
mark = len(self.rpn)
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [TRUE_CONST,FALSE_CONST,STR_CONST,NUM_CONST,INT_CONST,FUNC_IF,FUNC_CHOOSE,NAME,QUOTENAME,SUB,LP,REF2D]:
pass
self.expr(arg_type)
elif la1 and la1 in [RP,COMMA,SEMICOLON]:
pass
self.rpn += struct.pack("B", ptgMissArg)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
rpn_chunks.append(self.rpn[mark:])
ref_markers.append(len(self.sheet_references))
else:
break
self.match(RP)
self.rpn = self.rpn[:rpn_start]
nc = len(rpn_chunks)
chunklens = [len(chunk) for chunk in rpn_chunks]
skiplens = [0] * nc
skiplens[-1] = 3
for ic in range(nc-1, 0, -1):
skiplens[ic-1] = skiplens[ic] + chunklens[ic] + 4
jump_pos = [2 * nc + 2]
for ic in range(nc):
jump_pos.append(jump_pos[-1] + chunklens[ic] + 4)
chunk_shift = 2 * nc + 6 # size of tAttrChoose
for ic in range(nc):
for refx in range(ref_markers[ic], ref_markers[ic+1]):
ref = self.sheet_references[refx]
self.sheet_references[refx] = (ref[0], ref[1], ref[2] + chunk_shift)
chunk_shift += 4 # size of tAttrSkip
choose_rpn = []
choose_rpn.append(struct.pack("<BBH", ptgAttr, 0x04, nc)) # 0x04 is tAttrChoose
choose_rpn.append(struct.pack("<%dH" % (nc+1), *jump_pos))
for ic in range(nc):
choose_rpn.append(rpn_chunks[ic])
choose_rpn.append(struct.pack("<BBH", ptgAttr, 0x08, skiplens[ic])) # 0x08 is tAttrSkip
choose_rpn.append(struct.pack("<BBH", ptgFuncVarV, nc+1, 100)) # 100 is CHOOSE fn
self.rpn += b"".join(choose_rpn)
elif la1 and la1 in [LP]:
pass
self.match(LP)
self.expr(arg_type)
self.match(RP)
self.rpn += struct.pack("B", ptgParen)
else:
if (self.LA(1)==INT_CONST) and (_tokenSet_0.member(self.LA(2))):
pass
int_tok = self.LT(1)
self.match(INT_CONST)
# print "**int_const", int_tok.text
int_value = int(int_tok.text)
if int_value <= 65535:
self.rpn += struct.pack("<BH", ptgInt, int_value)
else:
self.rpn += struct.pack("<Bd", ptgNum, float(int_value))
elif (self.LA(1)==REF2D) and (_tokenSet_0.member(self.LA(2))):
pass
ref2d_tok = self.LT(1)
self.match(REF2D)
# print "**ref2d %s %s" % (ref2d_tok.text, arg_type)
r, c = Utils.cell_to_packed_rowcol(ref2d_tok.text)
ptg = ptgRefR + _RVAdeltaRef[arg_type]
self.rpn += struct.pack("<B2H", ptg, r, c)
elif (self.LA(1)==REF2D) and (self.LA(2)==COLON):
pass
ref2d1_tok = self.LT(1)
self.match(REF2D)
self.match(COLON)
ref2d2_tok = self.LT(1)
self.match(REF2D)
r1, c1 = Utils.cell_to_packed_rowcol(ref2d1_tok.text)
r2, c2 = Utils.cell_to_packed_rowcol(ref2d2_tok.text)
ptg = ptgAreaR + _RVAdeltaArea[arg_type]
self.rpn += struct.pack("<B4H", ptg, r1, r2, c1, c2)
elif (self.LA(1)==INT_CONST or self.LA(1)==NAME or self.LA(1)==QUOTENAME) and (self.LA(2)==COLON or self.LA(2)==BANG):
pass
sheet1=self.sheet()
sheet2 = sheet1
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [COLON]:
pass
self.match(COLON)
sheet2=self.sheet()
elif la1 and la1 in [BANG]:
pass
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
self.match(BANG)
ref3d_ref2d = self.LT(1)
self.match(REF2D)
ptg = ptgRef3dR + _RVAdeltaRef[arg_type]
rpn_ref2d = b""
r1, c1 = Utils.cell_to_packed_rowcol(ref3d_ref2d.text)
rpn_ref2d = struct.pack("<3H", 0x0000, r1, c1)
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [COLON]:
pass
self.match(COLON)
ref3d_ref2d2 = self.LT(1)
self.match(REF2D)
ptg = ptgArea3dR + _RVAdeltaArea[arg_type]
r2, c2 = Utils.cell_to_packed_rowcol(ref3d_ref2d2.text)
rpn_ref2d = struct.pack("<5H", 0x0000, r1, r2, c1, c2)
elif la1 and la1 in [EOF,EQ,NE,GT,LT,GE,LE,ADD,SUB,MUL,DIV,POWER,PERCENT,RP,COMMA,SEMICOLON,CONCAT]:
pass
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
self.rpn += struct.pack("<B", ptg)
self.sheet_references.append((sheet1, sheet2, len(self.rpn)))
self.rpn += rpn_ref2d
elif (self.LA(1)==NAME) and (_tokenSet_0.member(self.LA(2))):
name_tok = self.LT(1)
self.match(NAME)
raise Exception("[formula] found unexpected NAME token (%r)" % name_tok.txt)
# #### TODO: handle references to defined names here
elif (self.LA(1)==NAME) and (self.LA(2)==LP):
func_tok = self.LT(1)
self.match(NAME)
func_toku = func_tok.text.upper()
if func_toku in all_funcs_by_name:
(opcode,
min_argc,
max_argc,
func_type,
arg_type_str) = all_funcs_by_name[func_toku]
arg_type_list = list(arg_type_str)
else:
raise Exception("[formula] unknown function (%s)" % func_tok.text)
# print "**func_tok1 %s %s" % (func_toku, func_type)
xcall = opcode < 0
if xcall:
# The name of the add-in function is passed as the 1st arg
# of the hidden XCALL function
self.xcall_references.append((func_toku, len(self.rpn) + 1))
self.rpn += struct.pack("<BHHH",
ptgNameXR,
0xadde, # ##PATCHME## index to REF entry in EXTERNSHEET record
0xefbe, # ##PATCHME## one-based index to EXTERNNAME record
0x0000) # unused
self.match(LP)
arg_count=self.expr_list(arg_type_list, min_argc, max_argc)
self.match(RP)
if arg_count > max_argc or arg_count < min_argc:
raise Exception("%d parameters for function: %s" % (arg_count, func_tok.text))
if xcall:
func_ptg = ptgFuncVarR + _RVAdelta[func_type]
self.rpn += struct.pack("<2BH", func_ptg, arg_count + 1, 255) # 255 is magic XCALL function
elif min_argc == max_argc:
func_ptg = ptgFuncR + _RVAdelta[func_type]
self.rpn += struct.pack("<BH", func_ptg, opcode)
elif arg_count == 1 and func_tok.text.upper() == "SUM":
self.rpn += struct.pack("<BBH", ptgAttr, 0x10, 0) # tAttrSum
else:
func_ptg = ptgFuncVarR + _RVAdelta[func_type]
self.rpn += struct.pack("<2BH", func_ptg, arg_count, opcode)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
def sheet(self):
ref = None
sheet_ref_name = None
sheet_ref_int = None
sheet_ref_quote = None
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [NAME]:
sheet_ref_name = self.LT(1)
self.match(NAME)
ref = sheet_ref_name.text
elif la1 and la1 in [INT_CONST]:
sheet_ref_int = self.LT(1)
self.match(INT_CONST)
ref = sheet_ref_int.text
elif la1 and la1 in [QUOTENAME]:
sheet_ref_quote = self.LT(1)
self.match(QUOTENAME)
ref = sheet_ref_quote.text[1:-1].replace("''", "'")
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
return ref
def expr_list(self,
arg_type_list, min_argc, max_argc
):
arg_cnt = None
arg_cnt = 0
arg_type = arg_type_list[arg_cnt]
# print "**expr_list1[%d] req=%s" % (arg_cnt, arg_type)
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [TRUE_CONST,FALSE_CONST,STR_CONST,NUM_CONST,INT_CONST,FUNC_IF,FUNC_CHOOSE,NAME,QUOTENAME,SUB,LP,REF2D]:
pass
self.expr(arg_type)
arg_cnt += 1
while True:
if (self.LA(1)==COMMA or self.LA(1)==SEMICOLON):
pass
if arg_cnt < len(arg_type_list):
arg_type = arg_type_list[arg_cnt]
else:
arg_type = arg_type_list[-1]
if arg_type == "+":
arg_type = arg_type_list[-2]
# print "**expr_list2[%d] req=%s" % (arg_cnt, arg_type)
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [SEMICOLON]:
pass
self.match(SEMICOLON)
elif la1 and la1 in [COMMA]:
pass
self.match(COMMA)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [TRUE_CONST,FALSE_CONST,STR_CONST,NUM_CONST,INT_CONST,FUNC_IF,FUNC_CHOOSE,NAME,QUOTENAME,SUB,LP,REF2D]:
pass
self.expr(arg_type)
elif la1 and la1 in [RP,COMMA,SEMICOLON]:
pass
self.rpn += struct.pack("B", ptgMissArg)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
arg_cnt += 1
else:
break
elif la1 and la1 in [RP]:
pass
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
return arg_cnt
_tokenNames = [
"<0>",
"EOF",
"<2>",
"NULL_TREE_LOOKAHEAD",
"TRUE_CONST",
"FALSE_CONST",
"STR_CONST",
"NUM_CONST",
"INT_CONST",
"FUNC_IF",
"FUNC_CHOOSE",
"NAME",
"QUOTENAME",
"EQ",
"NE",
"GT",
"LT",
"GE",
"LE",
"ADD",
"SUB",
"MUL",
"DIV",
"POWER",
"PERCENT",
"LP",
"RP",
"LB",
"RB",
"COLON",
"COMMA",
"SEMICOLON",
"REF2D",
"REF2D_R1C1",
"BANG",
"CONCAT"
]
### generate bit set
def mk_tokenSet_0():
### var1
data = [ 37681618946, 0]
return data
_tokenSet_0 = antlr.BitSet(mk_tokenSet_0())
| mit | -3,495,928,913,831,055,400 | 32.616085 | 143 | 0.441785 | false |
geometalab/osmaxx | tests/conversion/converters/inside_worker_test/nonop_way_test.py | 2 | 7148 | from contextlib import closing
import pytest
import sqlalchemy
from sqlalchemy.sql.schema import Table as DbTable
from osmaxx.utils.frozendict import frozendict
from tests.conftest import TagCombination
from tests.conversion.converters.inside_worker_test.conftest import slow
from tests.conversion.converters.inside_worker_test.declarative_schema import osm_models
MAJOR_KEYS = frozenset({'highway', 'railway'})
DEFAULT_EXPECTED_FALLBACK_SUBTYPE_FOR_MAJOR_KEY = frozendict(
highway='road',
railway='railway'
)
CORRESPONDING_OSMAXX_WAY_TYPES_FOR_OSM_TAG_COMBINATIONS = frozendict(
{
TagCombination(highway='track'): 'track',
TagCombination(highway='track', tracktype='grade3'): 'grade3',
TagCombination(highway='footway'): 'footway',
TagCombination(highway='secondary', junction='roundabout'): 'secondary',
TagCombination(highway='some bogus type of road', junction='roundabout'): 'roundabout',
TagCombination(railway='rail'): 'rail',
TagCombination(railway='platform'): 'railway',
},
)
CORRESPONDING_OSMAXX_STATUSES_FOR_OSM_STATUSES = frozendict(
proposed='P',
planned='P',
construction='C',
disused='D',
abandoned='A',
)
@slow
def test_osm_object_without_status_does_not_end_up_in_nonop(non_lifecycle_data_import, nonop_l, road_l, railway_l):
engine = non_lifecycle_data_import
with closing(engine.execute(sqlalchemy.select('*').select_from(road_l))) as road_result:
with closing(engine.execute(sqlalchemy.select('*').select_from(railway_l))) as railway_result:
assert road_result.rowcount + railway_result.rowcount == 1
with closing(engine.execute(sqlalchemy.select('*').select_from(nonop_l))) as nonop_result:
assert nonop_result.rowcount == 0
@slow
def test_osm_object_with_status_ends_up_in_nonop_with_correct_attribute_values(
lifecycle_data_import,
nonop_l, road_l, railway_l,
expected_osmaxx_status, osm_status, non_lifecycle_osm_tags, major_tag_key, expected_nonop_subtype,
):
engine = lifecycle_data_import
with closing(engine.execute(sqlalchemy.select('*').select_from(road_l))) as road_result:
assert road_result.rowcount == 0
with closing(engine.execute(sqlalchemy.select('*').select_from(railway_l))) as railway_result:
assert railway_result.rowcount == 0
with closing(engine.execute(sqlalchemy.select('*').select_from(nonop_l))) as result:
assert result.rowcount == 1
row = result.fetchone()
assert row['status'] == expected_osmaxx_status
assert row['tags'] == '"{key}"=>"{value}"'.format(key=osm_status, value=non_lifecycle_osm_tags[major_tag_key])
assert row['sub_type'] == expected_nonop_subtype
@slow
def test_osm_object_with_status_without_details_ends_up_in_nonop_with_correct_status(
incomplete_lifecycle_data_import, nonop_l, road_l, railway_l, expected_osmaxx_status,
expected_fallback_subtype):
engine = incomplete_lifecycle_data_import
with closing(engine.execute(sqlalchemy.select('*').select_from(road_l))) as road_result:
assert road_result.rowcount == 0
with closing(engine.execute(sqlalchemy.select('*').select_from(railway_l))) as railway_result:
assert railway_result.rowcount == 0
with closing(engine.execute(sqlalchemy.select('*').select_from(nonop_l))) as result:
assert result.rowcount == 1
row = result.fetchone()
assert row['status'] == expected_osmaxx_status
assert row['tags'] is None
assert row['sub_type'] == expected_fallback_subtype
@pytest.fixture
def nonop_l():
return DbTable('nonop_l', osm_models.metadata, schema='view_osmaxx')
@pytest.fixture
def road_l():
return DbTable('road_l', osm_models.metadata, schema='view_osmaxx')
@pytest.fixture
def railway_l():
return DbTable('railway_l', osm_models.metadata, schema='view_osmaxx')
@pytest.fixture
def expected_fallback_subtype(major_tag_key, incomplete_lifecycle_osm_tags):
if major_tag_key == 'highway' and incomplete_lifecycle_osm_tags.pop('junction', None) == 'roundabout':
return 'roundabout'
return DEFAULT_EXPECTED_FALLBACK_SUBTYPE_FOR_MAJOR_KEY[major_tag_key]
@pytest.yield_fixture
def lifecycle_data_import(lifecycle_data, data_import):
with data_import(lifecycle_data) as engine:
yield engine
@pytest.yield_fixture
def incomplete_lifecycle_data_import(incomplete_lifecycle_data, data_import):
with data_import(incomplete_lifecycle_data) as engine:
yield engine
@pytest.yield_fixture
def non_lifecycle_data_import(non_lifecycle_data, data_import):
with data_import(non_lifecycle_data) as engine:
yield engine
@pytest.fixture
def lifecycle_data(lifecycle_osm_tags):
return {osm_models.t_osm_line: lifecycle_osm_tags}
@pytest.fixture
def incomplete_lifecycle_data(incomplete_lifecycle_osm_tags):
return {osm_models.t_osm_line: incomplete_lifecycle_osm_tags}
@pytest.fixture
def non_lifecycle_data(non_lifecycle_osm_tags):
return {osm_models.t_osm_line: non_lifecycle_osm_tags}
@pytest.fixture
def lifecycle_osm_tags(non_lifecycle_osm_tags, osm_status, major_tag_key):
osm_tags = dict(non_lifecycle_osm_tags)
major_tag_value = osm_tags.pop(major_tag_key)
osm_tags.update({major_tag_key: osm_status, 'tags': {osm_status: major_tag_value}})
assert len(osm_tags) == len(non_lifecycle_osm_tags) + 1
return osm_tags
@pytest.fixture
def incomplete_lifecycle_osm_tags(non_lifecycle_osm_tags, osm_status, major_tag_key):
osm_tags = dict(non_lifecycle_osm_tags)
osm_tags.update({major_tag_key: osm_status})
assert len(osm_tags) == len(non_lifecycle_osm_tags)
return osm_tags
@pytest.fixture
def non_lifecycle_osm_tags(non_lifecycle_osm_tags_and_expected_nonop_subtype):
osm_tags, _ = non_lifecycle_osm_tags_and_expected_nonop_subtype
return osm_tags
@pytest.fixture
def major_tag_key(non_lifecycle_osm_tags):
major_keys = MAJOR_KEYS.intersection(non_lifecycle_osm_tags)
assert len(major_keys) == 1
return next(iter(major_keys))
@pytest.fixture
def expected_nonop_subtype(non_lifecycle_osm_tags_and_expected_nonop_subtype):
_, subtype = non_lifecycle_osm_tags_and_expected_nonop_subtype
return subtype
@pytest.fixture
def osm_status(osm_status_and_expected_osmaxx_status):
status, _ = osm_status_and_expected_osmaxx_status
return status
@pytest.fixture
def expected_osmaxx_status(osm_status_and_expected_osmaxx_status):
_, osmaxx_status = osm_status_and_expected_osmaxx_status
return osmaxx_status
@pytest.fixture(
params=CORRESPONDING_OSMAXX_WAY_TYPES_FOR_OSM_TAG_COMBINATIONS.items(),
ids=[str(tag_combination) for tag_combination in CORRESPONDING_OSMAXX_WAY_TYPES_FOR_OSM_TAG_COMBINATIONS.keys()],
)
def non_lifecycle_osm_tags_and_expected_nonop_subtype(request):
return request.param
@pytest.fixture(
params=CORRESPONDING_OSMAXX_STATUSES_FOR_OSM_STATUSES.items(),
ids=list(CORRESPONDING_OSMAXX_STATUSES_FOR_OSM_STATUSES.keys()),
)
def osm_status_and_expected_osmaxx_status(request):
return request.param
| mit | 591,218,811,448,632,800 | 34.386139 | 118 | 0.718243 | false |
guewen/connector | connector/tests/test_mapper.py | 1 | 20300 | # -*- coding: utf-8 -*-
import unittest2
import mock
import openerp.tests.common as common
from openerp.addons.connector.unit.mapper import (
Mapper,
ImportMapper,
ImportMapChild,
MappingDefinition,
changed_by,
only_create,
convert,
m2o_to_backend,
backend_to_m2o,
none,
MapOptions,
mapping)
from openerp.addons.connector.exception import NoConnectorUnitError
from openerp.addons.connector.backend import Backend
from openerp.addons.connector.connector import Environment
from openerp.addons.connector.session import ConnectorSession
class test_mapper(unittest2.TestCase):
""" Test Mapper """
def test_mapping_decorator(self):
class KifKrokerMapper(Mapper):
_model_name = 'res.users'
@changed_by('name', 'city')
@mapping
@only_create
def name(self):
pass
@changed_by('email')
@mapping
def email(self):
pass
@changed_by('street')
@mapping
def street(self):
pass
self.maxDiff = None
name_def = MappingDefinition(changed_by=set(('name', 'city')),
only_create=True)
email_def = MappingDefinition(changed_by=set(('email',)),
only_create=False)
street_def = MappingDefinition(changed_by=set(('street',)),
only_create=False)
self.assertEqual(KifKrokerMapper._map_methods,
{'name': name_def,
'email': email_def,
'street': street_def,
})
def test_mapping_decorator_cross_classes(self):
""" Mappings should not propagate to other classes"""
class MomMapper(Mapper):
_model_name = 'res.users'
@changed_by('name', 'city')
@mapping
def name(self):
pass
class ZappMapper(Mapper):
_model_name = 'res.users'
@changed_by('email')
@only_create
@mapping
def email(self):
pass
mom_def = MappingDefinition(changed_by=set(('name', 'city')),
only_create=False)
zapp_def = MappingDefinition(changed_by=set(('email',)),
only_create=True)
self.assertEqual(MomMapper._map_methods,
{'name': mom_def})
self.assertEqual(ZappMapper._map_methods,
{'email': zapp_def})
def test_mapping_decorator_cumul(self):
""" Mappings should cumulate the ``super`` mappings
and the local mappings."""
class FryMapper(Mapper):
_model_name = 'res.users'
@changed_by('name', 'city')
@mapping
def name(self):
pass
class FarnsworthMapper(FryMapper):
_model_name = 'res.users'
@changed_by('email')
@mapping
def email(self):
pass
name_def = MappingDefinition(changed_by=set(('name', 'city')),
only_create=False)
email_def = MappingDefinition(changed_by=set(('email',)),
only_create=False)
self.assertEqual(FarnsworthMapper._map_methods,
{'name': name_def,
'email': email_def})
def test_mapping_decorator_cumul_changed_by(self):
""" Mappings should cumulate the changed_by fields of the
``super`` mappings and the local mappings """
class FryMapper(Mapper):
_model_name = 'res.users'
@changed_by('name', 'city')
@mapping
def name(self):
pass
class FarnsworthMapper(FryMapper):
_model_name = 'res.users'
@changed_by('email')
@mapping
def name(self):
pass
name_def = MappingDefinition(changed_by=set(('name', 'city', 'email')),
only_create=False)
self.assertEqual(FarnsworthMapper._map_methods,
{'name': name_def})
def test_mapping_record(self):
""" Map a record and check the result """
class MyMapper(ImportMapper):
direct = [('name', 'out_name')]
@mapping
def street(self, record):
return {'out_street': record['street'].upper()}
env = mock.MagicMock()
record = {'name': 'Guewen',
'street': 'street'}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_name': 'Guewen',
'out_street': 'STREET'}
self.assertEqual(map_record.values(), expected)
self.assertEqual(map_record.values(for_create=True), expected)
def test_mapping_record_on_create(self):
""" Map a record and check the result for creation of record """
class MyMapper(ImportMapper):
direct = [('name', 'out_name')]
@mapping
def street(self, record):
return {'out_street': record['street'].upper()}
@only_create
@mapping
def city(self, record):
return {'out_city': 'city'}
env = mock.MagicMock()
record = {'name': 'Guewen',
'street': 'street'}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_name': 'Guewen',
'out_street': 'STREET'}
self.assertEqual(map_record.values(), expected)
expected = {'out_name': 'Guewen',
'out_street': 'STREET',
'out_city': 'city'}
self.assertEqual(map_record.values(for_create=True), expected)
def test_mapping_update(self):
""" Force values on a map record """
class MyMapper(ImportMapper):
direct = [('name', 'out_name')]
@mapping
def street(self, record):
return {'out_street': record['street'].upper()}
@only_create
@mapping
def city(self, record):
return {'out_city': 'city'}
env = mock.MagicMock()
record = {'name': 'Guewen',
'street': 'street'}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
map_record.update({'test': 1}, out_city='forced')
expected = {'out_name': 'Guewen',
'out_street': 'STREET',
'out_city': 'forced',
'test': 1}
self.assertEqual(map_record.values(), expected)
expected = {'out_name': 'Guewen',
'out_street': 'STREET',
'out_city': 'forced',
'test': 1}
self.assertEqual(map_record.values(for_create=True), expected)
def test_finalize(self):
""" Inherit finalize to modify values """
class MyMapper(ImportMapper):
direct = [('name', 'out_name')]
def finalize(self, record, values):
result = super(MyMapper, self).finalize(record, values)
result['test'] = 'abc'
return result
env = mock.MagicMock()
record = {'name': 'Guewen',
'street': 'street'}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_name': 'Guewen',
'test': 'abc'}
self.assertEqual(map_record.values(), expected)
expected = {'out_name': 'Guewen',
'test': 'abc'}
self.assertEqual(map_record.values(for_create=True), expected)
def test_some_fields(self):
""" Map only a selection of fields """
class MyMapper(ImportMapper):
direct = [('name', 'out_name'),
('street', 'out_street'),
]
@changed_by('country')
@mapping
def country(self, record):
return {'country': 'country'}
env = mock.MagicMock()
record = {'name': 'Guewen',
'street': 'street',
'country': 'country'}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_name': 'Guewen',
'country': 'country'}
self.assertEqual(map_record.values(fields=['name', 'country']),
expected)
expected = {'out_name': 'Guewen',
'country': 'country'}
self.assertEqual(map_record.values(for_create=True,
fields=['name', 'country']),
expected)
def test_mapping_modifier(self):
""" Map a direct record with a modifier function """
def do_nothing(field):
def transform(self, record, to_attr):
return record[field]
return transform
class MyMapper(ImportMapper):
direct = [(do_nothing('name'), 'out_name')]
env = mock.MagicMock()
record = {'name': 'Guewen'}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_name': 'Guewen'}
self.assertEqual(map_record.values(), expected)
self.assertEqual(map_record.values(for_create=True), expected)
def test_mapping_convert(self):
""" Map a direct record with the convert modifier function """
class MyMapper(ImportMapper):
direct = [(convert('name', int), 'out_name')]
env = mock.MagicMock()
record = {'name': '300'}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_name': 300}
self.assertEqual(map_record.values(), expected)
self.assertEqual(map_record.values(for_create=True), expected)
def test_mapping_modifier_none(self):
""" Pipeline of modifiers """
class MyMapper(ImportMapper):
direct = [(none('in_f'), 'out_f'),
(none('in_t'), 'out_t')]
env = mock.MagicMock()
record = {'in_f': False, 'in_t': True}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_f': None, 'out_t': True}
self.assertEqual(map_record.values(), expected)
self.assertEqual(map_record.values(for_create=True), expected)
def test_mapping_modifier_pipeline(self):
""" Pipeline of modifiers """
class MyMapper(ImportMapper):
direct = [(none(convert('in_f', bool)), 'out_f'),
(none(convert('in_t', bool)), 'out_t')]
env = mock.MagicMock()
record = {'in_f': 0, 'in_t': 1}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_f': None, 'out_t': True}
self.assertEqual(map_record.values(), expected)
self.assertEqual(map_record.values(for_create=True), expected)
def test_mapping_custom_option(self):
""" Usage of custom options in mappings """
class MyMapper(ImportMapper):
@mapping
def any(self, record):
if self.options.custom:
res = True
else:
res = False
return {'res': res}
env = mock.MagicMock()
record = {}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'res': True}
self.assertEqual(map_record.values(custom=True), expected)
def test_mapping_custom_option_not_defined(self):
""" Usage of custom options not defined raise AttributeError """
class MyMapper(ImportMapper):
@mapping
def any(self, record):
if self.options.custom is None:
res = True
else:
res = False
return {'res': res}
env = mock.MagicMock()
record = {}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'res': True}
self.assertEqual(map_record.values(), expected)
def test_map_options(self):
""" Test MapOptions """
options = MapOptions({'xyz': 'abc'}, k=1)
options.l = 2
self.assertEqual(options['xyz'], 'abc')
self.assertEqual(options['k'], 1)
self.assertEqual(options['l'], 2)
self.assertEqual(options.xyz, 'abc')
self.assertEqual(options.k, 1)
self.assertEqual(options.l, 2)
self.assertEqual(options['undefined'], None)
self.assertEqual(options.undefined, None)
class test_mapper_binding(common.TransactionCase):
""" Test Mapper with Bindings"""
def setUp(self):
super(test_mapper_binding, self).setUp()
self.session = ConnectorSession(self.cr, self.uid)
self.Partner = self.registry('res.partner')
self.backend = mock.Mock(wraps=Backend('x', version='y'),
name='backend')
backend_record = mock.Mock()
backend_record.get_backend.return_value = self.backend
self.env = Environment(backend_record, self.session, 'res.partner')
self.country_binder = mock.Mock(name='country_binder')
self.country_binder.return_value = self.country_binder
self.backend.get_class.return_value = self.country_binder
def test_mapping_m2o_to_backend(self):
""" Map a direct record with the m2o_to_backend modifier function """
class MyMapper(ImportMapper):
_model_name = 'res.partner'
direct = [(m2o_to_backend('country_id'), 'country')]
partner_id = self.ref('base.main_partner')
self.Partner.write(self.cr, self.uid, partner_id,
{'country_id': self.ref('base.ch')})
partner = self.Partner.browse(self.cr, self.uid, partner_id)
self.country_binder.to_backend.return_value = 10
mapper = MyMapper(self.env)
map_record = mapper.map_record(partner)
self.assertEqual(map_record.values(), {'country': 10})
self.country_binder.to_backend.assert_called_once_with(
partner.country_id.id, wrap=False)
def test_mapping_backend_to_m2o(self):
""" Map a direct record with the backend_to_m2o modifier function """
class MyMapper(ImportMapper):
_model_name = 'res.partner'
direct = [(backend_to_m2o('country'), 'country_id')]
record = {'country': 10}
self.country_binder.to_openerp.return_value = 44
mapper = MyMapper(self.env)
map_record = mapper.map_record(record)
self.assertEqual(map_record.values(), {'country_id': 44})
self.country_binder.to_openerp.assert_called_once_with(
10, unwrap=False)
def test_mapping_record_children_no_map_child(self):
""" Map a record with children, using default MapChild """
backend = Backend('backend', '42')
@backend
class LineMapper(ImportMapper):
_model_name = 'res.currency.rate'
direct = [('name', 'name')]
@mapping
def price(self, record):
return {'rate': record['rate'] * 2}
@only_create
@mapping
def discount(self, record):
return {'test': .5}
@backend
class ObjectMapper(ImportMapper):
_model_name = 'res.currency'
direct = [('name', 'name')]
children = [('lines', 'line_ids', 'res.currency.rate')]
backend_record = mock.Mock()
backend_record.get_backend.side_effect = lambda *a: backend
env = Environment(backend_record, self.session, 'res.currency')
record = {'name': 'SO1',
'lines': [{'name': '2013-11-07',
'rate': 10},
{'name': '2013-11-08',
'rate': 20}]}
mapper = ObjectMapper(env)
map_record = mapper.map_record(record)
expected = {'name': 'SO1',
'line_ids': [(0, 0, {'name': '2013-11-07',
'rate': 20}),
(0, 0, {'name': '2013-11-08',
'rate': 40})]
}
self.assertEqual(map_record.values(), expected)
expected = {'name': 'SO1',
'line_ids': [(0, 0, {'name': '2013-11-07',
'rate': 20,
'test': .5}),
(0, 0, {'name': '2013-11-08',
'rate': 40,
'test': .5})]
}
self.assertEqual(map_record.values(for_create=True), expected)
def test_mapping_record_children(self):
""" Map a record with children, using defined MapChild """
backend = Backend('backend', '42')
@backend
class LineMapper(ImportMapper):
_model_name = 'res.currency.rate'
direct = [('name', 'name')]
@mapping
def price(self, record):
return {'rate': record['rate'] * 2}
@only_create
@mapping
def discount(self, record):
return {'test': .5}
@backend
class SaleLineImportMapChild(ImportMapChild):
_model_name = 'res.currency.rate'
def format_items(self, items_values):
return [('ABC', values) for values in items_values]
@backend
class ObjectMapper(ImportMapper):
_model_name = 'res.currency'
direct = [('name', 'name')]
children = [('lines', 'line_ids', 'res.currency.rate')]
backend_record = mock.Mock()
backend_record.get_backend.side_effect = lambda *a: backend
env = Environment(backend_record, self.session, 'res.currency')
record = {'name': 'SO1',
'lines': [{'name': '2013-11-07',
'rate': 10},
{'name': '2013-11-08',
'rate': 20}]}
mapper = ObjectMapper(env)
map_record = mapper.map_record(record)
expected = {'name': 'SO1',
'line_ids': [('ABC', {'name': '2013-11-07',
'rate': 20}),
('ABC', {'name': '2013-11-08',
'rate': 40})]
}
self.assertEqual(map_record.values(), expected)
expected = {'name': 'SO1',
'line_ids': [('ABC', {'name': '2013-11-07',
'rate': 20,
'test': .5}),
('ABC', {'name': '2013-11-08',
'rate': 40,
'test': .5})]
}
self.assertEqual(map_record.values(for_create=True), expected)
def test_modifier_filter_field(self):
""" A direct mapping with a modifier must still be considered from the list of fields """
class MyMapper(ImportMapper):
direct = [('field', 'field2'),
('no_field', 'no_field2'),
(convert('name', int), 'out_name')]
env = mock.MagicMock()
record = {'name': '300', 'field': 'value', 'no_field': 'no_value'}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_name': 300, 'field2': 'value'}
self.assertEqual(map_record.values(fields=['field', 'name']), expected)
self.assertEqual(map_record.values(for_create=True,
fields=['field', 'name']), expected)
| agpl-3.0 | -408,886,055,127,792,100 | 34.243056 | 97 | 0.503793 | false |
austinhyde/ansible-modules-core | cloud/openstack/os_nova_flavor.py | 93 | 6844 | #!/usr/bin/python
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
DOCUMENTATION = '''
---
module: os_nova_flavor
short_description: Manage OpenStack compute flavors
extends_documentation_fragment: openstack
version_added: "2.0"
author: "David Shrewsbury (@Shrews)"
description:
- Add or remove flavors from OpenStack.
options:
state:
description:
- Indicate desired state of the resource. When I(state) is 'present',
then I(ram), I(vcpus), and I(disk) are all required. There are no
default values for those parameters.
choices: ['present', 'absent']
required: false
default: present
name:
description:
- Flavor name.
required: true
ram:
description:
- Amount of memory, in MB.
required: false
default: null
vcpus:
description:
- Number of virtual CPUs.
required: false
default: null
disk:
description:
- Size of local disk, in GB.
required: false
default: null
ephemeral:
description:
- Ephemeral space size, in GB.
required: false
default: 0
swap:
description:
- Swap space size, in MB.
required: false
default: 0
rxtx_factor:
description:
- RX/TX factor.
required: false
default: 1.0
is_public:
description:
- Make flavor accessible to the public.
required: false
default: true
flavorid:
description:
- ID for the flavor. This is optional as a unique UUID will be
assigned if a value is not specified.
required: false
default: "auto"
requirements: ["shade"]
'''
EXAMPLES = '''
# Create 'tiny' flavor with 1024MB of RAM, 1 virtual CPU, and 10GB of
# local disk, and 10GB of ephemeral.
- os_nova_flavor:
cloud=mycloud
state=present
name=tiny
ram=1024
vcpus=1
disk=10
ephemeral=10
# Delete 'tiny' flavor
- os_nova_flavor:
cloud=mycloud
state=absent
name=tiny
'''
RETURN = '''
flavor:
description: Dictionary describing the flavor.
returned: On success when I(state) is 'present'
type: dictionary
contains:
id:
description: Flavor ID.
returned: success
type: string
sample: "515256b8-7027-4d73-aa54-4e30a4a4a339"
name:
description: Flavor name.
returned: success
type: string
sample: "tiny"
disk:
description: Size of local disk, in GB.
returned: success
type: int
sample: 10
ephemeral:
description: Ephemeral space size, in GB.
returned: success
type: int
sample: 10
ram:
description: Amount of memory, in MB.
returned: success
type: int
sample: 1024
swap:
description: Swap space size, in MB.
returned: success
type: int
sample: 100
vcpus:
description: Number of virtual CPUs.
returned: success
type: int
sample: 2
is_public:
description: Make flavor accessible to the public.
returned: success
type: bool
sample: true
'''
def _system_state_change(module, flavor):
state = module.params['state']
if state == 'present' and not flavor:
return True
if state == 'absent' and flavor:
return True
return False
def main():
argument_spec = openstack_full_argument_spec(
state = dict(required=False, default='present',
choices=['absent', 'present']),
name = dict(required=False),
# required when state is 'present'
ram = dict(required=False, type='int'),
vcpus = dict(required=False, type='int'),
disk = dict(required=False, type='int'),
ephemeral = dict(required=False, default=0, type='int'),
swap = dict(required=False, default=0, type='int'),
rxtx_factor = dict(required=False, default=1.0, type='float'),
is_public = dict(required=False, default=True, type='bool'),
flavorid = dict(required=False, default="auto"),
)
module_kwargs = openstack_module_kwargs()
module = AnsibleModule(
argument_spec,
supports_check_mode=True,
required_if=[
('state', 'present', ['ram', 'vcpus', 'disk'])
],
**module_kwargs)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
state = module.params['state']
name = module.params['name']
try:
cloud = shade.operator_cloud(**module.params)
flavor = cloud.get_flavor(name)
if module.check_mode:
module.exit_json(changed=_system_state_change(module, flavor))
if state == 'present':
if not flavor:
flavor = cloud.create_flavor(
name=name,
ram=module.params['ram'],
vcpus=module.params['vcpus'],
disk=module.params['disk'],
flavorid=module.params['flavorid'],
ephemeral=module.params['ephemeral'],
swap=module.params['swap'],
rxtx_factor=module.params['rxtx_factor'],
is_public=module.params['is_public']
)
module.exit_json(changed=True, flavor=flavor)
module.exit_json(changed=False, flavor=flavor)
elif state == 'absent':
if flavor:
cloud.delete_flavor(name)
module.exit_json(changed=True)
module.exit_json(changed=False)
except shade.OpenStackCloudException as e:
module.fail_json(msg=e.message)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
| gpl-3.0 | -3,063,250,834,862,408,700 | 27.877637 | 77 | 0.585184 | false |
vivianli32/TravelConnect | flask/lib/python3.4/site-packages/whoosh/matching/binary.py | 94 | 24452 | # Copyright 2010 Matt Chaput. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
from whoosh.matching import mcore
class BiMatcher(mcore.Matcher):
"""Base class for matchers that combine the results of two sub-matchers in
some way.
"""
def __init__(self, a, b):
super(BiMatcher, self).__init__()
self.a = a
self.b = b
def reset(self):
self.a.reset()
self.b.reset()
def __repr__(self):
return "%s(%r, %r)" % (self.__class__.__name__, self.a, self.b)
def children(self):
return [self.a, self.b]
def copy(self):
return self.__class__(self.a.copy(), self.b.copy())
def depth(self):
return 1 + max(self.a.depth(), self.b.depth())
def skip_to(self, id):
if not self.is_active():
raise mcore.ReadTooFar
ra = self.a.skip_to(id)
rb = self.b.skip_to(id)
return ra or rb
def supports_block_quality(self):
return (self.a.supports_block_quality()
and self.b.supports_block_quality())
def supports(self, astype):
return self.a.supports(astype) and self.b.supports(astype)
class AdditiveBiMatcher(BiMatcher):
"""Base class for binary matchers where the scores of the sub-matchers are
added together.
"""
def max_quality(self):
q = 0.0
if self.a.is_active():
q += self.a.max_quality()
if self.b.is_active():
q += self.b.max_quality()
return q
def block_quality(self):
bq = 0.0
if self.a.is_active():
bq += self.a.block_quality()
if self.b.is_active():
bq += self.b.block_quality()
return bq
def weight(self):
return (self.a.weight() + self.b.weight())
def score(self):
return (self.a.score() + self.b.score())
def __eq__(self, other):
return self.__class__ is type(other)
def __lt__(self, other):
return type(other) is self.__class__
def __ne__(self, other):
return not self.__eq__(other)
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self.__eq__(other) or self.__lt__(other)
def __ge__(self, other):
return self.__eq__(other) or self.__gt__(other)
class UnionMatcher(AdditiveBiMatcher):
"""Matches the union (OR) of the postings in the two sub-matchers.
"""
_id = None
def replace(self, minquality=0):
a = self.a
b = self.b
a_active = a.is_active()
b_active = b.is_active()
# If neither sub-matcher on its own has a high enough max quality to
# contribute, convert to an intersection matcher
if minquality and a_active and b_active:
a_max = a.max_quality()
b_max = b.max_quality()
if a_max < minquality and b_max < minquality:
return IntersectionMatcher(a, b).replace(minquality)
elif a_max < minquality:
return AndMaybeMatcher(b, a)
elif b_max < minquality:
return AndMaybeMatcher(a, b)
# If one or both of the sub-matchers are inactive, convert
if not (a_active or b_active):
return mcore.NullMatcher()
elif not a_active:
return b.replace(minquality)
elif not b_active:
return a.replace(minquality)
a = a.replace(minquality - b.max_quality() if minquality else 0)
b = b.replace(minquality - a.max_quality() if minquality else 0)
# If one of the sub-matchers changed, return a new union
if a is not self.a or b is not self.b:
return self.__class__(a, b)
else:
self._id = None
return self
def is_active(self):
return self.a.is_active() or self.b.is_active()
def skip_to(self, id):
self._id = None
ra = rb = False
if self.a.is_active():
ra = self.a.skip_to(id)
if self.b.is_active():
rb = self.b.skip_to(id)
return ra or rb
def id(self):
_id = self._id
if _id is not None:
return _id
a = self.a
b = self.b
if not a.is_active():
_id = b.id()
elif not b.is_active():
_id = a.id()
else:
_id = min(a.id(), b.id())
self._id = _id
return _id
# Using sets is faster in most cases, but could potentially use a lot of
# memory. Comment out this method override to not use sets.
#def all_ids(self):
# return iter(sorted(set(self.a.all_ids()) | set(self.b.all_ids())))
def next(self):
self._id = None
a = self.a
b = self.b
a_active = a.is_active()
b_active = b.is_active()
# Shortcut when one matcher is inactive
if not (a_active or b_active):
raise mcore.ReadTooFar
elif not a_active:
return b.next()
elif not b_active:
return a.next()
a_id = a.id()
b_id = b.id()
ar = br = None
# After all that, here's the actual implementation
if a_id <= b_id:
ar = a.next()
if b_id <= a_id:
br = b.next()
return ar or br
def spans(self):
if not self.a.is_active():
return self.b.spans()
if not self.b.is_active():
return self.a.spans()
id_a = self.a.id()
id_b = self.b.id()
if id_a < id_b:
return self.a.spans()
elif id_b < id_a:
return self.b.spans()
else:
return sorted(set(self.a.spans()) | set(self.b.spans()))
def weight(self):
a = self.a
b = self.b
if not a.is_active():
return b.weight()
if not b.is_active():
return a.weight()
id_a = a.id()
id_b = b.id()
if id_a < id_b:
return a.weight()
elif id_b < id_a:
return b.weight()
else:
return (a.weight() + b.weight())
def score(self):
a = self.a
b = self.b
if not a.is_active():
return b.score()
if not b.is_active():
return a.score()
id_a = a.id()
id_b = b.id()
if id_a < id_b:
return a.score()
elif id_b < id_a:
return b.score()
else:
return (a.score() + b.score())
def skip_to_quality(self, minquality):
self._id = None
a = self.a
b = self.b
if not (a.is_active() or b.is_active()):
raise mcore.ReadTooFar
# Short circuit if one matcher is inactive
if not a.is_active():
return b.skip_to_quality(minquality)
elif not b.is_active():
return a.skip_to_quality(minquality)
skipped = 0
aq = a.block_quality()
bq = b.block_quality()
while a.is_active() and b.is_active() and aq + bq <= minquality:
if aq < bq:
skipped += a.skip_to_quality(minquality - bq)
aq = a.block_quality()
else:
skipped += b.skip_to_quality(minquality - aq)
bq = b.block_quality()
return skipped
class DisjunctionMaxMatcher(UnionMatcher):
"""Matches the union (OR) of two sub-matchers. Where both sub-matchers
match the same posting, returns the weight/score of the higher-scoring
posting.
"""
# TODO: this class inherits from AdditiveBiMatcher (through UnionMatcher)
# but it does not add the scores of the sub-matchers together (it
# overrides all methods that perform addition). Need to clean up the
# inheritance.
def __init__(self, a, b, tiebreak=0.0):
super(DisjunctionMaxMatcher, self).__init__(a, b)
self.tiebreak = tiebreak
def copy(self):
return self.__class__(self.a.copy(), self.b.copy(),
tiebreak=self.tiebreak)
def replace(self, minquality=0):
a = self.a
b = self.b
a_active = a.is_active()
b_active = b.is_active()
# DisMax takes the max of the sub-matcher qualities instead of adding
# them, so we need special logic here
if minquality and a_active and b_active:
a_max = a.max_quality()
b_max = b.max_quality()
if a_max < minquality and b_max < minquality:
# If neither sub-matcher has a high enough max quality to
# contribute, return an inactive matcher
return mcore.NullMatcher()
elif b_max < minquality:
# If the b matcher can't contribute, return a
return a.replace(minquality)
elif a_max < minquality:
# If the a matcher can't contribute, return b
return b.replace(minquality)
if not (a_active or b_active):
return mcore.NullMatcher()
elif not a_active:
return b.replace(minquality)
elif not b_active:
return a.replace(minquality)
# We CAN pass the minquality down here, since we don't add the two
# scores together
a = a.replace(minquality)
b = b.replace(minquality)
a_active = a.is_active()
b_active = b.is_active()
# It's kind of tedious to check for inactive sub-matchers all over
# again here after we replace them, but it's probably better than
# returning a replacement with an inactive sub-matcher
if not (a_active and b_active):
return mcore.NullMatcher()
elif not a_active:
return b
elif not b_active:
return a
elif a is not self.a or b is not self.b:
# If one of the sub-matchers changed, return a new DisMax
return self.__class__(a, b)
else:
return self
def score(self):
if not self.a.is_active():
return self.b.score()
elif not self.b.is_active():
return self.a.score()
else:
return max(self.a.score(), self.b.score())
def max_quality(self):
return max(self.a.max_quality(), self.b.max_quality())
def block_quality(self):
return max(self.a.block_quality(), self.b.block_quality())
def skip_to_quality(self, minquality):
a = self.a
b = self.b
# Short circuit if one matcher is inactive
if not a.is_active():
sk = b.skip_to_quality(minquality)
return sk
elif not b.is_active():
return a.skip_to_quality(minquality)
skipped = 0
aq = a.block_quality()
bq = b.block_quality()
while a.is_active() and b.is_active() and max(aq, bq) <= minquality:
if aq <= minquality:
skipped += a.skip_to_quality(minquality)
aq = a.block_quality()
if bq <= minquality:
skipped += b.skip_to_quality(minquality)
bq = b.block_quality()
return skipped
class IntersectionMatcher(AdditiveBiMatcher):
"""Matches the intersection (AND) of the postings in the two sub-matchers.
"""
def __init__(self, a, b):
super(IntersectionMatcher, self).__init__(a, b)
self._find_first()
def reset(self):
self.a.reset()
self.b.reset()
self._find_first()
def _find_first(self):
if (self.a.is_active()
and self.b.is_active()
and self.a.id() != self.b.id()):
self._find_next()
def replace(self, minquality=0):
a = self.a
b = self.b
a_active = a.is_active()
b_active = b.is_active()
if not (a_active and b_active):
# Intersection matcher requires that both sub-matchers be active
return mcore.NullMatcher()
if minquality:
a_max = a.max_quality()
b_max = b.max_quality()
if a_max + b_max < minquality:
# If the combined quality of the sub-matchers can't contribute,
# return an inactive matcher
return mcore.NullMatcher()
# Require that the replacements be able to contribute results
# higher than the minquality
a_min = minquality - b_max
b_min = minquality - a_max
else:
a_min = b_min = 0
a = a.replace(a_min)
b = b.replace(b_min)
a_active = a.is_active()
b_active = b.is_active()
if not (a_active or b_active):
return mcore.NullMatcher()
elif not a_active:
return b
elif not b_active:
return a
elif a is not self.a or b is not self.b:
return self.__class__(a, b)
else:
return self
def is_active(self):
return self.a.is_active() and self.b.is_active()
def _find_next(self):
a = self.a
b = self.b
a_id = a.id()
b_id = b.id()
assert a_id != b_id
r = False
while a.is_active() and b.is_active() and a_id != b_id:
if a_id < b_id:
ra = a.skip_to(b_id)
if not a.is_active():
return
r = r or ra
a_id = a.id()
else:
rb = b.skip_to(a_id)
if not b.is_active():
return
r = r or rb
b_id = b.id()
return r
def id(self):
return self.a.id()
# Using sets is faster in some cases, but could potentially use a lot of
# memory
def all_ids(self):
return iter(sorted(set(self.a.all_ids()) & set(self.b.all_ids())))
def skip_to(self, id):
if not self.is_active():
raise mcore.ReadTooFar
ra = self.a.skip_to(id)
rb = self.b.skip_to(id)
if self.is_active():
rn = False
if self.a.id() != self.b.id():
rn = self._find_next()
return ra or rb or rn
def skip_to_quality(self, minquality):
a = self.a
b = self.b
minquality = minquality
skipped = 0
aq = a.block_quality()
bq = b.block_quality()
while a.is_active() and b.is_active() and aq + bq <= minquality:
if aq < bq:
# If the block quality of A is less than B, skip A ahead until
# it can contribute at least the balance of the required min
# quality when added to B
sk = a.skip_to_quality(minquality - bq)
skipped += sk
if not sk and a.is_active():
# The matcher couldn't skip ahead for some reason, so just
# advance and try again
a.next()
else:
# And vice-versa
sk = b.skip_to_quality(minquality - aq)
skipped += sk
if not sk and b.is_active():
b.next()
if not a.is_active() or not b.is_active():
# One of the matchers is exhausted
break
if a.id() != b.id():
# We want to always leave in a state where the matchers are at
# the same document, so call _find_next() to sync them
self._find_next()
# Get the block qualities at the new matcher positions
aq = a.block_quality()
bq = b.block_quality()
return skipped
def next(self):
if not self.is_active():
raise mcore.ReadTooFar
# We must assume that the ids are equal whenever next() is called (they
# should have been made equal by _find_next), so advance them both
ar = self.a.next()
if self.is_active():
nr = self._find_next()
return ar or nr
def spans(self):
return sorted(set(self.a.spans()) | set(self.b.spans()))
class AndNotMatcher(BiMatcher):
"""Matches the postings in the first sub-matcher that are NOT present in
the second sub-matcher.
"""
def __init__(self, a, b):
super(AndNotMatcher, self).__init__(a, b)
self._find_first()
def reset(self):
self.a.reset()
self.b.reset()
self._find_first()
def _find_first(self):
if (self.a.is_active()
and self.b.is_active()
and self.a.id() == self.b.id()):
self._find_next()
def is_active(self):
return self.a.is_active()
def _find_next(self):
pos = self.a
neg = self.b
if not neg.is_active():
return
pos_id = pos.id()
r = False
if neg.id() < pos_id:
neg.skip_to(pos_id)
while pos.is_active() and neg.is_active() and pos_id == neg.id():
nr = pos.next()
if not pos.is_active():
break
r = r or nr
pos_id = pos.id()
neg.skip_to(pos_id)
return r
def supports_block_quality(self):
return self.a.supports_block_quality()
def replace(self, minquality=0):
if not self.a.is_active():
# The a matcher is required, so if it's inactive, return an
# inactive matcher
return mcore.NullMatcher()
elif (minquality
and self.a.max_quality() < minquality):
# If the quality of the required matcher isn't high enough to
# contribute, return an inactive matcher
return mcore.NullMatcher()
elif not self.b.is_active():
# If the prohibited matcher is inactive, convert to just the
# required matcher
return self.a.replace(minquality)
a = self.a.replace(minquality)
b = self.b.replace()
if a is not self.a or b is not self.b:
# If one of the sub-matchers was replaced, return a new AndNot
return self.__class__(a, b)
else:
return self
def max_quality(self):
return self.a.max_quality()
def block_quality(self):
return self.a.block_quality()
def skip_to_quality(self, minquality):
skipped = self.a.skip_to_quality(minquality)
self._find_next()
return skipped
def id(self):
return self.a.id()
def next(self):
if not self.a.is_active():
raise mcore.ReadTooFar
ar = self.a.next()
nr = False
if self.a.is_active() and self.b.is_active():
nr = self._find_next()
return ar or nr
def skip_to(self, id):
if not self.a.is_active():
raise mcore.ReadTooFar
if id < self.a.id():
return
self.a.skip_to(id)
if self.b.is_active():
self.b.skip_to(id)
self._find_next()
def weight(self):
return self.a.weight()
def score(self):
return self.a.score()
def supports(self, astype):
return self.a.supports(astype)
def value(self):
return self.a.value()
def value_as(self, astype):
return self.a.value_as(astype)
class AndMaybeMatcher(AdditiveBiMatcher):
"""Matches postings in the first sub-matcher, and if the same posting is
in the second sub-matcher, adds their scores.
"""
def __init__(self, a, b):
AdditiveBiMatcher.__init__(self, a, b)
self._first_b()
def reset(self):
self.a.reset()
self.b.reset()
self._first_b()
def _first_b(self):
a = self.a
b = self.b
if a.is_active() and b.is_active() and a.id() != b.id():
b.skip_to(a.id())
def is_active(self):
return self.a.is_active()
def id(self):
return self.a.id()
def next(self):
if not self.a.is_active():
raise mcore.ReadTooFar
ar = self.a.next()
br = False
if self.a.is_active() and self.b.is_active():
br = self.b.skip_to(self.a.id())
return ar or br
def skip_to(self, id):
if not self.a.is_active():
raise mcore.ReadTooFar
ra = self.a.skip_to(id)
rb = False
if self.a.is_active() and self.b.is_active():
rb = self.b.skip_to(id)
return ra or rb
def replace(self, minquality=0):
a = self.a
b = self.b
a_active = a.is_active()
b_active = b.is_active()
if not a_active:
return mcore.NullMatcher()
elif minquality and b_active:
if a.max_quality() + b.max_quality() < minquality:
# If the combined max quality of the sub-matchers isn't high
# enough to possibly contribute, return an inactive matcher
return mcore.NullMatcher()
elif a.max_quality() < minquality:
# If the max quality of the main sub-matcher isn't high enough
# to ever contribute without the optional sub- matcher, change
# into an IntersectionMatcher
return IntersectionMatcher(self.a, self.b)
elif not b_active:
return a.replace(minquality)
new_a = a.replace(minquality - b.max_quality())
new_b = b.replace(minquality - a.max_quality())
if new_a is not a or new_b is not b:
# If one of the sub-matchers changed, return a new AndMaybe
return self.__class__(new_a, new_b)
else:
return self
def skip_to_quality(self, minquality):
a = self.a
b = self.b
minquality = minquality
if not a.is_active():
raise mcore.ReadTooFar
if not b.is_active():
return a.skip_to_quality(minquality)
skipped = 0
aq = a.block_quality()
bq = b.block_quality()
while a.is_active() and b.is_active() and aq + bq <= minquality:
if aq < bq:
skipped += a.skip_to_quality(minquality - bq)
aq = a.block_quality()
else:
skipped += b.skip_to_quality(minquality - aq)
bq = b.block_quality()
return skipped
def weight(self):
if self.a.id() == self.b.id():
return self.a.weight() + self.b.weight()
else:
return self.a.weight()
def score(self):
if self.b.is_active() and self.a.id() == self.b.id():
return self.a.score() + self.b.score()
else:
return self.a.score()
def supports(self, astype):
return self.a.supports(astype)
def value(self):
return self.a.value()
def value_as(self, astype):
return self.a.value_as(astype)
| mit | 1,762,680,918,025,518,800 | 29.450809 | 79 | 0.537625 | false |
geekaia/edx-platform | cms/envs/aws_migrate.py | 87 | 1229 | """
A Django settings file for use on AWS while running
database migrations, since we don't want to normally run the
LMS with enough privileges to modify the database schema.
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=W0401, W0614
# Import everything from .aws so that our settings are based on those.
from .aws import *
import os
from django.core.exceptions import ImproperlyConfigured
DB_OVERRIDES = dict(
PASSWORD=os.environ.get('DB_MIGRATION_PASS', None),
ENGINE=os.environ.get('DB_MIGRATION_ENGINE', DATABASES['default']['ENGINE']),
USER=os.environ.get('DB_MIGRATION_USER', DATABASES['default']['USER']),
NAME=os.environ.get('DB_MIGRATION_NAME', DATABASES['default']['NAME']),
HOST=os.environ.get('DB_MIGRATION_HOST', DATABASES['default']['HOST']),
PORT=os.environ.get('DB_MIGRATION_PORT', DATABASES['default']['PORT']),
)
if DB_OVERRIDES['PASSWORD'] is None:
raise ImproperlyConfigured("No database password was provided for running "
"migrations. This is fatal.")
for override, value in DB_OVERRIDES.iteritems():
DATABASES['default'][override] = value
| agpl-3.0 | -6,698,457,249,484,909,000 | 39.966667 | 81 | 0.715216 | false |
evilgroot/ethercut | ethercut/master.py | 1 | 6570 | # coding: utf-8
# ETHERCUT SUITE
# Author: Ivan 'evilgroot' Luengo
# Email: [email protected]
# This project is released under a GPLv3 license
"""
Master: Handles the program loop
"""
import pcap, Queue, logging
import contextlib
import ethercut.ui as ui
import ethercut.log as log
import ethercut.sniff as sniff
import ethercut.utils as utils
import ethercut.net.link as link
import ethercut.discovery as discovery
import ethercut.exceptions as exceptions
import ethercut.net.target as target
import ethercut.net.inject as inject
import ethercut.net.network as network
import ethercut.decodermanager as decmanager
import ethercut.spoofermanager as spfmanager
import ethercut.platform as platform
import ethercut.koalafilter as koala
import ethercut.shell as shell
from ethercut.options import *
from ethercut.config import ethconf
from ethercut.context import ctx
from ethercut import NAME, PROGRAM, CONFILE, COPYRIGHT, AUTHOR
from ethercut.types.colorstr import CStr
class Master(object):
name = NAME
program = PROGRAM
def __init__(self):
# Load configuration file
ethconf.load(CONFILE)
# Register the decoders
self.decoders = decmanager.DecoderManager()
self.decoders.register()
# Register the spoofers
self.spoofers = spfmanager.SpooferManager()
self.spoofers.register()
# Add all options
self.opt = Options()
self.target1 = None
self.target2 = None
self.targetlist = ctx.targetlist = target.TargetList()
self.iface = None
self.original_mac = None
self.network = None
self.gateway = None
self.injector = inject.Injector()
self.discovery = discovery.Discovery()
self.sniffer = sniff.Sniffer()
self.filter = koala.KoalaFilter(self.decoders)
# Initialize the user interface
self.ui = ui.TextUI(self)
def start(self):
"""
Starts the whole thing
"""
# Load spoofers and decoders
if not self.opt.sniff.read:
self.spoofers.load()
self.decoders.load()
# Starts the user interface
self.ui.start()
def show_summary(self):
"""
Show a summary of the program status:
-Spoofers and decoders successfuly loaded
-Modules enabled (discovery, sniffer...)
"""
spoof = CStr(len(self.spoofers)).green if len(self.spoofers) > 0 else CStr(0).red
decode = CStr(len(self.decoders)).green if len(self.decoders) > 0 else CStr(0).red
disc = CStr("ON").green if self.discovery.active else CStr("OFF").red
sniff = CStr("ON").green if self.sniffer.active else CStr("OFF").red
summary = "[%s: %s - %s: %s - %s: %s - %s: %s]\n"%(CStr("spoofers").yellow,
spoof,
CStr("decoders").yellow,
decode,
CStr("discovery").yellow,
disc,
CStr("sniffer").yellow,
sniff)
self.ui.user_msg(summary)
def update_network(self):
"""
Update the network details
"""
if self.opt.core.use_mac:
cfg = utils.get_iface(self.opt.core.iface)
if cfg["inet"] is None:
raise exceptions.EthercutException("Couldn't determine %s IP address, make sure it "+
"is connected and propertly configured")
# Save the original mac to restore it later
self.original_mac = cfg["hw"]
self.ui.msg("Changing MAC address to: %s" %CStr(self.opt.core.use_mac).yellow)
shell.Shell().change_mac(self.opt.core.iface, self.opt.core.use_mac)
self.iface = link.Link(self.opt.core.iface)
# Network
self.network = network.Network(self.iface.ip, self.iface.netmask)
# Try to find the network gateway
gwip = self.opt.core.gateway or self.network.gateway
gwhw = utils.arp_read(gwip)
if gwip is None or gwhw is None:
raise exceptions.EthercutException("Ethercut wasn't able to find the network gateway, "+
"please check your network configuration")
self.gateway = target.Target(gwip, gwhw)
self.ui.msg("[%s] %s"%(CStr("IFACE").cyan, self.iface))
self.ui.msg("[%s] %s" %(CStr("GATEWAY").cyan, repr(self.gateway)))
# Update the context
ctx.iface = self.iface
ctx.network = self.network
ctx.gateway = self.gateway
def update_targets(self):
"""
Compile the target specifications and build the target list
"""
self.targetlist.clear()
self.target1 = self.opt.attack.target1
self.target2 = self.opt.attack.target2
# Add targets and bindings specified by the user with -T
for t in self.opt.attack.targets:
ip, mac, port = t
if port:
if mac: # Bind ports to MAC by default
if mac in self.target1:
self.target1.specific[mac] = port
if mac in self.target2:
self.target2.specific[mac] = port
else: # Bind it to the ip
if ip in self.target1:
self.target1.specific[ip] = port
if ip in self.target2:
self.target2.specific[ip] = port
if not self.opt.sniff.read:
# Only add the target if it has mac and ip
if (ip and mac and ip != self.gateway.ip and mac != self.gateway.mac and
ip != self.iface.ip and mac != self.iface.mac):
self.targetlist.append(target.Target(ip, mac, perm=True))
if len(self.targetlist) > 0:
self.ui.msg("Permanent targets:")
for t in self.targetlist:
self.ui.msg("\t%s"%repr(t))
else:
self.ui.msg("No permanent targets were added to the target list")
ctx.targetlist = self.targetlist
ctx.target1 = self.target1
ctx.target2 = self.target2
def shutdown(self):
"""
Shuts the program down, terminate all daemons
"""
self.ui.clean_exit()
| gpl-3.0 | -3,700,081,429,847,109,000 | 33.578947 | 101 | 0.568341 | false |
hetajen/vnpy161 | vn.api/vn.ctp/py3/pyscript/ctp_struct.py | 40 | 315084 | # encoding: UTF-8
structDict = {}
#//////////////////////////////////////////////////////////////////////
#@system ๆฐไธไปฃไบคๆๆ็ณป็ป
#@company ไธๆตทๆ่ดงไฟกๆฏๆๆฏๆ้ๅ
ฌๅธ
#@file ThostFtdcUserApiStruct.h
#@brief ๅฎไนไบๅฎขๆท็ซฏๆฅๅฃไฝฟ็จ็ไธๅกๆฐๆฎ็ปๆ
#@history
#//////////////////////////////////////////////////////////////////////
#ไฟกๆฏๅๅ
CThostFtdcDisseminationField = {}
#ๅบๅ็ณปๅๅท
CThostFtdcDisseminationField["SequenceSeries"] = "int"
#ๅบๅๅท
CThostFtdcDisseminationField["SequenceNo"] = "int"
structDict['CThostFtdcDisseminationField'] = CThostFtdcDisseminationField
#็จๆท็ปๅฝ่ฏทๆฑ
CThostFtdcReqUserLoginField = {}
#ไบคๆๆฅ
CThostFtdcReqUserLoginField["TradingDay"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcReqUserLoginField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcReqUserLoginField["UserID"] = "string"
#ๅฏ็
CThostFtdcReqUserLoginField["Password"] = "string"
#็จๆท็ซฏไบงๅไฟกๆฏ
CThostFtdcReqUserLoginField["UserProductInfo"] = "string"
#ๆฅๅฃ็ซฏไบงๅไฟกๆฏ
CThostFtdcReqUserLoginField["InterfaceProductInfo"] = "string"
#ๅ่ฎฎไฟกๆฏ
CThostFtdcReqUserLoginField["ProtocolInfo"] = "string"
#Macๅฐๅ
CThostFtdcReqUserLoginField["MacAddress"] = "string"
#ๅจๆๅฏ็
CThostFtdcReqUserLoginField["OneTimePassword"] = "string"
#็ป็ซฏIPๅฐๅ
CThostFtdcReqUserLoginField["ClientIPAddress"] = "string"
structDict['CThostFtdcReqUserLoginField'] = CThostFtdcReqUserLoginField
#็จๆท็ปๅฝๅบ็ญ
CThostFtdcRspUserLoginField = {}
#ไบคๆๆฅ
CThostFtdcRspUserLoginField["TradingDay"] = "string"
#็ปๅฝๆๅๆถ้ด
CThostFtdcRspUserLoginField["LoginTime"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcRspUserLoginField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcRspUserLoginField["UserID"] = "string"
#ไบคๆ็ณป็ปๅ็งฐ
CThostFtdcRspUserLoginField["SystemName"] = "string"
#ๅ็ฝฎ็ผๅท
CThostFtdcRspUserLoginField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcRspUserLoginField["SessionID"] = "int"
#ๆๅคงๆฅๅๅผ็จ
CThostFtdcRspUserLoginField["MaxOrderRef"] = "string"
#ไธๆๆๆถ้ด
CThostFtdcRspUserLoginField["SHFETime"] = "string"
#ๅคงๅๆๆถ้ด
CThostFtdcRspUserLoginField["DCETime"] = "string"
#้ๅๆๆถ้ด
CThostFtdcRspUserLoginField["CZCETime"] = "string"
#ไธญ้ๆๆถ้ด
CThostFtdcRspUserLoginField["FFEXTime"] = "string"
#่ฝๆบไธญๅฟๆถ้ด
CThostFtdcRspUserLoginField["INETime"] = "string"
structDict['CThostFtdcRspUserLoginField'] = CThostFtdcRspUserLoginField
#็จๆท็ปๅบ่ฏทๆฑ
CThostFtdcUserLogoutField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcUserLogoutField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcUserLogoutField["UserID"] = "string"
structDict['CThostFtdcUserLogoutField'] = CThostFtdcUserLogoutField
#ๅผบๅถไบคๆๅ้ๅบ
CThostFtdcForceUserLogoutField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcForceUserLogoutField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcForceUserLogoutField["UserID"] = "string"
structDict['CThostFtdcForceUserLogoutField'] = CThostFtdcForceUserLogoutField
#ๅฎขๆท็ซฏ่ฎค่ฏ่ฏทๆฑ
CThostFtdcReqAuthenticateField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcReqAuthenticateField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcReqAuthenticateField["UserID"] = "string"
#็จๆท็ซฏไบงๅไฟกๆฏ
CThostFtdcReqAuthenticateField["UserProductInfo"] = "string"
#่ฎค่ฏ็
CThostFtdcReqAuthenticateField["AuthCode"] = "string"
structDict['CThostFtdcReqAuthenticateField'] = CThostFtdcReqAuthenticateField
#ๅฎขๆท็ซฏ่ฎค่ฏๅๅบ
CThostFtdcRspAuthenticateField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcRspAuthenticateField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcRspAuthenticateField["UserID"] = "string"
#็จๆท็ซฏไบงๅไฟกๆฏ
CThostFtdcRspAuthenticateField["UserProductInfo"] = "string"
structDict['CThostFtdcRspAuthenticateField'] = CThostFtdcRspAuthenticateField
#ๅฎขๆท็ซฏ่ฎค่ฏไฟกๆฏ
CThostFtdcAuthenticationInfoField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcAuthenticationInfoField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcAuthenticationInfoField["UserID"] = "string"
#็จๆท็ซฏไบงๅไฟกๆฏ
CThostFtdcAuthenticationInfoField["UserProductInfo"] = "string"
#่ฎค่ฏไฟกๆฏ
CThostFtdcAuthenticationInfoField["AuthInfo"] = "string"
#ๆฏๅฆไธบ่ฎค่ฏ็ปๆ
CThostFtdcAuthenticationInfoField["IsResult"] = "int"
structDict['CThostFtdcAuthenticationInfoField'] = CThostFtdcAuthenticationInfoField
#้ถๆ่ฝฌๅธๆฅๆๅคด
CThostFtdcTransferHeaderField = {}
#็ๆฌๅท๏ผๅธธ้๏ผ1.0
CThostFtdcTransferHeaderField["Version"] = "string"
#ไบคๆไปฃ็ ๏ผๅฟ
ๅกซ
CThostFtdcTransferHeaderField["TradeCode"] = "string"
#ไบคๆๆฅๆ๏ผๅฟ
ๅกซ๏ผๆ ผๅผ๏ผyyyymmdd
CThostFtdcTransferHeaderField["TradeDate"] = "string"
#ไบคๆๆถ้ด๏ผๅฟ
ๅกซ๏ผๆ ผๅผ๏ผhhmmss
CThostFtdcTransferHeaderField["TradeTime"] = "string"
#ๅ่ตทๆนๆตๆฐดๅท๏ผN/A
CThostFtdcTransferHeaderField["TradeSerial"] = "string"
#ๆ่ดงๅ
ฌๅธไปฃ็ ๏ผๅฟ
ๅกซ
CThostFtdcTransferHeaderField["FutureID"] = "string"
#้ถ่กไปฃ็ ๏ผๆ นๆฎๆฅ่ฏข้ถ่กๅพๅฐ๏ผๅฟ
ๅกซ
CThostFtdcTransferHeaderField["BankID"] = "string"
#้ถ่กๅไธญๅฟไปฃ็ ๏ผๆ นๆฎๆฅ่ฏข้ถ่กๅพๅฐ๏ผๅฟ
ๅกซ
CThostFtdcTransferHeaderField["BankBrchID"] = "string"
#ๆไฝๅ๏ผN/A
CThostFtdcTransferHeaderField["OperNo"] = "string"
#ไบคๆ่ฎพๅค็ฑปๅ๏ผN/A
CThostFtdcTransferHeaderField["DeviceID"] = "string"
#่ฎฐๅฝๆฐ๏ผN/A
CThostFtdcTransferHeaderField["RecordNum"] = "string"
#ไผ่ฏ็ผๅท๏ผN/A
CThostFtdcTransferHeaderField["SessionID"] = "int"
#่ฏทๆฑ็ผๅท๏ผN/A
CThostFtdcTransferHeaderField["RequestID"] = "int"
structDict['CThostFtdcTransferHeaderField'] = CThostFtdcTransferHeaderField
#้ถ่ก่ต้่ฝฌๆ่ดง่ฏทๆฑ๏ผTradeCode=202001
CThostFtdcTransferBankToFutureReqField = {}
#ๆ่ดง่ต้่ดฆๆท
CThostFtdcTransferBankToFutureReqField["FutureAccount"] = "string"
#ๅฏ็ ๆ ๅฟ
CThostFtdcTransferBankToFutureReqField["FuturePwdFlag"] = "char"
#ๅฏ็
CThostFtdcTransferBankToFutureReqField["FutureAccPwd"] = "string"
#่ฝฌ่ดฆ้้ข
CThostFtdcTransferBankToFutureReqField["TradeAmt"] = "float"
#ๅฎขๆทๆ็ปญ่ดน
CThostFtdcTransferBankToFutureReqField["CustFee"] = "float"
#ๅธ็ง๏ผRMB-ไบบๆฐๅธ USD-็พๅ HKD-ๆธฏๅ
CThostFtdcTransferBankToFutureReqField["CurrencyCode"] = "string"
structDict['CThostFtdcTransferBankToFutureReqField'] = CThostFtdcTransferBankToFutureReqField
#้ถ่ก่ต้่ฝฌๆ่ดง่ฏทๆฑๅๅบ
CThostFtdcTransferBankToFutureRspField = {}
#ๅๅบไปฃ็
CThostFtdcTransferBankToFutureRspField["RetCode"] = "string"
#ๅๅบไฟกๆฏ
CThostFtdcTransferBankToFutureRspField["RetInfo"] = "string"
#่ต้่ดฆๆท
CThostFtdcTransferBankToFutureRspField["FutureAccount"] = "string"
#่ฝฌๅธ้้ข
CThostFtdcTransferBankToFutureRspField["TradeAmt"] = "float"
#ๅบๆถๅฎขๆทๆ็ปญ่ดน
CThostFtdcTransferBankToFutureRspField["CustFee"] = "float"
#ๅธ็ง
CThostFtdcTransferBankToFutureRspField["CurrencyCode"] = "string"
structDict['CThostFtdcTransferBankToFutureRspField'] = CThostFtdcTransferBankToFutureRspField
#ๆ่ดง่ต้่ฝฌ้ถ่ก่ฏทๆฑ๏ผTradeCode=202002
CThostFtdcTransferFutureToBankReqField = {}
#ๆ่ดง่ต้่ดฆๆท
CThostFtdcTransferFutureToBankReqField["FutureAccount"] = "string"
#ๅฏ็ ๆ ๅฟ
CThostFtdcTransferFutureToBankReqField["FuturePwdFlag"] = "char"
#ๅฏ็
CThostFtdcTransferFutureToBankReqField["FutureAccPwd"] = "string"
#่ฝฌ่ดฆ้้ข
CThostFtdcTransferFutureToBankReqField["TradeAmt"] = "float"
#ๅฎขๆทๆ็ปญ่ดน
CThostFtdcTransferFutureToBankReqField["CustFee"] = "float"
#ๅธ็ง๏ผRMB-ไบบๆฐๅธ USD-็พๅ HKD-ๆธฏๅ
CThostFtdcTransferFutureToBankReqField["CurrencyCode"] = "string"
structDict['CThostFtdcTransferFutureToBankReqField'] = CThostFtdcTransferFutureToBankReqField
#ๆ่ดง่ต้่ฝฌ้ถ่ก่ฏทๆฑๅๅบ
CThostFtdcTransferFutureToBankRspField = {}
#ๅๅบไปฃ็
CThostFtdcTransferFutureToBankRspField["RetCode"] = "string"
#ๅๅบไฟกๆฏ
CThostFtdcTransferFutureToBankRspField["RetInfo"] = "string"
#่ต้่ดฆๆท
CThostFtdcTransferFutureToBankRspField["FutureAccount"] = "string"
#่ฝฌๅธ้้ข
CThostFtdcTransferFutureToBankRspField["TradeAmt"] = "float"
#ๅบๆถๅฎขๆทๆ็ปญ่ดน
CThostFtdcTransferFutureToBankRspField["CustFee"] = "float"
#ๅธ็ง
CThostFtdcTransferFutureToBankRspField["CurrencyCode"] = "string"
structDict['CThostFtdcTransferFutureToBankRspField'] = CThostFtdcTransferFutureToBankRspField
#ๆฅ่ฏข้ถ่ก่ต้่ฏทๆฑ๏ผTradeCode=204002
CThostFtdcTransferQryBankReqField = {}
#ๆ่ดง่ต้่ดฆๆท
CThostFtdcTransferQryBankReqField["FutureAccount"] = "string"
#ๅฏ็ ๆ ๅฟ
CThostFtdcTransferQryBankReqField["FuturePwdFlag"] = "char"
#ๅฏ็
CThostFtdcTransferQryBankReqField["FutureAccPwd"] = "string"
#ๅธ็ง๏ผRMB-ไบบๆฐๅธ USD-็พๅ HKD-ๆธฏๅ
CThostFtdcTransferQryBankReqField["CurrencyCode"] = "string"
structDict['CThostFtdcTransferQryBankReqField'] = CThostFtdcTransferQryBankReqField
#ๆฅ่ฏข้ถ่ก่ต้่ฏทๆฑๅๅบ
CThostFtdcTransferQryBankRspField = {}
#ๅๅบไปฃ็
CThostFtdcTransferQryBankRspField["RetCode"] = "string"
#ๅๅบไฟกๆฏ
CThostFtdcTransferQryBankRspField["RetInfo"] = "string"
#่ต้่ดฆๆท
CThostFtdcTransferQryBankRspField["FutureAccount"] = "string"
#้ถ่กไฝ้ข
CThostFtdcTransferQryBankRspField["TradeAmt"] = "float"
#้ถ่กๅฏ็จไฝ้ข
CThostFtdcTransferQryBankRspField["UseAmt"] = "float"
#้ถ่กๅฏๅไฝ้ข
CThostFtdcTransferQryBankRspField["FetchAmt"] = "float"
#ๅธ็ง
CThostFtdcTransferQryBankRspField["CurrencyCode"] = "string"
structDict['CThostFtdcTransferQryBankRspField'] = CThostFtdcTransferQryBankRspField
#ๆฅ่ฏข้ถ่กไบคๆๆ็ป่ฏทๆฑ๏ผTradeCode=204999
CThostFtdcTransferQryDetailReqField = {}
#ๆ่ดง่ต้่ดฆๆท
CThostFtdcTransferQryDetailReqField["FutureAccount"] = "string"
structDict['CThostFtdcTransferQryDetailReqField'] = CThostFtdcTransferQryDetailReqField
#ๆฅ่ฏข้ถ่กไบคๆๆ็ป่ฏทๆฑๅๅบ
CThostFtdcTransferQryDetailRspField = {}
#ไบคๆๆฅๆ
CThostFtdcTransferQryDetailRspField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcTransferQryDetailRspField["TradeTime"] = "string"
#ไบคๆไปฃ็
CThostFtdcTransferQryDetailRspField["TradeCode"] = "string"
#ๆ่ดงๆตๆฐดๅท
CThostFtdcTransferQryDetailRspField["FutureSerial"] = "int"
#ๆ่ดงๅ
ฌๅธไปฃ็
CThostFtdcTransferQryDetailRspField["FutureID"] = "string"
#่ต้ๅธๅท
CThostFtdcTransferQryDetailRspField["FutureAccount"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcTransferQryDetailRspField["BankSerial"] = "int"
#้ถ่กไปฃ็
CThostFtdcTransferQryDetailRspField["BankID"] = "string"
#้ถ่กๅไธญๅฟไปฃ็
CThostFtdcTransferQryDetailRspField["BankBrchID"] = "string"
#้ถ่ก่ดฆๅท
CThostFtdcTransferQryDetailRspField["BankAccount"] = "string"
#่ฏไปถๅท็
CThostFtdcTransferQryDetailRspField["CertCode"] = "string"
#่ดงๅธไปฃ็
CThostFtdcTransferQryDetailRspField["CurrencyCode"] = "string"
#ๅ็้้ข
CThostFtdcTransferQryDetailRspField["TxAmount"] = "float"
#ๆๆๆ ๅฟ
CThostFtdcTransferQryDetailRspField["Flag"] = "char"
structDict['CThostFtdcTransferQryDetailRspField'] = CThostFtdcTransferQryDetailRspField
#ๅๅบไฟกๆฏ
CThostFtdcRspInfoField = {}
#้่ฏฏไปฃ็
CThostFtdcRspInfoField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcRspInfoField["ErrorMsg"] = "string"
structDict['CThostFtdcRspInfoField'] = CThostFtdcRspInfoField
#ไบคๆๆ
CThostFtdcExchangeField = {}
#ไบคๆๆไปฃ็
CThostFtdcExchangeField["ExchangeID"] = "string"
#ไบคๆๆๅ็งฐ
CThostFtdcExchangeField["ExchangeName"] = "string"
#ไบคๆๆๅฑๆง
CThostFtdcExchangeField["ExchangeProperty"] = "char"
structDict['CThostFtdcExchangeField'] = CThostFtdcExchangeField
#ไบงๅ
CThostFtdcProductField = {}
#ไบงๅไปฃ็
CThostFtdcProductField["ProductID"] = "string"
#ไบงๅๅ็งฐ
CThostFtdcProductField["ProductName"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcProductField["ExchangeID"] = "string"
#ไบงๅ็ฑปๅ
CThostFtdcProductField["ProductClass"] = "char"
#ๅ็บฆๆฐ้ไนๆฐ
CThostFtdcProductField["VolumeMultiple"] = "int"
#ๆๅฐๅๅจไปทไฝ
CThostFtdcProductField["PriceTick"] = "float"
#ๅธไปทๅๆๅคงไธๅ้
CThostFtdcProductField["MaxMarketOrderVolume"] = "int"
#ๅธไปทๅๆๅฐไธๅ้
CThostFtdcProductField["MinMarketOrderVolume"] = "int"
#้ไปทๅๆๅคงไธๅ้
CThostFtdcProductField["MaxLimitOrderVolume"] = "int"
#้ไปทๅๆๅฐไธๅ้
CThostFtdcProductField["MinLimitOrderVolume"] = "int"
#ๆไป็ฑปๅ
CThostFtdcProductField["PositionType"] = "char"
#ๆไปๆฅๆ็ฑปๅ
CThostFtdcProductField["PositionDateType"] = "char"
#ๅนณไปๅค็็ฑปๅ
CThostFtdcProductField["CloseDealType"] = "char"
#ไบคๆๅธ็ง็ฑปๅ
CThostFtdcProductField["TradeCurrencyID"] = "string"
#่ดจๆผ่ต้ๅฏ็จ่ๅด
CThostFtdcProductField["MortgageFundUseRange"] = "char"
#ไบคๆๆไบงๅไปฃ็
CThostFtdcProductField["ExchangeProductID"] = "string"
#ๅ็บฆๅบ็กๅๅไนๆฐ
CThostFtdcProductField["UnderlyingMultiple"] = "float"
structDict['CThostFtdcProductField'] = CThostFtdcProductField
#ๅ็บฆ
CThostFtdcInstrumentField = {}
#ๅ็บฆไปฃ็
CThostFtdcInstrumentField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcInstrumentField["ExchangeID"] = "string"
#ๅ็บฆๅ็งฐ
CThostFtdcInstrumentField["InstrumentName"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcInstrumentField["ExchangeInstID"] = "string"
#ไบงๅไปฃ็
CThostFtdcInstrumentField["ProductID"] = "string"
#ไบงๅ็ฑปๅ
CThostFtdcInstrumentField["ProductClass"] = "char"
#ไบคๅฒๅนดไปฝ
CThostFtdcInstrumentField["DeliveryYear"] = "int"
#ไบคๅฒๆ
CThostFtdcInstrumentField["DeliveryMonth"] = "int"
#ๅธไปทๅๆๅคงไธๅ้
CThostFtdcInstrumentField["MaxMarketOrderVolume"] = "int"
#ๅธไปทๅๆๅฐไธๅ้
CThostFtdcInstrumentField["MinMarketOrderVolume"] = "int"
#้ไปทๅๆๅคงไธๅ้
CThostFtdcInstrumentField["MaxLimitOrderVolume"] = "int"
#้ไปทๅๆๅฐไธๅ้
CThostFtdcInstrumentField["MinLimitOrderVolume"] = "int"
#ๅ็บฆๆฐ้ไนๆฐ
CThostFtdcInstrumentField["VolumeMultiple"] = "int"
#ๆๅฐๅๅจไปทไฝ
CThostFtdcInstrumentField["PriceTick"] = "float"
#ๅๅปบๆฅ
CThostFtdcInstrumentField["CreateDate"] = "string"
#ไธๅธๆฅ
CThostFtdcInstrumentField["OpenDate"] = "string"
#ๅฐๆๆฅ
CThostFtdcInstrumentField["ExpireDate"] = "string"
#ๅผๅงไบคๅฒๆฅ
CThostFtdcInstrumentField["StartDelivDate"] = "string"
#็ปๆไบคๅฒๆฅ
CThostFtdcInstrumentField["EndDelivDate"] = "string"
#ๅ็บฆ็ๅฝๅจๆ็ถๆ
CThostFtdcInstrumentField["InstLifePhase"] = "char"
#ๅฝๅๆฏๅฆไบคๆ
CThostFtdcInstrumentField["IsTrading"] = "int"
#ๆไป็ฑปๅ
CThostFtdcInstrumentField["PositionType"] = "char"
#ๆไปๆฅๆ็ฑปๅ
CThostFtdcInstrumentField["PositionDateType"] = "char"
#ๅคๅคดไฟ่ฏ้็
CThostFtdcInstrumentField["LongMarginRatio"] = "float"
#็ฉบๅคดไฟ่ฏ้็
CThostFtdcInstrumentField["ShortMarginRatio"] = "float"
#ๆฏๅฆไฝฟ็จๅคง้ขๅ่พนไฟ่ฏ้็ฎๆณ
CThostFtdcInstrumentField["MaxMarginSideAlgorithm"] = "char"
#ๅบ็กๅๅไปฃ็
CThostFtdcInstrumentField["UnderlyingInstrID"] = "string"
#ๆง่กไปท
CThostFtdcInstrumentField["StrikePrice"] = "float"
#ๆๆ็ฑปๅ
CThostFtdcInstrumentField["OptionsType"] = "char"
#ๅ็บฆๅบ็กๅๅไนๆฐ
CThostFtdcInstrumentField["UnderlyingMultiple"] = "float"
#็ปๅ็ฑปๅ
CThostFtdcInstrumentField["CombinationType"] = "char"
#ๆๅฐไนฐไธๅๅไฝ
CThostFtdcInstrumentField["MinBuyVolume"] = "int"
#ๆๅฐๅไธๅๅไฝ
CThostFtdcInstrumentField["MinSellVolume"] = "int"
#ๅ็บฆๆ ่ฏ็
CThostFtdcInstrumentField["InstrumentCode"] = "string"
structDict['CThostFtdcInstrumentField'] = CThostFtdcInstrumentField
#็ป็บชๅ
ฌๅธ
CThostFtdcBrokerField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcBrokerField["BrokerID"] = "string"
#็ป็บชๅ
ฌๅธ็ฎ็งฐ
CThostFtdcBrokerField["BrokerAbbr"] = "string"
#็ป็บชๅ
ฌๅธๅ็งฐ
CThostFtdcBrokerField["BrokerName"] = "string"
#ๆฏๅฆๆดป่ท
CThostFtdcBrokerField["IsActive"] = "int"
structDict['CThostFtdcBrokerField'] = CThostFtdcBrokerField
#ไบคๆๆไบคๆๅ
CThostFtdcTraderField = {}
#ไบคๆๆไปฃ็
CThostFtdcTraderField["ExchangeID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcTraderField["TraderID"] = "string"
#ไผๅไปฃ็
CThostFtdcTraderField["ParticipantID"] = "string"
#ๅฏ็
CThostFtdcTraderField["Password"] = "string"
#ๅฎ่ฃ
ๆฐ้
CThostFtdcTraderField["InstallCount"] = "int"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcTraderField["BrokerID"] = "string"
structDict['CThostFtdcTraderField'] = CThostFtdcTraderField
#ๆ่ต่
CThostFtdcInvestorField = {}
#ๆ่ต่
ไปฃ็
CThostFtdcInvestorField["InvestorID"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInvestorField["BrokerID"] = "string"
#ๆ่ต่
ๅ็ปไปฃ็
CThostFtdcInvestorField["InvestorGroupID"] = "string"
#ๆ่ต่
ๅ็งฐ
CThostFtdcInvestorField["InvestorName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcInvestorField["IdentifiedCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcInvestorField["IdentifiedCardNo"] = "string"
#ๆฏๅฆๆดป่ท
CThostFtdcInvestorField["IsActive"] = "int"
#่็ณป็ต่ฏ
CThostFtdcInvestorField["Telephone"] = "string"
#้่ฎฏๅฐๅ
CThostFtdcInvestorField["Address"] = "string"
#ๅผๆทๆฅๆ
CThostFtdcInvestorField["OpenDate"] = "string"
#ๆๆบ
CThostFtdcInvestorField["Mobile"] = "string"
#ๆ็ปญ่ดน็ๆจกๆฟไปฃ็
CThostFtdcInvestorField["CommModelID"] = "string"
#ไฟ่ฏ้็ๆจกๆฟไปฃ็
CThostFtdcInvestorField["MarginModelID"] = "string"
structDict['CThostFtdcInvestorField'] = CThostFtdcInvestorField
#ไบคๆ็ผ็
CThostFtdcTradingCodeField = {}
#ๆ่ต่
ไปฃ็
CThostFtdcTradingCodeField["InvestorID"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcTradingCodeField["BrokerID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcTradingCodeField["ExchangeID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcTradingCodeField["ClientID"] = "string"
#ๆฏๅฆๆดป่ท
CThostFtdcTradingCodeField["IsActive"] = "int"
#ไบคๆ็ผ็ ็ฑปๅ
CThostFtdcTradingCodeField["ClientIDType"] = "char"
#่ฅไธ้จ็ผๅท
CThostFtdcTradingCodeField["BranchID"] = "string"
#ไธๅก็ฑปๅ
CThostFtdcTradingCodeField["BizType"] = "char"
structDict['CThostFtdcTradingCodeField'] = CThostFtdcTradingCodeField
#ไผๅ็ผ็ ๅ็ป็บชๅ
ฌๅธ็ผ็ ๅฏน็
ง่กจ
CThostFtdcPartBrokerField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcPartBrokerField["BrokerID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcPartBrokerField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcPartBrokerField["ParticipantID"] = "string"
#ๆฏๅฆๆดป่ท
CThostFtdcPartBrokerField["IsActive"] = "int"
structDict['CThostFtdcPartBrokerField'] = CThostFtdcPartBrokerField
#็ฎก็็จๆท
CThostFtdcSuperUserField = {}
#็จๆทไปฃ็
CThostFtdcSuperUserField["UserID"] = "string"
#็จๆทๅ็งฐ
CThostFtdcSuperUserField["UserName"] = "string"
#ๅฏ็
CThostFtdcSuperUserField["Password"] = "string"
#ๆฏๅฆๆดป่ท
CThostFtdcSuperUserField["IsActive"] = "int"
structDict['CThostFtdcSuperUserField'] = CThostFtdcSuperUserField
#็ฎก็็จๆทๅ่ฝๆ้
CThostFtdcSuperUserFunctionField = {}
#็จๆทไปฃ็
CThostFtdcSuperUserFunctionField["UserID"] = "string"
#ๅ่ฝไปฃ็
CThostFtdcSuperUserFunctionField["FunctionCode"] = "char"
structDict['CThostFtdcSuperUserFunctionField'] = CThostFtdcSuperUserFunctionField
#ๆ่ต่
็ป
CThostFtdcInvestorGroupField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInvestorGroupField["BrokerID"] = "string"
#ๆ่ต่
ๅ็ปไปฃ็
CThostFtdcInvestorGroupField["InvestorGroupID"] = "string"
#ๆ่ต่
ๅ็ปๅ็งฐ
CThostFtdcInvestorGroupField["InvestorGroupName"] = "string"
structDict['CThostFtdcInvestorGroupField'] = CThostFtdcInvestorGroupField
#่ต้่ดฆๆท
CThostFtdcTradingAccountField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcTradingAccountField["BrokerID"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcTradingAccountField["AccountID"] = "string"
#ไธๆฌก่ดจๆผ้้ข
CThostFtdcTradingAccountField["PreMortgage"] = "float"
#ไธๆฌกไฟก็จ้ขๅบฆ
CThostFtdcTradingAccountField["PreCredit"] = "float"
#ไธๆฌกๅญๆฌพ้ข
CThostFtdcTradingAccountField["PreDeposit"] = "float"
#ไธๆฌก็ป็ฎๅๅค้
CThostFtdcTradingAccountField["PreBalance"] = "float"
#ไธๆฌกๅ ็จ็ไฟ่ฏ้
CThostFtdcTradingAccountField["PreMargin"] = "float"
#ๅฉๆฏๅบๆฐ
CThostFtdcTradingAccountField["InterestBase"] = "float"
#ๅฉๆฏๆถๅ
ฅ
CThostFtdcTradingAccountField["Interest"] = "float"
#ๅ
ฅ้้้ข
CThostFtdcTradingAccountField["Deposit"] = "float"
#ๅบ้้้ข
CThostFtdcTradingAccountField["Withdraw"] = "float"
#ๅป็ป็ไฟ่ฏ้
CThostFtdcTradingAccountField["FrozenMargin"] = "float"
#ๅป็ป็่ต้
CThostFtdcTradingAccountField["FrozenCash"] = "float"
#ๅป็ป็ๆ็ปญ่ดน
CThostFtdcTradingAccountField["FrozenCommission"] = "float"
#ๅฝๅไฟ่ฏ้ๆป้ข
CThostFtdcTradingAccountField["CurrMargin"] = "float"
#่ต้ๅทฎ้ข
CThostFtdcTradingAccountField["CashIn"] = "float"
#ๆ็ปญ่ดน
CThostFtdcTradingAccountField["Commission"] = "float"
#ๅนณไป็ไบ
CThostFtdcTradingAccountField["CloseProfit"] = "float"
#ๆไป็ไบ
CThostFtdcTradingAccountField["PositionProfit"] = "float"
#ๆ่ดง็ป็ฎๅๅค้
CThostFtdcTradingAccountField["Balance"] = "float"
#ๅฏ็จ่ต้
CThostFtdcTradingAccountField["Available"] = "float"
#ๅฏๅ่ต้
CThostFtdcTradingAccountField["WithdrawQuota"] = "float"
#ๅบๆฌๅๅค้
CThostFtdcTradingAccountField["Reserve"] = "float"
#ไบคๆๆฅ
CThostFtdcTradingAccountField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcTradingAccountField["SettlementID"] = "int"
#ไฟก็จ้ขๅบฆ
CThostFtdcTradingAccountField["Credit"] = "float"
#่ดจๆผ้้ข
CThostFtdcTradingAccountField["Mortgage"] = "float"
#ไบคๆๆไฟ่ฏ้
CThostFtdcTradingAccountField["ExchangeMargin"] = "float"
#ๆ่ต่
ไบคๅฒไฟ่ฏ้
CThostFtdcTradingAccountField["DeliveryMargin"] = "float"
#ไบคๆๆไบคๅฒไฟ่ฏ้
CThostFtdcTradingAccountField["ExchangeDeliveryMargin"] = "float"
#ไฟๅบๆ่ดง็ป็ฎๅๅค้
CThostFtdcTradingAccountField["ReserveBalance"] = "float"
#ๅธ็งไปฃ็
CThostFtdcTradingAccountField["CurrencyID"] = "string"
#ไธๆฌก่ดงๅธ่ดจๅ
ฅ้้ข
CThostFtdcTradingAccountField["PreFundMortgageIn"] = "float"
#ไธๆฌก่ดงๅธ่ดจๅบ้้ข
CThostFtdcTradingAccountField["PreFundMortgageOut"] = "float"
#่ดงๅธ่ดจๅ
ฅ้้ข
CThostFtdcTradingAccountField["FundMortgageIn"] = "float"
#่ดงๅธ่ดจๅบ้้ข
CThostFtdcTradingAccountField["FundMortgageOut"] = "float"
#่ดงๅธ่ดจๆผไฝ้ข
CThostFtdcTradingAccountField["FundMortgageAvailable"] = "float"
#ๅฏ่ดจๆผ่ดงๅธ้้ข
CThostFtdcTradingAccountField["MortgageableFund"] = "float"
#็นๆฎไบงๅๅ ็จไฟ่ฏ้
CThostFtdcTradingAccountField["SpecProductMargin"] = "float"
#็นๆฎไบงๅๅป็ปไฟ่ฏ้
CThostFtdcTradingAccountField["SpecProductFrozenMargin"] = "float"
#็นๆฎไบงๅๆ็ปญ่ดน
CThostFtdcTradingAccountField["SpecProductCommission"] = "float"
#็นๆฎไบงๅๅป็ปๆ็ปญ่ดน
CThostFtdcTradingAccountField["SpecProductFrozenCommission"] = "float"
#็นๆฎไบงๅๆไป็ไบ
CThostFtdcTradingAccountField["SpecProductPositionProfit"] = "float"
#็นๆฎไบงๅๅนณไป็ไบ
CThostFtdcTradingAccountField["SpecProductCloseProfit"] = "float"
#ๆ นๆฎๆไป็ไบ็ฎๆณ่ฎก็ฎ็็นๆฎไบงๅๆไป็ไบ
CThostFtdcTradingAccountField["SpecProductPositionProfitByAlg"] = "float"
#็นๆฎไบงๅไบคๆๆไฟ่ฏ้
CThostFtdcTradingAccountField["SpecProductExchangeMargin"] = "float"
#ไธๅก็ฑปๅ
CThostFtdcTradingAccountField["BizType"] = "char"
structDict['CThostFtdcTradingAccountField'] = CThostFtdcTradingAccountField
#ๆ่ต่
ๆไป
CThostFtdcInvestorPositionField = {}
#ๅ็บฆไปฃ็
CThostFtdcInvestorPositionField["InstrumentID"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInvestorPositionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInvestorPositionField["InvestorID"] = "string"
#ๆไปๅค็ฉบๆนๅ
CThostFtdcInvestorPositionField["PosiDirection"] = "char"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcInvestorPositionField["HedgeFlag"] = "char"
#ๆไปๆฅๆ
CThostFtdcInvestorPositionField["PositionDate"] = "char"
#ไธๆฅๆไป
CThostFtdcInvestorPositionField["YdPosition"] = "int"
#ไปๆฅๆไป
CThostFtdcInvestorPositionField["Position"] = "int"
#ๅคๅคดๅป็ป
CThostFtdcInvestorPositionField["LongFrozen"] = "int"
#็ฉบๅคดๅป็ป
CThostFtdcInvestorPositionField["ShortFrozen"] = "int"
#ๅผไปๅป็ป้้ข
CThostFtdcInvestorPositionField["LongFrozenAmount"] = "float"
#ๅผไปๅป็ป้้ข
CThostFtdcInvestorPositionField["ShortFrozenAmount"] = "float"
#ๅผไป้
CThostFtdcInvestorPositionField["OpenVolume"] = "int"
#ๅนณไป้
CThostFtdcInvestorPositionField["CloseVolume"] = "int"
#ๅผไป้้ข
CThostFtdcInvestorPositionField["OpenAmount"] = "float"
#ๅนณไป้้ข
CThostFtdcInvestorPositionField["CloseAmount"] = "float"
#ๆไปๆๆฌ
CThostFtdcInvestorPositionField["PositionCost"] = "float"
#ไธๆฌกๅ ็จ็ไฟ่ฏ้
CThostFtdcInvestorPositionField["PreMargin"] = "float"
#ๅ ็จ็ไฟ่ฏ้
CThostFtdcInvestorPositionField["UseMargin"] = "float"
#ๅป็ป็ไฟ่ฏ้
CThostFtdcInvestorPositionField["FrozenMargin"] = "float"
#ๅป็ป็่ต้
CThostFtdcInvestorPositionField["FrozenCash"] = "float"
#ๅป็ป็ๆ็ปญ่ดน
CThostFtdcInvestorPositionField["FrozenCommission"] = "float"
#่ต้ๅทฎ้ข
CThostFtdcInvestorPositionField["CashIn"] = "float"
#ๆ็ปญ่ดน
CThostFtdcInvestorPositionField["Commission"] = "float"
#ๅนณไป็ไบ
CThostFtdcInvestorPositionField["CloseProfit"] = "float"
#ๆไป็ไบ
CThostFtdcInvestorPositionField["PositionProfit"] = "float"
#ไธๆฌก็ป็ฎไปท
CThostFtdcInvestorPositionField["PreSettlementPrice"] = "float"
#ๆฌๆฌก็ป็ฎไปท
CThostFtdcInvestorPositionField["SettlementPrice"] = "float"
#ไบคๆๆฅ
CThostFtdcInvestorPositionField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcInvestorPositionField["SettlementID"] = "int"
#ๅผไปๆๆฌ
CThostFtdcInvestorPositionField["OpenCost"] = "float"
#ไบคๆๆไฟ่ฏ้
CThostFtdcInvestorPositionField["ExchangeMargin"] = "float"
#็ปๅๆไบคๅฝขๆ็ๆไป
CThostFtdcInvestorPositionField["CombPosition"] = "int"
#็ปๅๅคๅคดๅป็ป
CThostFtdcInvestorPositionField["CombLongFrozen"] = "int"
#็ปๅ็ฉบๅคดๅป็ป
CThostFtdcInvestorPositionField["CombShortFrozen"] = "int"
#้ๆฅ็ฏๅธๅนณไป็ไบ
CThostFtdcInvestorPositionField["CloseProfitByDate"] = "float"
#้็ฌๅฏนๅฒๅนณไป็ไบ
CThostFtdcInvestorPositionField["CloseProfitByTrade"] = "float"
#ไปๆฅๆไป
CThostFtdcInvestorPositionField["TodayPosition"] = "int"
#ไฟ่ฏ้็
CThostFtdcInvestorPositionField["MarginRateByMoney"] = "float"
#ไฟ่ฏ้็(ๆๆๆฐ)
CThostFtdcInvestorPositionField["MarginRateByVolume"] = "float"
#ๆง่กๅป็ป
CThostFtdcInvestorPositionField["StrikeFrozen"] = "int"
#ๆง่กๅป็ป้้ข
CThostFtdcInvestorPositionField["StrikeFrozenAmount"] = "float"
#ๆพๅผๆง่กๅป็ป
CThostFtdcInvestorPositionField["AbandonFrozen"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcInvestorPositionField["ExchangeID"] = "string"
#ๆง่กๅป็ป็ๆจไป
CThostFtdcInvestorPositionField["YdStrikeFrozen"] = "int"
structDict['CThostFtdcInvestorPositionField'] = CThostFtdcInvestorPositionField
#ๅ็บฆไฟ่ฏ้็
CThostFtdcInstrumentMarginRateField = {}
#ๅ็บฆไปฃ็
CThostFtdcInstrumentMarginRateField["InstrumentID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcInstrumentMarginRateField["InvestorRange"] = "char"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInstrumentMarginRateField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInstrumentMarginRateField["InvestorID"] = "string"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcInstrumentMarginRateField["HedgeFlag"] = "char"
#ๅคๅคดไฟ่ฏ้็
CThostFtdcInstrumentMarginRateField["LongMarginRatioByMoney"] = "float"
#ๅคๅคดไฟ่ฏ้่ดน
CThostFtdcInstrumentMarginRateField["LongMarginRatioByVolume"] = "float"
#็ฉบๅคดไฟ่ฏ้็
CThostFtdcInstrumentMarginRateField["ShortMarginRatioByMoney"] = "float"
#็ฉบๅคดไฟ่ฏ้่ดน
CThostFtdcInstrumentMarginRateField["ShortMarginRatioByVolume"] = "float"
#ๆฏๅฆ็ธๅฏนไบคๆๆๆถๅ
CThostFtdcInstrumentMarginRateField["IsRelative"] = "int"
structDict['CThostFtdcInstrumentMarginRateField'] = CThostFtdcInstrumentMarginRateField
#ๅ็บฆๆ็ปญ่ดน็
CThostFtdcInstrumentCommissionRateField = {}
#ๅ็บฆไปฃ็
CThostFtdcInstrumentCommissionRateField["InstrumentID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcInstrumentCommissionRateField["InvestorRange"] = "char"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInstrumentCommissionRateField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInstrumentCommissionRateField["InvestorID"] = "string"
#ๅผไปๆ็ปญ่ดน็
CThostFtdcInstrumentCommissionRateField["OpenRatioByMoney"] = "float"
#ๅผไปๆ็ปญ่ดน
CThostFtdcInstrumentCommissionRateField["OpenRatioByVolume"] = "float"
#ๅนณไปๆ็ปญ่ดน็
CThostFtdcInstrumentCommissionRateField["CloseRatioByMoney"] = "float"
#ๅนณไปๆ็ปญ่ดน
CThostFtdcInstrumentCommissionRateField["CloseRatioByVolume"] = "float"
#ๅนณไปๆ็ปญ่ดน็
CThostFtdcInstrumentCommissionRateField["CloseTodayRatioByMoney"] = "float"
#ๅนณไปๆ็ปญ่ดน
CThostFtdcInstrumentCommissionRateField["CloseTodayRatioByVolume"] = "float"
#ไบคๆๆไปฃ็
CThostFtdcInstrumentCommissionRateField["ExchangeID"] = "string"
#ไธๅก็ฑปๅ
CThostFtdcInstrumentCommissionRateField["BizType"] = "char"
structDict['CThostFtdcInstrumentCommissionRateField'] = CThostFtdcInstrumentCommissionRateField
#ๆทฑๅบฆ่กๆ
CThostFtdcDepthMarketDataField = {}
#ไบคๆๆฅ
CThostFtdcDepthMarketDataField["TradingDay"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcDepthMarketDataField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcDepthMarketDataField["ExchangeID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcDepthMarketDataField["ExchangeInstID"] = "string"
#ๆๆฐไปท
CThostFtdcDepthMarketDataField["LastPrice"] = "float"
#ไธๆฌก็ป็ฎไปท
CThostFtdcDepthMarketDataField["PreSettlementPrice"] = "float"
#ๆจๆถ็
CThostFtdcDepthMarketDataField["PreClosePrice"] = "float"
#ๆจๆไป้
CThostFtdcDepthMarketDataField["PreOpenInterest"] = "float"
#ไปๅผ็
CThostFtdcDepthMarketDataField["OpenPrice"] = "float"
#ๆ้ซไปท
CThostFtdcDepthMarketDataField["HighestPrice"] = "float"
#ๆไฝไปท
CThostFtdcDepthMarketDataField["LowestPrice"] = "float"
#ๆฐ้
CThostFtdcDepthMarketDataField["Volume"] = "int"
#ๆไบค้้ข
CThostFtdcDepthMarketDataField["Turnover"] = "float"
#ๆไป้
CThostFtdcDepthMarketDataField["OpenInterest"] = "float"
#ไปๆถ็
CThostFtdcDepthMarketDataField["ClosePrice"] = "float"
#ๆฌๆฌก็ป็ฎไปท
CThostFtdcDepthMarketDataField["SettlementPrice"] = "float"
#ๆถจๅๆฟไปท
CThostFtdcDepthMarketDataField["UpperLimitPrice"] = "float"
#่ทๅๆฟไปท
CThostFtdcDepthMarketDataField["LowerLimitPrice"] = "float"
#ๆจ่ๅฎๅบฆ
CThostFtdcDepthMarketDataField["PreDelta"] = "float"
#ไป่ๅฎๅบฆ
CThostFtdcDepthMarketDataField["CurrDelta"] = "float"
#ๆๅไฟฎๆนๆถ้ด
CThostFtdcDepthMarketDataField["UpdateTime"] = "string"
#ๆๅไฟฎๆนๆฏซ็ง
CThostFtdcDepthMarketDataField["UpdateMillisec"] = "int"
#็ณไนฐไปทไธ
CThostFtdcDepthMarketDataField["BidPrice1"] = "float"
#็ณไนฐ้ไธ
CThostFtdcDepthMarketDataField["BidVolume1"] = "int"
#็ณๅไปทไธ
CThostFtdcDepthMarketDataField["AskPrice1"] = "float"
#็ณๅ้ไธ
CThostFtdcDepthMarketDataField["AskVolume1"] = "int"
#็ณไนฐไปทไบ
CThostFtdcDepthMarketDataField["BidPrice2"] = "float"
#็ณไนฐ้ไบ
CThostFtdcDepthMarketDataField["BidVolume2"] = "int"
#็ณๅไปทไบ
CThostFtdcDepthMarketDataField["AskPrice2"] = "float"
#็ณๅ้ไบ
CThostFtdcDepthMarketDataField["AskVolume2"] = "int"
#็ณไนฐไปทไธ
CThostFtdcDepthMarketDataField["BidPrice3"] = "float"
#็ณไนฐ้ไธ
CThostFtdcDepthMarketDataField["BidVolume3"] = "int"
#็ณๅไปทไธ
CThostFtdcDepthMarketDataField["AskPrice3"] = "float"
#็ณๅ้ไธ
CThostFtdcDepthMarketDataField["AskVolume3"] = "int"
#็ณไนฐไปทๅ
CThostFtdcDepthMarketDataField["BidPrice4"] = "float"
#็ณไนฐ้ๅ
CThostFtdcDepthMarketDataField["BidVolume4"] = "int"
#็ณๅไปทๅ
CThostFtdcDepthMarketDataField["AskPrice4"] = "float"
#็ณๅ้ๅ
CThostFtdcDepthMarketDataField["AskVolume4"] = "int"
#็ณไนฐไปทไบ
CThostFtdcDepthMarketDataField["BidPrice5"] = "float"
#็ณไนฐ้ไบ
CThostFtdcDepthMarketDataField["BidVolume5"] = "int"
#็ณๅไปทไบ
CThostFtdcDepthMarketDataField["AskPrice5"] = "float"
#็ณๅ้ไบ
CThostFtdcDepthMarketDataField["AskVolume5"] = "int"
#ๅฝๆฅๅไปท
CThostFtdcDepthMarketDataField["AveragePrice"] = "float"
#ไธๅกๆฅๆ
CThostFtdcDepthMarketDataField["ActionDay"] = "string"
structDict['CThostFtdcDepthMarketDataField'] = CThostFtdcDepthMarketDataField
#ๆ่ต่
ๅ็บฆไบคๆๆ้
CThostFtdcInstrumentTradingRightField = {}
#ๅ็บฆไปฃ็
CThostFtdcInstrumentTradingRightField["InstrumentID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcInstrumentTradingRightField["InvestorRange"] = "char"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInstrumentTradingRightField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInstrumentTradingRightField["InvestorID"] = "string"
#ไบคๆๆ้
CThostFtdcInstrumentTradingRightField["TradingRight"] = "char"
#ไบคๆๆไปฃ็
CThostFtdcInstrumentTradingRightField["ExchangeID"] = "string"
#ไธๅก็ฑปๅ
CThostFtdcInstrumentTradingRightField["BizType"] = "char"
structDict['CThostFtdcInstrumentTradingRightField'] = CThostFtdcInstrumentTradingRightField
#็ป็บชๅ
ฌๅธ็จๆท
CThostFtdcBrokerUserField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcBrokerUserField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcBrokerUserField["UserID"] = "string"
#็จๆทๅ็งฐ
CThostFtdcBrokerUserField["UserName"] = "string"
#็จๆท็ฑปๅ
CThostFtdcBrokerUserField["UserType"] = "char"
#ๆฏๅฆๆดป่ท
CThostFtdcBrokerUserField["IsActive"] = "int"
#ๆฏๅฆไฝฟ็จไปค็
CThostFtdcBrokerUserField["IsUsingOTP"] = "int"
structDict['CThostFtdcBrokerUserField'] = CThostFtdcBrokerUserField
#็ป็บชๅ
ฌๅธ็จๆทๅฃไปค
CThostFtdcBrokerUserPasswordField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcBrokerUserPasswordField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcBrokerUserPasswordField["UserID"] = "string"
#ๅฏ็
CThostFtdcBrokerUserPasswordField["Password"] = "string"
structDict['CThostFtdcBrokerUserPasswordField'] = CThostFtdcBrokerUserPasswordField
#็ป็บชๅ
ฌๅธ็จๆทๅ่ฝๆ้
CThostFtdcBrokerUserFunctionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcBrokerUserFunctionField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcBrokerUserFunctionField["UserID"] = "string"
#็ป็บชๅ
ฌๅธๅ่ฝไปฃ็
CThostFtdcBrokerUserFunctionField["BrokerFunctionCode"] = "char"
structDict['CThostFtdcBrokerUserFunctionField'] = CThostFtdcBrokerUserFunctionField
#ไบคๆๆไบคๆๅๆฅ็ๆบ
CThostFtdcTraderOfferField = {}
#ไบคๆๆไปฃ็
CThostFtdcTraderOfferField["ExchangeID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcTraderOfferField["TraderID"] = "string"
#ไผๅไปฃ็
CThostFtdcTraderOfferField["ParticipantID"] = "string"
#ๅฏ็
CThostFtdcTraderOfferField["Password"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcTraderOfferField["InstallID"] = "int"
#ๆฌๅฐๆฅๅ็ผๅท
CThostFtdcTraderOfferField["OrderLocalID"] = "string"
#ไบคๆๆไบคๆๅ่ฟๆฅ็ถๆ
CThostFtdcTraderOfferField["TraderConnectStatus"] = "char"
#ๅๅบ่ฟๆฅ่ฏทๆฑ็ๆฅๆ
CThostFtdcTraderOfferField["ConnectRequestDate"] = "string"
#ๅๅบ่ฟๆฅ่ฏทๆฑ็ๆถ้ด
CThostFtdcTraderOfferField["ConnectRequestTime"] = "string"
#ไธๆฌกๆฅๅๆฅๆ
CThostFtdcTraderOfferField["LastReportDate"] = "string"
#ไธๆฌกๆฅๅๆถ้ด
CThostFtdcTraderOfferField["LastReportTime"] = "string"
#ๅฎๆ่ฟๆฅๆฅๆ
CThostFtdcTraderOfferField["ConnectDate"] = "string"
#ๅฎๆ่ฟๆฅๆถ้ด
CThostFtdcTraderOfferField["ConnectTime"] = "string"
#ๅฏๅจๆฅๆ
CThostFtdcTraderOfferField["StartDate"] = "string"
#ๅฏๅจๆถ้ด
CThostFtdcTraderOfferField["StartTime"] = "string"
#ไบคๆๆฅ
CThostFtdcTraderOfferField["TradingDay"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcTraderOfferField["BrokerID"] = "string"
#ๆฌๅธญไฝๆๅคงๆไบค็ผๅท
CThostFtdcTraderOfferField["MaxTradeID"] = "string"
#ๆฌๅธญไฝๆๅคงๆฅๅๅคๆท
CThostFtdcTraderOfferField["MaxOrderMessageReference"] = "string"
#ไธๅก็ฑปๅ
CThostFtdcTraderOfferField["BizType"] = "char"
structDict['CThostFtdcTraderOfferField'] = CThostFtdcTraderOfferField
#ๆ่ต่
็ป็ฎ็ปๆ
CThostFtdcSettlementInfoField = {}
#ไบคๆๆฅ
CThostFtdcSettlementInfoField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcSettlementInfoField["SettlementID"] = "int"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcSettlementInfoField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcSettlementInfoField["InvestorID"] = "string"
#ๅบๅท
CThostFtdcSettlementInfoField["SequenceNo"] = "int"
#ๆถๆฏๆญฃๆ
CThostFtdcSettlementInfoField["Content"] = "string"
structDict['CThostFtdcSettlementInfoField'] = CThostFtdcSettlementInfoField
#ๅ็บฆไฟ่ฏ้็่ฐๆด
CThostFtdcInstrumentMarginRateAdjustField = {}
#ๅ็บฆไปฃ็
CThostFtdcInstrumentMarginRateAdjustField["InstrumentID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcInstrumentMarginRateAdjustField["InvestorRange"] = "char"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInstrumentMarginRateAdjustField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInstrumentMarginRateAdjustField["InvestorID"] = "string"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcInstrumentMarginRateAdjustField["HedgeFlag"] = "char"
#ๅคๅคดไฟ่ฏ้็
CThostFtdcInstrumentMarginRateAdjustField["LongMarginRatioByMoney"] = "float"
#ๅคๅคดไฟ่ฏ้่ดน
CThostFtdcInstrumentMarginRateAdjustField["LongMarginRatioByVolume"] = "float"
#็ฉบๅคดไฟ่ฏ้็
CThostFtdcInstrumentMarginRateAdjustField["ShortMarginRatioByMoney"] = "float"
#็ฉบๅคดไฟ่ฏ้่ดน
CThostFtdcInstrumentMarginRateAdjustField["ShortMarginRatioByVolume"] = "float"
#ๆฏๅฆ็ธๅฏนไบคๆๆๆถๅ
CThostFtdcInstrumentMarginRateAdjustField["IsRelative"] = "int"
structDict['CThostFtdcInstrumentMarginRateAdjustField'] = CThostFtdcInstrumentMarginRateAdjustField
#ไบคๆๆไฟ่ฏ้็
CThostFtdcExchangeMarginRateField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcExchangeMarginRateField["BrokerID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcExchangeMarginRateField["InstrumentID"] = "string"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcExchangeMarginRateField["HedgeFlag"] = "char"
#ๅคๅคดไฟ่ฏ้็
CThostFtdcExchangeMarginRateField["LongMarginRatioByMoney"] = "float"
#ๅคๅคดไฟ่ฏ้่ดน
CThostFtdcExchangeMarginRateField["LongMarginRatioByVolume"] = "float"
#็ฉบๅคดไฟ่ฏ้็
CThostFtdcExchangeMarginRateField["ShortMarginRatioByMoney"] = "float"
#็ฉบๅคดไฟ่ฏ้่ดน
CThostFtdcExchangeMarginRateField["ShortMarginRatioByVolume"] = "float"
structDict['CThostFtdcExchangeMarginRateField'] = CThostFtdcExchangeMarginRateField
#ไบคๆๆไฟ่ฏ้็่ฐๆด
CThostFtdcExchangeMarginRateAdjustField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcExchangeMarginRateAdjustField["BrokerID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcExchangeMarginRateAdjustField["InstrumentID"] = "string"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcExchangeMarginRateAdjustField["HedgeFlag"] = "char"
#่ท้ไบคๆๆๆ่ต่
ๅคๅคดไฟ่ฏ้็
CThostFtdcExchangeMarginRateAdjustField["LongMarginRatioByMoney"] = "float"
#่ท้ไบคๆๆๆ่ต่
ๅคๅคดไฟ่ฏ้่ดน
CThostFtdcExchangeMarginRateAdjustField["LongMarginRatioByVolume"] = "float"
#่ท้ไบคๆๆๆ่ต่
็ฉบๅคดไฟ่ฏ้็
CThostFtdcExchangeMarginRateAdjustField["ShortMarginRatioByMoney"] = "float"
#่ท้ไบคๆๆๆ่ต่
็ฉบๅคดไฟ่ฏ้่ดน
CThostFtdcExchangeMarginRateAdjustField["ShortMarginRatioByVolume"] = "float"
#ไบคๆๆๅคๅคดไฟ่ฏ้็
CThostFtdcExchangeMarginRateAdjustField["ExchLongMarginRatioByMoney"] = "float"
#ไบคๆๆๅคๅคดไฟ่ฏ้่ดน
CThostFtdcExchangeMarginRateAdjustField["ExchLongMarginRatioByVolume"] = "float"
#ไบคๆๆ็ฉบๅคดไฟ่ฏ้็
CThostFtdcExchangeMarginRateAdjustField["ExchShortMarginRatioByMoney"] = "float"
#ไบคๆๆ็ฉบๅคดไฟ่ฏ้่ดน
CThostFtdcExchangeMarginRateAdjustField["ExchShortMarginRatioByVolume"] = "float"
#ไธ่ท้ไบคๆๆๆ่ต่
ๅคๅคดไฟ่ฏ้็
CThostFtdcExchangeMarginRateAdjustField["NoLongMarginRatioByMoney"] = "float"
#ไธ่ท้ไบคๆๆๆ่ต่
ๅคๅคดไฟ่ฏ้่ดน
CThostFtdcExchangeMarginRateAdjustField["NoLongMarginRatioByVolume"] = "float"
#ไธ่ท้ไบคๆๆๆ่ต่
็ฉบๅคดไฟ่ฏ้็
CThostFtdcExchangeMarginRateAdjustField["NoShortMarginRatioByMoney"] = "float"
#ไธ่ท้ไบคๆๆๆ่ต่
็ฉบๅคดไฟ่ฏ้่ดน
CThostFtdcExchangeMarginRateAdjustField["NoShortMarginRatioByVolume"] = "float"
structDict['CThostFtdcExchangeMarginRateAdjustField'] = CThostFtdcExchangeMarginRateAdjustField
#ๆฑ็
CThostFtdcExchangeRateField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcExchangeRateField["BrokerID"] = "string"
#ๆบๅธ็ง
CThostFtdcExchangeRateField["FromCurrencyID"] = "string"
#ๆบๅธ็งๅไฝๆฐ้
CThostFtdcExchangeRateField["FromCurrencyUnit"] = "float"
#็ฎๆ ๅธ็ง
CThostFtdcExchangeRateField["ToCurrencyID"] = "string"
#ๆฑ็
CThostFtdcExchangeRateField["ExchangeRate"] = "float"
structDict['CThostFtdcExchangeRateField'] = CThostFtdcExchangeRateField
#็ป็ฎๅผ็จ
CThostFtdcSettlementRefField = {}
#ไบคๆๆฅ
CThostFtdcSettlementRefField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcSettlementRefField["SettlementID"] = "int"
structDict['CThostFtdcSettlementRefField'] = CThostFtdcSettlementRefField
#ๅฝๅๆถ้ด
CThostFtdcCurrentTimeField = {}
#ๅฝๅๆฅๆ
CThostFtdcCurrentTimeField["CurrDate"] = "string"
#ๅฝๅๆถ้ด
CThostFtdcCurrentTimeField["CurrTime"] = "string"
#ๅฝๅๆถ้ด๏ผๆฏซ็ง๏ผ
CThostFtdcCurrentTimeField["CurrMillisec"] = "int"
#ไธๅกๆฅๆ
CThostFtdcCurrentTimeField["ActionDay"] = "string"
structDict['CThostFtdcCurrentTimeField'] = CThostFtdcCurrentTimeField
#้่ฎฏ้ถๆฎต
CThostFtdcCommPhaseField = {}
#ไบคๆๆฅ
CThostFtdcCommPhaseField["TradingDay"] = "string"
#้่ฎฏๆถๆฎต็ผๅท
CThostFtdcCommPhaseField["CommPhaseNo"] = "int"
#็ณป็ป็ผๅท
CThostFtdcCommPhaseField["SystemID"] = "string"
structDict['CThostFtdcCommPhaseField'] = CThostFtdcCommPhaseField
#็ปๅฝไฟกๆฏ
CThostFtdcLoginInfoField = {}
#ๅ็ฝฎ็ผๅท
CThostFtdcLoginInfoField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcLoginInfoField["SessionID"] = "int"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcLoginInfoField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcLoginInfoField["UserID"] = "string"
#็ปๅฝๆฅๆ
CThostFtdcLoginInfoField["LoginDate"] = "string"
#็ปๅฝๆถ้ด
CThostFtdcLoginInfoField["LoginTime"] = "string"
#IPๅฐๅ
CThostFtdcLoginInfoField["IPAddress"] = "string"
#็จๆท็ซฏไบงๅไฟกๆฏ
CThostFtdcLoginInfoField["UserProductInfo"] = "string"
#ๆฅๅฃ็ซฏไบงๅไฟกๆฏ
CThostFtdcLoginInfoField["InterfaceProductInfo"] = "string"
#ๅ่ฎฎไฟกๆฏ
CThostFtdcLoginInfoField["ProtocolInfo"] = "string"
#็ณป็ปๅ็งฐ
CThostFtdcLoginInfoField["SystemName"] = "string"
#ๅฏ็
CThostFtdcLoginInfoField["Password"] = "string"
#ๆๅคงๆฅๅๅผ็จ
CThostFtdcLoginInfoField["MaxOrderRef"] = "string"
#ไธๆๆๆถ้ด
CThostFtdcLoginInfoField["SHFETime"] = "string"
#ๅคงๅๆๆถ้ด
CThostFtdcLoginInfoField["DCETime"] = "string"
#้ๅๆๆถ้ด
CThostFtdcLoginInfoField["CZCETime"] = "string"
#ไธญ้ๆๆถ้ด
CThostFtdcLoginInfoField["FFEXTime"] = "string"
#Macๅฐๅ
CThostFtdcLoginInfoField["MacAddress"] = "string"
#ๅจๆๅฏ็
CThostFtdcLoginInfoField["OneTimePassword"] = "string"
#่ฝๆบไธญๅฟๆถ้ด
CThostFtdcLoginInfoField["INETime"] = "string"
structDict['CThostFtdcLoginInfoField'] = CThostFtdcLoginInfoField
#็ปๅฝไฟกๆฏ
CThostFtdcLogoutAllField = {}
#ๅ็ฝฎ็ผๅท
CThostFtdcLogoutAllField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcLogoutAllField["SessionID"] = "int"
#็ณป็ปๅ็งฐ
CThostFtdcLogoutAllField["SystemName"] = "string"
structDict['CThostFtdcLogoutAllField'] = CThostFtdcLogoutAllField
#ๅ็ฝฎ็ถๆ
CThostFtdcFrontStatusField = {}
#ๅ็ฝฎ็ผๅท
CThostFtdcFrontStatusField["FrontID"] = "int"
#ไธๆฌกๆฅๅๆฅๆ
CThostFtdcFrontStatusField["LastReportDate"] = "string"
#ไธๆฌกๆฅๅๆถ้ด
CThostFtdcFrontStatusField["LastReportTime"] = "string"
#ๆฏๅฆๆดป่ท
CThostFtdcFrontStatusField["IsActive"] = "int"
structDict['CThostFtdcFrontStatusField'] = CThostFtdcFrontStatusField
#็จๆทๅฃไปคๅๆด
CThostFtdcUserPasswordUpdateField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcUserPasswordUpdateField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcUserPasswordUpdateField["UserID"] = "string"
#ๅๆฅ็ๅฃไปค
CThostFtdcUserPasswordUpdateField["OldPassword"] = "string"
#ๆฐ็ๅฃไปค
CThostFtdcUserPasswordUpdateField["NewPassword"] = "string"
structDict['CThostFtdcUserPasswordUpdateField'] = CThostFtdcUserPasswordUpdateField
#่พๅ
ฅๆฅๅ
CThostFtdcInputOrderField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInputOrderField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInputOrderField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcInputOrderField["InstrumentID"] = "string"
#ๆฅๅๅผ็จ
CThostFtdcInputOrderField["OrderRef"] = "string"
#็จๆทไปฃ็
CThostFtdcInputOrderField["UserID"] = "string"
#ๆฅๅไปทๆ ผๆกไปถ
CThostFtdcInputOrderField["OrderPriceType"] = "char"
#ไนฐๅๆนๅ
CThostFtdcInputOrderField["Direction"] = "char"
#็ปๅๅผๅนณๆ ๅฟ
CThostFtdcInputOrderField["CombOffsetFlag"] = "string"
#็ปๅๆๆบๅฅไฟๆ ๅฟ
CThostFtdcInputOrderField["CombHedgeFlag"] = "string"
#ไปทๆ ผ
CThostFtdcInputOrderField["LimitPrice"] = "float"
#ๆฐ้
CThostFtdcInputOrderField["VolumeTotalOriginal"] = "int"
#ๆๆๆ็ฑปๅ
CThostFtdcInputOrderField["TimeCondition"] = "char"
#GTDๆฅๆ
CThostFtdcInputOrderField["GTDDate"] = "string"
#ๆไบค้็ฑปๅ
CThostFtdcInputOrderField["VolumeCondition"] = "char"
#ๆๅฐๆไบค้
CThostFtdcInputOrderField["MinVolume"] = "int"
#่งฆๅๆกไปถ
CThostFtdcInputOrderField["ContingentCondition"] = "char"
#ๆญขๆไปท
CThostFtdcInputOrderField["StopPrice"] = "float"
#ๅผบๅนณๅๅ
CThostFtdcInputOrderField["ForceCloseReason"] = "char"
#่ชๅจๆ่ตทๆ ๅฟ
CThostFtdcInputOrderField["IsAutoSuspend"] = "int"
#ไธๅกๅๅ
CThostFtdcInputOrderField["BusinessUnit"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcInputOrderField["RequestID"] = "int"
#็จๆทๅผบ่ฏๆ ๅฟ
CThostFtdcInputOrderField["UserForceClose"] = "int"
#ไบๆขๅๆ ๅฟ
CThostFtdcInputOrderField["IsSwapOrder"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcInputOrderField["ExchangeID"] = "string"
structDict['CThostFtdcInputOrderField'] = CThostFtdcInputOrderField
#ๆฅๅ
CThostFtdcOrderField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcOrderField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcOrderField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcOrderField["InstrumentID"] = "string"
#ๆฅๅๅผ็จ
CThostFtdcOrderField["OrderRef"] = "string"
#็จๆทไปฃ็
CThostFtdcOrderField["UserID"] = "string"
#ๆฅๅไปทๆ ผๆกไปถ
CThostFtdcOrderField["OrderPriceType"] = "char"
#ไนฐๅๆนๅ
CThostFtdcOrderField["Direction"] = "char"
#็ปๅๅผๅนณๆ ๅฟ
CThostFtdcOrderField["CombOffsetFlag"] = "string"
#็ปๅๆๆบๅฅไฟๆ ๅฟ
CThostFtdcOrderField["CombHedgeFlag"] = "string"
#ไปทๆ ผ
CThostFtdcOrderField["LimitPrice"] = "float"
#ๆฐ้
CThostFtdcOrderField["VolumeTotalOriginal"] = "int"
#ๆๆๆ็ฑปๅ
CThostFtdcOrderField["TimeCondition"] = "char"
#GTDๆฅๆ
CThostFtdcOrderField["GTDDate"] = "string"
#ๆไบค้็ฑปๅ
CThostFtdcOrderField["VolumeCondition"] = "char"
#ๆๅฐๆไบค้
CThostFtdcOrderField["MinVolume"] = "int"
#่งฆๅๆกไปถ
CThostFtdcOrderField["ContingentCondition"] = "char"
#ๆญขๆไปท
CThostFtdcOrderField["StopPrice"] = "float"
#ๅผบๅนณๅๅ
CThostFtdcOrderField["ForceCloseReason"] = "char"
#่ชๅจๆ่ตทๆ ๅฟ
CThostFtdcOrderField["IsAutoSuspend"] = "int"
#ไธๅกๅๅ
CThostFtdcOrderField["BusinessUnit"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcOrderField["RequestID"] = "int"
#ๆฌๅฐๆฅๅ็ผๅท
CThostFtdcOrderField["OrderLocalID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcOrderField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcOrderField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcOrderField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcOrderField["ExchangeInstID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcOrderField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcOrderField["InstallID"] = "int"
#ๆฅๅๆไบค็ถๆ
CThostFtdcOrderField["OrderSubmitStatus"] = "char"
#ๆฅๅๆ็คบๅบๅท
CThostFtdcOrderField["NotifySequence"] = "int"
#ไบคๆๆฅ
CThostFtdcOrderField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcOrderField["SettlementID"] = "int"
#ๆฅๅ็ผๅท
CThostFtdcOrderField["OrderSysID"] = "string"
#ๆฅๅๆฅๆบ
CThostFtdcOrderField["OrderSource"] = "char"
#ๆฅๅ็ถๆ
CThostFtdcOrderField["OrderStatus"] = "char"
#ๆฅๅ็ฑปๅ
CThostFtdcOrderField["OrderType"] = "char"
#ไปๆไบคๆฐ้
CThostFtdcOrderField["VolumeTraded"] = "int"
#ๅฉไฝๆฐ้
CThostFtdcOrderField["VolumeTotal"] = "int"
#ๆฅๅๆฅๆ
CThostFtdcOrderField["InsertDate"] = "string"
#ๅงๆๆถ้ด
CThostFtdcOrderField["InsertTime"] = "string"
#ๆฟๆดปๆถ้ด
CThostFtdcOrderField["ActiveTime"] = "string"
#ๆ่ตทๆถ้ด
CThostFtdcOrderField["SuspendTime"] = "string"
#ๆๅไฟฎๆนๆถ้ด
CThostFtdcOrderField["UpdateTime"] = "string"
#ๆค้ๆถ้ด
CThostFtdcOrderField["CancelTime"] = "string"
#ๆๅไฟฎๆนไบคๆๆไบคๆๅไปฃ็
CThostFtdcOrderField["ActiveTraderID"] = "string"
#็ป็ฎไผๅ็ผๅท
CThostFtdcOrderField["ClearingPartID"] = "string"
#ๅบๅท
CThostFtdcOrderField["SequenceNo"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcOrderField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcOrderField["SessionID"] = "int"
#็จๆท็ซฏไบงๅไฟกๆฏ
CThostFtdcOrderField["UserProductInfo"] = "string"
#็ถๆไฟกๆฏ
CThostFtdcOrderField["StatusMsg"] = "string"
#็จๆทๅผบ่ฏๆ ๅฟ
CThostFtdcOrderField["UserForceClose"] = "int"
#ๆไฝ็จๆทไปฃ็
CThostFtdcOrderField["ActiveUserID"] = "string"
#็ป็บชๅ
ฌๅธๆฅๅ็ผๅท
CThostFtdcOrderField["BrokerOrderSeq"] = "int"
#็ธๅ
ณๆฅๅ
CThostFtdcOrderField["RelativeOrderSysID"] = "string"
#้ๅๆๆไบคๆฐ้
CThostFtdcOrderField["ZCETotalTradedVolume"] = "int"
#ไบๆขๅๆ ๅฟ
CThostFtdcOrderField["IsSwapOrder"] = "int"
#่ฅไธ้จ็ผๅท
CThostFtdcOrderField["BranchID"] = "string"
structDict['CThostFtdcOrderField'] = CThostFtdcOrderField
#ไบคๆๆๆฅๅ
CThostFtdcExchangeOrderField = {}
#ๆฅๅไปทๆ ผๆกไปถ
CThostFtdcExchangeOrderField["OrderPriceType"] = "char"
#ไนฐๅๆนๅ
CThostFtdcExchangeOrderField["Direction"] = "char"
#็ปๅๅผๅนณๆ ๅฟ
CThostFtdcExchangeOrderField["CombOffsetFlag"] = "string"
#็ปๅๆๆบๅฅไฟๆ ๅฟ
CThostFtdcExchangeOrderField["CombHedgeFlag"] = "string"
#ไปทๆ ผ
CThostFtdcExchangeOrderField["LimitPrice"] = "float"
#ๆฐ้
CThostFtdcExchangeOrderField["VolumeTotalOriginal"] = "int"
#ๆๆๆ็ฑปๅ
CThostFtdcExchangeOrderField["TimeCondition"] = "char"
#GTDๆฅๆ
CThostFtdcExchangeOrderField["GTDDate"] = "string"
#ๆไบค้็ฑปๅ
CThostFtdcExchangeOrderField["VolumeCondition"] = "char"
#ๆๅฐๆไบค้
CThostFtdcExchangeOrderField["MinVolume"] = "int"
#่งฆๅๆกไปถ
CThostFtdcExchangeOrderField["ContingentCondition"] = "char"
#ๆญขๆไปท
CThostFtdcExchangeOrderField["StopPrice"] = "float"
#ๅผบๅนณๅๅ
CThostFtdcExchangeOrderField["ForceCloseReason"] = "char"
#่ชๅจๆ่ตทๆ ๅฟ
CThostFtdcExchangeOrderField["IsAutoSuspend"] = "int"
#ไธๅกๅๅ
CThostFtdcExchangeOrderField["BusinessUnit"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcExchangeOrderField["RequestID"] = "int"
#ๆฌๅฐๆฅๅ็ผๅท
CThostFtdcExchangeOrderField["OrderLocalID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcExchangeOrderField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcExchangeOrderField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcExchangeOrderField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcExchangeOrderField["ExchangeInstID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeOrderField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExchangeOrderField["InstallID"] = "int"
#ๆฅๅๆไบค็ถๆ
CThostFtdcExchangeOrderField["OrderSubmitStatus"] = "char"
#ๆฅๅๆ็คบๅบๅท
CThostFtdcExchangeOrderField["NotifySequence"] = "int"
#ไบคๆๆฅ
CThostFtdcExchangeOrderField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcExchangeOrderField["SettlementID"] = "int"
#ๆฅๅ็ผๅท
CThostFtdcExchangeOrderField["OrderSysID"] = "string"
#ๆฅๅๆฅๆบ
CThostFtdcExchangeOrderField["OrderSource"] = "char"
#ๆฅๅ็ถๆ
CThostFtdcExchangeOrderField["OrderStatus"] = "char"
#ๆฅๅ็ฑปๅ
CThostFtdcExchangeOrderField["OrderType"] = "char"
#ไปๆไบคๆฐ้
CThostFtdcExchangeOrderField["VolumeTraded"] = "int"
#ๅฉไฝๆฐ้
CThostFtdcExchangeOrderField["VolumeTotal"] = "int"
#ๆฅๅๆฅๆ
CThostFtdcExchangeOrderField["InsertDate"] = "string"
#ๅงๆๆถ้ด
CThostFtdcExchangeOrderField["InsertTime"] = "string"
#ๆฟๆดปๆถ้ด
CThostFtdcExchangeOrderField["ActiveTime"] = "string"
#ๆ่ตทๆถ้ด
CThostFtdcExchangeOrderField["SuspendTime"] = "string"
#ๆๅไฟฎๆนๆถ้ด
CThostFtdcExchangeOrderField["UpdateTime"] = "string"
#ๆค้ๆถ้ด
CThostFtdcExchangeOrderField["CancelTime"] = "string"
#ๆๅไฟฎๆนไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeOrderField["ActiveTraderID"] = "string"
#็ป็ฎไผๅ็ผๅท
CThostFtdcExchangeOrderField["ClearingPartID"] = "string"
#ๅบๅท
CThostFtdcExchangeOrderField["SequenceNo"] = "int"
#่ฅไธ้จ็ผๅท
CThostFtdcExchangeOrderField["BranchID"] = "string"
structDict['CThostFtdcExchangeOrderField'] = CThostFtdcExchangeOrderField
#ไบคๆๆๆฅๅๆๅ
ฅๅคฑ่ดฅ
CThostFtdcExchangeOrderInsertErrorField = {}
#ไบคๆๆไปฃ็
CThostFtdcExchangeOrderInsertErrorField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcExchangeOrderInsertErrorField["ParticipantID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeOrderInsertErrorField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExchangeOrderInsertErrorField["InstallID"] = "int"
#ๆฌๅฐๆฅๅ็ผๅท
CThostFtdcExchangeOrderInsertErrorField["OrderLocalID"] = "string"
#้่ฏฏไปฃ็
CThostFtdcExchangeOrderInsertErrorField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcExchangeOrderInsertErrorField["ErrorMsg"] = "string"
structDict['CThostFtdcExchangeOrderInsertErrorField'] = CThostFtdcExchangeOrderInsertErrorField
#่พๅ
ฅๆฅๅๆไฝ
CThostFtdcInputOrderActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInputOrderActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInputOrderActionField["InvestorID"] = "string"
#ๆฅๅๆไฝๅผ็จ
CThostFtdcInputOrderActionField["OrderActionRef"] = "int"
#ๆฅๅๅผ็จ
CThostFtdcInputOrderActionField["OrderRef"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcInputOrderActionField["RequestID"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcInputOrderActionField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcInputOrderActionField["SessionID"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcInputOrderActionField["ExchangeID"] = "string"
#ๆฅๅ็ผๅท
CThostFtdcInputOrderActionField["OrderSysID"] = "string"
#ๆไฝๆ ๅฟ
CThostFtdcInputOrderActionField["ActionFlag"] = "char"
#ไปทๆ ผ
CThostFtdcInputOrderActionField["LimitPrice"] = "float"
#ๆฐ้ๅๅ
CThostFtdcInputOrderActionField["VolumeChange"] = "int"
#็จๆทไปฃ็
CThostFtdcInputOrderActionField["UserID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcInputOrderActionField["InstrumentID"] = "string"
structDict['CThostFtdcInputOrderActionField'] = CThostFtdcInputOrderActionField
#ๆฅๅๆไฝ
CThostFtdcOrderActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcOrderActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcOrderActionField["InvestorID"] = "string"
#ๆฅๅๆไฝๅผ็จ
CThostFtdcOrderActionField["OrderActionRef"] = "int"
#ๆฅๅๅผ็จ
CThostFtdcOrderActionField["OrderRef"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcOrderActionField["RequestID"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcOrderActionField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcOrderActionField["SessionID"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcOrderActionField["ExchangeID"] = "string"
#ๆฅๅ็ผๅท
CThostFtdcOrderActionField["OrderSysID"] = "string"
#ๆไฝๆ ๅฟ
CThostFtdcOrderActionField["ActionFlag"] = "char"
#ไปทๆ ผ
CThostFtdcOrderActionField["LimitPrice"] = "float"
#ๆฐ้ๅๅ
CThostFtdcOrderActionField["VolumeChange"] = "int"
#ๆไฝๆฅๆ
CThostFtdcOrderActionField["ActionDate"] = "string"
#ๆไฝๆถ้ด
CThostFtdcOrderActionField["ActionTime"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcOrderActionField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcOrderActionField["InstallID"] = "int"
#ๆฌๅฐๆฅๅ็ผๅท
CThostFtdcOrderActionField["OrderLocalID"] = "string"
#ๆไฝๆฌๅฐ็ผๅท
CThostFtdcOrderActionField["ActionLocalID"] = "string"
#ไผๅไปฃ็
CThostFtdcOrderActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcOrderActionField["ClientID"] = "string"
#ไธๅกๅๅ
CThostFtdcOrderActionField["BusinessUnit"] = "string"
#ๆฅๅๆไฝ็ถๆ
CThostFtdcOrderActionField["OrderActionStatus"] = "char"
#็จๆทไปฃ็
CThostFtdcOrderActionField["UserID"] = "string"
#็ถๆไฟกๆฏ
CThostFtdcOrderActionField["StatusMsg"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcOrderActionField["InstrumentID"] = "string"
#่ฅไธ้จ็ผๅท
CThostFtdcOrderActionField["BranchID"] = "string"
structDict['CThostFtdcOrderActionField'] = CThostFtdcOrderActionField
#ไบคๆๆๆฅๅๆไฝ
CThostFtdcExchangeOrderActionField = {}
#ไบคๆๆไปฃ็
CThostFtdcExchangeOrderActionField["ExchangeID"] = "string"
#ๆฅๅ็ผๅท
CThostFtdcExchangeOrderActionField["OrderSysID"] = "string"
#ๆไฝๆ ๅฟ
CThostFtdcExchangeOrderActionField["ActionFlag"] = "char"
#ไปทๆ ผ
CThostFtdcExchangeOrderActionField["LimitPrice"] = "float"
#ๆฐ้ๅๅ
CThostFtdcExchangeOrderActionField["VolumeChange"] = "int"
#ๆไฝๆฅๆ
CThostFtdcExchangeOrderActionField["ActionDate"] = "string"
#ๆไฝๆถ้ด
CThostFtdcExchangeOrderActionField["ActionTime"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeOrderActionField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExchangeOrderActionField["InstallID"] = "int"
#ๆฌๅฐๆฅๅ็ผๅท
CThostFtdcExchangeOrderActionField["OrderLocalID"] = "string"
#ๆไฝๆฌๅฐ็ผๅท
CThostFtdcExchangeOrderActionField["ActionLocalID"] = "string"
#ไผๅไปฃ็
CThostFtdcExchangeOrderActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcExchangeOrderActionField["ClientID"] = "string"
#ไธๅกๅๅ
CThostFtdcExchangeOrderActionField["BusinessUnit"] = "string"
#ๆฅๅๆไฝ็ถๆ
CThostFtdcExchangeOrderActionField["OrderActionStatus"] = "char"
#็จๆทไปฃ็
CThostFtdcExchangeOrderActionField["UserID"] = "string"
#่ฅไธ้จ็ผๅท
CThostFtdcExchangeOrderActionField["BranchID"] = "string"
structDict['CThostFtdcExchangeOrderActionField'] = CThostFtdcExchangeOrderActionField
#ไบคๆๆๆฅๅๆไฝๅคฑ่ดฅ
CThostFtdcExchangeOrderActionErrorField = {}
#ไบคๆๆไปฃ็
CThostFtdcExchangeOrderActionErrorField["ExchangeID"] = "string"
#ๆฅๅ็ผๅท
CThostFtdcExchangeOrderActionErrorField["OrderSysID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeOrderActionErrorField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExchangeOrderActionErrorField["InstallID"] = "int"
#ๆฌๅฐๆฅๅ็ผๅท
CThostFtdcExchangeOrderActionErrorField["OrderLocalID"] = "string"
#ๆไฝๆฌๅฐ็ผๅท
CThostFtdcExchangeOrderActionErrorField["ActionLocalID"] = "string"
#้่ฏฏไปฃ็
CThostFtdcExchangeOrderActionErrorField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcExchangeOrderActionErrorField["ErrorMsg"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcExchangeOrderActionErrorField["BrokerID"] = "string"
structDict['CThostFtdcExchangeOrderActionErrorField'] = CThostFtdcExchangeOrderActionErrorField
#ไบคๆๆๆไบค
CThostFtdcExchangeTradeField = {}
#ไบคๆๆไปฃ็
CThostFtdcExchangeTradeField["ExchangeID"] = "string"
#ๆไบค็ผๅท
CThostFtdcExchangeTradeField["TradeID"] = "string"
#ไนฐๅๆนๅ
CThostFtdcExchangeTradeField["Direction"] = "char"
#ๆฅๅ็ผๅท
CThostFtdcExchangeTradeField["OrderSysID"] = "string"
#ไผๅไปฃ็
CThostFtdcExchangeTradeField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcExchangeTradeField["ClientID"] = "string"
#ไบคๆ่ง่ฒ
CThostFtdcExchangeTradeField["TradingRole"] = "char"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcExchangeTradeField["ExchangeInstID"] = "string"
#ๅผๅนณๆ ๅฟ
CThostFtdcExchangeTradeField["OffsetFlag"] = "char"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcExchangeTradeField["HedgeFlag"] = "char"
#ไปทๆ ผ
CThostFtdcExchangeTradeField["Price"] = "float"
#ๆฐ้
CThostFtdcExchangeTradeField["Volume"] = "int"
#ๆไบคๆถๆ
CThostFtdcExchangeTradeField["TradeDate"] = "string"
#ๆไบคๆถ้ด
CThostFtdcExchangeTradeField["TradeTime"] = "string"
#ๆไบค็ฑปๅ
CThostFtdcExchangeTradeField["TradeType"] = "char"
#ๆไบคไปทๆฅๆบ
CThostFtdcExchangeTradeField["PriceSource"] = "char"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeTradeField["TraderID"] = "string"
#ๆฌๅฐๆฅๅ็ผๅท
CThostFtdcExchangeTradeField["OrderLocalID"] = "string"
#็ป็ฎไผๅ็ผๅท
CThostFtdcExchangeTradeField["ClearingPartID"] = "string"
#ไธๅกๅๅ
CThostFtdcExchangeTradeField["BusinessUnit"] = "string"
#ๅบๅท
CThostFtdcExchangeTradeField["SequenceNo"] = "int"
#ๆไบคๆฅๆบ
CThostFtdcExchangeTradeField["TradeSource"] = "char"
structDict['CThostFtdcExchangeTradeField'] = CThostFtdcExchangeTradeField
#ๆไบค
CThostFtdcTradeField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcTradeField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcTradeField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcTradeField["InstrumentID"] = "string"
#ๆฅๅๅผ็จ
CThostFtdcTradeField["OrderRef"] = "string"
#็จๆทไปฃ็
CThostFtdcTradeField["UserID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcTradeField["ExchangeID"] = "string"
#ๆไบค็ผๅท
CThostFtdcTradeField["TradeID"] = "string"
#ไนฐๅๆนๅ
CThostFtdcTradeField["Direction"] = "char"
#ๆฅๅ็ผๅท
CThostFtdcTradeField["OrderSysID"] = "string"
#ไผๅไปฃ็
CThostFtdcTradeField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcTradeField["ClientID"] = "string"
#ไบคๆ่ง่ฒ
CThostFtdcTradeField["TradingRole"] = "char"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcTradeField["ExchangeInstID"] = "string"
#ๅผๅนณๆ ๅฟ
CThostFtdcTradeField["OffsetFlag"] = "char"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcTradeField["HedgeFlag"] = "char"
#ไปทๆ ผ
CThostFtdcTradeField["Price"] = "float"
#ๆฐ้
CThostFtdcTradeField["Volume"] = "int"
#ๆไบคๆถๆ
CThostFtdcTradeField["TradeDate"] = "string"
#ๆไบคๆถ้ด
CThostFtdcTradeField["TradeTime"] = "string"
#ๆไบค็ฑปๅ
CThostFtdcTradeField["TradeType"] = "char"
#ๆไบคไปทๆฅๆบ
CThostFtdcTradeField["PriceSource"] = "char"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcTradeField["TraderID"] = "string"
#ๆฌๅฐๆฅๅ็ผๅท
CThostFtdcTradeField["OrderLocalID"] = "string"
#็ป็ฎไผๅ็ผๅท
CThostFtdcTradeField["ClearingPartID"] = "string"
#ไธๅกๅๅ
CThostFtdcTradeField["BusinessUnit"] = "string"
#ๅบๅท
CThostFtdcTradeField["SequenceNo"] = "int"
#ไบคๆๆฅ
CThostFtdcTradeField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcTradeField["SettlementID"] = "int"
#็ป็บชๅ
ฌๅธๆฅๅ็ผๅท
CThostFtdcTradeField["BrokerOrderSeq"] = "int"
#ๆไบคๆฅๆบ
CThostFtdcTradeField["TradeSource"] = "char"
structDict['CThostFtdcTradeField'] = CThostFtdcTradeField
#็จๆทไผ่ฏ
CThostFtdcUserSessionField = {}
#ๅ็ฝฎ็ผๅท
CThostFtdcUserSessionField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcUserSessionField["SessionID"] = "int"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcUserSessionField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcUserSessionField["UserID"] = "string"
#็ปๅฝๆฅๆ
CThostFtdcUserSessionField["LoginDate"] = "string"
#็ปๅฝๆถ้ด
CThostFtdcUserSessionField["LoginTime"] = "string"
#IPๅฐๅ
CThostFtdcUserSessionField["IPAddress"] = "string"
#็จๆท็ซฏไบงๅไฟกๆฏ
CThostFtdcUserSessionField["UserProductInfo"] = "string"
#ๆฅๅฃ็ซฏไบงๅไฟกๆฏ
CThostFtdcUserSessionField["InterfaceProductInfo"] = "string"
#ๅ่ฎฎไฟกๆฏ
CThostFtdcUserSessionField["ProtocolInfo"] = "string"
#Macๅฐๅ
CThostFtdcUserSessionField["MacAddress"] = "string"
structDict['CThostFtdcUserSessionField'] = CThostFtdcUserSessionField
#ๆฅ่ฏขๆๅคงๆฅๅๆฐ้
CThostFtdcQueryMaxOrderVolumeField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQueryMaxOrderVolumeField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQueryMaxOrderVolumeField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQueryMaxOrderVolumeField["InstrumentID"] = "string"
#ไนฐๅๆนๅ
CThostFtdcQueryMaxOrderVolumeField["Direction"] = "char"
#ๅผๅนณๆ ๅฟ
CThostFtdcQueryMaxOrderVolumeField["OffsetFlag"] = "char"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcQueryMaxOrderVolumeField["HedgeFlag"] = "char"
#ๆๅคงๅ
่ฎธๆฅๅๆฐ้
CThostFtdcQueryMaxOrderVolumeField["MaxVolume"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcQueryMaxOrderVolumeField["ExchangeID"] = "string"
structDict['CThostFtdcQueryMaxOrderVolumeField'] = CThostFtdcQueryMaxOrderVolumeField
#ๆ่ต่
็ป็ฎ็ปๆ็กฎ่ฎคไฟกๆฏ
CThostFtdcSettlementInfoConfirmField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcSettlementInfoConfirmField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcSettlementInfoConfirmField["InvestorID"] = "string"
#็กฎ่ฎคๆฅๆ
CThostFtdcSettlementInfoConfirmField["ConfirmDate"] = "string"
#็กฎ่ฎคๆถ้ด
CThostFtdcSettlementInfoConfirmField["ConfirmTime"] = "string"
structDict['CThostFtdcSettlementInfoConfirmField'] = CThostFtdcSettlementInfoConfirmField
#ๅบๅ
ฅ้ๅๆญฅ
CThostFtdcSyncDepositField = {}
#ๅบๅ
ฅ้ๆตๆฐดๅท
CThostFtdcSyncDepositField["DepositSeqNo"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcSyncDepositField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcSyncDepositField["InvestorID"] = "string"
#ๅ
ฅ้้้ข
CThostFtdcSyncDepositField["Deposit"] = "float"
#ๆฏๅฆๅผบๅถ่ฟ่ก
CThostFtdcSyncDepositField["IsForce"] = "int"
#ๅธ็งไปฃ็
CThostFtdcSyncDepositField["CurrencyID"] = "string"
structDict['CThostFtdcSyncDepositField'] = CThostFtdcSyncDepositField
#่ดงๅธ่ดจๆผๅๆญฅ
CThostFtdcSyncFundMortgageField = {}
#่ดงๅธ่ดจๆผๆตๆฐดๅท
CThostFtdcSyncFundMortgageField["MortgageSeqNo"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcSyncFundMortgageField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcSyncFundMortgageField["InvestorID"] = "string"
#ๆบๅธ็ง
CThostFtdcSyncFundMortgageField["FromCurrencyID"] = "string"
#่ดจๆผ้้ข
CThostFtdcSyncFundMortgageField["MortgageAmount"] = "float"
#็ฎๆ ๅธ็ง
CThostFtdcSyncFundMortgageField["ToCurrencyID"] = "string"
structDict['CThostFtdcSyncFundMortgageField'] = CThostFtdcSyncFundMortgageField
#็ป็บชๅ
ฌๅธๅๆญฅ
CThostFtdcBrokerSyncField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcBrokerSyncField["BrokerID"] = "string"
structDict['CThostFtdcBrokerSyncField'] = CThostFtdcBrokerSyncField
#ๆญฃๅจๅๆญฅไธญ็ๆ่ต่
CThostFtdcSyncingInvestorField = {}
#ๆ่ต่
ไปฃ็
CThostFtdcSyncingInvestorField["InvestorID"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcSyncingInvestorField["BrokerID"] = "string"
#ๆ่ต่
ๅ็ปไปฃ็
CThostFtdcSyncingInvestorField["InvestorGroupID"] = "string"
#ๆ่ต่
ๅ็งฐ
CThostFtdcSyncingInvestorField["InvestorName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcSyncingInvestorField["IdentifiedCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcSyncingInvestorField["IdentifiedCardNo"] = "string"
#ๆฏๅฆๆดป่ท
CThostFtdcSyncingInvestorField["IsActive"] = "int"
#่็ณป็ต่ฏ
CThostFtdcSyncingInvestorField["Telephone"] = "string"
#้่ฎฏๅฐๅ
CThostFtdcSyncingInvestorField["Address"] = "string"
#ๅผๆทๆฅๆ
CThostFtdcSyncingInvestorField["OpenDate"] = "string"
#ๆๆบ
CThostFtdcSyncingInvestorField["Mobile"] = "string"
#ๆ็ปญ่ดน็ๆจกๆฟไปฃ็
CThostFtdcSyncingInvestorField["CommModelID"] = "string"
#ไฟ่ฏ้็ๆจกๆฟไปฃ็
CThostFtdcSyncingInvestorField["MarginModelID"] = "string"
structDict['CThostFtdcSyncingInvestorField'] = CThostFtdcSyncingInvestorField
#ๆญฃๅจๅๆญฅไธญ็ไบคๆไปฃ็
CThostFtdcSyncingTradingCodeField = {}
#ๆ่ต่
ไปฃ็
CThostFtdcSyncingTradingCodeField["InvestorID"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcSyncingTradingCodeField["BrokerID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcSyncingTradingCodeField["ExchangeID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcSyncingTradingCodeField["ClientID"] = "string"
#ๆฏๅฆๆดป่ท
CThostFtdcSyncingTradingCodeField["IsActive"] = "int"
#ไบคๆ็ผ็ ็ฑปๅ
CThostFtdcSyncingTradingCodeField["ClientIDType"] = "char"
#่ฅไธ้จ็ผๅท
CThostFtdcSyncingTradingCodeField["BranchID"] = "string"
structDict['CThostFtdcSyncingTradingCodeField'] = CThostFtdcSyncingTradingCodeField
#ๆญฃๅจๅๆญฅไธญ็ๆ่ต่
ๅ็ป
CThostFtdcSyncingInvestorGroupField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcSyncingInvestorGroupField["BrokerID"] = "string"
#ๆ่ต่
ๅ็ปไปฃ็
CThostFtdcSyncingInvestorGroupField["InvestorGroupID"] = "string"
#ๆ่ต่
ๅ็ปๅ็งฐ
CThostFtdcSyncingInvestorGroupField["InvestorGroupName"] = "string"
structDict['CThostFtdcSyncingInvestorGroupField'] = CThostFtdcSyncingInvestorGroupField
#ๆญฃๅจๅๆญฅไธญ็ไบคๆ่ดฆๅท
CThostFtdcSyncingTradingAccountField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcSyncingTradingAccountField["BrokerID"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcSyncingTradingAccountField["AccountID"] = "string"
#ไธๆฌก่ดจๆผ้้ข
CThostFtdcSyncingTradingAccountField["PreMortgage"] = "float"
#ไธๆฌกไฟก็จ้ขๅบฆ
CThostFtdcSyncingTradingAccountField["PreCredit"] = "float"
#ไธๆฌกๅญๆฌพ้ข
CThostFtdcSyncingTradingAccountField["PreDeposit"] = "float"
#ไธๆฌก็ป็ฎๅๅค้
CThostFtdcSyncingTradingAccountField["PreBalance"] = "float"
#ไธๆฌกๅ ็จ็ไฟ่ฏ้
CThostFtdcSyncingTradingAccountField["PreMargin"] = "float"
#ๅฉๆฏๅบๆฐ
CThostFtdcSyncingTradingAccountField["InterestBase"] = "float"
#ๅฉๆฏๆถๅ
ฅ
CThostFtdcSyncingTradingAccountField["Interest"] = "float"
#ๅ
ฅ้้้ข
CThostFtdcSyncingTradingAccountField["Deposit"] = "float"
#ๅบ้้้ข
CThostFtdcSyncingTradingAccountField["Withdraw"] = "float"
#ๅป็ป็ไฟ่ฏ้
CThostFtdcSyncingTradingAccountField["FrozenMargin"] = "float"
#ๅป็ป็่ต้
CThostFtdcSyncingTradingAccountField["FrozenCash"] = "float"
#ๅป็ป็ๆ็ปญ่ดน
CThostFtdcSyncingTradingAccountField["FrozenCommission"] = "float"
#ๅฝๅไฟ่ฏ้ๆป้ข
CThostFtdcSyncingTradingAccountField["CurrMargin"] = "float"
#่ต้ๅทฎ้ข
CThostFtdcSyncingTradingAccountField["CashIn"] = "float"
#ๆ็ปญ่ดน
CThostFtdcSyncingTradingAccountField["Commission"] = "float"
#ๅนณไป็ไบ
CThostFtdcSyncingTradingAccountField["CloseProfit"] = "float"
#ๆไป็ไบ
CThostFtdcSyncingTradingAccountField["PositionProfit"] = "float"
#ๆ่ดง็ป็ฎๅๅค้
CThostFtdcSyncingTradingAccountField["Balance"] = "float"
#ๅฏ็จ่ต้
CThostFtdcSyncingTradingAccountField["Available"] = "float"
#ๅฏๅ่ต้
CThostFtdcSyncingTradingAccountField["WithdrawQuota"] = "float"
#ๅบๆฌๅๅค้
CThostFtdcSyncingTradingAccountField["Reserve"] = "float"
#ไบคๆๆฅ
CThostFtdcSyncingTradingAccountField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcSyncingTradingAccountField["SettlementID"] = "int"
#ไฟก็จ้ขๅบฆ
CThostFtdcSyncingTradingAccountField["Credit"] = "float"
#่ดจๆผ้้ข
CThostFtdcSyncingTradingAccountField["Mortgage"] = "float"
#ไบคๆๆไฟ่ฏ้
CThostFtdcSyncingTradingAccountField["ExchangeMargin"] = "float"
#ๆ่ต่
ไบคๅฒไฟ่ฏ้
CThostFtdcSyncingTradingAccountField["DeliveryMargin"] = "float"
#ไบคๆๆไบคๅฒไฟ่ฏ้
CThostFtdcSyncingTradingAccountField["ExchangeDeliveryMargin"] = "float"
#ไฟๅบๆ่ดง็ป็ฎๅๅค้
CThostFtdcSyncingTradingAccountField["ReserveBalance"] = "float"
#ๅธ็งไปฃ็
CThostFtdcSyncingTradingAccountField["CurrencyID"] = "string"
#ไธๆฌก่ดงๅธ่ดจๅ
ฅ้้ข
CThostFtdcSyncingTradingAccountField["PreFundMortgageIn"] = "float"
#ไธๆฌก่ดงๅธ่ดจๅบ้้ข
CThostFtdcSyncingTradingAccountField["PreFundMortgageOut"] = "float"
#่ดงๅธ่ดจๅ
ฅ้้ข
CThostFtdcSyncingTradingAccountField["FundMortgageIn"] = "float"
#่ดงๅธ่ดจๅบ้้ข
CThostFtdcSyncingTradingAccountField["FundMortgageOut"] = "float"
#่ดงๅธ่ดจๆผไฝ้ข
CThostFtdcSyncingTradingAccountField["FundMortgageAvailable"] = "float"
#ๅฏ่ดจๆผ่ดงๅธ้้ข
CThostFtdcSyncingTradingAccountField["MortgageableFund"] = "float"
#็นๆฎไบงๅๅ ็จไฟ่ฏ้
CThostFtdcSyncingTradingAccountField["SpecProductMargin"] = "float"
#็นๆฎไบงๅๅป็ปไฟ่ฏ้
CThostFtdcSyncingTradingAccountField["SpecProductFrozenMargin"] = "float"
#็นๆฎไบงๅๆ็ปญ่ดน
CThostFtdcSyncingTradingAccountField["SpecProductCommission"] = "float"
#็นๆฎไบงๅๅป็ปๆ็ปญ่ดน
CThostFtdcSyncingTradingAccountField["SpecProductFrozenCommission"] = "float"
#็นๆฎไบงๅๆไป็ไบ
CThostFtdcSyncingTradingAccountField["SpecProductPositionProfit"] = "float"
#็นๆฎไบงๅๅนณไป็ไบ
CThostFtdcSyncingTradingAccountField["SpecProductCloseProfit"] = "float"
#ๆ นๆฎๆไป็ไบ็ฎๆณ่ฎก็ฎ็็นๆฎไบงๅๆไป็ไบ
CThostFtdcSyncingTradingAccountField["SpecProductPositionProfitByAlg"] = "float"
#็นๆฎไบงๅไบคๆๆไฟ่ฏ้
CThostFtdcSyncingTradingAccountField["SpecProductExchangeMargin"] = "float"
structDict['CThostFtdcSyncingTradingAccountField'] = CThostFtdcSyncingTradingAccountField
#ๆญฃๅจๅๆญฅไธญ็ๆ่ต่
ๆไป
CThostFtdcSyncingInvestorPositionField = {}
#ๅ็บฆไปฃ็
CThostFtdcSyncingInvestorPositionField["InstrumentID"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcSyncingInvestorPositionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcSyncingInvestorPositionField["InvestorID"] = "string"
#ๆไปๅค็ฉบๆนๅ
CThostFtdcSyncingInvestorPositionField["PosiDirection"] = "char"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcSyncingInvestorPositionField["HedgeFlag"] = "char"
#ๆไปๆฅๆ
CThostFtdcSyncingInvestorPositionField["PositionDate"] = "char"
#ไธๆฅๆไป
CThostFtdcSyncingInvestorPositionField["YdPosition"] = "int"
#ไปๆฅๆไป
CThostFtdcSyncingInvestorPositionField["Position"] = "int"
#ๅคๅคดๅป็ป
CThostFtdcSyncingInvestorPositionField["LongFrozen"] = "int"
#็ฉบๅคดๅป็ป
CThostFtdcSyncingInvestorPositionField["ShortFrozen"] = "int"
#ๅผไปๅป็ป้้ข
CThostFtdcSyncingInvestorPositionField["LongFrozenAmount"] = "float"
#ๅผไปๅป็ป้้ข
CThostFtdcSyncingInvestorPositionField["ShortFrozenAmount"] = "float"
#ๅผไป้
CThostFtdcSyncingInvestorPositionField["OpenVolume"] = "int"
#ๅนณไป้
CThostFtdcSyncingInvestorPositionField["CloseVolume"] = "int"
#ๅผไป้้ข
CThostFtdcSyncingInvestorPositionField["OpenAmount"] = "float"
#ๅนณไป้้ข
CThostFtdcSyncingInvestorPositionField["CloseAmount"] = "float"
#ๆไปๆๆฌ
CThostFtdcSyncingInvestorPositionField["PositionCost"] = "float"
#ไธๆฌกๅ ็จ็ไฟ่ฏ้
CThostFtdcSyncingInvestorPositionField["PreMargin"] = "float"
#ๅ ็จ็ไฟ่ฏ้
CThostFtdcSyncingInvestorPositionField["UseMargin"] = "float"
#ๅป็ป็ไฟ่ฏ้
CThostFtdcSyncingInvestorPositionField["FrozenMargin"] = "float"
#ๅป็ป็่ต้
CThostFtdcSyncingInvestorPositionField["FrozenCash"] = "float"
#ๅป็ป็ๆ็ปญ่ดน
CThostFtdcSyncingInvestorPositionField["FrozenCommission"] = "float"
#่ต้ๅทฎ้ข
CThostFtdcSyncingInvestorPositionField["CashIn"] = "float"
#ๆ็ปญ่ดน
CThostFtdcSyncingInvestorPositionField["Commission"] = "float"
#ๅนณไป็ไบ
CThostFtdcSyncingInvestorPositionField["CloseProfit"] = "float"
#ๆไป็ไบ
CThostFtdcSyncingInvestorPositionField["PositionProfit"] = "float"
#ไธๆฌก็ป็ฎไปท
CThostFtdcSyncingInvestorPositionField["PreSettlementPrice"] = "float"
#ๆฌๆฌก็ป็ฎไปท
CThostFtdcSyncingInvestorPositionField["SettlementPrice"] = "float"
#ไบคๆๆฅ
CThostFtdcSyncingInvestorPositionField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcSyncingInvestorPositionField["SettlementID"] = "int"
#ๅผไปๆๆฌ
CThostFtdcSyncingInvestorPositionField["OpenCost"] = "float"
#ไบคๆๆไฟ่ฏ้
CThostFtdcSyncingInvestorPositionField["ExchangeMargin"] = "float"
#็ปๅๆไบคๅฝขๆ็ๆไป
CThostFtdcSyncingInvestorPositionField["CombPosition"] = "int"
#็ปๅๅคๅคดๅป็ป
CThostFtdcSyncingInvestorPositionField["CombLongFrozen"] = "int"
#็ปๅ็ฉบๅคดๅป็ป
CThostFtdcSyncingInvestorPositionField["CombShortFrozen"] = "int"
#้ๆฅ็ฏๅธๅนณไป็ไบ
CThostFtdcSyncingInvestorPositionField["CloseProfitByDate"] = "float"
#้็ฌๅฏนๅฒๅนณไป็ไบ
CThostFtdcSyncingInvestorPositionField["CloseProfitByTrade"] = "float"
#ไปๆฅๆไป
CThostFtdcSyncingInvestorPositionField["TodayPosition"] = "int"
#ไฟ่ฏ้็
CThostFtdcSyncingInvestorPositionField["MarginRateByMoney"] = "float"
#ไฟ่ฏ้็(ๆๆๆฐ)
CThostFtdcSyncingInvestorPositionField["MarginRateByVolume"] = "float"
#ๆง่กๅป็ป
CThostFtdcSyncingInvestorPositionField["StrikeFrozen"] = "int"
#ๆง่กๅป็ป้้ข
CThostFtdcSyncingInvestorPositionField["StrikeFrozenAmount"] = "float"
#ๆพๅผๆง่กๅป็ป
CThostFtdcSyncingInvestorPositionField["AbandonFrozen"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcSyncingInvestorPositionField["ExchangeID"] = "string"
#ๆง่กๅป็ป็ๆจไป
CThostFtdcSyncingInvestorPositionField["YdStrikeFrozen"] = "int"
structDict['CThostFtdcSyncingInvestorPositionField'] = CThostFtdcSyncingInvestorPositionField
#ๆญฃๅจๅๆญฅไธญ็ๅ็บฆไฟ่ฏ้็
CThostFtdcSyncingInstrumentMarginRateField = {}
#ๅ็บฆไปฃ็
CThostFtdcSyncingInstrumentMarginRateField["InstrumentID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcSyncingInstrumentMarginRateField["InvestorRange"] = "char"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcSyncingInstrumentMarginRateField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcSyncingInstrumentMarginRateField["InvestorID"] = "string"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcSyncingInstrumentMarginRateField["HedgeFlag"] = "char"
#ๅคๅคดไฟ่ฏ้็
CThostFtdcSyncingInstrumentMarginRateField["LongMarginRatioByMoney"] = "float"
#ๅคๅคดไฟ่ฏ้่ดน
CThostFtdcSyncingInstrumentMarginRateField["LongMarginRatioByVolume"] = "float"
#็ฉบๅคดไฟ่ฏ้็
CThostFtdcSyncingInstrumentMarginRateField["ShortMarginRatioByMoney"] = "float"
#็ฉบๅคดไฟ่ฏ้่ดน
CThostFtdcSyncingInstrumentMarginRateField["ShortMarginRatioByVolume"] = "float"
#ๆฏๅฆ็ธๅฏนไบคๆๆๆถๅ
CThostFtdcSyncingInstrumentMarginRateField["IsRelative"] = "int"
structDict['CThostFtdcSyncingInstrumentMarginRateField'] = CThostFtdcSyncingInstrumentMarginRateField
#ๆญฃๅจๅๆญฅไธญ็ๅ็บฆๆ็ปญ่ดน็
CThostFtdcSyncingInstrumentCommissionRateField = {}
#ๅ็บฆไปฃ็
CThostFtdcSyncingInstrumentCommissionRateField["InstrumentID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcSyncingInstrumentCommissionRateField["InvestorRange"] = "char"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcSyncingInstrumentCommissionRateField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcSyncingInstrumentCommissionRateField["InvestorID"] = "string"
#ๅผไปๆ็ปญ่ดน็
CThostFtdcSyncingInstrumentCommissionRateField["OpenRatioByMoney"] = "float"
#ๅผไปๆ็ปญ่ดน
CThostFtdcSyncingInstrumentCommissionRateField["OpenRatioByVolume"] = "float"
#ๅนณไปๆ็ปญ่ดน็
CThostFtdcSyncingInstrumentCommissionRateField["CloseRatioByMoney"] = "float"
#ๅนณไปๆ็ปญ่ดน
CThostFtdcSyncingInstrumentCommissionRateField["CloseRatioByVolume"] = "float"
#ๅนณไปๆ็ปญ่ดน็
CThostFtdcSyncingInstrumentCommissionRateField["CloseTodayRatioByMoney"] = "float"
#ๅนณไปๆ็ปญ่ดน
CThostFtdcSyncingInstrumentCommissionRateField["CloseTodayRatioByVolume"] = "float"
#ไบคๆๆไปฃ็
CThostFtdcSyncingInstrumentCommissionRateField["ExchangeID"] = "string"
structDict['CThostFtdcSyncingInstrumentCommissionRateField'] = CThostFtdcSyncingInstrumentCommissionRateField
#ๆญฃๅจๅๆญฅไธญ็ๅ็บฆไบคๆๆ้
CThostFtdcSyncingInstrumentTradingRightField = {}
#ๅ็บฆไปฃ็
CThostFtdcSyncingInstrumentTradingRightField["InstrumentID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcSyncingInstrumentTradingRightField["InvestorRange"] = "char"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcSyncingInstrumentTradingRightField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcSyncingInstrumentTradingRightField["InvestorID"] = "string"
#ไบคๆๆ้
CThostFtdcSyncingInstrumentTradingRightField["TradingRight"] = "char"
#ไบคๆๆไปฃ็
CThostFtdcSyncingInstrumentTradingRightField["ExchangeID"] = "string"
structDict['CThostFtdcSyncingInstrumentTradingRightField'] = CThostFtdcSyncingInstrumentTradingRightField
#ๆฅ่ฏขๆฅๅ
CThostFtdcQryOrderField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryOrderField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryOrderField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryOrderField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryOrderField["ExchangeID"] = "string"
#ๆฅๅ็ผๅท
CThostFtdcQryOrderField["OrderSysID"] = "string"
#ๅผๅงๆถ้ด
CThostFtdcQryOrderField["InsertTimeStart"] = "string"
#็ปๆๆถ้ด
CThostFtdcQryOrderField["InsertTimeEnd"] = "string"
structDict['CThostFtdcQryOrderField'] = CThostFtdcQryOrderField
#ๆฅ่ฏขๆไบค
CThostFtdcQryTradeField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryTradeField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryTradeField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryTradeField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryTradeField["ExchangeID"] = "string"
#ๆไบค็ผๅท
CThostFtdcQryTradeField["TradeID"] = "string"
#ๅผๅงๆถ้ด
CThostFtdcQryTradeField["TradeTimeStart"] = "string"
#็ปๆๆถ้ด
CThostFtdcQryTradeField["TradeTimeEnd"] = "string"
structDict['CThostFtdcQryTradeField'] = CThostFtdcQryTradeField
#ๆฅ่ฏขๆ่ต่
ๆไป
CThostFtdcQryInvestorPositionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryInvestorPositionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryInvestorPositionField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryInvestorPositionField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryInvestorPositionField["ExchangeID"] = "string"
structDict['CThostFtdcQryInvestorPositionField'] = CThostFtdcQryInvestorPositionField
#ๆฅ่ฏข่ต้่ดฆๆท
CThostFtdcQryTradingAccountField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryTradingAccountField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryTradingAccountField["InvestorID"] = "string"
#ๅธ็งไปฃ็
CThostFtdcQryTradingAccountField["CurrencyID"] = "string"
#ไธๅก็ฑปๅ
CThostFtdcQryTradingAccountField["BizType"] = "char"
structDict['CThostFtdcQryTradingAccountField'] = CThostFtdcQryTradingAccountField
#ๆฅ่ฏขๆ่ต่
CThostFtdcQryInvestorField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryInvestorField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryInvestorField["InvestorID"] = "string"
structDict['CThostFtdcQryInvestorField'] = CThostFtdcQryInvestorField
#ๆฅ่ฏขไบคๆ็ผ็
CThostFtdcQryTradingCodeField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryTradingCodeField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryTradingCodeField["InvestorID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryTradingCodeField["ExchangeID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcQryTradingCodeField["ClientID"] = "string"
#ไบคๆ็ผ็ ็ฑปๅ
CThostFtdcQryTradingCodeField["ClientIDType"] = "char"
structDict['CThostFtdcQryTradingCodeField'] = CThostFtdcQryTradingCodeField
#ๆฅ่ฏขๆ่ต่
็ป
CThostFtdcQryInvestorGroupField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryInvestorGroupField["BrokerID"] = "string"
structDict['CThostFtdcQryInvestorGroupField'] = CThostFtdcQryInvestorGroupField
#ๆฅ่ฏขๅ็บฆไฟ่ฏ้็
CThostFtdcQryInstrumentMarginRateField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryInstrumentMarginRateField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryInstrumentMarginRateField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryInstrumentMarginRateField["InstrumentID"] = "string"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcQryInstrumentMarginRateField["HedgeFlag"] = "char"
structDict['CThostFtdcQryInstrumentMarginRateField'] = CThostFtdcQryInstrumentMarginRateField
#ๆฅ่ฏขๆ็ปญ่ดน็
CThostFtdcQryInstrumentCommissionRateField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryInstrumentCommissionRateField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryInstrumentCommissionRateField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryInstrumentCommissionRateField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryInstrumentCommissionRateField["ExchangeID"] = "string"
structDict['CThostFtdcQryInstrumentCommissionRateField'] = CThostFtdcQryInstrumentCommissionRateField
#ๆฅ่ฏขๅ็บฆไบคๆๆ้
CThostFtdcQryInstrumentTradingRightField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryInstrumentTradingRightField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryInstrumentTradingRightField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryInstrumentTradingRightField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryInstrumentTradingRightField["ExchangeID"] = "string"
structDict['CThostFtdcQryInstrumentTradingRightField'] = CThostFtdcQryInstrumentTradingRightField
#ๆฅ่ฏข็ป็บชๅ
ฌๅธ
CThostFtdcQryBrokerField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryBrokerField["BrokerID"] = "string"
structDict['CThostFtdcQryBrokerField'] = CThostFtdcQryBrokerField
#ๆฅ่ฏขไบคๆๅ
CThostFtdcQryTraderField = {}
#ไบคๆๆไปฃ็
CThostFtdcQryTraderField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcQryTraderField["ParticipantID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcQryTraderField["TraderID"] = "string"
structDict['CThostFtdcQryTraderField'] = CThostFtdcQryTraderField
#ๆฅ่ฏข็ฎก็็จๆทๅ่ฝๆ้
CThostFtdcQrySuperUserFunctionField = {}
#็จๆทไปฃ็
CThostFtdcQrySuperUserFunctionField["UserID"] = "string"
structDict['CThostFtdcQrySuperUserFunctionField'] = CThostFtdcQrySuperUserFunctionField
#ๆฅ่ฏข็จๆทไผ่ฏ
CThostFtdcQryUserSessionField = {}
#ๅ็ฝฎ็ผๅท
CThostFtdcQryUserSessionField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcQryUserSessionField["SessionID"] = "int"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryUserSessionField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcQryUserSessionField["UserID"] = "string"
structDict['CThostFtdcQryUserSessionField'] = CThostFtdcQryUserSessionField
#ๆฅ่ฏข็ป็บชๅ
ฌๅธไผๅไปฃ็
CThostFtdcQryPartBrokerField = {}
#ไบคๆๆไปฃ็
CThostFtdcQryPartBrokerField["ExchangeID"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryPartBrokerField["BrokerID"] = "string"
#ไผๅไปฃ็
CThostFtdcQryPartBrokerField["ParticipantID"] = "string"
structDict['CThostFtdcQryPartBrokerField'] = CThostFtdcQryPartBrokerField
#ๆฅ่ฏขๅ็ฝฎ็ถๆ
CThostFtdcQryFrontStatusField = {}
#ๅ็ฝฎ็ผๅท
CThostFtdcQryFrontStatusField["FrontID"] = "int"
structDict['CThostFtdcQryFrontStatusField'] = CThostFtdcQryFrontStatusField
#ๆฅ่ฏขไบคๆๆๆฅๅ
CThostFtdcQryExchangeOrderField = {}
#ไผๅไปฃ็
CThostFtdcQryExchangeOrderField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcQryExchangeOrderField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcQryExchangeOrderField["ExchangeInstID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryExchangeOrderField["ExchangeID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcQryExchangeOrderField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeOrderField'] = CThostFtdcQryExchangeOrderField
#ๆฅ่ฏขๆฅๅๆไฝ
CThostFtdcQryOrderActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryOrderActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryOrderActionField["InvestorID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryOrderActionField["ExchangeID"] = "string"
structDict['CThostFtdcQryOrderActionField'] = CThostFtdcQryOrderActionField
#ๆฅ่ฏขไบคๆๆๆฅๅๆไฝ
CThostFtdcQryExchangeOrderActionField = {}
#ไผๅไปฃ็
CThostFtdcQryExchangeOrderActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcQryExchangeOrderActionField["ClientID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryExchangeOrderActionField["ExchangeID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcQryExchangeOrderActionField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeOrderActionField'] = CThostFtdcQryExchangeOrderActionField
#ๆฅ่ฏข็ฎก็็จๆท
CThostFtdcQrySuperUserField = {}
#็จๆทไปฃ็
CThostFtdcQrySuperUserField["UserID"] = "string"
structDict['CThostFtdcQrySuperUserField'] = CThostFtdcQrySuperUserField
#ๆฅ่ฏขไบคๆๆ
CThostFtdcQryExchangeField = {}
#ไบคๆๆไปฃ็
CThostFtdcQryExchangeField["ExchangeID"] = "string"
structDict['CThostFtdcQryExchangeField'] = CThostFtdcQryExchangeField
#ๆฅ่ฏขไบงๅ
CThostFtdcQryProductField = {}
#ไบงๅไปฃ็
CThostFtdcQryProductField["ProductID"] = "string"
#ไบงๅ็ฑปๅ
CThostFtdcQryProductField["ProductClass"] = "char"
#ไบคๆๆไปฃ็
CThostFtdcQryProductField["ExchangeID"] = "string"
structDict['CThostFtdcQryProductField'] = CThostFtdcQryProductField
#ๆฅ่ฏขๅ็บฆ
CThostFtdcQryInstrumentField = {}
#ๅ็บฆไปฃ็
CThostFtdcQryInstrumentField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryInstrumentField["ExchangeID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcQryInstrumentField["ExchangeInstID"] = "string"
#ไบงๅไปฃ็
CThostFtdcQryInstrumentField["ProductID"] = "string"
structDict['CThostFtdcQryInstrumentField'] = CThostFtdcQryInstrumentField
#ๆฅ่ฏข่กๆ
CThostFtdcQryDepthMarketDataField = {}
#ๅ็บฆไปฃ็
CThostFtdcQryDepthMarketDataField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryDepthMarketDataField["ExchangeID"] = "string"
structDict['CThostFtdcQryDepthMarketDataField'] = CThostFtdcQryDepthMarketDataField
#ๆฅ่ฏข็ป็บชๅ
ฌๅธ็จๆท
CThostFtdcQryBrokerUserField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryBrokerUserField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcQryBrokerUserField["UserID"] = "string"
structDict['CThostFtdcQryBrokerUserField'] = CThostFtdcQryBrokerUserField
#ๆฅ่ฏข็ป็บชๅ
ฌๅธ็จๆทๆ้
CThostFtdcQryBrokerUserFunctionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryBrokerUserFunctionField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcQryBrokerUserFunctionField["UserID"] = "string"
structDict['CThostFtdcQryBrokerUserFunctionField'] = CThostFtdcQryBrokerUserFunctionField
#ๆฅ่ฏขไบคๆๅๆฅ็ๆบ
CThostFtdcQryTraderOfferField = {}
#ไบคๆๆไปฃ็
CThostFtdcQryTraderOfferField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcQryTraderOfferField["ParticipantID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcQryTraderOfferField["TraderID"] = "string"
structDict['CThostFtdcQryTraderOfferField'] = CThostFtdcQryTraderOfferField
#ๆฅ่ฏขๅบๅ
ฅ้ๆตๆฐด
CThostFtdcQrySyncDepositField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQrySyncDepositField["BrokerID"] = "string"
#ๅบๅ
ฅ้ๆตๆฐดๅท
CThostFtdcQrySyncDepositField["DepositSeqNo"] = "string"
structDict['CThostFtdcQrySyncDepositField'] = CThostFtdcQrySyncDepositField
#ๆฅ่ฏขๆ่ต่
็ป็ฎ็ปๆ
CThostFtdcQrySettlementInfoField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQrySettlementInfoField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQrySettlementInfoField["InvestorID"] = "string"
#ไบคๆๆฅ
CThostFtdcQrySettlementInfoField["TradingDay"] = "string"
structDict['CThostFtdcQrySettlementInfoField'] = CThostFtdcQrySettlementInfoField
#ๆฅ่ฏขไบคๆๆไฟ่ฏ้็
CThostFtdcQryExchangeMarginRateField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryExchangeMarginRateField["BrokerID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryExchangeMarginRateField["InstrumentID"] = "string"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcQryExchangeMarginRateField["HedgeFlag"] = "char"
structDict['CThostFtdcQryExchangeMarginRateField'] = CThostFtdcQryExchangeMarginRateField
#ๆฅ่ฏขไบคๆๆ่ฐๆดไฟ่ฏ้็
CThostFtdcQryExchangeMarginRateAdjustField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryExchangeMarginRateAdjustField["BrokerID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryExchangeMarginRateAdjustField["InstrumentID"] = "string"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcQryExchangeMarginRateAdjustField["HedgeFlag"] = "char"
structDict['CThostFtdcQryExchangeMarginRateAdjustField'] = CThostFtdcQryExchangeMarginRateAdjustField
#ๆฅ่ฏขๆฑ็
CThostFtdcQryExchangeRateField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryExchangeRateField["BrokerID"] = "string"
#ๆบๅธ็ง
CThostFtdcQryExchangeRateField["FromCurrencyID"] = "string"
#็ฎๆ ๅธ็ง
CThostFtdcQryExchangeRateField["ToCurrencyID"] = "string"
structDict['CThostFtdcQryExchangeRateField'] = CThostFtdcQryExchangeRateField
#ๆฅ่ฏข่ดงๅธ่ดจๆผๆตๆฐด
CThostFtdcQrySyncFundMortgageField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQrySyncFundMortgageField["BrokerID"] = "string"
#่ดงๅธ่ดจๆผๆตๆฐดๅท
CThostFtdcQrySyncFundMortgageField["MortgageSeqNo"] = "string"
structDict['CThostFtdcQrySyncFundMortgageField'] = CThostFtdcQrySyncFundMortgageField
#ๆฅ่ฏขๆฅๅ
CThostFtdcQryHisOrderField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryHisOrderField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryHisOrderField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryHisOrderField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryHisOrderField["ExchangeID"] = "string"
#ๆฅๅ็ผๅท
CThostFtdcQryHisOrderField["OrderSysID"] = "string"
#ๅผๅงๆถ้ด
CThostFtdcQryHisOrderField["InsertTimeStart"] = "string"
#็ปๆๆถ้ด
CThostFtdcQryHisOrderField["InsertTimeEnd"] = "string"
#ไบคๆๆฅ
CThostFtdcQryHisOrderField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcQryHisOrderField["SettlementID"] = "int"
structDict['CThostFtdcQryHisOrderField'] = CThostFtdcQryHisOrderField
#ๅฝๅๆๆๅ็บฆๆๅฐไฟ่ฏ้
CThostFtdcOptionInstrMiniMarginField = {}
#ๅ็บฆไปฃ็
CThostFtdcOptionInstrMiniMarginField["InstrumentID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcOptionInstrMiniMarginField["InvestorRange"] = "char"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcOptionInstrMiniMarginField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcOptionInstrMiniMarginField["InvestorID"] = "string"
#ๅไฝ๏ผๆ๏ผๆๆๅ็บฆๆๅฐไฟ่ฏ้
CThostFtdcOptionInstrMiniMarginField["MinMargin"] = "float"
#ๅๅผๆนๅผ
CThostFtdcOptionInstrMiniMarginField["ValueMethod"] = "char"
#ๆฏๅฆ่ท้ไบคๆๆๆถๅ
CThostFtdcOptionInstrMiniMarginField["IsRelative"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcOptionInstrMiniMarginField["ExchangeID"] = "string"
structDict['CThostFtdcOptionInstrMiniMarginField'] = CThostFtdcOptionInstrMiniMarginField
#ๅฝๅๆๆๅ็บฆไฟ่ฏ้่ฐๆด็ณปๆฐ
CThostFtdcOptionInstrMarginAdjustField = {}
#ๅ็บฆไปฃ็
CThostFtdcOptionInstrMarginAdjustField["InstrumentID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcOptionInstrMarginAdjustField["InvestorRange"] = "char"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcOptionInstrMarginAdjustField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcOptionInstrMarginAdjustField["InvestorID"] = "string"
#ๆๆบ็ฉบๅคดไฟ่ฏ้่ฐๆด็ณปๆฐ
CThostFtdcOptionInstrMarginAdjustField["SShortMarginRatioByMoney"] = "float"
#ๆๆบ็ฉบๅคดไฟ่ฏ้่ฐๆด็ณปๆฐ
CThostFtdcOptionInstrMarginAdjustField["SShortMarginRatioByVolume"] = "float"
#ไฟๅผ็ฉบๅคดไฟ่ฏ้่ฐๆด็ณปๆฐ
CThostFtdcOptionInstrMarginAdjustField["HShortMarginRatioByMoney"] = "float"
#ไฟๅผ็ฉบๅคดไฟ่ฏ้่ฐๆด็ณปๆฐ
CThostFtdcOptionInstrMarginAdjustField["HShortMarginRatioByVolume"] = "float"
#ๅฅๅฉ็ฉบๅคดไฟ่ฏ้่ฐๆด็ณปๆฐ
CThostFtdcOptionInstrMarginAdjustField["AShortMarginRatioByMoney"] = "float"
#ๅฅๅฉ็ฉบๅคดไฟ่ฏ้่ฐๆด็ณปๆฐ
CThostFtdcOptionInstrMarginAdjustField["AShortMarginRatioByVolume"] = "float"
#ๆฏๅฆ่ท้ไบคๆๆๆถๅ
CThostFtdcOptionInstrMarginAdjustField["IsRelative"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcOptionInstrMarginAdjustField["ExchangeID"] = "string"
structDict['CThostFtdcOptionInstrMarginAdjustField'] = CThostFtdcOptionInstrMarginAdjustField
#ๅฝๅๆๆๅ็บฆๆ็ปญ่ดน็่ฏฆ็ปๅ
ๅฎน
CThostFtdcOptionInstrCommRateField = {}
#ๅ็บฆไปฃ็
CThostFtdcOptionInstrCommRateField["InstrumentID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcOptionInstrCommRateField["InvestorRange"] = "char"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcOptionInstrCommRateField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcOptionInstrCommRateField["InvestorID"] = "string"
#ๅผไปๆ็ปญ่ดน็
CThostFtdcOptionInstrCommRateField["OpenRatioByMoney"] = "float"
#ๅผไปๆ็ปญ่ดน
CThostFtdcOptionInstrCommRateField["OpenRatioByVolume"] = "float"
#ๅนณไปๆ็ปญ่ดน็
CThostFtdcOptionInstrCommRateField["CloseRatioByMoney"] = "float"
#ๅนณไปๆ็ปญ่ดน
CThostFtdcOptionInstrCommRateField["CloseRatioByVolume"] = "float"
#ๅนณไปๆ็ปญ่ดน็
CThostFtdcOptionInstrCommRateField["CloseTodayRatioByMoney"] = "float"
#ๅนณไปๆ็ปญ่ดน
CThostFtdcOptionInstrCommRateField["CloseTodayRatioByVolume"] = "float"
#ๆง่กๆ็ปญ่ดน็
CThostFtdcOptionInstrCommRateField["StrikeRatioByMoney"] = "float"
#ๆง่กๆ็ปญ่ดน
CThostFtdcOptionInstrCommRateField["StrikeRatioByVolume"] = "float"
#ไบคๆๆไปฃ็
CThostFtdcOptionInstrCommRateField["ExchangeID"] = "string"
structDict['CThostFtdcOptionInstrCommRateField'] = CThostFtdcOptionInstrCommRateField
#ๆๆไบคๆๆๆฌ
CThostFtdcOptionInstrTradeCostField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcOptionInstrTradeCostField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcOptionInstrTradeCostField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcOptionInstrTradeCostField["InstrumentID"] = "string"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcOptionInstrTradeCostField["HedgeFlag"] = "char"
#ๆๆๅ็บฆไฟ่ฏ้ไธๅ้จๅ
CThostFtdcOptionInstrTradeCostField["FixedMargin"] = "float"
#ๆๆๅ็บฆๆๅฐไฟ่ฏ้
CThostFtdcOptionInstrTradeCostField["MiniMargin"] = "float"
#ๆๆๅ็บฆๆๅฉ้
CThostFtdcOptionInstrTradeCostField["Royalty"] = "float"
#ไบคๆๆๆๆๅ็บฆไฟ่ฏ้ไธๅ้จๅ
CThostFtdcOptionInstrTradeCostField["ExchFixedMargin"] = "float"
#ไบคๆๆๆๆๅ็บฆๆๅฐไฟ่ฏ้
CThostFtdcOptionInstrTradeCostField["ExchMiniMargin"] = "float"
#ไบคๆๆไปฃ็
CThostFtdcOptionInstrTradeCostField["ExchangeID"] = "string"
structDict['CThostFtdcOptionInstrTradeCostField'] = CThostFtdcOptionInstrTradeCostField
#ๆๆไบคๆๆๆฌๆฅ่ฏข
CThostFtdcQryOptionInstrTradeCostField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryOptionInstrTradeCostField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryOptionInstrTradeCostField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryOptionInstrTradeCostField["InstrumentID"] = "string"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcQryOptionInstrTradeCostField["HedgeFlag"] = "char"
#ๆๆๅ็บฆๆฅไปท
CThostFtdcQryOptionInstrTradeCostField["InputPrice"] = "float"
#ๆ ็ไปทๆ ผ,ๅกซ0ๅ็จๆจ็ป็ฎไปท
CThostFtdcQryOptionInstrTradeCostField["UnderlyingPrice"] = "float"
#ไบคๆๆไปฃ็
CThostFtdcQryOptionInstrTradeCostField["ExchangeID"] = "string"
structDict['CThostFtdcQryOptionInstrTradeCostField'] = CThostFtdcQryOptionInstrTradeCostField
#ๆๆๆ็ปญ่ดน็ๆฅ่ฏข
CThostFtdcQryOptionInstrCommRateField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryOptionInstrCommRateField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryOptionInstrCommRateField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryOptionInstrCommRateField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryOptionInstrCommRateField["ExchangeID"] = "string"
structDict['CThostFtdcQryOptionInstrCommRateField'] = CThostFtdcQryOptionInstrCommRateField
#่กๆ็ฐ่ดงๆๆฐ
CThostFtdcIndexPriceField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcIndexPriceField["BrokerID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcIndexPriceField["InstrumentID"] = "string"
#ๆๆฐ็ฐ่ดงๆถ็ไปท
CThostFtdcIndexPriceField["ClosePrice"] = "float"
#ไบคๆๆไปฃ็
CThostFtdcIndexPriceField["ExchangeID"] = "string"
structDict['CThostFtdcIndexPriceField'] = CThostFtdcIndexPriceField
#่พๅ
ฅ็ๆง่กๅฎฃๅ
CThostFtdcInputExecOrderField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInputExecOrderField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInputExecOrderField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcInputExecOrderField["InstrumentID"] = "string"
#ๆง่กๅฎฃๅๅผ็จ
CThostFtdcInputExecOrderField["ExecOrderRef"] = "string"
#็จๆทไปฃ็
CThostFtdcInputExecOrderField["UserID"] = "string"
#ๆฐ้
CThostFtdcInputExecOrderField["Volume"] = "int"
#่ฏทๆฑ็ผๅท
CThostFtdcInputExecOrderField["RequestID"] = "int"
#ไธๅกๅๅ
CThostFtdcInputExecOrderField["BusinessUnit"] = "string"
#ๅผๅนณๆ ๅฟ
CThostFtdcInputExecOrderField["OffsetFlag"] = "char"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcInputExecOrderField["HedgeFlag"] = "char"
#ๆง่ก็ฑปๅ
CThostFtdcInputExecOrderField["ActionType"] = "char"
#ไฟ็ๅคดๅฏธ็ณ่ฏท็ๆไปๆนๅ
CThostFtdcInputExecOrderField["PosiDirection"] = "char"
#ๆๆ่กๆๅๆฏๅฆไฟ็ๆ่ดงๅคดๅฏธ็ๆ ่ฎฐ
CThostFtdcInputExecOrderField["ReservePositionFlag"] = "char"
#ๆๆ่กๆๅ็ๆ็ๅคดๅฏธๆฏๅฆ่ชๅจๅนณไป
CThostFtdcInputExecOrderField["CloseFlag"] = "char"
#ไบคๆๆไปฃ็
CThostFtdcInputExecOrderField["ExchangeID"] = "string"
structDict['CThostFtdcInputExecOrderField'] = CThostFtdcInputExecOrderField
#่พๅ
ฅๆง่กๅฎฃๅๆไฝ
CThostFtdcInputExecOrderActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInputExecOrderActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInputExecOrderActionField["InvestorID"] = "string"
#ๆง่กๅฎฃๅๆไฝๅผ็จ
CThostFtdcInputExecOrderActionField["ExecOrderActionRef"] = "int"
#ๆง่กๅฎฃๅๅผ็จ
CThostFtdcInputExecOrderActionField["ExecOrderRef"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcInputExecOrderActionField["RequestID"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcInputExecOrderActionField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcInputExecOrderActionField["SessionID"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcInputExecOrderActionField["ExchangeID"] = "string"
#ๆง่กๅฎฃๅๆไฝ็ผๅท
CThostFtdcInputExecOrderActionField["ExecOrderSysID"] = "string"
#ๆไฝๆ ๅฟ
CThostFtdcInputExecOrderActionField["ActionFlag"] = "char"
#็จๆทไปฃ็
CThostFtdcInputExecOrderActionField["UserID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcInputExecOrderActionField["InstrumentID"] = "string"
structDict['CThostFtdcInputExecOrderActionField'] = CThostFtdcInputExecOrderActionField
#ๆง่กๅฎฃๅ
CThostFtdcExecOrderField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcExecOrderField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcExecOrderField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcExecOrderField["InstrumentID"] = "string"
#ๆง่กๅฎฃๅๅผ็จ
CThostFtdcExecOrderField["ExecOrderRef"] = "string"
#็จๆทไปฃ็
CThostFtdcExecOrderField["UserID"] = "string"
#ๆฐ้
CThostFtdcExecOrderField["Volume"] = "int"
#่ฏทๆฑ็ผๅท
CThostFtdcExecOrderField["RequestID"] = "int"
#ไธๅกๅๅ
CThostFtdcExecOrderField["BusinessUnit"] = "string"
#ๅผๅนณๆ ๅฟ
CThostFtdcExecOrderField["OffsetFlag"] = "char"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcExecOrderField["HedgeFlag"] = "char"
#ๆง่ก็ฑปๅ
CThostFtdcExecOrderField["ActionType"] = "char"
#ไฟ็ๅคดๅฏธ็ณ่ฏท็ๆไปๆนๅ
CThostFtdcExecOrderField["PosiDirection"] = "char"
#ๆๆ่กๆๅๆฏๅฆไฟ็ๆ่ดงๅคดๅฏธ็ๆ ่ฎฐ
CThostFtdcExecOrderField["ReservePositionFlag"] = "char"
#ๆๆ่กๆๅ็ๆ็ๅคดๅฏธๆฏๅฆ่ชๅจๅนณไป
CThostFtdcExecOrderField["CloseFlag"] = "char"
#ๆฌๅฐๆง่กๅฎฃๅ็ผๅท
CThostFtdcExecOrderField["ExecOrderLocalID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcExecOrderField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcExecOrderField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcExecOrderField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcExecOrderField["ExchangeInstID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExecOrderField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExecOrderField["InstallID"] = "int"
#ๆง่กๅฎฃๅๆไบค็ถๆ
CThostFtdcExecOrderField["OrderSubmitStatus"] = "char"
#ๆฅๅๆ็คบๅบๅท
CThostFtdcExecOrderField["NotifySequence"] = "int"
#ไบคๆๆฅ
CThostFtdcExecOrderField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcExecOrderField["SettlementID"] = "int"
#ๆง่กๅฎฃๅ็ผๅท
CThostFtdcExecOrderField["ExecOrderSysID"] = "string"
#ๆฅๅๆฅๆ
CThostFtdcExecOrderField["InsertDate"] = "string"
#ๆๅ
ฅๆถ้ด
CThostFtdcExecOrderField["InsertTime"] = "string"
#ๆค้ๆถ้ด
CThostFtdcExecOrderField["CancelTime"] = "string"
#ๆง่ก็ปๆ
CThostFtdcExecOrderField["ExecResult"] = "char"
#็ป็ฎไผๅ็ผๅท
CThostFtdcExecOrderField["ClearingPartID"] = "string"
#ๅบๅท
CThostFtdcExecOrderField["SequenceNo"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcExecOrderField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcExecOrderField["SessionID"] = "int"
#็จๆท็ซฏไบงๅไฟกๆฏ
CThostFtdcExecOrderField["UserProductInfo"] = "string"
#็ถๆไฟกๆฏ
CThostFtdcExecOrderField["StatusMsg"] = "string"
#ๆไฝ็จๆทไปฃ็
CThostFtdcExecOrderField["ActiveUserID"] = "string"
#็ป็บชๅ
ฌๅธๆฅๅ็ผๅท
CThostFtdcExecOrderField["BrokerExecOrderSeq"] = "int"
#่ฅไธ้จ็ผๅท
CThostFtdcExecOrderField["BranchID"] = "string"
structDict['CThostFtdcExecOrderField'] = CThostFtdcExecOrderField
#ๆง่กๅฎฃๅๆไฝ
CThostFtdcExecOrderActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcExecOrderActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcExecOrderActionField["InvestorID"] = "string"
#ๆง่กๅฎฃๅๆไฝๅผ็จ
CThostFtdcExecOrderActionField["ExecOrderActionRef"] = "int"
#ๆง่กๅฎฃๅๅผ็จ
CThostFtdcExecOrderActionField["ExecOrderRef"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcExecOrderActionField["RequestID"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcExecOrderActionField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcExecOrderActionField["SessionID"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcExecOrderActionField["ExchangeID"] = "string"
#ๆง่กๅฎฃๅๆไฝ็ผๅท
CThostFtdcExecOrderActionField["ExecOrderSysID"] = "string"
#ๆไฝๆ ๅฟ
CThostFtdcExecOrderActionField["ActionFlag"] = "char"
#ๆไฝๆฅๆ
CThostFtdcExecOrderActionField["ActionDate"] = "string"
#ๆไฝๆถ้ด
CThostFtdcExecOrderActionField["ActionTime"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExecOrderActionField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExecOrderActionField["InstallID"] = "int"
#ๆฌๅฐๆง่กๅฎฃๅ็ผๅท
CThostFtdcExecOrderActionField["ExecOrderLocalID"] = "string"
#ๆไฝๆฌๅฐ็ผๅท
CThostFtdcExecOrderActionField["ActionLocalID"] = "string"
#ไผๅไปฃ็
CThostFtdcExecOrderActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcExecOrderActionField["ClientID"] = "string"
#ไธๅกๅๅ
CThostFtdcExecOrderActionField["BusinessUnit"] = "string"
#ๆฅๅๆไฝ็ถๆ
CThostFtdcExecOrderActionField["OrderActionStatus"] = "char"
#็จๆทไปฃ็
CThostFtdcExecOrderActionField["UserID"] = "string"
#ๆง่ก็ฑปๅ
CThostFtdcExecOrderActionField["ActionType"] = "char"
#็ถๆไฟกๆฏ
CThostFtdcExecOrderActionField["StatusMsg"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcExecOrderActionField["InstrumentID"] = "string"
#่ฅไธ้จ็ผๅท
CThostFtdcExecOrderActionField["BranchID"] = "string"
structDict['CThostFtdcExecOrderActionField'] = CThostFtdcExecOrderActionField
#ๆง่กๅฎฃๅๆฅ่ฏข
CThostFtdcQryExecOrderField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryExecOrderField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryExecOrderField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryExecOrderField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryExecOrderField["ExchangeID"] = "string"
#ๆง่กๅฎฃๅ็ผๅท
CThostFtdcQryExecOrderField["ExecOrderSysID"] = "string"
#ๅผๅงๆถ้ด
CThostFtdcQryExecOrderField["InsertTimeStart"] = "string"
#็ปๆๆถ้ด
CThostFtdcQryExecOrderField["InsertTimeEnd"] = "string"
structDict['CThostFtdcQryExecOrderField'] = CThostFtdcQryExecOrderField
#ไบคๆๆๆง่กๅฎฃๅไฟกๆฏ
CThostFtdcExchangeExecOrderField = {}
#ๆฐ้
CThostFtdcExchangeExecOrderField["Volume"] = "int"
#่ฏทๆฑ็ผๅท
CThostFtdcExchangeExecOrderField["RequestID"] = "int"
#ไธๅกๅๅ
CThostFtdcExchangeExecOrderField["BusinessUnit"] = "string"
#ๅผๅนณๆ ๅฟ
CThostFtdcExchangeExecOrderField["OffsetFlag"] = "char"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcExchangeExecOrderField["HedgeFlag"] = "char"
#ๆง่ก็ฑปๅ
CThostFtdcExchangeExecOrderField["ActionType"] = "char"
#ไฟ็ๅคดๅฏธ็ณ่ฏท็ๆไปๆนๅ
CThostFtdcExchangeExecOrderField["PosiDirection"] = "char"
#ๆๆ่กๆๅๆฏๅฆไฟ็ๆ่ดงๅคดๅฏธ็ๆ ่ฎฐ
CThostFtdcExchangeExecOrderField["ReservePositionFlag"] = "char"
#ๆๆ่กๆๅ็ๆ็ๅคดๅฏธๆฏๅฆ่ชๅจๅนณไป
CThostFtdcExchangeExecOrderField["CloseFlag"] = "char"
#ๆฌๅฐๆง่กๅฎฃๅ็ผๅท
CThostFtdcExchangeExecOrderField["ExecOrderLocalID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcExchangeExecOrderField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcExchangeExecOrderField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcExchangeExecOrderField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcExchangeExecOrderField["ExchangeInstID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeExecOrderField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExchangeExecOrderField["InstallID"] = "int"
#ๆง่กๅฎฃๅๆไบค็ถๆ
CThostFtdcExchangeExecOrderField["OrderSubmitStatus"] = "char"
#ๆฅๅๆ็คบๅบๅท
CThostFtdcExchangeExecOrderField["NotifySequence"] = "int"
#ไบคๆๆฅ
CThostFtdcExchangeExecOrderField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcExchangeExecOrderField["SettlementID"] = "int"
#ๆง่กๅฎฃๅ็ผๅท
CThostFtdcExchangeExecOrderField["ExecOrderSysID"] = "string"
#ๆฅๅๆฅๆ
CThostFtdcExchangeExecOrderField["InsertDate"] = "string"
#ๆๅ
ฅๆถ้ด
CThostFtdcExchangeExecOrderField["InsertTime"] = "string"
#ๆค้ๆถ้ด
CThostFtdcExchangeExecOrderField["CancelTime"] = "string"
#ๆง่ก็ปๆ
CThostFtdcExchangeExecOrderField["ExecResult"] = "char"
#็ป็ฎไผๅ็ผๅท
CThostFtdcExchangeExecOrderField["ClearingPartID"] = "string"
#ๅบๅท
CThostFtdcExchangeExecOrderField["SequenceNo"] = "int"
#่ฅไธ้จ็ผๅท
CThostFtdcExchangeExecOrderField["BranchID"] = "string"
structDict['CThostFtdcExchangeExecOrderField'] = CThostFtdcExchangeExecOrderField
#ไบคๆๆๆง่กๅฎฃๅๆฅ่ฏข
CThostFtdcQryExchangeExecOrderField = {}
#ไผๅไปฃ็
CThostFtdcQryExchangeExecOrderField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcQryExchangeExecOrderField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcQryExchangeExecOrderField["ExchangeInstID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryExchangeExecOrderField["ExchangeID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcQryExchangeExecOrderField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeExecOrderField'] = CThostFtdcQryExchangeExecOrderField
#ๆง่กๅฎฃๅๆไฝๆฅ่ฏข
CThostFtdcQryExecOrderActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryExecOrderActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryExecOrderActionField["InvestorID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryExecOrderActionField["ExchangeID"] = "string"
structDict['CThostFtdcQryExecOrderActionField'] = CThostFtdcQryExecOrderActionField
#ไบคๆๆๆง่กๅฎฃๅๆไฝ
CThostFtdcExchangeExecOrderActionField = {}
#ไบคๆๆไปฃ็
CThostFtdcExchangeExecOrderActionField["ExchangeID"] = "string"
#ๆง่กๅฎฃๅๆไฝ็ผๅท
CThostFtdcExchangeExecOrderActionField["ExecOrderSysID"] = "string"
#ๆไฝๆ ๅฟ
CThostFtdcExchangeExecOrderActionField["ActionFlag"] = "char"
#ๆไฝๆฅๆ
CThostFtdcExchangeExecOrderActionField["ActionDate"] = "string"
#ๆไฝๆถ้ด
CThostFtdcExchangeExecOrderActionField["ActionTime"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeExecOrderActionField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExchangeExecOrderActionField["InstallID"] = "int"
#ๆฌๅฐๆง่กๅฎฃๅ็ผๅท
CThostFtdcExchangeExecOrderActionField["ExecOrderLocalID"] = "string"
#ๆไฝๆฌๅฐ็ผๅท
CThostFtdcExchangeExecOrderActionField["ActionLocalID"] = "string"
#ไผๅไปฃ็
CThostFtdcExchangeExecOrderActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcExchangeExecOrderActionField["ClientID"] = "string"
#ไธๅกๅๅ
CThostFtdcExchangeExecOrderActionField["BusinessUnit"] = "string"
#ๆฅๅๆไฝ็ถๆ
CThostFtdcExchangeExecOrderActionField["OrderActionStatus"] = "char"
#็จๆทไปฃ็
CThostFtdcExchangeExecOrderActionField["UserID"] = "string"
#ๆง่ก็ฑปๅ
CThostFtdcExchangeExecOrderActionField["ActionType"] = "char"
#่ฅไธ้จ็ผๅท
CThostFtdcExchangeExecOrderActionField["BranchID"] = "string"
structDict['CThostFtdcExchangeExecOrderActionField'] = CThostFtdcExchangeExecOrderActionField
#ไบคๆๆๆง่กๅฎฃๅๆไฝๆฅ่ฏข
CThostFtdcQryExchangeExecOrderActionField = {}
#ไผๅไปฃ็
CThostFtdcQryExchangeExecOrderActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcQryExchangeExecOrderActionField["ClientID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryExchangeExecOrderActionField["ExchangeID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcQryExchangeExecOrderActionField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeExecOrderActionField'] = CThostFtdcQryExchangeExecOrderActionField
#้่ฏฏๆง่กๅฎฃๅ
CThostFtdcErrExecOrderField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcErrExecOrderField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcErrExecOrderField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcErrExecOrderField["InstrumentID"] = "string"
#ๆง่กๅฎฃๅๅผ็จ
CThostFtdcErrExecOrderField["ExecOrderRef"] = "string"
#็จๆทไปฃ็
CThostFtdcErrExecOrderField["UserID"] = "string"
#ๆฐ้
CThostFtdcErrExecOrderField["Volume"] = "int"
#่ฏทๆฑ็ผๅท
CThostFtdcErrExecOrderField["RequestID"] = "int"
#ไธๅกๅๅ
CThostFtdcErrExecOrderField["BusinessUnit"] = "string"
#ๅผๅนณๆ ๅฟ
CThostFtdcErrExecOrderField["OffsetFlag"] = "char"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcErrExecOrderField["HedgeFlag"] = "char"
#ๆง่ก็ฑปๅ
CThostFtdcErrExecOrderField["ActionType"] = "char"
#ไฟ็ๅคดๅฏธ็ณ่ฏท็ๆไปๆนๅ
CThostFtdcErrExecOrderField["PosiDirection"] = "char"
#ๆๆ่กๆๅๆฏๅฆไฟ็ๆ่ดงๅคดๅฏธ็ๆ ่ฎฐ
CThostFtdcErrExecOrderField["ReservePositionFlag"] = "char"
#ๆๆ่กๆๅ็ๆ็ๅคดๅฏธๆฏๅฆ่ชๅจๅนณไป
CThostFtdcErrExecOrderField["CloseFlag"] = "char"
#ไบคๆๆไปฃ็
CThostFtdcErrExecOrderField["ExchangeID"] = "string"
#้่ฏฏไปฃ็
CThostFtdcErrExecOrderField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcErrExecOrderField["ErrorMsg"] = "string"
structDict['CThostFtdcErrExecOrderField'] = CThostFtdcErrExecOrderField
#ๆฅ่ฏข้่ฏฏๆง่กๅฎฃๅ
CThostFtdcQryErrExecOrderField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryErrExecOrderField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryErrExecOrderField["InvestorID"] = "string"
structDict['CThostFtdcQryErrExecOrderField'] = CThostFtdcQryErrExecOrderField
#้่ฏฏๆง่กๅฎฃๅๆไฝ
CThostFtdcErrExecOrderActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcErrExecOrderActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcErrExecOrderActionField["InvestorID"] = "string"
#ๆง่กๅฎฃๅๆไฝๅผ็จ
CThostFtdcErrExecOrderActionField["ExecOrderActionRef"] = "int"
#ๆง่กๅฎฃๅๅผ็จ
CThostFtdcErrExecOrderActionField["ExecOrderRef"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcErrExecOrderActionField["RequestID"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcErrExecOrderActionField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcErrExecOrderActionField["SessionID"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcErrExecOrderActionField["ExchangeID"] = "string"
#ๆง่กๅฎฃๅๆไฝ็ผๅท
CThostFtdcErrExecOrderActionField["ExecOrderSysID"] = "string"
#ๆไฝๆ ๅฟ
CThostFtdcErrExecOrderActionField["ActionFlag"] = "char"
#็จๆทไปฃ็
CThostFtdcErrExecOrderActionField["UserID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcErrExecOrderActionField["InstrumentID"] = "string"
#้่ฏฏไปฃ็
CThostFtdcErrExecOrderActionField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcErrExecOrderActionField["ErrorMsg"] = "string"
structDict['CThostFtdcErrExecOrderActionField'] = CThostFtdcErrExecOrderActionField
#ๆฅ่ฏข้่ฏฏๆง่กๅฎฃๅๆไฝ
CThostFtdcQryErrExecOrderActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryErrExecOrderActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryErrExecOrderActionField["InvestorID"] = "string"
structDict['CThostFtdcQryErrExecOrderActionField'] = CThostFtdcQryErrExecOrderActionField
#ๆ่ต่
ๆๆๅ็บฆไบคๆๆ้
CThostFtdcOptionInstrTradingRightField = {}
#ๅ็บฆไปฃ็
CThostFtdcOptionInstrTradingRightField["InstrumentID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcOptionInstrTradingRightField["InvestorRange"] = "char"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcOptionInstrTradingRightField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcOptionInstrTradingRightField["InvestorID"] = "string"
#ไนฐๅๆนๅ
CThostFtdcOptionInstrTradingRightField["Direction"] = "char"
#ไบคๆๆ้
CThostFtdcOptionInstrTradingRightField["TradingRight"] = "char"
#ไบคๆๆไปฃ็
CThostFtdcOptionInstrTradingRightField["ExchangeID"] = "string"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcOptionInstrTradingRightField["HedgeFlag"] = "char"
structDict['CThostFtdcOptionInstrTradingRightField'] = CThostFtdcOptionInstrTradingRightField
#ๆฅ่ฏขๆๆๅ็บฆไบคๆๆ้
CThostFtdcQryOptionInstrTradingRightField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryOptionInstrTradingRightField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryOptionInstrTradingRightField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryOptionInstrTradingRightField["InstrumentID"] = "string"
#ไนฐๅๆนๅ
CThostFtdcQryOptionInstrTradingRightField["Direction"] = "char"
#ไบคๆๆไปฃ็
CThostFtdcQryOptionInstrTradingRightField["ExchangeID"] = "string"
structDict['CThostFtdcQryOptionInstrTradingRightField'] = CThostFtdcQryOptionInstrTradingRightField
#่พๅ
ฅ็่ฏขไปท
CThostFtdcInputForQuoteField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInputForQuoteField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInputForQuoteField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcInputForQuoteField["InstrumentID"] = "string"
#่ฏขไปทๅผ็จ
CThostFtdcInputForQuoteField["ForQuoteRef"] = "string"
#็จๆทไปฃ็
CThostFtdcInputForQuoteField["UserID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcInputForQuoteField["ExchangeID"] = "string"
structDict['CThostFtdcInputForQuoteField'] = CThostFtdcInputForQuoteField
#่ฏขไปท
CThostFtdcForQuoteField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcForQuoteField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcForQuoteField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcForQuoteField["InstrumentID"] = "string"
#่ฏขไปทๅผ็จ
CThostFtdcForQuoteField["ForQuoteRef"] = "string"
#็จๆทไปฃ็
CThostFtdcForQuoteField["UserID"] = "string"
#ๆฌๅฐ่ฏขไปท็ผๅท
CThostFtdcForQuoteField["ForQuoteLocalID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcForQuoteField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcForQuoteField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcForQuoteField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcForQuoteField["ExchangeInstID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcForQuoteField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcForQuoteField["InstallID"] = "int"
#ๆฅๅๆฅๆ
CThostFtdcForQuoteField["InsertDate"] = "string"
#ๆๅ
ฅๆถ้ด
CThostFtdcForQuoteField["InsertTime"] = "string"
#่ฏขไปท็ถๆ
CThostFtdcForQuoteField["ForQuoteStatus"] = "char"
#ๅ็ฝฎ็ผๅท
CThostFtdcForQuoteField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcForQuoteField["SessionID"] = "int"
#็ถๆไฟกๆฏ
CThostFtdcForQuoteField["StatusMsg"] = "string"
#ๆไฝ็จๆทไปฃ็
CThostFtdcForQuoteField["ActiveUserID"] = "string"
#็ป็บชๅ
ฌๅธ่ฏขไปท็ผๅท
CThostFtdcForQuoteField["BrokerForQutoSeq"] = "int"
structDict['CThostFtdcForQuoteField'] = CThostFtdcForQuoteField
#่ฏขไปทๆฅ่ฏข
CThostFtdcQryForQuoteField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryForQuoteField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryForQuoteField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryForQuoteField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryForQuoteField["ExchangeID"] = "string"
#ๅผๅงๆถ้ด
CThostFtdcQryForQuoteField["InsertTimeStart"] = "string"
#็ปๆๆถ้ด
CThostFtdcQryForQuoteField["InsertTimeEnd"] = "string"
structDict['CThostFtdcQryForQuoteField'] = CThostFtdcQryForQuoteField
#ไบคๆๆ่ฏขไปทไฟกๆฏ
CThostFtdcExchangeForQuoteField = {}
#ๆฌๅฐ่ฏขไปท็ผๅท
CThostFtdcExchangeForQuoteField["ForQuoteLocalID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcExchangeForQuoteField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcExchangeForQuoteField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcExchangeForQuoteField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcExchangeForQuoteField["ExchangeInstID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeForQuoteField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExchangeForQuoteField["InstallID"] = "int"
#ๆฅๅๆฅๆ
CThostFtdcExchangeForQuoteField["InsertDate"] = "string"
#ๆๅ
ฅๆถ้ด
CThostFtdcExchangeForQuoteField["InsertTime"] = "string"
#่ฏขไปท็ถๆ
CThostFtdcExchangeForQuoteField["ForQuoteStatus"] = "char"
structDict['CThostFtdcExchangeForQuoteField'] = CThostFtdcExchangeForQuoteField
#ไบคๆๆ่ฏขไปทๆฅ่ฏข
CThostFtdcQryExchangeForQuoteField = {}
#ไผๅไปฃ็
CThostFtdcQryExchangeForQuoteField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcQryExchangeForQuoteField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcQryExchangeForQuoteField["ExchangeInstID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryExchangeForQuoteField["ExchangeID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcQryExchangeForQuoteField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeForQuoteField'] = CThostFtdcQryExchangeForQuoteField
#่พๅ
ฅ็ๆฅไปท
CThostFtdcInputQuoteField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInputQuoteField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInputQuoteField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcInputQuoteField["InstrumentID"] = "string"
#ๆฅไปทๅผ็จ
CThostFtdcInputQuoteField["QuoteRef"] = "string"
#็จๆทไปฃ็
CThostFtdcInputQuoteField["UserID"] = "string"
#ๅไปทๆ ผ
CThostFtdcInputQuoteField["AskPrice"] = "float"
#ไนฐไปทๆ ผ
CThostFtdcInputQuoteField["BidPrice"] = "float"
#ๅๆฐ้
CThostFtdcInputQuoteField["AskVolume"] = "int"
#ไนฐๆฐ้
CThostFtdcInputQuoteField["BidVolume"] = "int"
#่ฏทๆฑ็ผๅท
CThostFtdcInputQuoteField["RequestID"] = "int"
#ไธๅกๅๅ
CThostFtdcInputQuoteField["BusinessUnit"] = "string"
#ๅๅผๅนณๆ ๅฟ
CThostFtdcInputQuoteField["AskOffsetFlag"] = "char"
#ไนฐๅผๅนณๆ ๅฟ
CThostFtdcInputQuoteField["BidOffsetFlag"] = "char"
#ๅๆๆบๅฅไฟๆ ๅฟ
CThostFtdcInputQuoteField["AskHedgeFlag"] = "char"
#ไนฐๆๆบๅฅไฟๆ ๅฟ
CThostFtdcInputQuoteField["BidHedgeFlag"] = "char"
#่ก็ๅๆฅๅๅผ็จ
CThostFtdcInputQuoteField["AskOrderRef"] = "string"
#่ก็ไนฐๆฅๅๅผ็จ
CThostFtdcInputQuoteField["BidOrderRef"] = "string"
#ๅบไปท็ผๅท
CThostFtdcInputQuoteField["ForQuoteSysID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcInputQuoteField["ExchangeID"] = "string"
structDict['CThostFtdcInputQuoteField'] = CThostFtdcInputQuoteField
#่พๅ
ฅๆฅไปทๆไฝ
CThostFtdcInputQuoteActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInputQuoteActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInputQuoteActionField["InvestorID"] = "string"
#ๆฅไปทๆไฝๅผ็จ
CThostFtdcInputQuoteActionField["QuoteActionRef"] = "int"
#ๆฅไปทๅผ็จ
CThostFtdcInputQuoteActionField["QuoteRef"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcInputQuoteActionField["RequestID"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcInputQuoteActionField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcInputQuoteActionField["SessionID"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcInputQuoteActionField["ExchangeID"] = "string"
#ๆฅไปทๆไฝ็ผๅท
CThostFtdcInputQuoteActionField["QuoteSysID"] = "string"
#ๆไฝๆ ๅฟ
CThostFtdcInputQuoteActionField["ActionFlag"] = "char"
#็จๆทไปฃ็
CThostFtdcInputQuoteActionField["UserID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcInputQuoteActionField["InstrumentID"] = "string"
structDict['CThostFtdcInputQuoteActionField'] = CThostFtdcInputQuoteActionField
#ๆฅไปท
CThostFtdcQuoteField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQuoteField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQuoteField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQuoteField["InstrumentID"] = "string"
#ๆฅไปทๅผ็จ
CThostFtdcQuoteField["QuoteRef"] = "string"
#็จๆทไปฃ็
CThostFtdcQuoteField["UserID"] = "string"
#ๅไปทๆ ผ
CThostFtdcQuoteField["AskPrice"] = "float"
#ไนฐไปทๆ ผ
CThostFtdcQuoteField["BidPrice"] = "float"
#ๅๆฐ้
CThostFtdcQuoteField["AskVolume"] = "int"
#ไนฐๆฐ้
CThostFtdcQuoteField["BidVolume"] = "int"
#่ฏทๆฑ็ผๅท
CThostFtdcQuoteField["RequestID"] = "int"
#ไธๅกๅๅ
CThostFtdcQuoteField["BusinessUnit"] = "string"
#ๅๅผๅนณๆ ๅฟ
CThostFtdcQuoteField["AskOffsetFlag"] = "char"
#ไนฐๅผๅนณๆ ๅฟ
CThostFtdcQuoteField["BidOffsetFlag"] = "char"
#ๅๆๆบๅฅไฟๆ ๅฟ
CThostFtdcQuoteField["AskHedgeFlag"] = "char"
#ไนฐๆๆบๅฅไฟๆ ๅฟ
CThostFtdcQuoteField["BidHedgeFlag"] = "char"
#ๆฌๅฐๆฅไปท็ผๅท
CThostFtdcQuoteField["QuoteLocalID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQuoteField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcQuoteField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcQuoteField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcQuoteField["ExchangeInstID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcQuoteField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcQuoteField["InstallID"] = "int"
#ๆฅไปทๆ็คบๅบๅท
CThostFtdcQuoteField["NotifySequence"] = "int"
#ๆฅไปทๆไบค็ถๆ
CThostFtdcQuoteField["OrderSubmitStatus"] = "char"
#ไบคๆๆฅ
CThostFtdcQuoteField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcQuoteField["SettlementID"] = "int"
#ๆฅไปท็ผๅท
CThostFtdcQuoteField["QuoteSysID"] = "string"
#ๆฅๅๆฅๆ
CThostFtdcQuoteField["InsertDate"] = "string"
#ๆๅ
ฅๆถ้ด
CThostFtdcQuoteField["InsertTime"] = "string"
#ๆค้ๆถ้ด
CThostFtdcQuoteField["CancelTime"] = "string"
#ๆฅไปท็ถๆ
CThostFtdcQuoteField["QuoteStatus"] = "char"
#็ป็ฎไผๅ็ผๅท
CThostFtdcQuoteField["ClearingPartID"] = "string"
#ๅบๅท
CThostFtdcQuoteField["SequenceNo"] = "int"
#ๅๆนๆฅๅ็ผๅท
CThostFtdcQuoteField["AskOrderSysID"] = "string"
#ไนฐๆนๆฅๅ็ผๅท
CThostFtdcQuoteField["BidOrderSysID"] = "string"
#ๅ็ฝฎ็ผๅท
CThostFtdcQuoteField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcQuoteField["SessionID"] = "int"
#็จๆท็ซฏไบงๅไฟกๆฏ
CThostFtdcQuoteField["UserProductInfo"] = "string"
#็ถๆไฟกๆฏ
CThostFtdcQuoteField["StatusMsg"] = "string"
#ๆไฝ็จๆทไปฃ็
CThostFtdcQuoteField["ActiveUserID"] = "string"
#็ป็บชๅ
ฌๅธๆฅไปท็ผๅท
CThostFtdcQuoteField["BrokerQuoteSeq"] = "int"
#่ก็ๅๆฅๅๅผ็จ
CThostFtdcQuoteField["AskOrderRef"] = "string"
#่ก็ไนฐๆฅๅๅผ็จ
CThostFtdcQuoteField["BidOrderRef"] = "string"
#ๅบไปท็ผๅท
CThostFtdcQuoteField["ForQuoteSysID"] = "string"
#่ฅไธ้จ็ผๅท
CThostFtdcQuoteField["BranchID"] = "string"
structDict['CThostFtdcQuoteField'] = CThostFtdcQuoteField
#ๆฅไปทๆไฝ
CThostFtdcQuoteActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQuoteActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQuoteActionField["InvestorID"] = "string"
#ๆฅไปทๆไฝๅผ็จ
CThostFtdcQuoteActionField["QuoteActionRef"] = "int"
#ๆฅไปทๅผ็จ
CThostFtdcQuoteActionField["QuoteRef"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcQuoteActionField["RequestID"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcQuoteActionField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcQuoteActionField["SessionID"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcQuoteActionField["ExchangeID"] = "string"
#ๆฅไปทๆไฝ็ผๅท
CThostFtdcQuoteActionField["QuoteSysID"] = "string"
#ๆไฝๆ ๅฟ
CThostFtdcQuoteActionField["ActionFlag"] = "char"
#ๆไฝๆฅๆ
CThostFtdcQuoteActionField["ActionDate"] = "string"
#ๆไฝๆถ้ด
CThostFtdcQuoteActionField["ActionTime"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcQuoteActionField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcQuoteActionField["InstallID"] = "int"
#ๆฌๅฐๆฅไปท็ผๅท
CThostFtdcQuoteActionField["QuoteLocalID"] = "string"
#ๆไฝๆฌๅฐ็ผๅท
CThostFtdcQuoteActionField["ActionLocalID"] = "string"
#ไผๅไปฃ็
CThostFtdcQuoteActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcQuoteActionField["ClientID"] = "string"
#ไธๅกๅๅ
CThostFtdcQuoteActionField["BusinessUnit"] = "string"
#ๆฅๅๆไฝ็ถๆ
CThostFtdcQuoteActionField["OrderActionStatus"] = "char"
#็จๆทไปฃ็
CThostFtdcQuoteActionField["UserID"] = "string"
#็ถๆไฟกๆฏ
CThostFtdcQuoteActionField["StatusMsg"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQuoteActionField["InstrumentID"] = "string"
#่ฅไธ้จ็ผๅท
CThostFtdcQuoteActionField["BranchID"] = "string"
structDict['CThostFtdcQuoteActionField'] = CThostFtdcQuoteActionField
#ๆฅไปทๆฅ่ฏข
CThostFtdcQryQuoteField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryQuoteField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryQuoteField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryQuoteField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryQuoteField["ExchangeID"] = "string"
#ๆฅไปท็ผๅท
CThostFtdcQryQuoteField["QuoteSysID"] = "string"
#ๅผๅงๆถ้ด
CThostFtdcQryQuoteField["InsertTimeStart"] = "string"
#็ปๆๆถ้ด
CThostFtdcQryQuoteField["InsertTimeEnd"] = "string"
structDict['CThostFtdcQryQuoteField'] = CThostFtdcQryQuoteField
#ไบคๆๆๆฅไปทไฟกๆฏ
CThostFtdcExchangeQuoteField = {}
#ๅไปทๆ ผ
CThostFtdcExchangeQuoteField["AskPrice"] = "float"
#ไนฐไปทๆ ผ
CThostFtdcExchangeQuoteField["BidPrice"] = "float"
#ๅๆฐ้
CThostFtdcExchangeQuoteField["AskVolume"] = "int"
#ไนฐๆฐ้
CThostFtdcExchangeQuoteField["BidVolume"] = "int"
#่ฏทๆฑ็ผๅท
CThostFtdcExchangeQuoteField["RequestID"] = "int"
#ไธๅกๅๅ
CThostFtdcExchangeQuoteField["BusinessUnit"] = "string"
#ๅๅผๅนณๆ ๅฟ
CThostFtdcExchangeQuoteField["AskOffsetFlag"] = "char"
#ไนฐๅผๅนณๆ ๅฟ
CThostFtdcExchangeQuoteField["BidOffsetFlag"] = "char"
#ๅๆๆบๅฅไฟๆ ๅฟ
CThostFtdcExchangeQuoteField["AskHedgeFlag"] = "char"
#ไนฐๆๆบๅฅไฟๆ ๅฟ
CThostFtdcExchangeQuoteField["BidHedgeFlag"] = "char"
#ๆฌๅฐๆฅไปท็ผๅท
CThostFtdcExchangeQuoteField["QuoteLocalID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcExchangeQuoteField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcExchangeQuoteField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcExchangeQuoteField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcExchangeQuoteField["ExchangeInstID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeQuoteField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExchangeQuoteField["InstallID"] = "int"
#ๆฅไปทๆ็คบๅบๅท
CThostFtdcExchangeQuoteField["NotifySequence"] = "int"
#ๆฅไปทๆไบค็ถๆ
CThostFtdcExchangeQuoteField["OrderSubmitStatus"] = "char"
#ไบคๆๆฅ
CThostFtdcExchangeQuoteField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcExchangeQuoteField["SettlementID"] = "int"
#ๆฅไปท็ผๅท
CThostFtdcExchangeQuoteField["QuoteSysID"] = "string"
#ๆฅๅๆฅๆ
CThostFtdcExchangeQuoteField["InsertDate"] = "string"
#ๆๅ
ฅๆถ้ด
CThostFtdcExchangeQuoteField["InsertTime"] = "string"
#ๆค้ๆถ้ด
CThostFtdcExchangeQuoteField["CancelTime"] = "string"
#ๆฅไปท็ถๆ
CThostFtdcExchangeQuoteField["QuoteStatus"] = "char"
#็ป็ฎไผๅ็ผๅท
CThostFtdcExchangeQuoteField["ClearingPartID"] = "string"
#ๅบๅท
CThostFtdcExchangeQuoteField["SequenceNo"] = "int"
#ๅๆนๆฅๅ็ผๅท
CThostFtdcExchangeQuoteField["AskOrderSysID"] = "string"
#ไนฐๆนๆฅๅ็ผๅท
CThostFtdcExchangeQuoteField["BidOrderSysID"] = "string"
#ๅบไปท็ผๅท
CThostFtdcExchangeQuoteField["ForQuoteSysID"] = "string"
#่ฅไธ้จ็ผๅท
CThostFtdcExchangeQuoteField["BranchID"] = "string"
structDict['CThostFtdcExchangeQuoteField'] = CThostFtdcExchangeQuoteField
#ไบคๆๆๆฅไปทๆฅ่ฏข
CThostFtdcQryExchangeQuoteField = {}
#ไผๅไปฃ็
CThostFtdcQryExchangeQuoteField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcQryExchangeQuoteField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcQryExchangeQuoteField["ExchangeInstID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryExchangeQuoteField["ExchangeID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcQryExchangeQuoteField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeQuoteField'] = CThostFtdcQryExchangeQuoteField
#ๆฅไปทๆไฝๆฅ่ฏข
CThostFtdcQryQuoteActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryQuoteActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryQuoteActionField["InvestorID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryQuoteActionField["ExchangeID"] = "string"
structDict['CThostFtdcQryQuoteActionField'] = CThostFtdcQryQuoteActionField
#ไบคๆๆๆฅไปทๆไฝ
CThostFtdcExchangeQuoteActionField = {}
#ไบคๆๆไปฃ็
CThostFtdcExchangeQuoteActionField["ExchangeID"] = "string"
#ๆฅไปทๆไฝ็ผๅท
CThostFtdcExchangeQuoteActionField["QuoteSysID"] = "string"
#ๆไฝๆ ๅฟ
CThostFtdcExchangeQuoteActionField["ActionFlag"] = "char"
#ๆไฝๆฅๆ
CThostFtdcExchangeQuoteActionField["ActionDate"] = "string"
#ๆไฝๆถ้ด
CThostFtdcExchangeQuoteActionField["ActionTime"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeQuoteActionField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExchangeQuoteActionField["InstallID"] = "int"
#ๆฌๅฐๆฅไปท็ผๅท
CThostFtdcExchangeQuoteActionField["QuoteLocalID"] = "string"
#ๆไฝๆฌๅฐ็ผๅท
CThostFtdcExchangeQuoteActionField["ActionLocalID"] = "string"
#ไผๅไปฃ็
CThostFtdcExchangeQuoteActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcExchangeQuoteActionField["ClientID"] = "string"
#ไธๅกๅๅ
CThostFtdcExchangeQuoteActionField["BusinessUnit"] = "string"
#ๆฅๅๆไฝ็ถๆ
CThostFtdcExchangeQuoteActionField["OrderActionStatus"] = "char"
#็จๆทไปฃ็
CThostFtdcExchangeQuoteActionField["UserID"] = "string"
structDict['CThostFtdcExchangeQuoteActionField'] = CThostFtdcExchangeQuoteActionField
#ไบคๆๆๆฅไปทๆไฝๆฅ่ฏข
CThostFtdcQryExchangeQuoteActionField = {}
#ไผๅไปฃ็
CThostFtdcQryExchangeQuoteActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcQryExchangeQuoteActionField["ClientID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryExchangeQuoteActionField["ExchangeID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcQryExchangeQuoteActionField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeQuoteActionField'] = CThostFtdcQryExchangeQuoteActionField
#ๆๆๅ็บฆdeltaๅผ
CThostFtdcOptionInstrDeltaField = {}
#ๅ็บฆไปฃ็
CThostFtdcOptionInstrDeltaField["InstrumentID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcOptionInstrDeltaField["InvestorRange"] = "char"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcOptionInstrDeltaField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcOptionInstrDeltaField["InvestorID"] = "string"
#Deltaๅผ
CThostFtdcOptionInstrDeltaField["Delta"] = "float"
#ไบคๆๆไปฃ็
CThostFtdcOptionInstrDeltaField["ExchangeID"] = "string"
structDict['CThostFtdcOptionInstrDeltaField'] = CThostFtdcOptionInstrDeltaField
#ๅ็ปๅๅธๅ็่ฏขไปท่ฏทๆฑ
CThostFtdcForQuoteRspField = {}
#ไบคๆๆฅ
CThostFtdcForQuoteRspField["TradingDay"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcForQuoteRspField["InstrumentID"] = "string"
#่ฏขไปท็ผๅท
CThostFtdcForQuoteRspField["ForQuoteSysID"] = "string"
#่ฏขไปทๆถ้ด
CThostFtdcForQuoteRspField["ForQuoteTime"] = "string"
#ไธๅกๆฅๆ
CThostFtdcForQuoteRspField["ActionDay"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcForQuoteRspField["ExchangeID"] = "string"
structDict['CThostFtdcForQuoteRspField'] = CThostFtdcForQuoteRspField
#ๅฝๅๆๆๅ็บฆๆง่กๅ็งปๅผ็่ฏฆ็ปๅ
ๅฎน
CThostFtdcStrikeOffsetField = {}
#ๅ็บฆไปฃ็
CThostFtdcStrikeOffsetField["InstrumentID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcStrikeOffsetField["InvestorRange"] = "char"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcStrikeOffsetField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcStrikeOffsetField["InvestorID"] = "string"
#ๆง่กๅ็งปๅผ
CThostFtdcStrikeOffsetField["Offset"] = "float"
#ไบคๆๆไปฃ็
CThostFtdcStrikeOffsetField["ExchangeID"] = "string"
structDict['CThostFtdcStrikeOffsetField'] = CThostFtdcStrikeOffsetField
#ๆๆๆง่กๅ็งปๅผๆฅ่ฏข
CThostFtdcQryStrikeOffsetField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryStrikeOffsetField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryStrikeOffsetField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryStrikeOffsetField["InstrumentID"] = "string"
structDict['CThostFtdcQryStrikeOffsetField'] = CThostFtdcQryStrikeOffsetField
#ๅฝๅ
ฅ้ๅฎ
CThostFtdcInputLockField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInputLockField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInputLockField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcInputLockField["InstrumentID"] = "string"
#้ๅฎๅผ็จ
CThostFtdcInputLockField["LockRef"] = "string"
#็จๆทไปฃ็
CThostFtdcInputLockField["UserID"] = "string"
#ๆฐ้
CThostFtdcInputLockField["Volume"] = "int"
#่ฏทๆฑ็ผๅท
CThostFtdcInputLockField["RequestID"] = "int"
#ไธๅกๅๅ
CThostFtdcInputLockField["BusinessUnit"] = "string"
#้ๅฎ็ฑปๅ
CThostFtdcInputLockField["LockType"] = "char"
#ไบคๆๆไปฃ็
CThostFtdcInputLockField["ExchangeID"] = "string"
structDict['CThostFtdcInputLockField'] = CThostFtdcInputLockField
#้ๅฎ
CThostFtdcLockField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcLockField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcLockField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcLockField["InstrumentID"] = "string"
#้ๅฎๅผ็จ
CThostFtdcLockField["LockRef"] = "string"
#็จๆทไปฃ็
CThostFtdcLockField["UserID"] = "string"
#ๆฐ้
CThostFtdcLockField["Volume"] = "int"
#่ฏทๆฑ็ผๅท
CThostFtdcLockField["RequestID"] = "int"
#ไธๅกๅๅ
CThostFtdcLockField["BusinessUnit"] = "string"
#้ๅฎ็ฑปๅ
CThostFtdcLockField["LockType"] = "char"
#ๆฌๅฐ้ๅฎ็ผๅท
CThostFtdcLockField["LockLocalID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcLockField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcLockField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcLockField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcLockField["ExchangeInstID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcLockField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcLockField["InstallID"] = "int"
#ๆง่กๅฎฃๅๆไบค็ถๆ
CThostFtdcLockField["OrderSubmitStatus"] = "char"
#ๆฅๅๆ็คบๅบๅท
CThostFtdcLockField["NotifySequence"] = "int"
#ไบคๆๆฅ
CThostFtdcLockField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcLockField["SettlementID"] = "int"
#้ๅฎ็ผๅท
CThostFtdcLockField["LockSysID"] = "string"
#ๆฅๅๆฅๆ
CThostFtdcLockField["InsertDate"] = "string"
#ๆๅ
ฅๆถ้ด
CThostFtdcLockField["InsertTime"] = "string"
#ๆค้ๆถ้ด
CThostFtdcLockField["CancelTime"] = "string"
#้ๅฎ็ถๆ
CThostFtdcLockField["LockStatus"] = "char"
#็ป็ฎไผๅ็ผๅท
CThostFtdcLockField["ClearingPartID"] = "string"
#ๅบๅท
CThostFtdcLockField["SequenceNo"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcLockField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcLockField["SessionID"] = "int"
#็จๆท็ซฏไบงๅไฟกๆฏ
CThostFtdcLockField["UserProductInfo"] = "string"
#็ถๆไฟกๆฏ
CThostFtdcLockField["StatusMsg"] = "string"
#ๆไฝ็จๆทไปฃ็
CThostFtdcLockField["ActiveUserID"] = "string"
#็ป็บชๅ
ฌๅธๆฅๅ็ผๅท
CThostFtdcLockField["BrokerLockSeq"] = "int"
#่ฅไธ้จ็ผๅท
CThostFtdcLockField["BranchID"] = "string"
structDict['CThostFtdcLockField'] = CThostFtdcLockField
#ๆฅ่ฏข้ๅฎ
CThostFtdcQryLockField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryLockField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryLockField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryLockField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryLockField["ExchangeID"] = "string"
#้ๅฎ็ผๅท
CThostFtdcQryLockField["LockSysID"] = "string"
#ๅผๅงๆถ้ด
CThostFtdcQryLockField["InsertTimeStart"] = "string"
#็ปๆๆถ้ด
CThostFtdcQryLockField["InsertTimeEnd"] = "string"
structDict['CThostFtdcQryLockField'] = CThostFtdcQryLockField
#้ๅฎ่ฏๅธไปไฝ
CThostFtdcLockPositionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcLockPositionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcLockPositionField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcLockPositionField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcLockPositionField["ExchangeID"] = "string"
#ๆฐ้
CThostFtdcLockPositionField["Volume"] = "int"
#ๅป็ปๆฐ้
CThostFtdcLockPositionField["FrozenVolume"] = "int"
structDict['CThostFtdcLockPositionField'] = CThostFtdcLockPositionField
#ๆฅ่ฏข้ๅฎ่ฏๅธไปไฝ
CThostFtdcQryLockPositionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryLockPositionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryLockPositionField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryLockPositionField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryLockPositionField["ExchangeID"] = "string"
structDict['CThostFtdcQryLockPositionField'] = CThostFtdcQryLockPositionField
#ๅฝๅETFๆๆๅ็บฆๆ็ปญ่ดน็่ฏฆ็ปๅ
ๅฎน
CThostFtdcETFOptionInstrCommRateField = {}
#ๅ็บฆไปฃ็
CThostFtdcETFOptionInstrCommRateField["InstrumentID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcETFOptionInstrCommRateField["InvestorRange"] = "char"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcETFOptionInstrCommRateField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcETFOptionInstrCommRateField["InvestorID"] = "string"
#ๅผไปๆ็ปญ่ดน็
CThostFtdcETFOptionInstrCommRateField["OpenRatioByMoney"] = "float"
#ๅผไปๆ็ปญ่ดน
CThostFtdcETFOptionInstrCommRateField["OpenRatioByVolume"] = "float"
#ๅนณไปๆ็ปญ่ดน็
CThostFtdcETFOptionInstrCommRateField["CloseRatioByMoney"] = "float"
#ๅนณไปๆ็ปญ่ดน
CThostFtdcETFOptionInstrCommRateField["CloseRatioByVolume"] = "float"
#ๅนณไปๆ็ปญ่ดน็
CThostFtdcETFOptionInstrCommRateField["CloseTodayRatioByMoney"] = "float"
#ๅนณไปๆ็ปญ่ดน
CThostFtdcETFOptionInstrCommRateField["CloseTodayRatioByVolume"] = "float"
#ๆง่กๆ็ปญ่ดน็
CThostFtdcETFOptionInstrCommRateField["StrikeRatioByMoney"] = "float"
#ๆง่กๆ็ปญ่ดน
CThostFtdcETFOptionInstrCommRateField["StrikeRatioByVolume"] = "float"
#ไบคๆๆไปฃ็
CThostFtdcETFOptionInstrCommRateField["ExchangeID"] = "string"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcETFOptionInstrCommRateField["HedgeFlag"] = "char"
#ๆไปๆนๅ
CThostFtdcETFOptionInstrCommRateField["PosiDirection"] = "char"
structDict['CThostFtdcETFOptionInstrCommRateField'] = CThostFtdcETFOptionInstrCommRateField
#ETFๆๆๆ็ปญ่ดน็ๆฅ่ฏข
CThostFtdcQryETFOptionInstrCommRateField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryETFOptionInstrCommRateField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryETFOptionInstrCommRateField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryETFOptionInstrCommRateField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryETFOptionInstrCommRateField["ExchangeID"] = "string"
structDict['CThostFtdcQryETFOptionInstrCommRateField'] = CThostFtdcQryETFOptionInstrCommRateField
#่พๅ
ฅ็ๆไปๅป็ป
CThostFtdcPosiFreezeField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcPosiFreezeField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcPosiFreezeField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcPosiFreezeField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcPosiFreezeField["ExchangeID"] = "string"
#ๆฌๅฐๆฅๅ็ผๅท
CThostFtdcPosiFreezeField["OrderLocalID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcPosiFreezeField["TraderID"] = "string"
#ไผๅไปฃ็
CThostFtdcPosiFreezeField["ParticipantID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcPosiFreezeField["InstallID"] = "int"
#ๆฐ้
CThostFtdcPosiFreezeField["Volume"] = "int"
#ๅป็ปๅๅ
CThostFtdcPosiFreezeField["FreezeReasonType"] = "char"
#ๅป็ป็ฑปๅ
CThostFtdcPosiFreezeField["FreezeType"] = "char"
structDict['CThostFtdcPosiFreezeField'] = CThostFtdcPosiFreezeField
#ๆฅ่ฏข้ๅฎ
CThostFtdcQryExchangeLockField = {}
#ไผๅไปฃ็
CThostFtdcQryExchangeLockField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcQryExchangeLockField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcQryExchangeLockField["ExchangeInstID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryExchangeLockField["ExchangeID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcQryExchangeLockField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeLockField'] = CThostFtdcQryExchangeLockField
#ไบคๆๆ้ๅฎ
CThostFtdcExchangeLockField = {}
#ๆฐ้
CThostFtdcExchangeLockField["Volume"] = "int"
#่ฏทๆฑ็ผๅท
CThostFtdcExchangeLockField["RequestID"] = "int"
#ไธๅกๅๅ
CThostFtdcExchangeLockField["BusinessUnit"] = "string"
#้ๅฎ็ฑปๅ
CThostFtdcExchangeLockField["LockType"] = "char"
#ๆฌๅฐ้ๅฎ็ผๅท
CThostFtdcExchangeLockField["LockLocalID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcExchangeLockField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcExchangeLockField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcExchangeLockField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcExchangeLockField["ExchangeInstID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeLockField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExchangeLockField["InstallID"] = "int"
#ๆง่กๅฎฃๅๆไบค็ถๆ
CThostFtdcExchangeLockField["OrderSubmitStatus"] = "char"
#ๆฅๅๆ็คบๅบๅท
CThostFtdcExchangeLockField["NotifySequence"] = "int"
#ไบคๆๆฅ
CThostFtdcExchangeLockField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcExchangeLockField["SettlementID"] = "int"
#้ๅฎ็ผๅท
CThostFtdcExchangeLockField["LockSysID"] = "string"
#ๆฅๅๆฅๆ
CThostFtdcExchangeLockField["InsertDate"] = "string"
#ๆๅ
ฅๆถ้ด
CThostFtdcExchangeLockField["InsertTime"] = "string"
#ๆค้ๆถ้ด
CThostFtdcExchangeLockField["CancelTime"] = "string"
#้ๅฎ็ถๆ
CThostFtdcExchangeLockField["LockStatus"] = "char"
#็ป็ฎไผๅ็ผๅท
CThostFtdcExchangeLockField["ClearingPartID"] = "string"
#ๅบๅท
CThostFtdcExchangeLockField["SequenceNo"] = "int"
#่ฅไธ้จ็ผๅท
CThostFtdcExchangeLockField["BranchID"] = "string"
structDict['CThostFtdcExchangeLockField'] = CThostFtdcExchangeLockField
#ไบคๆๆๆไฝ้่ฏฏ
CThostFtdcExchangeExecOrderActionErrorField = {}
#ไบคๆๆไปฃ็
CThostFtdcExchangeExecOrderActionErrorField["ExchangeID"] = "string"
#ๆง่กๅฎฃๅ็ผๅท
CThostFtdcExchangeExecOrderActionErrorField["ExecOrderSysID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeExecOrderActionErrorField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExchangeExecOrderActionErrorField["InstallID"] = "int"
#ๆฌๅฐๆง่กๅฎฃๅ็ผๅท
CThostFtdcExchangeExecOrderActionErrorField["ExecOrderLocalID"] = "string"
#ๆไฝๆฌๅฐ็ผๅท
CThostFtdcExchangeExecOrderActionErrorField["ActionLocalID"] = "string"
#้่ฏฏไปฃ็
CThostFtdcExchangeExecOrderActionErrorField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcExchangeExecOrderActionErrorField["ErrorMsg"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcExchangeExecOrderActionErrorField["BrokerID"] = "string"
structDict['CThostFtdcExchangeExecOrderActionErrorField'] = CThostFtdcExchangeExecOrderActionErrorField
#่พๅ
ฅๆน้ๆฅๅๆไฝ
CThostFtdcInputBatchOrderActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInputBatchOrderActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInputBatchOrderActionField["InvestorID"] = "string"
#ๆฅๅๆไฝๅผ็จ
CThostFtdcInputBatchOrderActionField["OrderActionRef"] = "int"
#่ฏทๆฑ็ผๅท
CThostFtdcInputBatchOrderActionField["RequestID"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcInputBatchOrderActionField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcInputBatchOrderActionField["SessionID"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcInputBatchOrderActionField["ExchangeID"] = "string"
#็จๆทไปฃ็
CThostFtdcInputBatchOrderActionField["UserID"] = "string"
structDict['CThostFtdcInputBatchOrderActionField'] = CThostFtdcInputBatchOrderActionField
#ๆน้ๆฅๅๆไฝ
CThostFtdcBatchOrderActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcBatchOrderActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcBatchOrderActionField["InvestorID"] = "string"
#ๆฅๅๆไฝๅผ็จ
CThostFtdcBatchOrderActionField["OrderActionRef"] = "int"
#่ฏทๆฑ็ผๅท
CThostFtdcBatchOrderActionField["RequestID"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcBatchOrderActionField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcBatchOrderActionField["SessionID"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcBatchOrderActionField["ExchangeID"] = "string"
#ๆไฝๆฅๆ
CThostFtdcBatchOrderActionField["ActionDate"] = "string"
#ๆไฝๆถ้ด
CThostFtdcBatchOrderActionField["ActionTime"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcBatchOrderActionField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcBatchOrderActionField["InstallID"] = "int"
#ๆไฝๆฌๅฐ็ผๅท
CThostFtdcBatchOrderActionField["ActionLocalID"] = "string"
#ไผๅไปฃ็
CThostFtdcBatchOrderActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcBatchOrderActionField["ClientID"] = "string"
#ไธๅกๅๅ
CThostFtdcBatchOrderActionField["BusinessUnit"] = "string"
#ๆฅๅๆไฝ็ถๆ
CThostFtdcBatchOrderActionField["OrderActionStatus"] = "char"
#็จๆทไปฃ็
CThostFtdcBatchOrderActionField["UserID"] = "string"
#็ถๆไฟกๆฏ
CThostFtdcBatchOrderActionField["StatusMsg"] = "string"
structDict['CThostFtdcBatchOrderActionField'] = CThostFtdcBatchOrderActionField
#ไบคๆๆๆน้ๆฅๅๆไฝ
CThostFtdcExchangeBatchOrderActionField = {}
#ไบคๆๆไปฃ็
CThostFtdcExchangeBatchOrderActionField["ExchangeID"] = "string"
#ๆไฝๆฅๆ
CThostFtdcExchangeBatchOrderActionField["ActionDate"] = "string"
#ๆไฝๆถ้ด
CThostFtdcExchangeBatchOrderActionField["ActionTime"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeBatchOrderActionField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExchangeBatchOrderActionField["InstallID"] = "int"
#ๆไฝๆฌๅฐ็ผๅท
CThostFtdcExchangeBatchOrderActionField["ActionLocalID"] = "string"
#ไผๅไปฃ็
CThostFtdcExchangeBatchOrderActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcExchangeBatchOrderActionField["ClientID"] = "string"
#ไธๅกๅๅ
CThostFtdcExchangeBatchOrderActionField["BusinessUnit"] = "string"
#ๆฅๅๆไฝ็ถๆ
CThostFtdcExchangeBatchOrderActionField["OrderActionStatus"] = "char"
#็จๆทไปฃ็
CThostFtdcExchangeBatchOrderActionField["UserID"] = "string"
structDict['CThostFtdcExchangeBatchOrderActionField'] = CThostFtdcExchangeBatchOrderActionField
#ๆฅ่ฏขๆน้ๆฅๅๆไฝ
CThostFtdcQryBatchOrderActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryBatchOrderActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryBatchOrderActionField["InvestorID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryBatchOrderActionField["ExchangeID"] = "string"
structDict['CThostFtdcQryBatchOrderActionField'] = CThostFtdcQryBatchOrderActionField
#ๆ่ต่
ๆไป้ๅถ
CThostFtdcLimitPosiField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcLimitPosiField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcLimitPosiField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcLimitPosiField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcLimitPosiField["ExchangeID"] = "string"
#ๆปๆฐ้้ๅถ
CThostFtdcLimitPosiField["TotalVolume"] = "int"
#ๅคๅคดๆฐ้้ๅถ
CThostFtdcLimitPosiField["LongVolume"] = "int"
#ๅฝๆฅๅคๅคดๅผไปๆฐ้้ๅถ
CThostFtdcLimitPosiField["OpenVolume"] = "int"
#ๅคๅคดๆไป้้ข้ๅถ
CThostFtdcLimitPosiField["LongAmount"] = "float"
#ๆปๆฐ้ๅป็ป
CThostFtdcLimitPosiField["TotalVolumeFrozen"] = "int"
#ๅคๅคดๆฐ้ๅป็ป
CThostFtdcLimitPosiField["LongVolumeFrozen"] = "int"
#ๅฝๆฅๅคๅคดๅผไปๆฐ้ๅป็ป
CThostFtdcLimitPosiField["OpenVolumeFrozen"] = "int"
#ๅคๅคดๆไป้้ขๅป็ป
CThostFtdcLimitPosiField["LongAmountFrozen"] = "float"
structDict['CThostFtdcLimitPosiField'] = CThostFtdcLimitPosiField
#ๆฅ่ฏขๆ่ต่
ๆไป้ๅถ
CThostFtdcQryLimitPosiField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryLimitPosiField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryLimitPosiField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryLimitPosiField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryLimitPosiField["ExchangeID"] = "string"
structDict['CThostFtdcQryLimitPosiField'] = CThostFtdcQryLimitPosiField
#็ป็บชๅ
ฌๅธๆไป้ๅถ
CThostFtdcBrokerLimitPosiField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcBrokerLimitPosiField["BrokerID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcBrokerLimitPosiField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcBrokerLimitPosiField["ExchangeID"] = "string"
#ๆปๆฐ้้ๅถ
CThostFtdcBrokerLimitPosiField["TotalVolume"] = "float"
#ๅคๅคดๆฐ้้ๅถ
CThostFtdcBrokerLimitPosiField["LongVolume"] = "float"
#ๆปๆฐ้ๅป็ป
CThostFtdcBrokerLimitPosiField["TotalVolumeFrozen"] = "float"
#ๅคๅคดๆฐ้ๅป็ป
CThostFtdcBrokerLimitPosiField["LongVolumeFrozen"] = "float"
structDict['CThostFtdcBrokerLimitPosiField'] = CThostFtdcBrokerLimitPosiField
#ๆฅ่ฏข็ป็บชๅ
ฌๅธๆไป้ๅถ
CThostFtdcQryBrokerLimitPosiField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryBrokerLimitPosiField["BrokerID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryBrokerLimitPosiField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryBrokerLimitPosiField["ExchangeID"] = "string"
structDict['CThostFtdcQryBrokerLimitPosiField'] = CThostFtdcQryBrokerLimitPosiField
#ๆ่ต่
่ฏๅธๆไป้ๅถ
CThostFtdcLimitPosiSField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcLimitPosiSField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcLimitPosiSField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcLimitPosiSField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcLimitPosiSField["ExchangeID"] = "string"
#ๆปๆฐ้้ๅถ
CThostFtdcLimitPosiSField["TotalVolume"] = "int"
#ๅฝๆฅๅผไปๆฐ้้ๅถ
CThostFtdcLimitPosiSField["OpenVolume"] = "int"
#ๆปๆฐ้ๅป็ป
CThostFtdcLimitPosiSField["TotalVolumeFrozen"] = "int"
#ๅฝๆฅๅผไปๆฐ้ๅป็ป
CThostFtdcLimitPosiSField["OpenVolumeFrozen"] = "int"
structDict['CThostFtdcLimitPosiSField'] = CThostFtdcLimitPosiSField
#ๆฅ่ฏขๆ่ต่
่ฏๅธๆไป้ๅถ
CThostFtdcQryLimitPosiSField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryLimitPosiSField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryLimitPosiSField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryLimitPosiSField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryLimitPosiSField["ExchangeID"] = "string"
structDict['CThostFtdcQryLimitPosiSField'] = CThostFtdcQryLimitPosiSField
#ๆ่ต่
ๆไป้ๅถๅๆฐ
CThostFtdcLimitPosiParamField = {}
#ๅ็บฆไปฃ็
CThostFtdcLimitPosiParamField["InstrumentID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcLimitPosiParamField["InvestorRange"] = "char"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcLimitPosiParamField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcLimitPosiParamField["InvestorID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcLimitPosiParamField["ExchangeID"] = "string"
#ๆปๆฐ้้ๅถ
CThostFtdcLimitPosiParamField["TotalVolume"] = "int"
#ๅคๅคดๆฐ้้ๅถ
CThostFtdcLimitPosiParamField["LongVolume"] = "int"
#ๅฝๆฅๅคๅคดๅผไปๆฐ้้ๅถ
CThostFtdcLimitPosiParamField["OpenVolume"] = "int"
#ๅคๅคดๆไป้้ข้ๅถ
CThostFtdcLimitPosiParamField["LongAmount"] = "float"
structDict['CThostFtdcLimitPosiParamField'] = CThostFtdcLimitPosiParamField
#็ป็บชๅ
ฌๅธๆไป้ๅถๅๆฐ
CThostFtdcBrokerLimitPosiParamField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcBrokerLimitPosiParamField["BrokerID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcBrokerLimitPosiParamField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcBrokerLimitPosiParamField["ExchangeID"] = "string"
#ๆปๆฐ้้ๅถ
CThostFtdcBrokerLimitPosiParamField["TotalVolume"] = "float"
#ๅคๅคดๆฐ้้ๅถ
CThostFtdcBrokerLimitPosiParamField["LongVolume"] = "float"
structDict['CThostFtdcBrokerLimitPosiParamField'] = CThostFtdcBrokerLimitPosiParamField
#ๆ่ต่
่ฏๅธๆไป้ๅถๅๆฐ
CThostFtdcLimitPosiParamSField = {}
#ๅ็บฆไปฃ็
CThostFtdcLimitPosiParamSField["InstrumentID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcLimitPosiParamSField["InvestorRange"] = "char"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcLimitPosiParamSField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcLimitPosiParamSField["InvestorID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcLimitPosiParamSField["ExchangeID"] = "string"
#ๆปๆฐ้้ๅถ
CThostFtdcLimitPosiParamSField["TotalVolume"] = "int"
#ๅฝๆฅๅผไปๆฐ้้ๅถ
CThostFtdcLimitPosiParamSField["OpenVolume"] = "int"
structDict['CThostFtdcLimitPosiParamSField'] = CThostFtdcLimitPosiParamSField
#่พๅ
ฅ่ฏๅธๅค็ฝฎๆไฝ
CThostFtdcInputStockDisposalActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInputStockDisposalActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInputStockDisposalActionField["InvestorID"] = "string"
#่ฏๅธๅค็ฝฎๆไฝๅผ็จ
CThostFtdcInputStockDisposalActionField["StockDisposalActionRef"] = "int"
#่ฏๅธๅค็ฝฎๅผ็จ
CThostFtdcInputStockDisposalActionField["StockDisposalRef"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcInputStockDisposalActionField["RequestID"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcInputStockDisposalActionField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcInputStockDisposalActionField["SessionID"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcInputStockDisposalActionField["ExchangeID"] = "string"
#่ฏๅธๅค็ฝฎๆไฝ็ผๅท
CThostFtdcInputStockDisposalActionField["StockDisposalSysID"] = "string"
#ๆไฝๆ ๅฟ
CThostFtdcInputStockDisposalActionField["ActionFlag"] = "char"
#็จๆทไปฃ็
CThostFtdcInputStockDisposalActionField["UserID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcInputStockDisposalActionField["InstrumentID"] = "string"
structDict['CThostFtdcInputStockDisposalActionField'] = CThostFtdcInputStockDisposalActionField
#่ฏๅธๅค็ฝฎๆไฝ
CThostFtdcStockDisposalActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcStockDisposalActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcStockDisposalActionField["InvestorID"] = "string"
#่ฏๅธๅค็ฝฎๆไฝๅผ็จ
CThostFtdcStockDisposalActionField["StockDisposalActionRef"] = "int"
#่ฏๅธๅค็ฝฎๅผ็จ
CThostFtdcStockDisposalActionField["StockDisposalRef"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcStockDisposalActionField["RequestID"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcStockDisposalActionField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcStockDisposalActionField["SessionID"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcStockDisposalActionField["ExchangeID"] = "string"
#่ฏๅธๅค็ฝฎๆไฝ็ผๅท
CThostFtdcStockDisposalActionField["StockDisposalSysID"] = "string"
#ๆไฝๆ ๅฟ
CThostFtdcStockDisposalActionField["ActionFlag"] = "char"
#ๆไฝๆฅๆ
CThostFtdcStockDisposalActionField["ActionDate"] = "string"
#ๆไฝๆถ้ด
CThostFtdcStockDisposalActionField["ActionTime"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcStockDisposalActionField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcStockDisposalActionField["InstallID"] = "int"
#ๆฌๅฐ่ฏๅธๅค็ฝฎ็ผๅท
CThostFtdcStockDisposalActionField["StockDisposalLocalID"] = "string"
#ๆไฝๆฌๅฐ็ผๅท
CThostFtdcStockDisposalActionField["ActionLocalID"] = "string"
#ไผๅไปฃ็
CThostFtdcStockDisposalActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcStockDisposalActionField["ClientID"] = "string"
#ไธๅกๅๅ
CThostFtdcStockDisposalActionField["BusinessUnit"] = "string"
#ๆฅๅๆไฝ็ถๆ
CThostFtdcStockDisposalActionField["OrderActionStatus"] = "char"
#็จๆทไปฃ็
CThostFtdcStockDisposalActionField["UserID"] = "string"
#ๆง่ก็ฑปๅ
CThostFtdcStockDisposalActionField["ActionType"] = "char"
#็ถๆไฟกๆฏ
CThostFtdcStockDisposalActionField["StatusMsg"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcStockDisposalActionField["InstrumentID"] = "string"
#่ฅไธ้จ็ผๅท
CThostFtdcStockDisposalActionField["BranchID"] = "string"
structDict['CThostFtdcStockDisposalActionField'] = CThostFtdcStockDisposalActionField
#่ฏๅธๅค็ฝฎๆไฝๆฅ่ฏข
CThostFtdcQryStockDisposalActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryStockDisposalActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryStockDisposalActionField["InvestorID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryStockDisposalActionField["ExchangeID"] = "string"
structDict['CThostFtdcQryStockDisposalActionField'] = CThostFtdcQryStockDisposalActionField
#ไบคๆๆ่ฏๅธๅค็ฝฎๆไฝ
CThostFtdcExchangeStockDisposalActionField = {}
#ไบคๆๆไปฃ็
CThostFtdcExchangeStockDisposalActionField["ExchangeID"] = "string"
#่ฏๅธๅค็ฝฎๆไฝ็ผๅท
CThostFtdcExchangeStockDisposalActionField["StockDisposalSysID"] = "string"
#ๆไฝๆ ๅฟ
CThostFtdcExchangeStockDisposalActionField["ActionFlag"] = "char"
#ๆไฝๆฅๆ
CThostFtdcExchangeStockDisposalActionField["ActionDate"] = "string"
#ๆไฝๆถ้ด
CThostFtdcExchangeStockDisposalActionField["ActionTime"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeStockDisposalActionField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExchangeStockDisposalActionField["InstallID"] = "int"
#ๆฌๅฐ่ฏๅธๅค็ฝฎ็ผๅท
CThostFtdcExchangeStockDisposalActionField["StockDisposalLocalID"] = "string"
#ๆไฝๆฌๅฐ็ผๅท
CThostFtdcExchangeStockDisposalActionField["ActionLocalID"] = "string"
#ไผๅไปฃ็
CThostFtdcExchangeStockDisposalActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcExchangeStockDisposalActionField["ClientID"] = "string"
#ไธๅกๅๅ
CThostFtdcExchangeStockDisposalActionField["BusinessUnit"] = "string"
#ๆฅๅๆไฝ็ถๆ
CThostFtdcExchangeStockDisposalActionField["OrderActionStatus"] = "char"
#็จๆทไปฃ็
CThostFtdcExchangeStockDisposalActionField["UserID"] = "string"
#ๆง่ก็ฑปๅ
CThostFtdcExchangeStockDisposalActionField["ActionType"] = "char"
#่ฅไธ้จ็ผๅท
CThostFtdcExchangeStockDisposalActionField["BranchID"] = "string"
structDict['CThostFtdcExchangeStockDisposalActionField'] = CThostFtdcExchangeStockDisposalActionField
#้่ฏฏ่ฏๅธๅค็ฝฎๆไฝ
CThostFtdcQryExchangeStockDisposalActionField = {}
#ไผๅไปฃ็
CThostFtdcQryExchangeStockDisposalActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcQryExchangeStockDisposalActionField["ClientID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryExchangeStockDisposalActionField["ExchangeID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcQryExchangeStockDisposalActionField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeStockDisposalActionField'] = CThostFtdcQryExchangeStockDisposalActionField
#ๆฅ่ฏข้่ฏฏ่ฏๅธๅค็ฝฎๆไฝ
CThostFtdcQryErrStockDisposalActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryErrStockDisposalActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryErrStockDisposalActionField["InvestorID"] = "string"
structDict['CThostFtdcQryErrStockDisposalActionField'] = CThostFtdcQryErrStockDisposalActionField
#ไบคๆๆ่ฏๅธๅค็ฝฎๆไฝ้่ฏฏ
CThostFtdcExchangeStockDisposalActionErrorField = {}
#ไบคๆๆไปฃ็
CThostFtdcExchangeStockDisposalActionErrorField["ExchangeID"] = "string"
#่ฏๅธๅค็ฝฎ็ผๅท
CThostFtdcExchangeStockDisposalActionErrorField["StockDisposalSysID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeStockDisposalActionErrorField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExchangeStockDisposalActionErrorField["InstallID"] = "int"
#ๆฌๅฐ่ฏๅธๅค็ฝฎ็ผๅท
CThostFtdcExchangeStockDisposalActionErrorField["StockDisposalLocalID"] = "string"
#ๆไฝๆฌๅฐ็ผๅท
CThostFtdcExchangeStockDisposalActionErrorField["ActionLocalID"] = "string"
#้่ฏฏไปฃ็
CThostFtdcExchangeStockDisposalActionErrorField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcExchangeStockDisposalActionErrorField["ErrorMsg"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcExchangeStockDisposalActionErrorField["BrokerID"] = "string"
structDict['CThostFtdcExchangeStockDisposalActionErrorField'] = CThostFtdcExchangeStockDisposalActionErrorField
#้่ฏฏ่ฏๅธๅค็ฝฎๆไฝ
CThostFtdcErrStockDisposalActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcErrStockDisposalActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcErrStockDisposalActionField["InvestorID"] = "string"
#่ฏๅธๅค็ฝฎๆไฝๅผ็จ
CThostFtdcErrStockDisposalActionField["StockDisposalActionRef"] = "int"
#่ฏๅธๅค็ฝฎๅผ็จ
CThostFtdcErrStockDisposalActionField["StockDisposalRef"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcErrStockDisposalActionField["RequestID"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcErrStockDisposalActionField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcErrStockDisposalActionField["SessionID"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcErrStockDisposalActionField["ExchangeID"] = "string"
#่ฏๅธๅค็ฝฎๆไฝ็ผๅท
CThostFtdcErrStockDisposalActionField["StockDisposalSysID"] = "string"
#ๆไฝๆ ๅฟ
CThostFtdcErrStockDisposalActionField["ActionFlag"] = "char"
#็จๆทไปฃ็
CThostFtdcErrStockDisposalActionField["UserID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcErrStockDisposalActionField["InstrumentID"] = "string"
#้่ฏฏไปฃ็
CThostFtdcErrStockDisposalActionField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcErrStockDisposalActionField["ErrorMsg"] = "string"
structDict['CThostFtdcErrStockDisposalActionField'] = CThostFtdcErrStockDisposalActionField
#ๆ่ต่
ๅ็บง
CThostFtdcInvestorLevelField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInvestorLevelField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInvestorLevelField["InvestorID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcInvestorLevelField["ExchangeID"] = "string"
#ๆ่ต่
ๅ็บง็ฑปๅ
CThostFtdcInvestorLevelField["LevelType"] = "char"
structDict['CThostFtdcInvestorLevelField'] = CThostFtdcInvestorLevelField
#็ปๅๅ็บฆๅฎๅ
จ็ณปๆฐ
CThostFtdcCombInstrumentGuardField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcCombInstrumentGuardField["BrokerID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcCombInstrumentGuardField["InstrumentID"] = "string"
#
CThostFtdcCombInstrumentGuardField["GuarantRatio"] = "float"
structDict['CThostFtdcCombInstrumentGuardField'] = CThostFtdcCombInstrumentGuardField
#็ปๅๅ็บฆๅฎๅ
จ็ณปๆฐๆฅ่ฏข
CThostFtdcQryCombInstrumentGuardField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryCombInstrumentGuardField["BrokerID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryCombInstrumentGuardField["InstrumentID"] = "string"
structDict['CThostFtdcQryCombInstrumentGuardField'] = CThostFtdcQryCombInstrumentGuardField
#่พๅ
ฅ็็ณ่ฏท็ปๅ
CThostFtdcInputCombActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInputCombActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInputCombActionField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcInputCombActionField["InstrumentID"] = "string"
#็ปๅๅผ็จ
CThostFtdcInputCombActionField["CombActionRef"] = "string"
#็จๆทไปฃ็
CThostFtdcInputCombActionField["UserID"] = "string"
#ไนฐๅๆนๅ
CThostFtdcInputCombActionField["Direction"] = "char"
#ๆฐ้
CThostFtdcInputCombActionField["Volume"] = "int"
#็ปๅๆไปคๆนๅ
CThostFtdcInputCombActionField["CombDirection"] = "char"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcInputCombActionField["HedgeFlag"] = "char"
#ไบคๆๆไปฃ็
CThostFtdcInputCombActionField["ExchangeID"] = "string"
structDict['CThostFtdcInputCombActionField'] = CThostFtdcInputCombActionField
#็ณ่ฏท็ปๅ
CThostFtdcCombActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcCombActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcCombActionField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcCombActionField["InstrumentID"] = "string"
#็ปๅๅผ็จ
CThostFtdcCombActionField["CombActionRef"] = "string"
#็จๆทไปฃ็
CThostFtdcCombActionField["UserID"] = "string"
#ไนฐๅๆนๅ
CThostFtdcCombActionField["Direction"] = "char"
#ๆฐ้
CThostFtdcCombActionField["Volume"] = "int"
#็ปๅๆไปคๆนๅ
CThostFtdcCombActionField["CombDirection"] = "char"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcCombActionField["HedgeFlag"] = "char"
#ๆฌๅฐ็ณ่ฏท็ปๅ็ผๅท
CThostFtdcCombActionField["ActionLocalID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcCombActionField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcCombActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcCombActionField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcCombActionField["ExchangeInstID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcCombActionField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcCombActionField["InstallID"] = "int"
#็ปๅ็ถๆ
CThostFtdcCombActionField["ActionStatus"] = "char"
#ๆฅๅๆ็คบๅบๅท
CThostFtdcCombActionField["NotifySequence"] = "int"
#ไบคๆๆฅ
CThostFtdcCombActionField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcCombActionField["SettlementID"] = "int"
#ๅบๅท
CThostFtdcCombActionField["SequenceNo"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcCombActionField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcCombActionField["SessionID"] = "int"
#็จๆท็ซฏไบงๅไฟกๆฏ
CThostFtdcCombActionField["UserProductInfo"] = "string"
#็ถๆไฟกๆฏ
CThostFtdcCombActionField["StatusMsg"] = "string"
structDict['CThostFtdcCombActionField'] = CThostFtdcCombActionField
#็ณ่ฏท็ปๅๆฅ่ฏข
CThostFtdcQryCombActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryCombActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryCombActionField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryCombActionField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryCombActionField["ExchangeID"] = "string"
structDict['CThostFtdcQryCombActionField'] = CThostFtdcQryCombActionField
#ไบคๆๆ็ณ่ฏท็ปๅไฟกๆฏ
CThostFtdcExchangeCombActionField = {}
#ไนฐๅๆนๅ
CThostFtdcExchangeCombActionField["Direction"] = "char"
#ๆฐ้
CThostFtdcExchangeCombActionField["Volume"] = "int"
#็ปๅๆไปคๆนๅ
CThostFtdcExchangeCombActionField["CombDirection"] = "char"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcExchangeCombActionField["HedgeFlag"] = "char"
#ๆฌๅฐ็ณ่ฏท็ปๅ็ผๅท
CThostFtdcExchangeCombActionField["ActionLocalID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcExchangeCombActionField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcExchangeCombActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcExchangeCombActionField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcExchangeCombActionField["ExchangeInstID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeCombActionField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExchangeCombActionField["InstallID"] = "int"
#็ปๅ็ถๆ
CThostFtdcExchangeCombActionField["ActionStatus"] = "char"
#ๆฅๅๆ็คบๅบๅท
CThostFtdcExchangeCombActionField["NotifySequence"] = "int"
#ไบคๆๆฅ
CThostFtdcExchangeCombActionField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcExchangeCombActionField["SettlementID"] = "int"
#ๅบๅท
CThostFtdcExchangeCombActionField["SequenceNo"] = "int"
structDict['CThostFtdcExchangeCombActionField'] = CThostFtdcExchangeCombActionField
#ไบคๆๆ็ณ่ฏท็ปๅๆฅ่ฏข
CThostFtdcQryExchangeCombActionField = {}
#ไผๅไปฃ็
CThostFtdcQryExchangeCombActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcQryExchangeCombActionField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcQryExchangeCombActionField["ExchangeInstID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryExchangeCombActionField["ExchangeID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcQryExchangeCombActionField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeCombActionField'] = CThostFtdcQryExchangeCombActionField
#ไบงๅๆฅไปทๆฑ็
CThostFtdcProductExchRateField = {}
#ไบงๅไปฃ็
CThostFtdcProductExchRateField["ProductID"] = "string"
#ๆฅไปทๅธ็ง็ฑปๅ
CThostFtdcProductExchRateField["QuoteCurrencyID"] = "string"
#ๆฑ็
CThostFtdcProductExchRateField["ExchangeRate"] = "float"
structDict['CThostFtdcProductExchRateField'] = CThostFtdcProductExchRateField
#ไบงๅๆฅไปทๆฑ็ๆฅ่ฏข
CThostFtdcQryProductExchRateField = {}
#ไบงๅไปฃ็
CThostFtdcQryProductExchRateField["ProductID"] = "string"
structDict['CThostFtdcQryProductExchRateField'] = CThostFtdcQryProductExchRateField
#่พๅ
ฅ็ๆๅฎ
CThostFtdcInputDesignateField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInputDesignateField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInputDesignateField["InvestorID"] = "string"
#ๆๅฎ็ป่ฎฐๅผ็จ
CThostFtdcInputDesignateField["DesignateRef"] = "string"
#็จๆทไปฃ็
CThostFtdcInputDesignateField["UserID"] = "string"
#ๆๅฎๆนๅ
CThostFtdcInputDesignateField["DesignateType"] = "char"
#ไบคๆๆไปฃ็
CThostFtdcInputDesignateField["ExchangeID"] = "string"
structDict['CThostFtdcInputDesignateField'] = CThostFtdcInputDesignateField
#ๆๅฎ
CThostFtdcDesignateField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcDesignateField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcDesignateField["InvestorID"] = "string"
#ๆๅฎ็ป่ฎฐๅผ็จ
CThostFtdcDesignateField["DesignateRef"] = "string"
#็จๆทไปฃ็
CThostFtdcDesignateField["UserID"] = "string"
#ๆๅฎๆนๅ
CThostFtdcDesignateField["DesignateType"] = "char"
#ๆฌๅฐๆๅฎ็ผๅท
CThostFtdcDesignateField["DesignateLocalID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcDesignateField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcDesignateField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcDesignateField["ClientID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcDesignateField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcDesignateField["InstallID"] = "int"
#ๆๅฎ็ถๆ
CThostFtdcDesignateField["DesignateStatus"] = "char"
#ๆฅๅๆ็คบๅบๅท
CThostFtdcDesignateField["NotifySequence"] = "int"
#ไบคๆๆฅ
CThostFtdcDesignateField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcDesignateField["SettlementID"] = "int"
#ๆฅๅๆฅๆ
CThostFtdcDesignateField["InsertDate"] = "string"
#ๆๅ
ฅๆถ้ด
CThostFtdcDesignateField["InsertTime"] = "string"
#ๅ็ฝฎ็ผๅท
CThostFtdcDesignateField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcDesignateField["SessionID"] = "int"
#็จๆท็ซฏไบงๅไฟกๆฏ
CThostFtdcDesignateField["UserProductInfo"] = "string"
#็ถๆไฟกๆฏ
CThostFtdcDesignateField["StatusMsg"] = "string"
#่ฅไธ้จ็ผๅท
CThostFtdcDesignateField["BranchID"] = "string"
structDict['CThostFtdcDesignateField'] = CThostFtdcDesignateField
#็ณ่ฏทๆๅฎ
CThostFtdcQryDesignateField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryDesignateField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryDesignateField["InvestorID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryDesignateField["ExchangeID"] = "string"
structDict['CThostFtdcQryDesignateField'] = CThostFtdcQryDesignateField
#ไบคๆๆๆๅฎไฟกๆฏ
CThostFtdcExchangeDesignateField = {}
#ๆๅฎๆนๅ
CThostFtdcExchangeDesignateField["DesignateType"] = "char"
#ๆฌๅฐๆๅฎ็ผๅท
CThostFtdcExchangeDesignateField["DesignateLocalID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcExchangeDesignateField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcExchangeDesignateField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcExchangeDesignateField["ClientID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeDesignateField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExchangeDesignateField["InstallID"] = "int"
#ๆๅฎ็ถๆ
CThostFtdcExchangeDesignateField["DesignateStatus"] = "char"
#ๆฅๅๆ็คบๅบๅท
CThostFtdcExchangeDesignateField["NotifySequence"] = "int"
#ไบคๆๆฅ
CThostFtdcExchangeDesignateField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcExchangeDesignateField["SettlementID"] = "int"
#ๆฅๅๆฅๆ
CThostFtdcExchangeDesignateField["InsertDate"] = "string"
#ๆๅ
ฅๆถ้ด
CThostFtdcExchangeDesignateField["InsertTime"] = "string"
#่ฅไธ้จ็ผๅท
CThostFtdcExchangeDesignateField["BranchID"] = "string"
structDict['CThostFtdcExchangeDesignateField'] = CThostFtdcExchangeDesignateField
#่พๅ
ฅ็่ฏๅธๅค็ฝฎ
CThostFtdcInputStockDisposalField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInputStockDisposalField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInputStockDisposalField["InvestorID"] = "string"
#่ฏๅธๅค็ฝฎ็ป่ฎฐๅผ็จ
CThostFtdcInputStockDisposalField["StockDisposalRef"] = "string"
#็จๆทไปฃ็
CThostFtdcInputStockDisposalField["UserID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcInputStockDisposalField["InstrumentID"] = "string"
#ๆฐ้
CThostFtdcInputStockDisposalField["Volume"] = "int"
#่ฏๅธๅค็ฝฎๆนๅ
CThostFtdcInputStockDisposalField["StockDisposalType"] = "char"
#ไบคๆๆไปฃ็
CThostFtdcInputStockDisposalField["ExchangeID"] = "string"
structDict['CThostFtdcInputStockDisposalField'] = CThostFtdcInputStockDisposalField
#่ฏๅธๅค็ฝฎ
CThostFtdcStockDisposalField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcStockDisposalField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcStockDisposalField["InvestorID"] = "string"
#่ฏๅธๅค็ฝฎ็ป่ฎฐๅผ็จ
CThostFtdcStockDisposalField["StockDisposalRef"] = "string"
#็จๆทไปฃ็
CThostFtdcStockDisposalField["UserID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcStockDisposalField["InstrumentID"] = "string"
#ๆฐ้
CThostFtdcStockDisposalField["Volume"] = "int"
#่ฏๅธๅค็ฝฎๆนๅ
CThostFtdcStockDisposalField["StockDisposalType"] = "char"
#ๆฌๅฐ่ฏๅธๅค็ฝฎ็ผๅท
CThostFtdcStockDisposalField["StockDisposalLocalID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcStockDisposalField["ExchangeID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcStockDisposalField["ExchangeInstID"] = "string"
#ไผๅไปฃ็
CThostFtdcStockDisposalField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcStockDisposalField["ClientID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcStockDisposalField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcStockDisposalField["InstallID"] = "int"
#่ฏๅธๅค็ฝฎ็ถๆ
CThostFtdcStockDisposalField["StockDisposalStatus"] = "char"
#ๆฅๅๆ็คบๅบๅท
CThostFtdcStockDisposalField["NotifySequence"] = "int"
#ไบคๆๆฅ
CThostFtdcStockDisposalField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcStockDisposalField["SettlementID"] = "int"
#ๆฅๅๆฅๆ
CThostFtdcStockDisposalField["InsertDate"] = "string"
#ๆๅ
ฅๆถ้ด
CThostFtdcStockDisposalField["InsertTime"] = "string"
#ๅ็ฝฎ็ผๅท
CThostFtdcStockDisposalField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcStockDisposalField["SessionID"] = "int"
#็จๆท็ซฏไบงๅไฟกๆฏ
CThostFtdcStockDisposalField["UserProductInfo"] = "string"
#็ถๆไฟกๆฏ
CThostFtdcStockDisposalField["StatusMsg"] = "string"
#่ฅไธ้จ็ผๅท
CThostFtdcStockDisposalField["BranchID"] = "string"
#่ฏๅธๅค็ฝฎๆไฝ็ผๅท
CThostFtdcStockDisposalField["StockDisposalSysID"] = "string"
#ไธๅกๅๅ
CThostFtdcStockDisposalField["BusinessUnit"] = "string"
structDict['CThostFtdcStockDisposalField'] = CThostFtdcStockDisposalField
#็ณ่ฏท่ฏๅธๅค็ฝฎ
CThostFtdcQryStockDisposalField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryStockDisposalField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryStockDisposalField["InvestorID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryStockDisposalField["ExchangeID"] = "string"
structDict['CThostFtdcQryStockDisposalField'] = CThostFtdcQryStockDisposalField
#ไบคๆๆ่ฏๅธๅค็ฝฎไฟกๆฏ
CThostFtdcExchangeStockDisposalField = {}
#ๆฐ้
CThostFtdcExchangeStockDisposalField["Volume"] = "int"
#่ฏๅธๅค็ฝฎๆนๅ
CThostFtdcExchangeStockDisposalField["StockDisposalType"] = "char"
#ๆฌๅฐ่ฏๅธๅค็ฝฎ็ผๅท
CThostFtdcExchangeStockDisposalField["StockDisposalLocalID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcExchangeStockDisposalField["ExchangeID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcExchangeStockDisposalField["ExchangeInstID"] = "string"
#ไผๅไปฃ็
CThostFtdcExchangeStockDisposalField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcExchangeStockDisposalField["ClientID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcExchangeStockDisposalField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcExchangeStockDisposalField["InstallID"] = "int"
#่ฏๅธๅค็ฝฎ็ถๆ
CThostFtdcExchangeStockDisposalField["StockDisposalStatus"] = "char"
#ๆฅๅๆ็คบๅบๅท
CThostFtdcExchangeStockDisposalField["NotifySequence"] = "int"
#ไบคๆๆฅ
CThostFtdcExchangeStockDisposalField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcExchangeStockDisposalField["SettlementID"] = "int"
#ๆฅๅๆฅๆ
CThostFtdcExchangeStockDisposalField["InsertDate"] = "string"
#ๆๅ
ฅๆถ้ด
CThostFtdcExchangeStockDisposalField["InsertTime"] = "string"
#่ฅไธ้จ็ผๅท
CThostFtdcExchangeStockDisposalField["BranchID"] = "string"
#่ฏๅธๅค็ฝฎๆไฝ็ผๅท
CThostFtdcExchangeStockDisposalField["StockDisposalSysID"] = "string"
#ไธๅกๅๅ
CThostFtdcExchangeStockDisposalField["BusinessUnit"] = "string"
structDict['CThostFtdcExchangeStockDisposalField'] = CThostFtdcExchangeStockDisposalField
#ๆฅ่ฏขๆ่ต่
ๅ็บง
CThostFtdcQryInvestorLevelField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryInvestorLevelField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryInvestorLevelField["InvestorID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryInvestorLevelField["ExchangeID"] = "string"
structDict['CThostFtdcQryInvestorLevelField'] = CThostFtdcQryInvestorLevelField
#ๆฅ่ฏข่ฏขไปทไปทๅทฎๅๆฐ
CThostFtdcQryForQuoteParamField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryForQuoteParamField["BrokerID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryForQuoteParamField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryForQuoteParamField["ExchangeID"] = "string"
structDict['CThostFtdcQryForQuoteParamField'] = CThostFtdcQryForQuoteParamField
#่ฏขไปทไปทๅทฎๅๆฐ
CThostFtdcForQuoteParamField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcForQuoteParamField["BrokerID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcForQuoteParamField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcForQuoteParamField["ExchangeID"] = "string"
#ๆๆฐไปท
CThostFtdcForQuoteParamField["LastPrice"] = "float"
#ไปทๅทฎ
CThostFtdcForQuoteParamField["PriceInterval"] = "float"
structDict['CThostFtdcForQuoteParamField'] = CThostFtdcForQuoteParamField
#ๆฅ่ฏข่กๆๅป็ป
CThostFtdcQryExecFreezeField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryExecFreezeField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryExecFreezeField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryExecFreezeField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryExecFreezeField["ExchangeID"] = "string"
structDict['CThostFtdcQryExecFreezeField'] = CThostFtdcQryExecFreezeField
#่กๆๅป็ป
CThostFtdcExecFreezeField = {}
#ๆ ็ๅ็บฆไปฃ็
CThostFtdcExecFreezeField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcExecFreezeField["ExchangeID"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcExecFreezeField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcExecFreezeField["InvestorID"] = "string"
#ๆไปๅค็ฉบๆนๅ
CThostFtdcExecFreezeField["PosiDirection"] = "char"
#ๆๆ็ฑปๅ
CThostFtdcExecFreezeField["OptionsType"] = "char"
#ๅป็ป็ๆฐ้_ๅไฝ่ก
CThostFtdcExecFreezeField["Volume"] = "int"
#ๅป็ป้้ข
CThostFtdcExecFreezeField["FrozenAmount"] = "float"
structDict['CThostFtdcExecFreezeField'] = CThostFtdcExecFreezeField
#ๅธๅบ่กๆ
CThostFtdcMarketDataField = {}
#ไบคๆๆฅ
CThostFtdcMarketDataField["TradingDay"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcMarketDataField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcMarketDataField["ExchangeID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcMarketDataField["ExchangeInstID"] = "string"
#ๆๆฐไปท
CThostFtdcMarketDataField["LastPrice"] = "float"
#ไธๆฌก็ป็ฎไปท
CThostFtdcMarketDataField["PreSettlementPrice"] = "float"
#ๆจๆถ็
CThostFtdcMarketDataField["PreClosePrice"] = "float"
#ๆจๆไป้
CThostFtdcMarketDataField["PreOpenInterest"] = "float"
#ไปๅผ็
CThostFtdcMarketDataField["OpenPrice"] = "float"
#ๆ้ซไปท
CThostFtdcMarketDataField["HighestPrice"] = "float"
#ๆไฝไปท
CThostFtdcMarketDataField["LowestPrice"] = "float"
#ๆฐ้
CThostFtdcMarketDataField["Volume"] = "int"
#ๆไบค้้ข
CThostFtdcMarketDataField["Turnover"] = "float"
#ๆไป้
CThostFtdcMarketDataField["OpenInterest"] = "float"
#ไปๆถ็
CThostFtdcMarketDataField["ClosePrice"] = "float"
#ๆฌๆฌก็ป็ฎไปท
CThostFtdcMarketDataField["SettlementPrice"] = "float"
#ๆถจๅๆฟไปท
CThostFtdcMarketDataField["UpperLimitPrice"] = "float"
#่ทๅๆฟไปท
CThostFtdcMarketDataField["LowerLimitPrice"] = "float"
#ๆจ่ๅฎๅบฆ
CThostFtdcMarketDataField["PreDelta"] = "float"
#ไป่ๅฎๅบฆ
CThostFtdcMarketDataField["CurrDelta"] = "float"
#ๆๅไฟฎๆนๆถ้ด
CThostFtdcMarketDataField["UpdateTime"] = "string"
#ๆๅไฟฎๆนๆฏซ็ง
CThostFtdcMarketDataField["UpdateMillisec"] = "int"
#ไธๅกๆฅๆ
CThostFtdcMarketDataField["ActionDay"] = "string"
structDict['CThostFtdcMarketDataField'] = CThostFtdcMarketDataField
#่กๆ
ๅบ็กๅฑๆง
CThostFtdcMarketDataBaseField = {}
#ไบคๆๆฅ
CThostFtdcMarketDataBaseField["TradingDay"] = "string"
#ไธๆฌก็ป็ฎไปท
CThostFtdcMarketDataBaseField["PreSettlementPrice"] = "float"
#ๆจๆถ็
CThostFtdcMarketDataBaseField["PreClosePrice"] = "float"
#ๆจๆไป้
CThostFtdcMarketDataBaseField["PreOpenInterest"] = "float"
#ๆจ่ๅฎๅบฆ
CThostFtdcMarketDataBaseField["PreDelta"] = "float"
structDict['CThostFtdcMarketDataBaseField'] = CThostFtdcMarketDataBaseField
#่กๆ
้ๆๅฑๆง
CThostFtdcMarketDataStaticField = {}
#ไปๅผ็
CThostFtdcMarketDataStaticField["OpenPrice"] = "float"
#ๆ้ซไปท
CThostFtdcMarketDataStaticField["HighestPrice"] = "float"
#ๆไฝไปท
CThostFtdcMarketDataStaticField["LowestPrice"] = "float"
#ไปๆถ็
CThostFtdcMarketDataStaticField["ClosePrice"] = "float"
#ๆถจๅๆฟไปท
CThostFtdcMarketDataStaticField["UpperLimitPrice"] = "float"
#่ทๅๆฟไปท
CThostFtdcMarketDataStaticField["LowerLimitPrice"] = "float"
#ๆฌๆฌก็ป็ฎไปท
CThostFtdcMarketDataStaticField["SettlementPrice"] = "float"
#ไป่ๅฎๅบฆ
CThostFtdcMarketDataStaticField["CurrDelta"] = "float"
structDict['CThostFtdcMarketDataStaticField'] = CThostFtdcMarketDataStaticField
#่กๆ
ๆๆฐๆไบคๅฑๆง
CThostFtdcMarketDataLastMatchField = {}
#ๆๆฐไปท
CThostFtdcMarketDataLastMatchField["LastPrice"] = "float"
#ๆฐ้
CThostFtdcMarketDataLastMatchField["Volume"] = "int"
#ๆไบค้้ข
CThostFtdcMarketDataLastMatchField["Turnover"] = "float"
#ๆไป้
CThostFtdcMarketDataLastMatchField["OpenInterest"] = "float"
structDict['CThostFtdcMarketDataLastMatchField'] = CThostFtdcMarketDataLastMatchField
#่กๆ
ๆไผไปทๅฑๆง
CThostFtdcMarketDataBestPriceField = {}
#็ณไนฐไปทไธ
CThostFtdcMarketDataBestPriceField["BidPrice1"] = "float"
#็ณไนฐ้ไธ
CThostFtdcMarketDataBestPriceField["BidVolume1"] = "int"
#็ณๅไปทไธ
CThostFtdcMarketDataBestPriceField["AskPrice1"] = "float"
#็ณๅ้ไธ
CThostFtdcMarketDataBestPriceField["AskVolume1"] = "int"
structDict['CThostFtdcMarketDataBestPriceField'] = CThostFtdcMarketDataBestPriceField
#่กๆ
็ณไนฐไบใไธๅฑๆง
CThostFtdcMarketDataBid23Field = {}
#็ณไนฐไปทไบ
CThostFtdcMarketDataBid23Field["BidPrice2"] = "float"
#็ณไนฐ้ไบ
CThostFtdcMarketDataBid23Field["BidVolume2"] = "int"
#็ณไนฐไปทไธ
CThostFtdcMarketDataBid23Field["BidPrice3"] = "float"
#็ณไนฐ้ไธ
CThostFtdcMarketDataBid23Field["BidVolume3"] = "int"
structDict['CThostFtdcMarketDataBid23Field'] = CThostFtdcMarketDataBid23Field
#่กๆ
็ณๅไบใไธๅฑๆง
CThostFtdcMarketDataAsk23Field = {}
#็ณๅไปทไบ
CThostFtdcMarketDataAsk23Field["AskPrice2"] = "float"
#็ณๅ้ไบ
CThostFtdcMarketDataAsk23Field["AskVolume2"] = "int"
#็ณๅไปทไธ
CThostFtdcMarketDataAsk23Field["AskPrice3"] = "float"
#็ณๅ้ไธ
CThostFtdcMarketDataAsk23Field["AskVolume3"] = "int"
structDict['CThostFtdcMarketDataAsk23Field'] = CThostFtdcMarketDataAsk23Field
#่กๆ
็ณไนฐๅใไบๅฑๆง
CThostFtdcMarketDataBid45Field = {}
#็ณไนฐไปทๅ
CThostFtdcMarketDataBid45Field["BidPrice4"] = "float"
#็ณไนฐ้ๅ
CThostFtdcMarketDataBid45Field["BidVolume4"] = "int"
#็ณไนฐไปทไบ
CThostFtdcMarketDataBid45Field["BidPrice5"] = "float"
#็ณไนฐ้ไบ
CThostFtdcMarketDataBid45Field["BidVolume5"] = "int"
structDict['CThostFtdcMarketDataBid45Field'] = CThostFtdcMarketDataBid45Field
#่กๆ
็ณๅๅใไบๅฑๆง
CThostFtdcMarketDataAsk45Field = {}
#็ณๅไปทๅ
CThostFtdcMarketDataAsk45Field["AskPrice4"] = "float"
#็ณๅ้ๅ
CThostFtdcMarketDataAsk45Field["AskVolume4"] = "int"
#็ณๅไปทไบ
CThostFtdcMarketDataAsk45Field["AskPrice5"] = "float"
#็ณๅ้ไบ
CThostFtdcMarketDataAsk45Field["AskVolume5"] = "int"
structDict['CThostFtdcMarketDataAsk45Field'] = CThostFtdcMarketDataAsk45Field
#่กๆ
ๆดๆฐๆถ้ดๅฑๆง
CThostFtdcMarketDataUpdateTimeField = {}
#ๅ็บฆไปฃ็
CThostFtdcMarketDataUpdateTimeField["InstrumentID"] = "string"
#ๆๅไฟฎๆนๆถ้ด
CThostFtdcMarketDataUpdateTimeField["UpdateTime"] = "string"
#ๆๅไฟฎๆนๆฏซ็ง
CThostFtdcMarketDataUpdateTimeField["UpdateMillisec"] = "int"
#ไธๅกๆฅๆ
CThostFtdcMarketDataUpdateTimeField["ActionDay"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcMarketDataUpdateTimeField["ExchangeID"] = "string"
structDict['CThostFtdcMarketDataUpdateTimeField'] = CThostFtdcMarketDataUpdateTimeField
#่กๆ
ไบคๆๆไปฃ็ ๅฑๆง
CThostFtdcMarketDataExchangeField = {}
#ไบคๆๆไปฃ็
CThostFtdcMarketDataExchangeField["ExchangeID"] = "string"
structDict['CThostFtdcMarketDataExchangeField'] = CThostFtdcMarketDataExchangeField
#ๆๅฎ็ๅ็บฆ
CThostFtdcSpecificInstrumentField = {}
#ๅ็บฆไปฃ็
CThostFtdcSpecificInstrumentField["InstrumentID"] = "string"
structDict['CThostFtdcSpecificInstrumentField'] = CThostFtdcSpecificInstrumentField
#ๅ็บฆ็ถๆ
CThostFtdcInstrumentStatusField = {}
#ไบคๆๆไปฃ็
CThostFtdcInstrumentStatusField["ExchangeID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcInstrumentStatusField["ExchangeInstID"] = "string"
#็ป็ฎ็ปไปฃ็
CThostFtdcInstrumentStatusField["SettlementGroupID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcInstrumentStatusField["InstrumentID"] = "string"
#ๅ็บฆไบคๆ็ถๆ
CThostFtdcInstrumentStatusField["InstrumentStatus"] = "char"
#ไบคๆ้ถๆฎต็ผๅท
CThostFtdcInstrumentStatusField["TradingSegmentSN"] = "int"
#่ฟๅ
ฅๆฌ็ถๆๆถ้ด
CThostFtdcInstrumentStatusField["EnterTime"] = "string"
#่ฟๅ
ฅๆฌ็ถๆๅๅ
CThostFtdcInstrumentStatusField["EnterReason"] = "char"
structDict['CThostFtdcInstrumentStatusField'] = CThostFtdcInstrumentStatusField
#ๆฅ่ฏขๅ็บฆ็ถๆ
CThostFtdcQryInstrumentStatusField = {}
#ไบคๆๆไปฃ็
CThostFtdcQryInstrumentStatusField["ExchangeID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcQryInstrumentStatusField["ExchangeInstID"] = "string"
structDict['CThostFtdcQryInstrumentStatusField'] = CThostFtdcQryInstrumentStatusField
#ๆ่ต่
่ดฆๆท
CThostFtdcInvestorAccountField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInvestorAccountField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInvestorAccountField["InvestorID"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcInvestorAccountField["AccountID"] = "string"
#ๅธ็งไปฃ็
CThostFtdcInvestorAccountField["CurrencyID"] = "string"
structDict['CThostFtdcInvestorAccountField'] = CThostFtdcInvestorAccountField
#ๆตฎๅจ็ไบ็ฎๆณ
CThostFtdcPositionProfitAlgorithmField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcPositionProfitAlgorithmField["BrokerID"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcPositionProfitAlgorithmField["AccountID"] = "string"
#็ไบ็ฎๆณ
CThostFtdcPositionProfitAlgorithmField["Algorithm"] = "char"
#ๅคๆณจ
CThostFtdcPositionProfitAlgorithmField["Memo"] = "string"
#ๅธ็งไปฃ็
CThostFtdcPositionProfitAlgorithmField["CurrencyID"] = "string"
structDict['CThostFtdcPositionProfitAlgorithmField'] = CThostFtdcPositionProfitAlgorithmField
#ไผๅ่ต้ๆๆฃ
CThostFtdcDiscountField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcDiscountField["BrokerID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcDiscountField["InvestorRange"] = "char"
#ๆ่ต่
ไปฃ็
CThostFtdcDiscountField["InvestorID"] = "string"
#่ต้ๆๆฃๆฏไพ
CThostFtdcDiscountField["Discount"] = "float"
structDict['CThostFtdcDiscountField'] = CThostFtdcDiscountField
#ๆฅ่ฏข่ฝฌๅธ้ถ่ก
CThostFtdcQryTransferBankField = {}
#้ถ่กไปฃ็
CThostFtdcQryTransferBankField["BankID"] = "string"
#้ถ่กๅไธญๅฟไปฃ็
CThostFtdcQryTransferBankField["BankBrchID"] = "string"
structDict['CThostFtdcQryTransferBankField'] = CThostFtdcQryTransferBankField
#่ฝฌๅธ้ถ่ก
CThostFtdcTransferBankField = {}
#้ถ่กไปฃ็
CThostFtdcTransferBankField["BankID"] = "string"
#้ถ่กๅไธญๅฟไปฃ็
CThostFtdcTransferBankField["BankBrchID"] = "string"
#้ถ่กๅ็งฐ
CThostFtdcTransferBankField["BankName"] = "string"
#ๆฏๅฆๆดป่ท
CThostFtdcTransferBankField["IsActive"] = "int"
structDict['CThostFtdcTransferBankField'] = CThostFtdcTransferBankField
#ๆฅ่ฏขๆ่ต่
ๆไปๆ็ป
CThostFtdcQryInvestorPositionDetailField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryInvestorPositionDetailField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryInvestorPositionDetailField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryInvestorPositionDetailField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryInvestorPositionDetailField["ExchangeID"] = "string"
structDict['CThostFtdcQryInvestorPositionDetailField'] = CThostFtdcQryInvestorPositionDetailField
#ๆ่ต่
ๆไปๆ็ป
CThostFtdcInvestorPositionDetailField = {}
#ๅ็บฆไปฃ็
CThostFtdcInvestorPositionDetailField["InstrumentID"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInvestorPositionDetailField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInvestorPositionDetailField["InvestorID"] = "string"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcInvestorPositionDetailField["HedgeFlag"] = "char"
#ไนฐๅ
CThostFtdcInvestorPositionDetailField["Direction"] = "char"
#ๅผไปๆฅๆ
CThostFtdcInvestorPositionDetailField["OpenDate"] = "string"
#ๆไบค็ผๅท
CThostFtdcInvestorPositionDetailField["TradeID"] = "string"
#ๆฐ้
CThostFtdcInvestorPositionDetailField["Volume"] = "int"
#ๅผไปไปท
CThostFtdcInvestorPositionDetailField["OpenPrice"] = "float"
#ไบคๆๆฅ
CThostFtdcInvestorPositionDetailField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcInvestorPositionDetailField["SettlementID"] = "int"
#ๆไบค็ฑปๅ
CThostFtdcInvestorPositionDetailField["TradeType"] = "char"
#็ปๅๅ็บฆไปฃ็
CThostFtdcInvestorPositionDetailField["CombInstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcInvestorPositionDetailField["ExchangeID"] = "string"
#้ๆฅ็ฏๅธๅนณไป็ไบ
CThostFtdcInvestorPositionDetailField["CloseProfitByDate"] = "float"
#้็ฌๅฏนๅฒๅนณไป็ไบ
CThostFtdcInvestorPositionDetailField["CloseProfitByTrade"] = "float"
#้ๆฅ็ฏๅธๆไป็ไบ
CThostFtdcInvestorPositionDetailField["PositionProfitByDate"] = "float"
#้็ฌๅฏนๅฒๆไป็ไบ
CThostFtdcInvestorPositionDetailField["PositionProfitByTrade"] = "float"
#ๆ่ต่
ไฟ่ฏ้
CThostFtdcInvestorPositionDetailField["Margin"] = "float"
#ไบคๆๆไฟ่ฏ้
CThostFtdcInvestorPositionDetailField["ExchMargin"] = "float"
#ไฟ่ฏ้็
CThostFtdcInvestorPositionDetailField["MarginRateByMoney"] = "float"
#ไฟ่ฏ้็(ๆๆๆฐ)
CThostFtdcInvestorPositionDetailField["MarginRateByVolume"] = "float"
#ๆจ็ป็ฎไปท
CThostFtdcInvestorPositionDetailField["LastSettlementPrice"] = "float"
#็ป็ฎไปท
CThostFtdcInvestorPositionDetailField["SettlementPrice"] = "float"
#ๅนณไป้
CThostFtdcInvestorPositionDetailField["CloseVolume"] = "int"
#ๅนณไป้้ข
CThostFtdcInvestorPositionDetailField["CloseAmount"] = "float"
structDict['CThostFtdcInvestorPositionDetailField'] = CThostFtdcInvestorPositionDetailField
#่ต้่ดฆๆทๅฃไปคๅ
CThostFtdcTradingAccountPasswordField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcTradingAccountPasswordField["BrokerID"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcTradingAccountPasswordField["AccountID"] = "string"
#ๅฏ็
CThostFtdcTradingAccountPasswordField["Password"] = "string"
#ๅธ็งไปฃ็
CThostFtdcTradingAccountPasswordField["CurrencyID"] = "string"
structDict['CThostFtdcTradingAccountPasswordField'] = CThostFtdcTradingAccountPasswordField
#ไบคๆๆ่กๆ
ๆฅ็ๆบ
CThostFtdcMDTraderOfferField = {}
#ไบคๆๆไปฃ็
CThostFtdcMDTraderOfferField["ExchangeID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcMDTraderOfferField["TraderID"] = "string"
#ไผๅไปฃ็
CThostFtdcMDTraderOfferField["ParticipantID"] = "string"
#ๅฏ็
CThostFtdcMDTraderOfferField["Password"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcMDTraderOfferField["InstallID"] = "int"
#ๆฌๅฐๆฅๅ็ผๅท
CThostFtdcMDTraderOfferField["OrderLocalID"] = "string"
#ไบคๆๆไบคๆๅ่ฟๆฅ็ถๆ
CThostFtdcMDTraderOfferField["TraderConnectStatus"] = "char"
#ๅๅบ่ฟๆฅ่ฏทๆฑ็ๆฅๆ
CThostFtdcMDTraderOfferField["ConnectRequestDate"] = "string"
#ๅๅบ่ฟๆฅ่ฏทๆฑ็ๆถ้ด
CThostFtdcMDTraderOfferField["ConnectRequestTime"] = "string"
#ไธๆฌกๆฅๅๆฅๆ
CThostFtdcMDTraderOfferField["LastReportDate"] = "string"
#ไธๆฌกๆฅๅๆถ้ด
CThostFtdcMDTraderOfferField["LastReportTime"] = "string"
#ๅฎๆ่ฟๆฅๆฅๆ
CThostFtdcMDTraderOfferField["ConnectDate"] = "string"
#ๅฎๆ่ฟๆฅๆถ้ด
CThostFtdcMDTraderOfferField["ConnectTime"] = "string"
#ๅฏๅจๆฅๆ
CThostFtdcMDTraderOfferField["StartDate"] = "string"
#ๅฏๅจๆถ้ด
CThostFtdcMDTraderOfferField["StartTime"] = "string"
#ไบคๆๆฅ
CThostFtdcMDTraderOfferField["TradingDay"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcMDTraderOfferField["BrokerID"] = "string"
#ๆฌๅธญไฝๆๅคงๆไบค็ผๅท
CThostFtdcMDTraderOfferField["MaxTradeID"] = "string"
#ๆฌๅธญไฝๆๅคงๆฅๅๅคๆท
CThostFtdcMDTraderOfferField["MaxOrderMessageReference"] = "string"
#ไธๅก็ฑปๅ
CThostFtdcMDTraderOfferField["BizType"] = "char"
structDict['CThostFtdcMDTraderOfferField'] = CThostFtdcMDTraderOfferField
#ๆฅ่ฏข่กๆ
ๆฅ็ๆบ
CThostFtdcQryMDTraderOfferField = {}
#ไบคๆๆไปฃ็
CThostFtdcQryMDTraderOfferField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcQryMDTraderOfferField["ParticipantID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcQryMDTraderOfferField["TraderID"] = "string"
structDict['CThostFtdcQryMDTraderOfferField'] = CThostFtdcQryMDTraderOfferField
#ๆฅ่ฏขๅฎขๆท้็ฅ
CThostFtdcQryNoticeField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryNoticeField["BrokerID"] = "string"
structDict['CThostFtdcQryNoticeField'] = CThostFtdcQryNoticeField
#ๅฎขๆท้็ฅ
CThostFtdcNoticeField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcNoticeField["BrokerID"] = "string"
#ๆถๆฏๆญฃๆ
CThostFtdcNoticeField["Content"] = "string"
#็ป็บชๅ
ฌๅธ้็ฅๅ
ๅฎนๅบๅๅท
CThostFtdcNoticeField["SequenceLabel"] = "string"
structDict['CThostFtdcNoticeField'] = CThostFtdcNoticeField
#็จๆทๆ้
CThostFtdcUserRightField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcUserRightField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcUserRightField["UserID"] = "string"
#ๅฎขๆทๆ้็ฑปๅ
CThostFtdcUserRightField["UserRightType"] = "char"
#ๆฏๅฆ็ฆๆญข
CThostFtdcUserRightField["IsForbidden"] = "int"
structDict['CThostFtdcUserRightField'] = CThostFtdcUserRightField
#ๆฅ่ฏข็ป็ฎไฟกๆฏ็กฎ่ฎคๅ
CThostFtdcQrySettlementInfoConfirmField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQrySettlementInfoConfirmField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQrySettlementInfoConfirmField["InvestorID"] = "string"
structDict['CThostFtdcQrySettlementInfoConfirmField'] = CThostFtdcQrySettlementInfoConfirmField
#่ฃ
่ฝฝ็ป็ฎไฟกๆฏ
CThostFtdcLoadSettlementInfoField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcLoadSettlementInfoField["BrokerID"] = "string"
structDict['CThostFtdcLoadSettlementInfoField'] = CThostFtdcLoadSettlementInfoField
#็ป็บชๅ
ฌๅธๅฏๆ่ต้็ฎๆณ่กจ
CThostFtdcBrokerWithdrawAlgorithmField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcBrokerWithdrawAlgorithmField["BrokerID"] = "string"
#ๅฏๆ่ต้็ฎๆณ
CThostFtdcBrokerWithdrawAlgorithmField["WithdrawAlgorithm"] = "char"
#่ต้ไฝฟ็จ็
CThostFtdcBrokerWithdrawAlgorithmField["UsingRatio"] = "float"
#ๅฏๆๆฏๅฆๅ
ๅซๅนณไป็ๅฉ
CThostFtdcBrokerWithdrawAlgorithmField["IncludeCloseProfit"] = "char"
#ๆฌๆฅๆ ไปไธๆ ๆไบคๅฎขๆทๆฏๅฆๅๅฏๆๆฏไพ้ๅถ
CThostFtdcBrokerWithdrawAlgorithmField["AllWithoutTrade"] = "char"
#ๅฏ็จๆฏๅฆๅ
ๅซๅนณไป็ๅฉ
CThostFtdcBrokerWithdrawAlgorithmField["AvailIncludeCloseProfit"] = "char"
#ๆฏๅฆๅฏ็จ็จๆทไบไปถ
CThostFtdcBrokerWithdrawAlgorithmField["IsBrokerUserEvent"] = "int"
#ๅธ็งไปฃ็
CThostFtdcBrokerWithdrawAlgorithmField["CurrencyID"] = "string"
#่ดงๅธ่ดจๆผๆฏ็
CThostFtdcBrokerWithdrawAlgorithmField["FundMortgageRatio"] = "float"
#ๆ็็ฎๆณ
CThostFtdcBrokerWithdrawAlgorithmField["BalanceAlgorithm"] = "char"
structDict['CThostFtdcBrokerWithdrawAlgorithmField'] = CThostFtdcBrokerWithdrawAlgorithmField
#่ต้่ดฆๆทๅฃไปคๅๆดๅ
CThostFtdcTradingAccountPasswordUpdateV1Field = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcTradingAccountPasswordUpdateV1Field["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcTradingAccountPasswordUpdateV1Field["InvestorID"] = "string"
#ๅๆฅ็ๅฃไปค
CThostFtdcTradingAccountPasswordUpdateV1Field["OldPassword"] = "string"
#ๆฐ็ๅฃไปค
CThostFtdcTradingAccountPasswordUpdateV1Field["NewPassword"] = "string"
structDict['CThostFtdcTradingAccountPasswordUpdateV1Field'] = CThostFtdcTradingAccountPasswordUpdateV1Field
#่ต้่ดฆๆทๅฃไปคๅๆดๅ
CThostFtdcTradingAccountPasswordUpdateField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcTradingAccountPasswordUpdateField["BrokerID"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcTradingAccountPasswordUpdateField["AccountID"] = "string"
#ๅๆฅ็ๅฃไปค
CThostFtdcTradingAccountPasswordUpdateField["OldPassword"] = "string"
#ๆฐ็ๅฃไปค
CThostFtdcTradingAccountPasswordUpdateField["NewPassword"] = "string"
#ๅธ็งไปฃ็
CThostFtdcTradingAccountPasswordUpdateField["CurrencyID"] = "string"
structDict['CThostFtdcTradingAccountPasswordUpdateField'] = CThostFtdcTradingAccountPasswordUpdateField
#ๆฅ่ฏข็ปๅๅ็บฆๅ่
ฟ
CThostFtdcQryCombinationLegField = {}
#็ปๅๅ็บฆไปฃ็
CThostFtdcQryCombinationLegField["CombInstrumentID"] = "string"
#ๅ่
ฟ็ผๅท
CThostFtdcQryCombinationLegField["LegID"] = "int"
#ๅ่
ฟๅ็บฆไปฃ็
CThostFtdcQryCombinationLegField["LegInstrumentID"] = "string"
structDict['CThostFtdcQryCombinationLegField'] = CThostFtdcQryCombinationLegField
#ๆฅ่ฏข็ปๅๅ็บฆๅ่
ฟ
CThostFtdcQrySyncStatusField = {}
#ไบคๆๆฅ
CThostFtdcQrySyncStatusField["TradingDay"] = "string"
structDict['CThostFtdcQrySyncStatusField'] = CThostFtdcQrySyncStatusField
#็ปๅไบคๆๅ็บฆ็ๅ่
ฟ
CThostFtdcCombinationLegField = {}
#็ปๅๅ็บฆไปฃ็
CThostFtdcCombinationLegField["CombInstrumentID"] = "string"
#ๅ่
ฟ็ผๅท
CThostFtdcCombinationLegField["LegID"] = "int"
#ๅ่
ฟๅ็บฆไปฃ็
CThostFtdcCombinationLegField["LegInstrumentID"] = "string"
#ไนฐๅๆนๅ
CThostFtdcCombinationLegField["Direction"] = "char"
#ๅ่
ฟไนๆฐ
CThostFtdcCombinationLegField["LegMultiple"] = "int"
#ๆดพ็ๅฑๆฐ
CThostFtdcCombinationLegField["ImplyLevel"] = "int"
structDict['CThostFtdcCombinationLegField'] = CThostFtdcCombinationLegField
#ๆฐๆฎๅๆญฅ็ถๆ
CThostFtdcSyncStatusField = {}
#ไบคๆๆฅ
CThostFtdcSyncStatusField["TradingDay"] = "string"
#ๆฐๆฎๅๆญฅ็ถๆ
CThostFtdcSyncStatusField["DataSyncStatus"] = "char"
structDict['CThostFtdcSyncStatusField'] = CThostFtdcSyncStatusField
#ๆฅ่ฏข่็ณปไบบ
CThostFtdcQryLinkManField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryLinkManField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryLinkManField["InvestorID"] = "string"
structDict['CThostFtdcQryLinkManField'] = CThostFtdcQryLinkManField
#่็ณปไบบ
CThostFtdcLinkManField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcLinkManField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcLinkManField["InvestorID"] = "string"
#่็ณปไบบ็ฑปๅ
CThostFtdcLinkManField["PersonType"] = "char"
#่ฏไปถ็ฑปๅ
CThostFtdcLinkManField["IdentifiedCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcLinkManField["IdentifiedCardNo"] = "string"
#ๅ็งฐ
CThostFtdcLinkManField["PersonName"] = "string"
#่็ณป็ต่ฏ
CThostFtdcLinkManField["Telephone"] = "string"
#้่ฎฏๅฐๅ
CThostFtdcLinkManField["Address"] = "string"
#้ฎๆฟ็ผ็
CThostFtdcLinkManField["ZipCode"] = "string"
#ไผๅ
็บง
CThostFtdcLinkManField["Priority"] = "int"
#ๅผๆท้ฎๆฟ็ผ็
CThostFtdcLinkManField["UOAZipCode"] = "string"
#ๅ
จ็งฐ
CThostFtdcLinkManField["PersonFullName"] = "string"
structDict['CThostFtdcLinkManField'] = CThostFtdcLinkManField
#ๆฅ่ฏข็ป็บชๅ
ฌๅธ็จๆทไบไปถ
CThostFtdcQryBrokerUserEventField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryBrokerUserEventField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcQryBrokerUserEventField["UserID"] = "string"
#็จๆทไบไปถ็ฑปๅ
CThostFtdcQryBrokerUserEventField["UserEventType"] = "char"
structDict['CThostFtdcQryBrokerUserEventField'] = CThostFtdcQryBrokerUserEventField
#ๆฅ่ฏข็ป็บชๅ
ฌๅธ็จๆทไบไปถ
CThostFtdcBrokerUserEventField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcBrokerUserEventField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcBrokerUserEventField["UserID"] = "string"
#็จๆทไบไปถ็ฑปๅ
CThostFtdcBrokerUserEventField["UserEventType"] = "char"
#็จๆทไบไปถๅบๅท
CThostFtdcBrokerUserEventField["EventSequenceNo"] = "int"
#ไบไปถๅ็ๆฅๆ
CThostFtdcBrokerUserEventField["EventDate"] = "string"
#ไบไปถๅ็ๆถ้ด
CThostFtdcBrokerUserEventField["EventTime"] = "string"
#็จๆทไบไปถไฟกๆฏ
CThostFtdcBrokerUserEventField["UserEventInfo"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcBrokerUserEventField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcBrokerUserEventField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcBrokerUserEventField["ExchangeID"] = "string"
structDict['CThostFtdcBrokerUserEventField'] = CThostFtdcBrokerUserEventField
#ๆฅ่ฏข็ญพ็บฆ้ถ่ก่ฏทๆฑ
CThostFtdcQryContractBankField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryContractBankField["BrokerID"] = "string"
#้ถ่กไปฃ็
CThostFtdcQryContractBankField["BankID"] = "string"
#้ถ่กๅไธญๅฟไปฃ็
CThostFtdcQryContractBankField["BankBrchID"] = "string"
structDict['CThostFtdcQryContractBankField'] = CThostFtdcQryContractBankField
#ๆฅ่ฏข็ญพ็บฆ้ถ่กๅๅบ
CThostFtdcContractBankField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcContractBankField["BrokerID"] = "string"
#้ถ่กไปฃ็
CThostFtdcContractBankField["BankID"] = "string"
#้ถ่กๅไธญๅฟไปฃ็
CThostFtdcContractBankField["BankBrchID"] = "string"
#้ถ่กๅ็งฐ
CThostFtdcContractBankField["BankName"] = "string"
structDict['CThostFtdcContractBankField'] = CThostFtdcContractBankField
#ๆ่ต่
็ปๅๆไปๆ็ป
CThostFtdcInvestorPositionCombineDetailField = {}
#ไบคๆๆฅ
CThostFtdcInvestorPositionCombineDetailField["TradingDay"] = "string"
#ๅผไปๆฅๆ
CThostFtdcInvestorPositionCombineDetailField["OpenDate"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcInvestorPositionCombineDetailField["ExchangeID"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcInvestorPositionCombineDetailField["SettlementID"] = "int"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInvestorPositionCombineDetailField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInvestorPositionCombineDetailField["InvestorID"] = "string"
#็ปๅ็ผๅท
CThostFtdcInvestorPositionCombineDetailField["ComTradeID"] = "string"
#ๆฎๅ็ผๅท
CThostFtdcInvestorPositionCombineDetailField["TradeID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcInvestorPositionCombineDetailField["InstrumentID"] = "string"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcInvestorPositionCombineDetailField["HedgeFlag"] = "char"
#ไนฐๅ
CThostFtdcInvestorPositionCombineDetailField["Direction"] = "char"
#ๆไป้
CThostFtdcInvestorPositionCombineDetailField["TotalAmt"] = "int"
#ๆ่ต่
ไฟ่ฏ้
CThostFtdcInvestorPositionCombineDetailField["Margin"] = "float"
#ไบคๆๆไฟ่ฏ้
CThostFtdcInvestorPositionCombineDetailField["ExchMargin"] = "float"
#ไฟ่ฏ้็
CThostFtdcInvestorPositionCombineDetailField["MarginRateByMoney"] = "float"
#ไฟ่ฏ้็(ๆๆๆฐ)
CThostFtdcInvestorPositionCombineDetailField["MarginRateByVolume"] = "float"
#ๅ่
ฟ็ผๅท
CThostFtdcInvestorPositionCombineDetailField["LegID"] = "int"
#ๅ่
ฟไนๆฐ
CThostFtdcInvestorPositionCombineDetailField["LegMultiple"] = "int"
#็ปๅๆไปๅ็บฆ็ผ็
CThostFtdcInvestorPositionCombineDetailField["CombInstrumentID"] = "string"
#ๆไบค็ปๅท
CThostFtdcInvestorPositionCombineDetailField["TradeGroupID"] = "int"
structDict['CThostFtdcInvestorPositionCombineDetailField'] = CThostFtdcInvestorPositionCombineDetailField
#้ขๅๅ
CThostFtdcParkedOrderField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcParkedOrderField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcParkedOrderField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcParkedOrderField["InstrumentID"] = "string"
#ๆฅๅๅผ็จ
CThostFtdcParkedOrderField["OrderRef"] = "string"
#็จๆทไปฃ็
CThostFtdcParkedOrderField["UserID"] = "string"
#ๆฅๅไปทๆ ผๆกไปถ
CThostFtdcParkedOrderField["OrderPriceType"] = "char"
#ไนฐๅๆนๅ
CThostFtdcParkedOrderField["Direction"] = "char"
#็ปๅๅผๅนณๆ ๅฟ
CThostFtdcParkedOrderField["CombOffsetFlag"] = "string"
#็ปๅๆๆบๅฅไฟๆ ๅฟ
CThostFtdcParkedOrderField["CombHedgeFlag"] = "string"
#ไปทๆ ผ
CThostFtdcParkedOrderField["LimitPrice"] = "float"
#ๆฐ้
CThostFtdcParkedOrderField["VolumeTotalOriginal"] = "int"
#ๆๆๆ็ฑปๅ
CThostFtdcParkedOrderField["TimeCondition"] = "char"
#GTDๆฅๆ
CThostFtdcParkedOrderField["GTDDate"] = "string"
#ๆไบค้็ฑปๅ
CThostFtdcParkedOrderField["VolumeCondition"] = "char"
#ๆๅฐๆไบค้
CThostFtdcParkedOrderField["MinVolume"] = "int"
#่งฆๅๆกไปถ
CThostFtdcParkedOrderField["ContingentCondition"] = "char"
#ๆญขๆไปท
CThostFtdcParkedOrderField["StopPrice"] = "float"
#ๅผบๅนณๅๅ
CThostFtdcParkedOrderField["ForceCloseReason"] = "char"
#่ชๅจๆ่ตทๆ ๅฟ
CThostFtdcParkedOrderField["IsAutoSuspend"] = "int"
#ไธๅกๅๅ
CThostFtdcParkedOrderField["BusinessUnit"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcParkedOrderField["RequestID"] = "int"
#็จๆทๅผบ่ฏๆ ๅฟ
CThostFtdcParkedOrderField["UserForceClose"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcParkedOrderField["ExchangeID"] = "string"
#้ขๅๆฅๅ็ผๅท
CThostFtdcParkedOrderField["ParkedOrderID"] = "string"
#็จๆท็ฑปๅ
CThostFtdcParkedOrderField["UserType"] = "char"
#้ขๅๅ็ถๆ
CThostFtdcParkedOrderField["Status"] = "char"
#้่ฏฏไปฃ็
CThostFtdcParkedOrderField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcParkedOrderField["ErrorMsg"] = "string"
#ไบๆขๅๆ ๅฟ
CThostFtdcParkedOrderField["IsSwapOrder"] = "int"
structDict['CThostFtdcParkedOrderField'] = CThostFtdcParkedOrderField
#่พๅ
ฅ้ขๅๅๆไฝ
CThostFtdcParkedOrderActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcParkedOrderActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcParkedOrderActionField["InvestorID"] = "string"
#ๆฅๅๆไฝๅผ็จ
CThostFtdcParkedOrderActionField["OrderActionRef"] = "int"
#ๆฅๅๅผ็จ
CThostFtdcParkedOrderActionField["OrderRef"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcParkedOrderActionField["RequestID"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcParkedOrderActionField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcParkedOrderActionField["SessionID"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcParkedOrderActionField["ExchangeID"] = "string"
#ๆฅๅ็ผๅท
CThostFtdcParkedOrderActionField["OrderSysID"] = "string"
#ๆไฝๆ ๅฟ
CThostFtdcParkedOrderActionField["ActionFlag"] = "char"
#ไปทๆ ผ
CThostFtdcParkedOrderActionField["LimitPrice"] = "float"
#ๆฐ้ๅๅ
CThostFtdcParkedOrderActionField["VolumeChange"] = "int"
#็จๆทไปฃ็
CThostFtdcParkedOrderActionField["UserID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcParkedOrderActionField["InstrumentID"] = "string"
#้ขๅๆคๅๅ็ผๅท
CThostFtdcParkedOrderActionField["ParkedOrderActionID"] = "string"
#็จๆท็ฑปๅ
CThostFtdcParkedOrderActionField["UserType"] = "char"
#้ขๅๆคๅ็ถๆ
CThostFtdcParkedOrderActionField["Status"] = "char"
#้่ฏฏไปฃ็
CThostFtdcParkedOrderActionField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcParkedOrderActionField["ErrorMsg"] = "string"
structDict['CThostFtdcParkedOrderActionField'] = CThostFtdcParkedOrderActionField
#ๆฅ่ฏข้ขๅๅ
CThostFtdcQryParkedOrderField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryParkedOrderField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryParkedOrderField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryParkedOrderField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryParkedOrderField["ExchangeID"] = "string"
structDict['CThostFtdcQryParkedOrderField'] = CThostFtdcQryParkedOrderField
#ๆฅ่ฏข้ขๅๆคๅ
CThostFtdcQryParkedOrderActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryParkedOrderActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryParkedOrderActionField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryParkedOrderActionField["InstrumentID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryParkedOrderActionField["ExchangeID"] = "string"
structDict['CThostFtdcQryParkedOrderActionField'] = CThostFtdcQryParkedOrderActionField
#ๅ ้ค้ขๅๅ
CThostFtdcRemoveParkedOrderField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcRemoveParkedOrderField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcRemoveParkedOrderField["InvestorID"] = "string"
#้ขๅๆฅๅ็ผๅท
CThostFtdcRemoveParkedOrderField["ParkedOrderID"] = "string"
structDict['CThostFtdcRemoveParkedOrderField'] = CThostFtdcRemoveParkedOrderField
#ๅ ้ค้ขๅๆคๅ
CThostFtdcRemoveParkedOrderActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcRemoveParkedOrderActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcRemoveParkedOrderActionField["InvestorID"] = "string"
#้ขๅๆคๅ็ผๅท
CThostFtdcRemoveParkedOrderActionField["ParkedOrderActionID"] = "string"
structDict['CThostFtdcRemoveParkedOrderActionField'] = CThostFtdcRemoveParkedOrderActionField
#็ป็บชๅ
ฌๅธๅฏๆ่ต้็ฎๆณ่กจ
CThostFtdcInvestorWithdrawAlgorithmField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInvestorWithdrawAlgorithmField["BrokerID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcInvestorWithdrawAlgorithmField["InvestorRange"] = "char"
#ๆ่ต่
ไปฃ็
CThostFtdcInvestorWithdrawAlgorithmField["InvestorID"] = "string"
#ๅฏๆ่ต้ๆฏไพ
CThostFtdcInvestorWithdrawAlgorithmField["UsingRatio"] = "float"
#ๅธ็งไปฃ็
CThostFtdcInvestorWithdrawAlgorithmField["CurrencyID"] = "string"
#่ดงๅธ่ดจๆผๆฏ็
CThostFtdcInvestorWithdrawAlgorithmField["FundMortgageRatio"] = "float"
structDict['CThostFtdcInvestorWithdrawAlgorithmField'] = CThostFtdcInvestorWithdrawAlgorithmField
#ๆฅ่ฏข็ปๅๆไปๆ็ป
CThostFtdcQryInvestorPositionCombineDetailField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryInvestorPositionCombineDetailField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryInvestorPositionCombineDetailField["InvestorID"] = "string"
#็ปๅๆไปๅ็บฆ็ผ็
CThostFtdcQryInvestorPositionCombineDetailField["CombInstrumentID"] = "string"
structDict['CThostFtdcQryInvestorPositionCombineDetailField'] = CThostFtdcQryInvestorPositionCombineDetailField
#ๆไบคๅไปท
CThostFtdcMarketDataAveragePriceField = {}
#ๅฝๆฅๅไปท
CThostFtdcMarketDataAveragePriceField["AveragePrice"] = "float"
structDict['CThostFtdcMarketDataAveragePriceField'] = CThostFtdcMarketDataAveragePriceField
#ๆ ก้ชๆ่ต่
ๅฏ็
CThostFtdcVerifyInvestorPasswordField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcVerifyInvestorPasswordField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcVerifyInvestorPasswordField["InvestorID"] = "string"
#ๅฏ็
CThostFtdcVerifyInvestorPasswordField["Password"] = "string"
structDict['CThostFtdcVerifyInvestorPasswordField'] = CThostFtdcVerifyInvestorPasswordField
#็จๆทIP
CThostFtdcUserIPField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcUserIPField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcUserIPField["UserID"] = "string"
#IPๅฐๅ
CThostFtdcUserIPField["IPAddress"] = "string"
#IPๅฐๅๆฉ็
CThostFtdcUserIPField["IPMask"] = "string"
#Macๅฐๅ
CThostFtdcUserIPField["MacAddress"] = "string"
structDict['CThostFtdcUserIPField'] = CThostFtdcUserIPField
#็จๆทไบไปถ้็ฅไฟกๆฏ
CThostFtdcTradingNoticeInfoField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcTradingNoticeInfoField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcTradingNoticeInfoField["InvestorID"] = "string"
#ๅ้ๆถ้ด
CThostFtdcTradingNoticeInfoField["SendTime"] = "string"
#ๆถๆฏๆญฃๆ
CThostFtdcTradingNoticeInfoField["FieldContent"] = "string"
#ๅบๅ็ณปๅๅท
CThostFtdcTradingNoticeInfoField["SequenceSeries"] = "int"
#ๅบๅๅท
CThostFtdcTradingNoticeInfoField["SequenceNo"] = "int"
structDict['CThostFtdcTradingNoticeInfoField'] = CThostFtdcTradingNoticeInfoField
#็จๆทไบไปถ้็ฅ
CThostFtdcTradingNoticeField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcTradingNoticeField["BrokerID"] = "string"
#ๆ่ต่
่ๅด
CThostFtdcTradingNoticeField["InvestorRange"] = "char"
#ๆ่ต่
ไปฃ็
CThostFtdcTradingNoticeField["InvestorID"] = "string"
#ๅบๅ็ณปๅๅท
CThostFtdcTradingNoticeField["SequenceSeries"] = "int"
#็จๆทไปฃ็
CThostFtdcTradingNoticeField["UserID"] = "string"
#ๅ้ๆถ้ด
CThostFtdcTradingNoticeField["SendTime"] = "string"
#ๅบๅๅท
CThostFtdcTradingNoticeField["SequenceNo"] = "int"
#ๆถๆฏๆญฃๆ
CThostFtdcTradingNoticeField["FieldContent"] = "string"
structDict['CThostFtdcTradingNoticeField'] = CThostFtdcTradingNoticeField
#ๆฅ่ฏขไบคๆไบไปถ้็ฅ
CThostFtdcQryTradingNoticeField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryTradingNoticeField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryTradingNoticeField["InvestorID"] = "string"
structDict['CThostFtdcQryTradingNoticeField'] = CThostFtdcQryTradingNoticeField
#ๆฅ่ฏข้่ฏฏๆฅๅ
CThostFtdcQryErrOrderField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryErrOrderField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryErrOrderField["InvestorID"] = "string"
structDict['CThostFtdcQryErrOrderField'] = CThostFtdcQryErrOrderField
#้่ฏฏๆฅๅ
CThostFtdcErrOrderField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcErrOrderField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcErrOrderField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcErrOrderField["InstrumentID"] = "string"
#ๆฅๅๅผ็จ
CThostFtdcErrOrderField["OrderRef"] = "string"
#็จๆทไปฃ็
CThostFtdcErrOrderField["UserID"] = "string"
#ๆฅๅไปทๆ ผๆกไปถ
CThostFtdcErrOrderField["OrderPriceType"] = "char"
#ไนฐๅๆนๅ
CThostFtdcErrOrderField["Direction"] = "char"
#็ปๅๅผๅนณๆ ๅฟ
CThostFtdcErrOrderField["CombOffsetFlag"] = "string"
#็ปๅๆๆบๅฅไฟๆ ๅฟ
CThostFtdcErrOrderField["CombHedgeFlag"] = "string"
#ไปทๆ ผ
CThostFtdcErrOrderField["LimitPrice"] = "float"
#ๆฐ้
CThostFtdcErrOrderField["VolumeTotalOriginal"] = "int"
#ๆๆๆ็ฑปๅ
CThostFtdcErrOrderField["TimeCondition"] = "char"
#GTDๆฅๆ
CThostFtdcErrOrderField["GTDDate"] = "string"
#ๆไบค้็ฑปๅ
CThostFtdcErrOrderField["VolumeCondition"] = "char"
#ๆๅฐๆไบค้
CThostFtdcErrOrderField["MinVolume"] = "int"
#่งฆๅๆกไปถ
CThostFtdcErrOrderField["ContingentCondition"] = "char"
#ๆญขๆไปท
CThostFtdcErrOrderField["StopPrice"] = "float"
#ๅผบๅนณๅๅ
CThostFtdcErrOrderField["ForceCloseReason"] = "char"
#่ชๅจๆ่ตทๆ ๅฟ
CThostFtdcErrOrderField["IsAutoSuspend"] = "int"
#ไธๅกๅๅ
CThostFtdcErrOrderField["BusinessUnit"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcErrOrderField["RequestID"] = "int"
#็จๆทๅผบ่ฏๆ ๅฟ
CThostFtdcErrOrderField["UserForceClose"] = "int"
#้่ฏฏไปฃ็
CThostFtdcErrOrderField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcErrOrderField["ErrorMsg"] = "string"
#ไบๆขๅๆ ๅฟ
CThostFtdcErrOrderField["IsSwapOrder"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcErrOrderField["ExchangeID"] = "string"
structDict['CThostFtdcErrOrderField'] = CThostFtdcErrOrderField
#ๆฅ่ฏข้่ฏฏๆฅๅๆไฝ
CThostFtdcErrorConditionalOrderField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcErrorConditionalOrderField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcErrorConditionalOrderField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcErrorConditionalOrderField["InstrumentID"] = "string"
#ๆฅๅๅผ็จ
CThostFtdcErrorConditionalOrderField["OrderRef"] = "string"
#็จๆทไปฃ็
CThostFtdcErrorConditionalOrderField["UserID"] = "string"
#ๆฅๅไปทๆ ผๆกไปถ
CThostFtdcErrorConditionalOrderField["OrderPriceType"] = "char"
#ไนฐๅๆนๅ
CThostFtdcErrorConditionalOrderField["Direction"] = "char"
#็ปๅๅผๅนณๆ ๅฟ
CThostFtdcErrorConditionalOrderField["CombOffsetFlag"] = "string"
#็ปๅๆๆบๅฅไฟๆ ๅฟ
CThostFtdcErrorConditionalOrderField["CombHedgeFlag"] = "string"
#ไปทๆ ผ
CThostFtdcErrorConditionalOrderField["LimitPrice"] = "float"
#ๆฐ้
CThostFtdcErrorConditionalOrderField["VolumeTotalOriginal"] = "int"
#ๆๆๆ็ฑปๅ
CThostFtdcErrorConditionalOrderField["TimeCondition"] = "char"
#GTDๆฅๆ
CThostFtdcErrorConditionalOrderField["GTDDate"] = "string"
#ๆไบค้็ฑปๅ
CThostFtdcErrorConditionalOrderField["VolumeCondition"] = "char"
#ๆๅฐๆไบค้
CThostFtdcErrorConditionalOrderField["MinVolume"] = "int"
#่งฆๅๆกไปถ
CThostFtdcErrorConditionalOrderField["ContingentCondition"] = "char"
#ๆญขๆไปท
CThostFtdcErrorConditionalOrderField["StopPrice"] = "float"
#ๅผบๅนณๅๅ
CThostFtdcErrorConditionalOrderField["ForceCloseReason"] = "char"
#่ชๅจๆ่ตทๆ ๅฟ
CThostFtdcErrorConditionalOrderField["IsAutoSuspend"] = "int"
#ไธๅกๅๅ
CThostFtdcErrorConditionalOrderField["BusinessUnit"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcErrorConditionalOrderField["RequestID"] = "int"
#ๆฌๅฐๆฅๅ็ผๅท
CThostFtdcErrorConditionalOrderField["OrderLocalID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcErrorConditionalOrderField["ExchangeID"] = "string"
#ไผๅไปฃ็
CThostFtdcErrorConditionalOrderField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcErrorConditionalOrderField["ClientID"] = "string"
#ๅ็บฆๅจไบคๆๆ็ไปฃ็
CThostFtdcErrorConditionalOrderField["ExchangeInstID"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcErrorConditionalOrderField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcErrorConditionalOrderField["InstallID"] = "int"
#ๆฅๅๆไบค็ถๆ
CThostFtdcErrorConditionalOrderField["OrderSubmitStatus"] = "char"
#ๆฅๅๆ็คบๅบๅท
CThostFtdcErrorConditionalOrderField["NotifySequence"] = "int"
#ไบคๆๆฅ
CThostFtdcErrorConditionalOrderField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcErrorConditionalOrderField["SettlementID"] = "int"
#ๆฅๅ็ผๅท
CThostFtdcErrorConditionalOrderField["OrderSysID"] = "string"
#ๆฅๅๆฅๆบ
CThostFtdcErrorConditionalOrderField["OrderSource"] = "char"
#ๆฅๅ็ถๆ
CThostFtdcErrorConditionalOrderField["OrderStatus"] = "char"
#ๆฅๅ็ฑปๅ
CThostFtdcErrorConditionalOrderField["OrderType"] = "char"
#ไปๆไบคๆฐ้
CThostFtdcErrorConditionalOrderField["VolumeTraded"] = "int"
#ๅฉไฝๆฐ้
CThostFtdcErrorConditionalOrderField["VolumeTotal"] = "int"
#ๆฅๅๆฅๆ
CThostFtdcErrorConditionalOrderField["InsertDate"] = "string"
#ๅงๆๆถ้ด
CThostFtdcErrorConditionalOrderField["InsertTime"] = "string"
#ๆฟๆดปๆถ้ด
CThostFtdcErrorConditionalOrderField["ActiveTime"] = "string"
#ๆ่ตทๆถ้ด
CThostFtdcErrorConditionalOrderField["SuspendTime"] = "string"
#ๆๅไฟฎๆนๆถ้ด
CThostFtdcErrorConditionalOrderField["UpdateTime"] = "string"
#ๆค้ๆถ้ด
CThostFtdcErrorConditionalOrderField["CancelTime"] = "string"
#ๆๅไฟฎๆนไบคๆๆไบคๆๅไปฃ็
CThostFtdcErrorConditionalOrderField["ActiveTraderID"] = "string"
#็ป็ฎไผๅ็ผๅท
CThostFtdcErrorConditionalOrderField["ClearingPartID"] = "string"
#ๅบๅท
CThostFtdcErrorConditionalOrderField["SequenceNo"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcErrorConditionalOrderField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcErrorConditionalOrderField["SessionID"] = "int"
#็จๆท็ซฏไบงๅไฟกๆฏ
CThostFtdcErrorConditionalOrderField["UserProductInfo"] = "string"
#็ถๆไฟกๆฏ
CThostFtdcErrorConditionalOrderField["StatusMsg"] = "string"
#็จๆทๅผบ่ฏๆ ๅฟ
CThostFtdcErrorConditionalOrderField["UserForceClose"] = "int"
#ๆไฝ็จๆทไปฃ็
CThostFtdcErrorConditionalOrderField["ActiveUserID"] = "string"
#็ป็บชๅ
ฌๅธๆฅๅ็ผๅท
CThostFtdcErrorConditionalOrderField["BrokerOrderSeq"] = "int"
#็ธๅ
ณๆฅๅ
CThostFtdcErrorConditionalOrderField["RelativeOrderSysID"] = "string"
#้ๅๆๆไบคๆฐ้
CThostFtdcErrorConditionalOrderField["ZCETotalTradedVolume"] = "int"
#้่ฏฏไปฃ็
CThostFtdcErrorConditionalOrderField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcErrorConditionalOrderField["ErrorMsg"] = "string"
#ไบๆขๅๆ ๅฟ
CThostFtdcErrorConditionalOrderField["IsSwapOrder"] = "int"
#่ฅไธ้จ็ผๅท
CThostFtdcErrorConditionalOrderField["BranchID"] = "string"
structDict['CThostFtdcErrorConditionalOrderField'] = CThostFtdcErrorConditionalOrderField
#ๆฅ่ฏข้่ฏฏๆฅๅๆไฝ
CThostFtdcQryErrOrderActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryErrOrderActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryErrOrderActionField["InvestorID"] = "string"
structDict['CThostFtdcQryErrOrderActionField'] = CThostFtdcQryErrOrderActionField
#้่ฏฏๆฅๅๆไฝ
CThostFtdcErrOrderActionField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcErrOrderActionField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcErrOrderActionField["InvestorID"] = "string"
#ๆฅๅๆไฝๅผ็จ
CThostFtdcErrOrderActionField["OrderActionRef"] = "int"
#ๆฅๅๅผ็จ
CThostFtdcErrOrderActionField["OrderRef"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcErrOrderActionField["RequestID"] = "int"
#ๅ็ฝฎ็ผๅท
CThostFtdcErrOrderActionField["FrontID"] = "int"
#ไผ่ฏ็ผๅท
CThostFtdcErrOrderActionField["SessionID"] = "int"
#ไบคๆๆไปฃ็
CThostFtdcErrOrderActionField["ExchangeID"] = "string"
#ๆฅๅ็ผๅท
CThostFtdcErrOrderActionField["OrderSysID"] = "string"
#ๆไฝๆ ๅฟ
CThostFtdcErrOrderActionField["ActionFlag"] = "char"
#ไปทๆ ผ
CThostFtdcErrOrderActionField["LimitPrice"] = "float"
#ๆฐ้ๅๅ
CThostFtdcErrOrderActionField["VolumeChange"] = "int"
#ๆไฝๆฅๆ
CThostFtdcErrOrderActionField["ActionDate"] = "string"
#ๆไฝๆถ้ด
CThostFtdcErrOrderActionField["ActionTime"] = "string"
#ไบคๆๆไบคๆๅไปฃ็
CThostFtdcErrOrderActionField["TraderID"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcErrOrderActionField["InstallID"] = "int"
#ๆฌๅฐๆฅๅ็ผๅท
CThostFtdcErrOrderActionField["OrderLocalID"] = "string"
#ๆไฝๆฌๅฐ็ผๅท
CThostFtdcErrOrderActionField["ActionLocalID"] = "string"
#ไผๅไปฃ็
CThostFtdcErrOrderActionField["ParticipantID"] = "string"
#ๅฎขๆทไปฃ็
CThostFtdcErrOrderActionField["ClientID"] = "string"
#ไธๅกๅๅ
CThostFtdcErrOrderActionField["BusinessUnit"] = "string"
#ๆฅๅๆไฝ็ถๆ
CThostFtdcErrOrderActionField["OrderActionStatus"] = "char"
#็จๆทไปฃ็
CThostFtdcErrOrderActionField["UserID"] = "string"
#็ถๆไฟกๆฏ
CThostFtdcErrOrderActionField["StatusMsg"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcErrOrderActionField["InstrumentID"] = "string"
#่ฅไธ้จ็ผๅท
CThostFtdcErrOrderActionField["BranchID"] = "string"
#้่ฏฏไปฃ็
CThostFtdcErrOrderActionField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcErrOrderActionField["ErrorMsg"] = "string"
structDict['CThostFtdcErrOrderActionField'] = CThostFtdcErrOrderActionField
#ๆฅ่ฏขไบคๆๆ็ถๆ
CThostFtdcQryExchangeSequenceField = {}
#ไบคๆๆไปฃ็
CThostFtdcQryExchangeSequenceField["ExchangeID"] = "string"
structDict['CThostFtdcQryExchangeSequenceField'] = CThostFtdcQryExchangeSequenceField
#ไบคๆๆ็ถๆ
CThostFtdcExchangeSequenceField = {}
#ไบคๆๆไปฃ็
CThostFtdcExchangeSequenceField["ExchangeID"] = "string"
#ๅบๅท
CThostFtdcExchangeSequenceField["SequenceNo"] = "int"
#ๅ็บฆไบคๆ็ถๆ
CThostFtdcExchangeSequenceField["MarketStatus"] = "char"
structDict['CThostFtdcExchangeSequenceField'] = CThostFtdcExchangeSequenceField
#ๆ นๆฎไปทๆ ผๆฅ่ฏขๆๅคงๆฅๅๆฐ้
CThostFtdcQueryMaxOrderVolumeWithPriceField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQueryMaxOrderVolumeWithPriceField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQueryMaxOrderVolumeWithPriceField["InvestorID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQueryMaxOrderVolumeWithPriceField["InstrumentID"] = "string"
#ไนฐๅๆนๅ
CThostFtdcQueryMaxOrderVolumeWithPriceField["Direction"] = "char"
#ๅผๅนณๆ ๅฟ
CThostFtdcQueryMaxOrderVolumeWithPriceField["OffsetFlag"] = "char"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcQueryMaxOrderVolumeWithPriceField["HedgeFlag"] = "char"
#ๆๅคงๅ
่ฎธๆฅๅๆฐ้
CThostFtdcQueryMaxOrderVolumeWithPriceField["MaxVolume"] = "int"
#ๆฅๅไปทๆ ผ
CThostFtdcQueryMaxOrderVolumeWithPriceField["Price"] = "float"
#ไบคๆๆไปฃ็
CThostFtdcQueryMaxOrderVolumeWithPriceField["ExchangeID"] = "string"
structDict['CThostFtdcQueryMaxOrderVolumeWithPriceField'] = CThostFtdcQueryMaxOrderVolumeWithPriceField
#ๆฅ่ฏข็ป็บชๅ
ฌๅธไบคๆๅๆฐ
CThostFtdcQryBrokerTradingParamsField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryBrokerTradingParamsField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryBrokerTradingParamsField["InvestorID"] = "string"
#ๅธ็งไปฃ็
CThostFtdcQryBrokerTradingParamsField["CurrencyID"] = "string"
structDict['CThostFtdcQryBrokerTradingParamsField'] = CThostFtdcQryBrokerTradingParamsField
#็ป็บชๅ
ฌๅธไบคๆๅๆฐ
CThostFtdcBrokerTradingParamsField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcBrokerTradingParamsField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcBrokerTradingParamsField["InvestorID"] = "string"
#ไฟ่ฏ้ไปทๆ ผ็ฑปๅ
CThostFtdcBrokerTradingParamsField["MarginPriceType"] = "char"
#็ไบ็ฎๆณ
CThostFtdcBrokerTradingParamsField["Algorithm"] = "char"
#ๅฏ็จๆฏๅฆๅ
ๅซๅนณไป็ๅฉ
CThostFtdcBrokerTradingParamsField["AvailIncludeCloseProfit"] = "char"
#ๅธ็งไปฃ็
CThostFtdcBrokerTradingParamsField["CurrencyID"] = "string"
#ๆๆๆๅฉ้ไปทๆ ผ็ฑปๅ
CThostFtdcBrokerTradingParamsField["OptionRoyaltyPriceType"] = "char"
structDict['CThostFtdcBrokerTradingParamsField'] = CThostFtdcBrokerTradingParamsField
#ๆฅ่ฏข็ป็บชๅ
ฌๅธไบคๆ็ฎๆณ
CThostFtdcQryBrokerTradingAlgosField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryBrokerTradingAlgosField["BrokerID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryBrokerTradingAlgosField["ExchangeID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryBrokerTradingAlgosField["InstrumentID"] = "string"
structDict['CThostFtdcQryBrokerTradingAlgosField'] = CThostFtdcQryBrokerTradingAlgosField
#็ป็บชๅ
ฌๅธไบคๆ็ฎๆณ
CThostFtdcBrokerTradingAlgosField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcBrokerTradingAlgosField["BrokerID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcBrokerTradingAlgosField["ExchangeID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcBrokerTradingAlgosField["InstrumentID"] = "string"
#ๆไปๅค็็ฎๆณ็ผๅท
CThostFtdcBrokerTradingAlgosField["HandlePositionAlgoID"] = "char"
#ๅฏปๆพไฟ่ฏ้็็ฎๆณ็ผๅท
CThostFtdcBrokerTradingAlgosField["FindMarginRateAlgoID"] = "char"
#่ต้ๅค็็ฎๆณ็ผๅท
CThostFtdcBrokerTradingAlgosField["HandleTradingAccountAlgoID"] = "char"
structDict['CThostFtdcBrokerTradingAlgosField'] = CThostFtdcBrokerTradingAlgosField
#ๆฅ่ฏข็ป็บชๅ
ฌๅธ่ต้
CThostFtdcQueryBrokerDepositField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQueryBrokerDepositField["BrokerID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQueryBrokerDepositField["ExchangeID"] = "string"
structDict['CThostFtdcQueryBrokerDepositField'] = CThostFtdcQueryBrokerDepositField
#็ป็บชๅ
ฌๅธ่ต้
CThostFtdcBrokerDepositField = {}
#ไบคๆๆฅๆ
CThostFtdcBrokerDepositField["TradingDay"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcBrokerDepositField["BrokerID"] = "string"
#ไผๅไปฃ็
CThostFtdcBrokerDepositField["ParticipantID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcBrokerDepositField["ExchangeID"] = "string"
#ไธๆฌก็ป็ฎๅๅค้
CThostFtdcBrokerDepositField["PreBalance"] = "float"
#ๅฝๅไฟ่ฏ้ๆป้ข
CThostFtdcBrokerDepositField["CurrMargin"] = "float"
#ๅนณไป็ไบ
CThostFtdcBrokerDepositField["CloseProfit"] = "float"
#ๆ่ดง็ป็ฎๅๅค้
CThostFtdcBrokerDepositField["Balance"] = "float"
#ๅ
ฅ้้้ข
CThostFtdcBrokerDepositField["Deposit"] = "float"
#ๅบ้้้ข
CThostFtdcBrokerDepositField["Withdraw"] = "float"
#ๅฏๆ่ต้
CThostFtdcBrokerDepositField["Available"] = "float"
#ๅบๆฌๅๅค้
CThostFtdcBrokerDepositField["Reserve"] = "float"
#ๅป็ป็ไฟ่ฏ้
CThostFtdcBrokerDepositField["FrozenMargin"] = "float"
structDict['CThostFtdcBrokerDepositField'] = CThostFtdcBrokerDepositField
#ๆฅ่ฏขไฟ่ฏ้็็ฎก็ณป็ป็ป็บชๅ
ฌๅธๅฏ้ฅ
CThostFtdcQryCFMMCBrokerKeyField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryCFMMCBrokerKeyField["BrokerID"] = "string"
structDict['CThostFtdcQryCFMMCBrokerKeyField'] = CThostFtdcQryCFMMCBrokerKeyField
#ไฟ่ฏ้็็ฎก็ณป็ป็ป็บชๅ
ฌๅธๅฏ้ฅ
CThostFtdcCFMMCBrokerKeyField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcCFMMCBrokerKeyField["BrokerID"] = "string"
#็ป็บชๅ
ฌๅธ็ปไธ็ผ็
CThostFtdcCFMMCBrokerKeyField["ParticipantID"] = "string"
#ๅฏ้ฅ็ๆๆฅๆ
CThostFtdcCFMMCBrokerKeyField["CreateDate"] = "string"
#ๅฏ้ฅ็ๆๆถ้ด
CThostFtdcCFMMCBrokerKeyField["CreateTime"] = "string"
#ๅฏ้ฅ็ผๅท
CThostFtdcCFMMCBrokerKeyField["KeyID"] = "int"
#ๅจๆๅฏ้ฅ
CThostFtdcCFMMCBrokerKeyField["CurrentKey"] = "string"
#ๅจๆๅฏ้ฅ็ฑปๅ
CThostFtdcCFMMCBrokerKeyField["KeyKind"] = "char"
structDict['CThostFtdcCFMMCBrokerKeyField'] = CThostFtdcCFMMCBrokerKeyField
#ไฟ่ฏ้็็ฎก็ณป็ป็ป็บชๅ
ฌๅธ่ต้่ดฆๆทๅฏ้ฅ
CThostFtdcCFMMCTradingAccountKeyField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcCFMMCTradingAccountKeyField["BrokerID"] = "string"
#็ป็บชๅ
ฌๅธ็ปไธ็ผ็
CThostFtdcCFMMCTradingAccountKeyField["ParticipantID"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcCFMMCTradingAccountKeyField["AccountID"] = "string"
#ๅฏ้ฅ็ผๅท
CThostFtdcCFMMCTradingAccountKeyField["KeyID"] = "int"
#ๅจๆๅฏ้ฅ
CThostFtdcCFMMCTradingAccountKeyField["CurrentKey"] = "string"
structDict['CThostFtdcCFMMCTradingAccountKeyField'] = CThostFtdcCFMMCTradingAccountKeyField
#่ฏทๆฑๆฅ่ฏขไฟ่ฏ้็็ฎก็ณป็ป็ป็บชๅ
ฌๅธ่ต้่ดฆๆทๅฏ้ฅ
CThostFtdcQryCFMMCTradingAccountKeyField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryCFMMCTradingAccountKeyField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryCFMMCTradingAccountKeyField["InvestorID"] = "string"
structDict['CThostFtdcQryCFMMCTradingAccountKeyField'] = CThostFtdcQryCFMMCTradingAccountKeyField
#็จๆทๅจๆไปค็ๅๆฐ
CThostFtdcBrokerUserOTPParamField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcBrokerUserOTPParamField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcBrokerUserOTPParamField["UserID"] = "string"
#ๅจๆไปค็ๆไพๅ
CThostFtdcBrokerUserOTPParamField["OTPVendorsID"] = "string"
#ๅจๆไปค็ๅบๅๅท
CThostFtdcBrokerUserOTPParamField["SerialNumber"] = "string"
#ไปค็ๅฏ้ฅ
CThostFtdcBrokerUserOTPParamField["AuthKey"] = "string"
#ๆผ็งปๅผ
CThostFtdcBrokerUserOTPParamField["LastDrift"] = "int"
#ๆๅๅผ
CThostFtdcBrokerUserOTPParamField["LastSuccess"] = "int"
#ๅจๆไปค็็ฑปๅ
CThostFtdcBrokerUserOTPParamField["OTPType"] = "char"
structDict['CThostFtdcBrokerUserOTPParamField'] = CThostFtdcBrokerUserOTPParamField
#ๆๅทฅๅๆญฅ็จๆทๅจๆไปค็
CThostFtdcManualSyncBrokerUserOTPField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcManualSyncBrokerUserOTPField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcManualSyncBrokerUserOTPField["UserID"] = "string"
#ๅจๆไปค็็ฑปๅ
CThostFtdcManualSyncBrokerUserOTPField["OTPType"] = "char"
#็ฌฌไธไธชๅจๆๅฏ็
CThostFtdcManualSyncBrokerUserOTPField["FirstOTP"] = "string"
#็ฌฌไบไธชๅจๆๅฏ็
CThostFtdcManualSyncBrokerUserOTPField["SecondOTP"] = "string"
structDict['CThostFtdcManualSyncBrokerUserOTPField'] = CThostFtdcManualSyncBrokerUserOTPField
#ๆ่ต่
ๆ็ปญ่ดน็ๆจกๆฟ
CThostFtdcCommRateModelField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcCommRateModelField["BrokerID"] = "string"
#ๆ็ปญ่ดน็ๆจกๆฟไปฃ็
CThostFtdcCommRateModelField["CommModelID"] = "string"
#ๆจกๆฟๅ็งฐ
CThostFtdcCommRateModelField["CommModelName"] = "string"
structDict['CThostFtdcCommRateModelField'] = CThostFtdcCommRateModelField
#่ฏทๆฑๆฅ่ฏขๆ่ต่
ๆ็ปญ่ดน็ๆจกๆฟ
CThostFtdcQryCommRateModelField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryCommRateModelField["BrokerID"] = "string"
#ๆ็ปญ่ดน็ๆจกๆฟไปฃ็
CThostFtdcQryCommRateModelField["CommModelID"] = "string"
structDict['CThostFtdcQryCommRateModelField'] = CThostFtdcQryCommRateModelField
#ๆ่ต่
ไฟ่ฏ้็ๆจกๆฟ
CThostFtdcMarginModelField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcMarginModelField["BrokerID"] = "string"
#ไฟ่ฏ้็ๆจกๆฟไปฃ็
CThostFtdcMarginModelField["MarginModelID"] = "string"
#ๆจกๆฟๅ็งฐ
CThostFtdcMarginModelField["MarginModelName"] = "string"
structDict['CThostFtdcMarginModelField'] = CThostFtdcMarginModelField
#่ฏทๆฑๆฅ่ฏขๆ่ต่
ไฟ่ฏ้็ๆจกๆฟ
CThostFtdcQryMarginModelField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryMarginModelField["BrokerID"] = "string"
#ไฟ่ฏ้็ๆจกๆฟไปฃ็
CThostFtdcQryMarginModelField["MarginModelID"] = "string"
structDict['CThostFtdcQryMarginModelField'] = CThostFtdcQryMarginModelField
#ไปๅๆๆตไฟกๆฏ
CThostFtdcEWarrantOffsetField = {}
#ไบคๆๆฅๆ
CThostFtdcEWarrantOffsetField["TradingDay"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcEWarrantOffsetField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcEWarrantOffsetField["InvestorID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcEWarrantOffsetField["ExchangeID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcEWarrantOffsetField["InstrumentID"] = "string"
#ไนฐๅๆนๅ
CThostFtdcEWarrantOffsetField["Direction"] = "char"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcEWarrantOffsetField["HedgeFlag"] = "char"
#ๆฐ้
CThostFtdcEWarrantOffsetField["Volume"] = "int"
structDict['CThostFtdcEWarrantOffsetField'] = CThostFtdcEWarrantOffsetField
#ๆฅ่ฏขไปๅๆๆตไฟกๆฏ
CThostFtdcQryEWarrantOffsetField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryEWarrantOffsetField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryEWarrantOffsetField["InvestorID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryEWarrantOffsetField["ExchangeID"] = "string"
#ๅ็บฆไปฃ็
CThostFtdcQryEWarrantOffsetField["InstrumentID"] = "string"
structDict['CThostFtdcQryEWarrantOffsetField'] = CThostFtdcQryEWarrantOffsetField
#ๆฅ่ฏขๆ่ต่
ๅ็ง/่ทจๅ็งไฟ่ฏ้
CThostFtdcQryInvestorProductGroupMarginField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryInvestorProductGroupMarginField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQryInvestorProductGroupMarginField["InvestorID"] = "string"
#ๅ็ง/่ทจๅ็งๆ ็คบ
CThostFtdcQryInvestorProductGroupMarginField["ProductGroupID"] = "string"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcQryInvestorProductGroupMarginField["HedgeFlag"] = "char"
structDict['CThostFtdcQryInvestorProductGroupMarginField'] = CThostFtdcQryInvestorProductGroupMarginField
#ๆ่ต่
ๅ็ง/่ทจๅ็งไฟ่ฏ้
CThostFtdcInvestorProductGroupMarginField = {}
#ๅ็ง/่ทจๅ็งๆ ็คบ
CThostFtdcInvestorProductGroupMarginField["ProductGroupID"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInvestorProductGroupMarginField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInvestorProductGroupMarginField["InvestorID"] = "string"
#ไบคๆๆฅ
CThostFtdcInvestorProductGroupMarginField["TradingDay"] = "string"
#็ป็ฎ็ผๅท
CThostFtdcInvestorProductGroupMarginField["SettlementID"] = "int"
#ๅป็ป็ไฟ่ฏ้
CThostFtdcInvestorProductGroupMarginField["FrozenMargin"] = "float"
#ๅคๅคดๅป็ป็ไฟ่ฏ้
CThostFtdcInvestorProductGroupMarginField["LongFrozenMargin"] = "float"
#็ฉบๅคดๅป็ป็ไฟ่ฏ้
CThostFtdcInvestorProductGroupMarginField["ShortFrozenMargin"] = "float"
#ๅ ็จ็ไฟ่ฏ้
CThostFtdcInvestorProductGroupMarginField["UseMargin"] = "float"
#ๅคๅคดไฟ่ฏ้
CThostFtdcInvestorProductGroupMarginField["LongUseMargin"] = "float"
#็ฉบๅคดไฟ่ฏ้
CThostFtdcInvestorProductGroupMarginField["ShortUseMargin"] = "float"
#ไบคๆๆไฟ่ฏ้
CThostFtdcInvestorProductGroupMarginField["ExchMargin"] = "float"
#ไบคๆๆๅคๅคดไฟ่ฏ้
CThostFtdcInvestorProductGroupMarginField["LongExchMargin"] = "float"
#ไบคๆๆ็ฉบๅคดไฟ่ฏ้
CThostFtdcInvestorProductGroupMarginField["ShortExchMargin"] = "float"
#ๅนณไป็ไบ
CThostFtdcInvestorProductGroupMarginField["CloseProfit"] = "float"
#ๅป็ป็ๆ็ปญ่ดน
CThostFtdcInvestorProductGroupMarginField["FrozenCommission"] = "float"
#ๆ็ปญ่ดน
CThostFtdcInvestorProductGroupMarginField["Commission"] = "float"
#ๅป็ป็่ต้
CThostFtdcInvestorProductGroupMarginField["FrozenCash"] = "float"
#่ต้ๅทฎ้ข
CThostFtdcInvestorProductGroupMarginField["CashIn"] = "float"
#ๆไป็ไบ
CThostFtdcInvestorProductGroupMarginField["PositionProfit"] = "float"
#ๆๆตๆป้้ข
CThostFtdcInvestorProductGroupMarginField["OffsetAmount"] = "float"
#ๅคๅคดๆๆตๆป้้ข
CThostFtdcInvestorProductGroupMarginField["LongOffsetAmount"] = "float"
#็ฉบๅคดๆๆตๆป้้ข
CThostFtdcInvestorProductGroupMarginField["ShortOffsetAmount"] = "float"
#ไบคๆๆๆๆตๆป้้ข
CThostFtdcInvestorProductGroupMarginField["ExchOffsetAmount"] = "float"
#ไบคๆๆๅคๅคดๆๆตๆป้้ข
CThostFtdcInvestorProductGroupMarginField["LongExchOffsetAmount"] = "float"
#ไบคๆๆ็ฉบๅคดๆๆตๆป้้ข
CThostFtdcInvestorProductGroupMarginField["ShortExchOffsetAmount"] = "float"
#ๆๆบๅฅไฟๆ ๅฟ
CThostFtdcInvestorProductGroupMarginField["HedgeFlag"] = "char"
structDict['CThostFtdcInvestorProductGroupMarginField'] = CThostFtdcInvestorProductGroupMarginField
#ๆฅ่ฏข็ๆงไธญๅฟ็จๆทไปค็
CThostFtdcQueryCFMMCTradingAccountTokenField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQueryCFMMCTradingAccountTokenField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcQueryCFMMCTradingAccountTokenField["InvestorID"] = "string"
structDict['CThostFtdcQueryCFMMCTradingAccountTokenField'] = CThostFtdcQueryCFMMCTradingAccountTokenField
#็ๆงไธญๅฟ็จๆทไปค็
CThostFtdcCFMMCTradingAccountTokenField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcCFMMCTradingAccountTokenField["BrokerID"] = "string"
#็ป็บชๅ
ฌๅธ็ปไธ็ผ็
CThostFtdcCFMMCTradingAccountTokenField["ParticipantID"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcCFMMCTradingAccountTokenField["AccountID"] = "string"
#ๅฏ้ฅ็ผๅท
CThostFtdcCFMMCTradingAccountTokenField["KeyID"] = "int"
#ๅจๆไปค็
CThostFtdcCFMMCTradingAccountTokenField["Token"] = "string"
structDict['CThostFtdcCFMMCTradingAccountTokenField'] = CThostFtdcCFMMCTradingAccountTokenField
#ๆ่ต่
ๆไปคๆ้
CThostFtdcInstructionRightField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcInstructionRightField["BrokerID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcInstructionRightField["ExchangeID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcInstructionRightField["InvestorID"] = "string"
#ๆไปคๆ้็ฑปๅ
CThostFtdcInstructionRightField["InstructionRight"] = "char"
#ๆฏๅฆ็ฆๆญข
CThostFtdcInstructionRightField["IsForbidden"] = "int"
structDict['CThostFtdcInstructionRightField'] = CThostFtdcInstructionRightField
#ๆฅ่ฏขไบงๅ็ป
CThostFtdcQryProductGroupField = {}
#ไบงๅไปฃ็
CThostFtdcQryProductGroupField["ProductID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcQryProductGroupField["ExchangeID"] = "string"
structDict['CThostFtdcQryProductGroupField'] = CThostFtdcQryProductGroupField
#ๆ่ต่
ๅ็ง/่ทจๅ็งไฟ่ฏ้ไบงๅ็ป
CThostFtdcProductGroupField = {}
#ไบงๅไปฃ็
CThostFtdcProductGroupField["ProductID"] = "string"
#ไบคๆๆไปฃ็
CThostFtdcProductGroupField["ExchangeID"] = "string"
#ไบงๅ็ปไปฃ็
CThostFtdcProductGroupField["ProductGroupID"] = "string"
structDict['CThostFtdcProductGroupField'] = CThostFtdcProductGroupField
#่ฝฌๅธๅผๆท่ฏทๆฑ
CThostFtdcReqOpenAccountField = {}
#ไธๅกๅ่ฝ็
CThostFtdcReqOpenAccountField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcReqOpenAccountField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcReqOpenAccountField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcReqOpenAccountField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcReqOpenAccountField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcReqOpenAccountField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcReqOpenAccountField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcReqOpenAccountField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcReqOpenAccountField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcReqOpenAccountField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcReqOpenAccountField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcReqOpenAccountField["SessionID"] = "int"
#ๅฎขๆทๅงๅ
CThostFtdcReqOpenAccountField["CustomerName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcReqOpenAccountField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcReqOpenAccountField["IdentifiedCardNo"] = "string"
#ๆงๅซ
CThostFtdcReqOpenAccountField["Gender"] = "char"
#ๅฝๅฎถไปฃ็
CThostFtdcReqOpenAccountField["CountryCode"] = "string"
#ๅฎขๆท็ฑปๅ
CThostFtdcReqOpenAccountField["CustType"] = "char"
#ๅฐๅ
CThostFtdcReqOpenAccountField["Address"] = "string"
#้ฎ็ผ
CThostFtdcReqOpenAccountField["ZipCode"] = "string"
#็ต่ฏๅท็
CThostFtdcReqOpenAccountField["Telephone"] = "string"
#ๆๆบ
CThostFtdcReqOpenAccountField["MobilePhone"] = "string"
#ไผ ็
CThostFtdcReqOpenAccountField["Fax"] = "string"
#็ตๅญ้ฎไปถ
CThostFtdcReqOpenAccountField["EMail"] = "string"
#่ต้่ดฆๆท็ถๆ
CThostFtdcReqOpenAccountField["MoneyAccountStatus"] = "char"
#้ถ่กๅธๅท
CThostFtdcReqOpenAccountField["BankAccount"] = "string"
#้ถ่กๅฏ็
CThostFtdcReqOpenAccountField["BankPassWord"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcReqOpenAccountField["AccountID"] = "string"
#ๆ่ดงๅฏ็
CThostFtdcReqOpenAccountField["Password"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcReqOpenAccountField["InstallID"] = "int"
#้ช่ฏๅฎขๆท่ฏไปถๅท็ ๆ ๅฟ
CThostFtdcReqOpenAccountField["VerifyCertNoFlag"] = "char"
#ๅธ็งไปฃ็
CThostFtdcReqOpenAccountField["CurrencyID"] = "string"
#ๆฑ้ๆ ๅฟ
CThostFtdcReqOpenAccountField["CashExchangeCode"] = "char"
#ๆ่ฆ
CThostFtdcReqOpenAccountField["Digest"] = "string"
#้ถ่กๅธๅท็ฑปๅ
CThostFtdcReqOpenAccountField["BankAccType"] = "char"
#ๆธ ้ๆ ๅฟ
CThostFtdcReqOpenAccountField["DeviceID"] = "string"
#ๆ่ดงๅไฝๅธๅท็ฑปๅ
CThostFtdcReqOpenAccountField["BankSecuAccType"] = "char"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcReqOpenAccountField["BrokerIDByBank"] = "string"
#ๆ่ดงๅไฝๅธๅท
CThostFtdcReqOpenAccountField["BankSecuAcc"] = "string"
#้ถ่กๅฏ็ ๆ ๅฟ
CThostFtdcReqOpenAccountField["BankPwdFlag"] = "char"
#ๆ่ดง่ต้ๅฏ็ ๆ ธๅฏนๆ ๅฟ
CThostFtdcReqOpenAccountField["SecuPwdFlag"] = "char"
#ไบคๆๆๅ
CThostFtdcReqOpenAccountField["OperNo"] = "string"
#ไบคๆID
CThostFtdcReqOpenAccountField["TID"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcReqOpenAccountField["UserID"] = "string"
structDict['CThostFtdcReqOpenAccountField'] = CThostFtdcReqOpenAccountField
#่ฝฌๅธ้ๆท่ฏทๆฑ
CThostFtdcReqCancelAccountField = {}
#ไธๅกๅ่ฝ็
CThostFtdcReqCancelAccountField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcReqCancelAccountField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcReqCancelAccountField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcReqCancelAccountField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcReqCancelAccountField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcReqCancelAccountField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcReqCancelAccountField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcReqCancelAccountField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcReqCancelAccountField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcReqCancelAccountField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcReqCancelAccountField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcReqCancelAccountField["SessionID"] = "int"
#ๅฎขๆทๅงๅ
CThostFtdcReqCancelAccountField["CustomerName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcReqCancelAccountField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcReqCancelAccountField["IdentifiedCardNo"] = "string"
#ๆงๅซ
CThostFtdcReqCancelAccountField["Gender"] = "char"
#ๅฝๅฎถไปฃ็
CThostFtdcReqCancelAccountField["CountryCode"] = "string"
#ๅฎขๆท็ฑปๅ
CThostFtdcReqCancelAccountField["CustType"] = "char"
#ๅฐๅ
CThostFtdcReqCancelAccountField["Address"] = "string"
#้ฎ็ผ
CThostFtdcReqCancelAccountField["ZipCode"] = "string"
#็ต่ฏๅท็
CThostFtdcReqCancelAccountField["Telephone"] = "string"
#ๆๆบ
CThostFtdcReqCancelAccountField["MobilePhone"] = "string"
#ไผ ็
CThostFtdcReqCancelAccountField["Fax"] = "string"
#็ตๅญ้ฎไปถ
CThostFtdcReqCancelAccountField["EMail"] = "string"
#่ต้่ดฆๆท็ถๆ
CThostFtdcReqCancelAccountField["MoneyAccountStatus"] = "char"
#้ถ่กๅธๅท
CThostFtdcReqCancelAccountField["BankAccount"] = "string"
#้ถ่กๅฏ็
CThostFtdcReqCancelAccountField["BankPassWord"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcReqCancelAccountField["AccountID"] = "string"
#ๆ่ดงๅฏ็
CThostFtdcReqCancelAccountField["Password"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcReqCancelAccountField["InstallID"] = "int"
#้ช่ฏๅฎขๆท่ฏไปถๅท็ ๆ ๅฟ
CThostFtdcReqCancelAccountField["VerifyCertNoFlag"] = "char"
#ๅธ็งไปฃ็
CThostFtdcReqCancelAccountField["CurrencyID"] = "string"
#ๆฑ้ๆ ๅฟ
CThostFtdcReqCancelAccountField["CashExchangeCode"] = "char"
#ๆ่ฆ
CThostFtdcReqCancelAccountField["Digest"] = "string"
#้ถ่กๅธๅท็ฑปๅ
CThostFtdcReqCancelAccountField["BankAccType"] = "char"
#ๆธ ้ๆ ๅฟ
CThostFtdcReqCancelAccountField["DeviceID"] = "string"
#ๆ่ดงๅไฝๅธๅท็ฑปๅ
CThostFtdcReqCancelAccountField["BankSecuAccType"] = "char"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcReqCancelAccountField["BrokerIDByBank"] = "string"
#ๆ่ดงๅไฝๅธๅท
CThostFtdcReqCancelAccountField["BankSecuAcc"] = "string"
#้ถ่กๅฏ็ ๆ ๅฟ
CThostFtdcReqCancelAccountField["BankPwdFlag"] = "char"
#ๆ่ดง่ต้ๅฏ็ ๆ ธๅฏนๆ ๅฟ
CThostFtdcReqCancelAccountField["SecuPwdFlag"] = "char"
#ไบคๆๆๅ
CThostFtdcReqCancelAccountField["OperNo"] = "string"
#ไบคๆID
CThostFtdcReqCancelAccountField["TID"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcReqCancelAccountField["UserID"] = "string"
structDict['CThostFtdcReqCancelAccountField'] = CThostFtdcReqCancelAccountField
#ๅๆด้ถ่ก่ดฆๆท่ฏทๆฑ
CThostFtdcReqChangeAccountField = {}
#ไธๅกๅ่ฝ็
CThostFtdcReqChangeAccountField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcReqChangeAccountField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcReqChangeAccountField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcReqChangeAccountField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcReqChangeAccountField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcReqChangeAccountField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcReqChangeAccountField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcReqChangeAccountField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcReqChangeAccountField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcReqChangeAccountField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcReqChangeAccountField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcReqChangeAccountField["SessionID"] = "int"
#ๅฎขๆทๅงๅ
CThostFtdcReqChangeAccountField["CustomerName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcReqChangeAccountField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcReqChangeAccountField["IdentifiedCardNo"] = "string"
#ๆงๅซ
CThostFtdcReqChangeAccountField["Gender"] = "char"
#ๅฝๅฎถไปฃ็
CThostFtdcReqChangeAccountField["CountryCode"] = "string"
#ๅฎขๆท็ฑปๅ
CThostFtdcReqChangeAccountField["CustType"] = "char"
#ๅฐๅ
CThostFtdcReqChangeAccountField["Address"] = "string"
#้ฎ็ผ
CThostFtdcReqChangeAccountField["ZipCode"] = "string"
#็ต่ฏๅท็
CThostFtdcReqChangeAccountField["Telephone"] = "string"
#ๆๆบ
CThostFtdcReqChangeAccountField["MobilePhone"] = "string"
#ไผ ็
CThostFtdcReqChangeAccountField["Fax"] = "string"
#็ตๅญ้ฎไปถ
CThostFtdcReqChangeAccountField["EMail"] = "string"
#่ต้่ดฆๆท็ถๆ
CThostFtdcReqChangeAccountField["MoneyAccountStatus"] = "char"
#้ถ่กๅธๅท
CThostFtdcReqChangeAccountField["BankAccount"] = "string"
#้ถ่กๅฏ็
CThostFtdcReqChangeAccountField["BankPassWord"] = "string"
#ๆฐ้ถ่กๅธๅท
CThostFtdcReqChangeAccountField["NewBankAccount"] = "string"
#ๆฐ้ถ่กๅฏ็
CThostFtdcReqChangeAccountField["NewBankPassWord"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcReqChangeAccountField["AccountID"] = "string"
#ๆ่ดงๅฏ็
CThostFtdcReqChangeAccountField["Password"] = "string"
#้ถ่กๅธๅท็ฑปๅ
CThostFtdcReqChangeAccountField["BankAccType"] = "char"
#ๅฎ่ฃ
็ผๅท
CThostFtdcReqChangeAccountField["InstallID"] = "int"
#้ช่ฏๅฎขๆท่ฏไปถๅท็ ๆ ๅฟ
CThostFtdcReqChangeAccountField["VerifyCertNoFlag"] = "char"
#ๅธ็งไปฃ็
CThostFtdcReqChangeAccountField["CurrencyID"] = "string"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcReqChangeAccountField["BrokerIDByBank"] = "string"
#้ถ่กๅฏ็ ๆ ๅฟ
CThostFtdcReqChangeAccountField["BankPwdFlag"] = "char"
#ๆ่ดง่ต้ๅฏ็ ๆ ธๅฏนๆ ๅฟ
CThostFtdcReqChangeAccountField["SecuPwdFlag"] = "char"
#ไบคๆID
CThostFtdcReqChangeAccountField["TID"] = "int"
#ๆ่ฆ
CThostFtdcReqChangeAccountField["Digest"] = "string"
structDict['CThostFtdcReqChangeAccountField'] = CThostFtdcReqChangeAccountField
#่ฝฌ่ดฆ่ฏทๆฑ
CThostFtdcReqTransferField = {}
#ไธๅกๅ่ฝ็
CThostFtdcReqTransferField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcReqTransferField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcReqTransferField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcReqTransferField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcReqTransferField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcReqTransferField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcReqTransferField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcReqTransferField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcReqTransferField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcReqTransferField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcReqTransferField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcReqTransferField["SessionID"] = "int"
#ๅฎขๆทๅงๅ
CThostFtdcReqTransferField["CustomerName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcReqTransferField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcReqTransferField["IdentifiedCardNo"] = "string"
#ๅฎขๆท็ฑปๅ
CThostFtdcReqTransferField["CustType"] = "char"
#้ถ่กๅธๅท
CThostFtdcReqTransferField["BankAccount"] = "string"
#้ถ่กๅฏ็
CThostFtdcReqTransferField["BankPassWord"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcReqTransferField["AccountID"] = "string"
#ๆ่ดงๅฏ็
CThostFtdcReqTransferField["Password"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcReqTransferField["InstallID"] = "int"
#ๆ่ดงๅ
ฌๅธๆตๆฐดๅท
CThostFtdcReqTransferField["FutureSerial"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcReqTransferField["UserID"] = "string"
#้ช่ฏๅฎขๆท่ฏไปถๅท็ ๆ ๅฟ
CThostFtdcReqTransferField["VerifyCertNoFlag"] = "char"
#ๅธ็งไปฃ็
CThostFtdcReqTransferField["CurrencyID"] = "string"
#่ฝฌๅธ้้ข
CThostFtdcReqTransferField["TradeAmount"] = "float"
#ๆ่ดงๅฏๅ้้ข
CThostFtdcReqTransferField["FutureFetchAmount"] = "float"
#่ดน็จๆฏไปๆ ๅฟ
CThostFtdcReqTransferField["FeePayFlag"] = "char"
#ๅบๆถๅฎขๆท่ดน็จ
CThostFtdcReqTransferField["CustFee"] = "float"
#ๅบๆถๆ่ดงๅ
ฌๅธ่ดน็จ
CThostFtdcReqTransferField["BrokerFee"] = "float"
#ๅ้ๆน็ปๆฅๆถๆน็ๆถๆฏ
CThostFtdcReqTransferField["Message"] = "string"
#ๆ่ฆ
CThostFtdcReqTransferField["Digest"] = "string"
#้ถ่กๅธๅท็ฑปๅ
CThostFtdcReqTransferField["BankAccType"] = "char"
#ๆธ ้ๆ ๅฟ
CThostFtdcReqTransferField["DeviceID"] = "string"
#ๆ่ดงๅไฝๅธๅท็ฑปๅ
CThostFtdcReqTransferField["BankSecuAccType"] = "char"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcReqTransferField["BrokerIDByBank"] = "string"
#ๆ่ดงๅไฝๅธๅท
CThostFtdcReqTransferField["BankSecuAcc"] = "string"
#้ถ่กๅฏ็ ๆ ๅฟ
CThostFtdcReqTransferField["BankPwdFlag"] = "char"
#ๆ่ดง่ต้ๅฏ็ ๆ ธๅฏนๆ ๅฟ
CThostFtdcReqTransferField["SecuPwdFlag"] = "char"
#ไบคๆๆๅ
CThostFtdcReqTransferField["OperNo"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcReqTransferField["RequestID"] = "int"
#ไบคๆID
CThostFtdcReqTransferField["TID"] = "int"
#่ฝฌ่ดฆไบคๆ็ถๆ
CThostFtdcReqTransferField["TransferStatus"] = "char"
structDict['CThostFtdcReqTransferField'] = CThostFtdcReqTransferField
#้ถ่กๅ่ตท้ถ่ก่ต้่ฝฌๆ่ดงๅๅบ
CThostFtdcRspTransferField = {}
#ไธๅกๅ่ฝ็
CThostFtdcRspTransferField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcRspTransferField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcRspTransferField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcRspTransferField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcRspTransferField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcRspTransferField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcRspTransferField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcRspTransferField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcRspTransferField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcRspTransferField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcRspTransferField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcRspTransferField["SessionID"] = "int"
#ๅฎขๆทๅงๅ
CThostFtdcRspTransferField["CustomerName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcRspTransferField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcRspTransferField["IdentifiedCardNo"] = "string"
#ๅฎขๆท็ฑปๅ
CThostFtdcRspTransferField["CustType"] = "char"
#้ถ่กๅธๅท
CThostFtdcRspTransferField["BankAccount"] = "string"
#้ถ่กๅฏ็
CThostFtdcRspTransferField["BankPassWord"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcRspTransferField["AccountID"] = "string"
#ๆ่ดงๅฏ็
CThostFtdcRspTransferField["Password"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcRspTransferField["InstallID"] = "int"
#ๆ่ดงๅ
ฌๅธๆตๆฐดๅท
CThostFtdcRspTransferField["FutureSerial"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcRspTransferField["UserID"] = "string"
#้ช่ฏๅฎขๆท่ฏไปถๅท็ ๆ ๅฟ
CThostFtdcRspTransferField["VerifyCertNoFlag"] = "char"
#ๅธ็งไปฃ็
CThostFtdcRspTransferField["CurrencyID"] = "string"
#่ฝฌๅธ้้ข
CThostFtdcRspTransferField["TradeAmount"] = "float"
#ๆ่ดงๅฏๅ้้ข
CThostFtdcRspTransferField["FutureFetchAmount"] = "float"
#่ดน็จๆฏไปๆ ๅฟ
CThostFtdcRspTransferField["FeePayFlag"] = "char"
#ๅบๆถๅฎขๆท่ดน็จ
CThostFtdcRspTransferField["CustFee"] = "float"
#ๅบๆถๆ่ดงๅ
ฌๅธ่ดน็จ
CThostFtdcRspTransferField["BrokerFee"] = "float"
#ๅ้ๆน็ปๆฅๆถๆน็ๆถๆฏ
CThostFtdcRspTransferField["Message"] = "string"
#ๆ่ฆ
CThostFtdcRspTransferField["Digest"] = "string"
#้ถ่กๅธๅท็ฑปๅ
CThostFtdcRspTransferField["BankAccType"] = "char"
#ๆธ ้ๆ ๅฟ
CThostFtdcRspTransferField["DeviceID"] = "string"
#ๆ่ดงๅไฝๅธๅท็ฑปๅ
CThostFtdcRspTransferField["BankSecuAccType"] = "char"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcRspTransferField["BrokerIDByBank"] = "string"
#ๆ่ดงๅไฝๅธๅท
CThostFtdcRspTransferField["BankSecuAcc"] = "string"
#้ถ่กๅฏ็ ๆ ๅฟ
CThostFtdcRspTransferField["BankPwdFlag"] = "char"
#ๆ่ดง่ต้ๅฏ็ ๆ ธๅฏนๆ ๅฟ
CThostFtdcRspTransferField["SecuPwdFlag"] = "char"
#ไบคๆๆๅ
CThostFtdcRspTransferField["OperNo"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcRspTransferField["RequestID"] = "int"
#ไบคๆID
CThostFtdcRspTransferField["TID"] = "int"
#่ฝฌ่ดฆไบคๆ็ถๆ
CThostFtdcRspTransferField["TransferStatus"] = "char"
#้่ฏฏไปฃ็
CThostFtdcRspTransferField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcRspTransferField["ErrorMsg"] = "string"
structDict['CThostFtdcRspTransferField'] = CThostFtdcRspTransferField
#ๅฒๆญฃ่ฏทๆฑ
CThostFtdcReqRepealField = {}
#ๅฒๆญฃๆถ้ด้ด้
CThostFtdcReqRepealField["RepealTimeInterval"] = "int"
#ๅทฒ็ปๅฒๆญฃๆฌกๆฐ
CThostFtdcReqRepealField["RepealedTimes"] = "int"
#้ถ่กๅฒๆญฃๆ ๅฟ
CThostFtdcReqRepealField["BankRepealFlag"] = "char"
#ๆๅๅฒๆญฃๆ ๅฟ
CThostFtdcReqRepealField["BrokerRepealFlag"] = "char"
#่ขซๅฒๆญฃๅนณๅฐๆตๆฐดๅท
CThostFtdcReqRepealField["PlateRepealSerial"] = "int"
#่ขซๅฒๆญฃ้ถ่กๆตๆฐดๅท
CThostFtdcReqRepealField["BankRepealSerial"] = "string"
#่ขซๅฒๆญฃๆ่ดงๆตๆฐดๅท
CThostFtdcReqRepealField["FutureRepealSerial"] = "int"
#ไธๅกๅ่ฝ็
CThostFtdcReqRepealField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcReqRepealField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcReqRepealField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcReqRepealField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcReqRepealField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcReqRepealField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcReqRepealField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcReqRepealField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcReqRepealField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcReqRepealField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcReqRepealField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcReqRepealField["SessionID"] = "int"
#ๅฎขๆทๅงๅ
CThostFtdcReqRepealField["CustomerName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcReqRepealField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcReqRepealField["IdentifiedCardNo"] = "string"
#ๅฎขๆท็ฑปๅ
CThostFtdcReqRepealField["CustType"] = "char"
#้ถ่กๅธๅท
CThostFtdcReqRepealField["BankAccount"] = "string"
#้ถ่กๅฏ็
CThostFtdcReqRepealField["BankPassWord"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcReqRepealField["AccountID"] = "string"
#ๆ่ดงๅฏ็
CThostFtdcReqRepealField["Password"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcReqRepealField["InstallID"] = "int"
#ๆ่ดงๅ
ฌๅธๆตๆฐดๅท
CThostFtdcReqRepealField["FutureSerial"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcReqRepealField["UserID"] = "string"
#้ช่ฏๅฎขๆท่ฏไปถๅท็ ๆ ๅฟ
CThostFtdcReqRepealField["VerifyCertNoFlag"] = "char"
#ๅธ็งไปฃ็
CThostFtdcReqRepealField["CurrencyID"] = "string"
#่ฝฌๅธ้้ข
CThostFtdcReqRepealField["TradeAmount"] = "float"
#ๆ่ดงๅฏๅ้้ข
CThostFtdcReqRepealField["FutureFetchAmount"] = "float"
#่ดน็จๆฏไปๆ ๅฟ
CThostFtdcReqRepealField["FeePayFlag"] = "char"
#ๅบๆถๅฎขๆท่ดน็จ
CThostFtdcReqRepealField["CustFee"] = "float"
#ๅบๆถๆ่ดงๅ
ฌๅธ่ดน็จ
CThostFtdcReqRepealField["BrokerFee"] = "float"
#ๅ้ๆน็ปๆฅๆถๆน็ๆถๆฏ
CThostFtdcReqRepealField["Message"] = "string"
#ๆ่ฆ
CThostFtdcReqRepealField["Digest"] = "string"
#้ถ่กๅธๅท็ฑปๅ
CThostFtdcReqRepealField["BankAccType"] = "char"
#ๆธ ้ๆ ๅฟ
CThostFtdcReqRepealField["DeviceID"] = "string"
#ๆ่ดงๅไฝๅธๅท็ฑปๅ
CThostFtdcReqRepealField["BankSecuAccType"] = "char"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcReqRepealField["BrokerIDByBank"] = "string"
#ๆ่ดงๅไฝๅธๅท
CThostFtdcReqRepealField["BankSecuAcc"] = "string"
#้ถ่กๅฏ็ ๆ ๅฟ
CThostFtdcReqRepealField["BankPwdFlag"] = "char"
#ๆ่ดง่ต้ๅฏ็ ๆ ธๅฏนๆ ๅฟ
CThostFtdcReqRepealField["SecuPwdFlag"] = "char"
#ไบคๆๆๅ
CThostFtdcReqRepealField["OperNo"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcReqRepealField["RequestID"] = "int"
#ไบคๆID
CThostFtdcReqRepealField["TID"] = "int"
#่ฝฌ่ดฆไบคๆ็ถๆ
CThostFtdcReqRepealField["TransferStatus"] = "char"
structDict['CThostFtdcReqRepealField'] = CThostFtdcReqRepealField
#ๅฒๆญฃๅๅบ
CThostFtdcRspRepealField = {}
#ๅฒๆญฃๆถ้ด้ด้
CThostFtdcRspRepealField["RepealTimeInterval"] = "int"
#ๅทฒ็ปๅฒๆญฃๆฌกๆฐ
CThostFtdcRspRepealField["RepealedTimes"] = "int"
#้ถ่กๅฒๆญฃๆ ๅฟ
CThostFtdcRspRepealField["BankRepealFlag"] = "char"
#ๆๅๅฒๆญฃๆ ๅฟ
CThostFtdcRspRepealField["BrokerRepealFlag"] = "char"
#่ขซๅฒๆญฃๅนณๅฐๆตๆฐดๅท
CThostFtdcRspRepealField["PlateRepealSerial"] = "int"
#่ขซๅฒๆญฃ้ถ่กๆตๆฐดๅท
CThostFtdcRspRepealField["BankRepealSerial"] = "string"
#่ขซๅฒๆญฃๆ่ดงๆตๆฐดๅท
CThostFtdcRspRepealField["FutureRepealSerial"] = "int"
#ไธๅกๅ่ฝ็
CThostFtdcRspRepealField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcRspRepealField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcRspRepealField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcRspRepealField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcRspRepealField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcRspRepealField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcRspRepealField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcRspRepealField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcRspRepealField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcRspRepealField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcRspRepealField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcRspRepealField["SessionID"] = "int"
#ๅฎขๆทๅงๅ
CThostFtdcRspRepealField["CustomerName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcRspRepealField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcRspRepealField["IdentifiedCardNo"] = "string"
#ๅฎขๆท็ฑปๅ
CThostFtdcRspRepealField["CustType"] = "char"
#้ถ่กๅธๅท
CThostFtdcRspRepealField["BankAccount"] = "string"
#้ถ่กๅฏ็
CThostFtdcRspRepealField["BankPassWord"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcRspRepealField["AccountID"] = "string"
#ๆ่ดงๅฏ็
CThostFtdcRspRepealField["Password"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcRspRepealField["InstallID"] = "int"
#ๆ่ดงๅ
ฌๅธๆตๆฐดๅท
CThostFtdcRspRepealField["FutureSerial"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcRspRepealField["UserID"] = "string"
#้ช่ฏๅฎขๆท่ฏไปถๅท็ ๆ ๅฟ
CThostFtdcRspRepealField["VerifyCertNoFlag"] = "char"
#ๅธ็งไปฃ็
CThostFtdcRspRepealField["CurrencyID"] = "string"
#่ฝฌๅธ้้ข
CThostFtdcRspRepealField["TradeAmount"] = "float"
#ๆ่ดงๅฏๅ้้ข
CThostFtdcRspRepealField["FutureFetchAmount"] = "float"
#่ดน็จๆฏไปๆ ๅฟ
CThostFtdcRspRepealField["FeePayFlag"] = "char"
#ๅบๆถๅฎขๆท่ดน็จ
CThostFtdcRspRepealField["CustFee"] = "float"
#ๅบๆถๆ่ดงๅ
ฌๅธ่ดน็จ
CThostFtdcRspRepealField["BrokerFee"] = "float"
#ๅ้ๆน็ปๆฅๆถๆน็ๆถๆฏ
CThostFtdcRspRepealField["Message"] = "string"
#ๆ่ฆ
CThostFtdcRspRepealField["Digest"] = "string"
#้ถ่กๅธๅท็ฑปๅ
CThostFtdcRspRepealField["BankAccType"] = "char"
#ๆธ ้ๆ ๅฟ
CThostFtdcRspRepealField["DeviceID"] = "string"
#ๆ่ดงๅไฝๅธๅท็ฑปๅ
CThostFtdcRspRepealField["BankSecuAccType"] = "char"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcRspRepealField["BrokerIDByBank"] = "string"
#ๆ่ดงๅไฝๅธๅท
CThostFtdcRspRepealField["BankSecuAcc"] = "string"
#้ถ่กๅฏ็ ๆ ๅฟ
CThostFtdcRspRepealField["BankPwdFlag"] = "char"
#ๆ่ดง่ต้ๅฏ็ ๆ ธๅฏนๆ ๅฟ
CThostFtdcRspRepealField["SecuPwdFlag"] = "char"
#ไบคๆๆๅ
CThostFtdcRspRepealField["OperNo"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcRspRepealField["RequestID"] = "int"
#ไบคๆID
CThostFtdcRspRepealField["TID"] = "int"
#่ฝฌ่ดฆไบคๆ็ถๆ
CThostFtdcRspRepealField["TransferStatus"] = "char"
#้่ฏฏไปฃ็
CThostFtdcRspRepealField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcRspRepealField["ErrorMsg"] = "string"
structDict['CThostFtdcRspRepealField'] = CThostFtdcRspRepealField
#ๆฅ่ฏข่ดฆๆทไฟกๆฏ่ฏทๆฑ
CThostFtdcReqQueryAccountField = {}
#ไธๅกๅ่ฝ็
CThostFtdcReqQueryAccountField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcReqQueryAccountField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcReqQueryAccountField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcReqQueryAccountField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcReqQueryAccountField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcReqQueryAccountField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcReqQueryAccountField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcReqQueryAccountField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcReqQueryAccountField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcReqQueryAccountField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcReqQueryAccountField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcReqQueryAccountField["SessionID"] = "int"
#ๅฎขๆทๅงๅ
CThostFtdcReqQueryAccountField["CustomerName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcReqQueryAccountField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcReqQueryAccountField["IdentifiedCardNo"] = "string"
#ๅฎขๆท็ฑปๅ
CThostFtdcReqQueryAccountField["CustType"] = "char"
#้ถ่กๅธๅท
CThostFtdcReqQueryAccountField["BankAccount"] = "string"
#้ถ่กๅฏ็
CThostFtdcReqQueryAccountField["BankPassWord"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcReqQueryAccountField["AccountID"] = "string"
#ๆ่ดงๅฏ็
CThostFtdcReqQueryAccountField["Password"] = "string"
#ๆ่ดงๅ
ฌๅธๆตๆฐดๅท
CThostFtdcReqQueryAccountField["FutureSerial"] = "int"
#ๅฎ่ฃ
็ผๅท
CThostFtdcReqQueryAccountField["InstallID"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcReqQueryAccountField["UserID"] = "string"
#้ช่ฏๅฎขๆท่ฏไปถๅท็ ๆ ๅฟ
CThostFtdcReqQueryAccountField["VerifyCertNoFlag"] = "char"
#ๅธ็งไปฃ็
CThostFtdcReqQueryAccountField["CurrencyID"] = "string"
#ๆ่ฆ
CThostFtdcReqQueryAccountField["Digest"] = "string"
#้ถ่กๅธๅท็ฑปๅ
CThostFtdcReqQueryAccountField["BankAccType"] = "char"
#ๆธ ้ๆ ๅฟ
CThostFtdcReqQueryAccountField["DeviceID"] = "string"
#ๆ่ดงๅไฝๅธๅท็ฑปๅ
CThostFtdcReqQueryAccountField["BankSecuAccType"] = "char"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcReqQueryAccountField["BrokerIDByBank"] = "string"
#ๆ่ดงๅไฝๅธๅท
CThostFtdcReqQueryAccountField["BankSecuAcc"] = "string"
#้ถ่กๅฏ็ ๆ ๅฟ
CThostFtdcReqQueryAccountField["BankPwdFlag"] = "char"
#ๆ่ดง่ต้ๅฏ็ ๆ ธๅฏนๆ ๅฟ
CThostFtdcReqQueryAccountField["SecuPwdFlag"] = "char"
#ไบคๆๆๅ
CThostFtdcReqQueryAccountField["OperNo"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcReqQueryAccountField["RequestID"] = "int"
#ไบคๆID
CThostFtdcReqQueryAccountField["TID"] = "int"
structDict['CThostFtdcReqQueryAccountField'] = CThostFtdcReqQueryAccountField
#ๆฅ่ฏข่ดฆๆทไฟกๆฏๅๅบ
CThostFtdcRspQueryAccountField = {}
#ไธๅกๅ่ฝ็
CThostFtdcRspQueryAccountField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcRspQueryAccountField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcRspQueryAccountField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcRspQueryAccountField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcRspQueryAccountField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcRspQueryAccountField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcRspQueryAccountField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcRspQueryAccountField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcRspQueryAccountField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcRspQueryAccountField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcRspQueryAccountField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcRspQueryAccountField["SessionID"] = "int"
#ๅฎขๆทๅงๅ
CThostFtdcRspQueryAccountField["CustomerName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcRspQueryAccountField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcRspQueryAccountField["IdentifiedCardNo"] = "string"
#ๅฎขๆท็ฑปๅ
CThostFtdcRspQueryAccountField["CustType"] = "char"
#้ถ่กๅธๅท
CThostFtdcRspQueryAccountField["BankAccount"] = "string"
#้ถ่กๅฏ็
CThostFtdcRspQueryAccountField["BankPassWord"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcRspQueryAccountField["AccountID"] = "string"
#ๆ่ดงๅฏ็
CThostFtdcRspQueryAccountField["Password"] = "string"
#ๆ่ดงๅ
ฌๅธๆตๆฐดๅท
CThostFtdcRspQueryAccountField["FutureSerial"] = "int"
#ๅฎ่ฃ
็ผๅท
CThostFtdcRspQueryAccountField["InstallID"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcRspQueryAccountField["UserID"] = "string"
#้ช่ฏๅฎขๆท่ฏไปถๅท็ ๆ ๅฟ
CThostFtdcRspQueryAccountField["VerifyCertNoFlag"] = "char"
#ๅธ็งไปฃ็
CThostFtdcRspQueryAccountField["CurrencyID"] = "string"
#ๆ่ฆ
CThostFtdcRspQueryAccountField["Digest"] = "string"
#้ถ่กๅธๅท็ฑปๅ
CThostFtdcRspQueryAccountField["BankAccType"] = "char"
#ๆธ ้ๆ ๅฟ
CThostFtdcRspQueryAccountField["DeviceID"] = "string"
#ๆ่ดงๅไฝๅธๅท็ฑปๅ
CThostFtdcRspQueryAccountField["BankSecuAccType"] = "char"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcRspQueryAccountField["BrokerIDByBank"] = "string"
#ๆ่ดงๅไฝๅธๅท
CThostFtdcRspQueryAccountField["BankSecuAcc"] = "string"
#้ถ่กๅฏ็ ๆ ๅฟ
CThostFtdcRspQueryAccountField["BankPwdFlag"] = "char"
#ๆ่ดง่ต้ๅฏ็ ๆ ธๅฏนๆ ๅฟ
CThostFtdcRspQueryAccountField["SecuPwdFlag"] = "char"
#ไบคๆๆๅ
CThostFtdcRspQueryAccountField["OperNo"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcRspQueryAccountField["RequestID"] = "int"
#ไบคๆID
CThostFtdcRspQueryAccountField["TID"] = "int"
#้ถ่กๅฏ็จ้้ข
CThostFtdcRspQueryAccountField["BankUseAmount"] = "float"
#้ถ่กๅฏๅ้้ข
CThostFtdcRspQueryAccountField["BankFetchAmount"] = "float"
structDict['CThostFtdcRspQueryAccountField'] = CThostFtdcRspQueryAccountField
#ๆๅ็ญพๅฐ็ญพ้
CThostFtdcFutureSignIOField = {}
#ไธๅกๅ่ฝ็
CThostFtdcFutureSignIOField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcFutureSignIOField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcFutureSignIOField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcFutureSignIOField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcFutureSignIOField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcFutureSignIOField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcFutureSignIOField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcFutureSignIOField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcFutureSignIOField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcFutureSignIOField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcFutureSignIOField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcFutureSignIOField["SessionID"] = "int"
#ๅฎ่ฃ
็ผๅท
CThostFtdcFutureSignIOField["InstallID"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcFutureSignIOField["UserID"] = "string"
#ๆ่ฆ
CThostFtdcFutureSignIOField["Digest"] = "string"
#ๅธ็งไปฃ็
CThostFtdcFutureSignIOField["CurrencyID"] = "string"
#ๆธ ้ๆ ๅฟ
CThostFtdcFutureSignIOField["DeviceID"] = "string"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcFutureSignIOField["BrokerIDByBank"] = "string"
#ไบคๆๆๅ
CThostFtdcFutureSignIOField["OperNo"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcFutureSignIOField["RequestID"] = "int"
#ไบคๆID
CThostFtdcFutureSignIOField["TID"] = "int"
structDict['CThostFtdcFutureSignIOField'] = CThostFtdcFutureSignIOField
#ๆๅ็ญพๅฐๅๅบ
CThostFtdcRspFutureSignInField = {}
#ไธๅกๅ่ฝ็
CThostFtdcRspFutureSignInField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcRspFutureSignInField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcRspFutureSignInField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcRspFutureSignInField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcRspFutureSignInField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcRspFutureSignInField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcRspFutureSignInField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcRspFutureSignInField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcRspFutureSignInField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcRspFutureSignInField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcRspFutureSignInField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcRspFutureSignInField["SessionID"] = "int"
#ๅฎ่ฃ
็ผๅท
CThostFtdcRspFutureSignInField["InstallID"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcRspFutureSignInField["UserID"] = "string"
#ๆ่ฆ
CThostFtdcRspFutureSignInField["Digest"] = "string"
#ๅธ็งไปฃ็
CThostFtdcRspFutureSignInField["CurrencyID"] = "string"
#ๆธ ้ๆ ๅฟ
CThostFtdcRspFutureSignInField["DeviceID"] = "string"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcRspFutureSignInField["BrokerIDByBank"] = "string"
#ไบคๆๆๅ
CThostFtdcRspFutureSignInField["OperNo"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcRspFutureSignInField["RequestID"] = "int"
#ไบคๆID
CThostFtdcRspFutureSignInField["TID"] = "int"
#้่ฏฏไปฃ็
CThostFtdcRspFutureSignInField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcRspFutureSignInField["ErrorMsg"] = "string"
#PINๅฏ้ฅ
CThostFtdcRspFutureSignInField["PinKey"] = "string"
#MACๅฏ้ฅ
CThostFtdcRspFutureSignInField["MacKey"] = "string"
structDict['CThostFtdcRspFutureSignInField'] = CThostFtdcRspFutureSignInField
#ๆๅ็ญพ้่ฏทๆฑ
CThostFtdcReqFutureSignOutField = {}
#ไธๅกๅ่ฝ็
CThostFtdcReqFutureSignOutField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcReqFutureSignOutField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcReqFutureSignOutField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcReqFutureSignOutField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcReqFutureSignOutField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcReqFutureSignOutField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcReqFutureSignOutField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcReqFutureSignOutField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcReqFutureSignOutField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcReqFutureSignOutField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcReqFutureSignOutField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcReqFutureSignOutField["SessionID"] = "int"
#ๅฎ่ฃ
็ผๅท
CThostFtdcReqFutureSignOutField["InstallID"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcReqFutureSignOutField["UserID"] = "string"
#ๆ่ฆ
CThostFtdcReqFutureSignOutField["Digest"] = "string"
#ๅธ็งไปฃ็
CThostFtdcReqFutureSignOutField["CurrencyID"] = "string"
#ๆธ ้ๆ ๅฟ
CThostFtdcReqFutureSignOutField["DeviceID"] = "string"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcReqFutureSignOutField["BrokerIDByBank"] = "string"
#ไบคๆๆๅ
CThostFtdcReqFutureSignOutField["OperNo"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcReqFutureSignOutField["RequestID"] = "int"
#ไบคๆID
CThostFtdcReqFutureSignOutField["TID"] = "int"
structDict['CThostFtdcReqFutureSignOutField'] = CThostFtdcReqFutureSignOutField
#ๆๅ็ญพ้ๅๅบ
CThostFtdcRspFutureSignOutField = {}
#ไธๅกๅ่ฝ็
CThostFtdcRspFutureSignOutField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcRspFutureSignOutField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcRspFutureSignOutField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcRspFutureSignOutField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcRspFutureSignOutField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcRspFutureSignOutField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcRspFutureSignOutField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcRspFutureSignOutField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcRspFutureSignOutField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcRspFutureSignOutField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcRspFutureSignOutField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcRspFutureSignOutField["SessionID"] = "int"
#ๅฎ่ฃ
็ผๅท
CThostFtdcRspFutureSignOutField["InstallID"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcRspFutureSignOutField["UserID"] = "string"
#ๆ่ฆ
CThostFtdcRspFutureSignOutField["Digest"] = "string"
#ๅธ็งไปฃ็
CThostFtdcRspFutureSignOutField["CurrencyID"] = "string"
#ๆธ ้ๆ ๅฟ
CThostFtdcRspFutureSignOutField["DeviceID"] = "string"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcRspFutureSignOutField["BrokerIDByBank"] = "string"
#ไบคๆๆๅ
CThostFtdcRspFutureSignOutField["OperNo"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcRspFutureSignOutField["RequestID"] = "int"
#ไบคๆID
CThostFtdcRspFutureSignOutField["TID"] = "int"
#้่ฏฏไปฃ็
CThostFtdcRspFutureSignOutField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcRspFutureSignOutField["ErrorMsg"] = "string"
structDict['CThostFtdcRspFutureSignOutField'] = CThostFtdcRspFutureSignOutField
#ๆฅ่ฏขๆๅฎๆตๆฐดๅท็ไบคๆ็ปๆ่ฏทๆฑ
CThostFtdcReqQueryTradeResultBySerialField = {}
#ไธๅกๅ่ฝ็
CThostFtdcReqQueryTradeResultBySerialField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcReqQueryTradeResultBySerialField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcReqQueryTradeResultBySerialField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcReqQueryTradeResultBySerialField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcReqQueryTradeResultBySerialField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcReqQueryTradeResultBySerialField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcReqQueryTradeResultBySerialField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcReqQueryTradeResultBySerialField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcReqQueryTradeResultBySerialField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcReqQueryTradeResultBySerialField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcReqQueryTradeResultBySerialField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcReqQueryTradeResultBySerialField["SessionID"] = "int"
#ๆตๆฐดๅท
CThostFtdcReqQueryTradeResultBySerialField["Reference"] = "int"
#ๆฌๆตๆฐดๅทๅๅธ่
็ๆบๆ็ฑปๅ
CThostFtdcReqQueryTradeResultBySerialField["RefrenceIssureType"] = "char"
#ๆฌๆตๆฐดๅทๅๅธ่
ๆบๆ็ผ็
CThostFtdcReqQueryTradeResultBySerialField["RefrenceIssure"] = "string"
#ๅฎขๆทๅงๅ
CThostFtdcReqQueryTradeResultBySerialField["CustomerName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcReqQueryTradeResultBySerialField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcReqQueryTradeResultBySerialField["IdentifiedCardNo"] = "string"
#ๅฎขๆท็ฑปๅ
CThostFtdcReqQueryTradeResultBySerialField["CustType"] = "char"
#้ถ่กๅธๅท
CThostFtdcReqQueryTradeResultBySerialField["BankAccount"] = "string"
#้ถ่กๅฏ็
CThostFtdcReqQueryTradeResultBySerialField["BankPassWord"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcReqQueryTradeResultBySerialField["AccountID"] = "string"
#ๆ่ดงๅฏ็
CThostFtdcReqQueryTradeResultBySerialField["Password"] = "string"
#ๅธ็งไปฃ็
CThostFtdcReqQueryTradeResultBySerialField["CurrencyID"] = "string"
#่ฝฌๅธ้้ข
CThostFtdcReqQueryTradeResultBySerialField["TradeAmount"] = "float"
#ๆ่ฆ
CThostFtdcReqQueryTradeResultBySerialField["Digest"] = "string"
structDict['CThostFtdcReqQueryTradeResultBySerialField'] = CThostFtdcReqQueryTradeResultBySerialField
#ๆฅ่ฏขๆๅฎๆตๆฐดๅท็ไบคๆ็ปๆๅๅบ
CThostFtdcRspQueryTradeResultBySerialField = {}
#ไธๅกๅ่ฝ็
CThostFtdcRspQueryTradeResultBySerialField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcRspQueryTradeResultBySerialField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcRspQueryTradeResultBySerialField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcRspQueryTradeResultBySerialField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcRspQueryTradeResultBySerialField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcRspQueryTradeResultBySerialField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcRspQueryTradeResultBySerialField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcRspQueryTradeResultBySerialField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcRspQueryTradeResultBySerialField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcRspQueryTradeResultBySerialField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcRspQueryTradeResultBySerialField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcRspQueryTradeResultBySerialField["SessionID"] = "int"
#้่ฏฏไปฃ็
CThostFtdcRspQueryTradeResultBySerialField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcRspQueryTradeResultBySerialField["ErrorMsg"] = "string"
#ๆตๆฐดๅท
CThostFtdcRspQueryTradeResultBySerialField["Reference"] = "int"
#ๆฌๆตๆฐดๅทๅๅธ่
็ๆบๆ็ฑปๅ
CThostFtdcRspQueryTradeResultBySerialField["RefrenceIssureType"] = "char"
#ๆฌๆตๆฐดๅทๅๅธ่
ๆบๆ็ผ็
CThostFtdcRspQueryTradeResultBySerialField["RefrenceIssure"] = "string"
#ๅๅง่ฟๅไปฃ็
CThostFtdcRspQueryTradeResultBySerialField["OriginReturnCode"] = "string"
#ๅๅง่ฟๅ็ ๆ่ฟฐ
CThostFtdcRspQueryTradeResultBySerialField["OriginDescrInfoForReturnCode"] = "string"
#้ถ่กๅธๅท
CThostFtdcRspQueryTradeResultBySerialField["BankAccount"] = "string"
#้ถ่กๅฏ็
CThostFtdcRspQueryTradeResultBySerialField["BankPassWord"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcRspQueryTradeResultBySerialField["AccountID"] = "string"
#ๆ่ดงๅฏ็
CThostFtdcRspQueryTradeResultBySerialField["Password"] = "string"
#ๅธ็งไปฃ็
CThostFtdcRspQueryTradeResultBySerialField["CurrencyID"] = "string"
#่ฝฌๅธ้้ข
CThostFtdcRspQueryTradeResultBySerialField["TradeAmount"] = "float"
#ๆ่ฆ
CThostFtdcRspQueryTradeResultBySerialField["Digest"] = "string"
structDict['CThostFtdcRspQueryTradeResultBySerialField'] = CThostFtdcRspQueryTradeResultBySerialField
#ๆฅ็ปๆไปถๅฐฑ็ปช่ฏทๆฑ
CThostFtdcReqDayEndFileReadyField = {}
#ไธๅกๅ่ฝ็
CThostFtdcReqDayEndFileReadyField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcReqDayEndFileReadyField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcReqDayEndFileReadyField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcReqDayEndFileReadyField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcReqDayEndFileReadyField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcReqDayEndFileReadyField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcReqDayEndFileReadyField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcReqDayEndFileReadyField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcReqDayEndFileReadyField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcReqDayEndFileReadyField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcReqDayEndFileReadyField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcReqDayEndFileReadyField["SessionID"] = "int"
#ๆไปถไธๅกๅ่ฝ
CThostFtdcReqDayEndFileReadyField["FileBusinessCode"] = "char"
#ๆ่ฆ
CThostFtdcReqDayEndFileReadyField["Digest"] = "string"
structDict['CThostFtdcReqDayEndFileReadyField'] = CThostFtdcReqDayEndFileReadyField
#่ฟๅ็ปๆ
CThostFtdcReturnResultField = {}
#่ฟๅไปฃ็
CThostFtdcReturnResultField["ReturnCode"] = "string"
#่ฟๅ็ ๆ่ฟฐ
CThostFtdcReturnResultField["DescrInfoForReturnCode"] = "string"
structDict['CThostFtdcReturnResultField'] = CThostFtdcReturnResultField
#้ช่ฏๆ่ดง่ต้ๅฏ็
CThostFtdcVerifyFuturePasswordField = {}
#ไธๅกๅ่ฝ็
CThostFtdcVerifyFuturePasswordField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcVerifyFuturePasswordField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcVerifyFuturePasswordField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcVerifyFuturePasswordField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcVerifyFuturePasswordField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcVerifyFuturePasswordField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcVerifyFuturePasswordField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcVerifyFuturePasswordField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcVerifyFuturePasswordField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcVerifyFuturePasswordField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcVerifyFuturePasswordField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcVerifyFuturePasswordField["SessionID"] = "int"
#ๆ่ต่
ๅธๅท
CThostFtdcVerifyFuturePasswordField["AccountID"] = "string"
#ๆ่ดงๅฏ็
CThostFtdcVerifyFuturePasswordField["Password"] = "string"
#้ถ่กๅธๅท
CThostFtdcVerifyFuturePasswordField["BankAccount"] = "string"
#้ถ่กๅฏ็
CThostFtdcVerifyFuturePasswordField["BankPassWord"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcVerifyFuturePasswordField["InstallID"] = "int"
#ไบคๆID
CThostFtdcVerifyFuturePasswordField["TID"] = "int"
#ๅธ็งไปฃ็
CThostFtdcVerifyFuturePasswordField["CurrencyID"] = "string"
structDict['CThostFtdcVerifyFuturePasswordField'] = CThostFtdcVerifyFuturePasswordField
#้ช่ฏๅฎขๆทไฟกๆฏ
CThostFtdcVerifyCustInfoField = {}
#ๅฎขๆทๅงๅ
CThostFtdcVerifyCustInfoField["CustomerName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcVerifyCustInfoField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcVerifyCustInfoField["IdentifiedCardNo"] = "string"
#ๅฎขๆท็ฑปๅ
CThostFtdcVerifyCustInfoField["CustType"] = "char"
structDict['CThostFtdcVerifyCustInfoField'] = CThostFtdcVerifyCustInfoField
#้ช่ฏๆ่ดง่ต้ๅฏ็ ๅๅฎขๆทไฟกๆฏ
CThostFtdcVerifyFuturePasswordAndCustInfoField = {}
#ๅฎขๆทๅงๅ
CThostFtdcVerifyFuturePasswordAndCustInfoField["CustomerName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcVerifyFuturePasswordAndCustInfoField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcVerifyFuturePasswordAndCustInfoField["IdentifiedCardNo"] = "string"
#ๅฎขๆท็ฑปๅ
CThostFtdcVerifyFuturePasswordAndCustInfoField["CustType"] = "char"
#ๆ่ต่
ๅธๅท
CThostFtdcVerifyFuturePasswordAndCustInfoField["AccountID"] = "string"
#ๆ่ดงๅฏ็
CThostFtdcVerifyFuturePasswordAndCustInfoField["Password"] = "string"
#ๅธ็งไปฃ็
CThostFtdcVerifyFuturePasswordAndCustInfoField["CurrencyID"] = "string"
structDict['CThostFtdcVerifyFuturePasswordAndCustInfoField'] = CThostFtdcVerifyFuturePasswordAndCustInfoField
#้ช่ฏๆ่ดง่ต้ๅฏ็ ๅๅฎขๆทไฟกๆฏ
CThostFtdcDepositResultInformField = {}
#ๅบๅ
ฅ้ๆตๆฐดๅท๏ผ่ฏฅๆตๆฐดๅทไธบ้ถๆๆฅ็่ฟๅ็ๆตๆฐดๅท
CThostFtdcDepositResultInformField["DepositSeqNo"] = "string"
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcDepositResultInformField["BrokerID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcDepositResultInformField["InvestorID"] = "string"
#ๅ
ฅ้้้ข
CThostFtdcDepositResultInformField["Deposit"] = "float"
#่ฏทๆฑ็ผๅท
CThostFtdcDepositResultInformField["RequestID"] = "int"
#่ฟๅไปฃ็
CThostFtdcDepositResultInformField["ReturnCode"] = "string"
#่ฟๅ็ ๆ่ฟฐ
CThostFtdcDepositResultInformField["DescrInfoForReturnCode"] = "string"
structDict['CThostFtdcDepositResultInformField'] = CThostFtdcDepositResultInformField
#ไบคๆๆ ธๅฟๅ้ถๆๆฅ็ๅๅบๅฏ้ฅๅๆญฅ่ฏทๆฑ
CThostFtdcReqSyncKeyField = {}
#ไธๅกๅ่ฝ็
CThostFtdcReqSyncKeyField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcReqSyncKeyField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcReqSyncKeyField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcReqSyncKeyField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcReqSyncKeyField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcReqSyncKeyField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcReqSyncKeyField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcReqSyncKeyField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcReqSyncKeyField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcReqSyncKeyField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcReqSyncKeyField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcReqSyncKeyField["SessionID"] = "int"
#ๅฎ่ฃ
็ผๅท
CThostFtdcReqSyncKeyField["InstallID"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcReqSyncKeyField["UserID"] = "string"
#ไบคๆๆ ธๅฟ็ป้ถๆๆฅ็็ๆถๆฏ
CThostFtdcReqSyncKeyField["Message"] = "string"
#ๆธ ้ๆ ๅฟ
CThostFtdcReqSyncKeyField["DeviceID"] = "string"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcReqSyncKeyField["BrokerIDByBank"] = "string"
#ไบคๆๆๅ
CThostFtdcReqSyncKeyField["OperNo"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcReqSyncKeyField["RequestID"] = "int"
#ไบคๆID
CThostFtdcReqSyncKeyField["TID"] = "int"
structDict['CThostFtdcReqSyncKeyField'] = CThostFtdcReqSyncKeyField
#ไบคๆๆ ธๅฟๅ้ถๆๆฅ็ๅๅบๅฏ้ฅๅๆญฅๅๅบ
CThostFtdcRspSyncKeyField = {}
#ไธๅกๅ่ฝ็
CThostFtdcRspSyncKeyField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcRspSyncKeyField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcRspSyncKeyField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcRspSyncKeyField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcRspSyncKeyField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcRspSyncKeyField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcRspSyncKeyField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcRspSyncKeyField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcRspSyncKeyField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcRspSyncKeyField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcRspSyncKeyField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcRspSyncKeyField["SessionID"] = "int"
#ๅฎ่ฃ
็ผๅท
CThostFtdcRspSyncKeyField["InstallID"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcRspSyncKeyField["UserID"] = "string"
#ไบคๆๆ ธๅฟ็ป้ถๆๆฅ็็ๆถๆฏ
CThostFtdcRspSyncKeyField["Message"] = "string"
#ๆธ ้ๆ ๅฟ
CThostFtdcRspSyncKeyField["DeviceID"] = "string"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcRspSyncKeyField["BrokerIDByBank"] = "string"
#ไบคๆๆๅ
CThostFtdcRspSyncKeyField["OperNo"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcRspSyncKeyField["RequestID"] = "int"
#ไบคๆID
CThostFtdcRspSyncKeyField["TID"] = "int"
#้่ฏฏไปฃ็
CThostFtdcRspSyncKeyField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcRspSyncKeyField["ErrorMsg"] = "string"
structDict['CThostFtdcRspSyncKeyField'] = CThostFtdcRspSyncKeyField
#ๆฅ่ฏข่ดฆๆทไฟกๆฏ้็ฅ
CThostFtdcNotifyQueryAccountField = {}
#ไธๅกๅ่ฝ็
CThostFtdcNotifyQueryAccountField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcNotifyQueryAccountField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcNotifyQueryAccountField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcNotifyQueryAccountField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcNotifyQueryAccountField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcNotifyQueryAccountField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcNotifyQueryAccountField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcNotifyQueryAccountField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcNotifyQueryAccountField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcNotifyQueryAccountField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcNotifyQueryAccountField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcNotifyQueryAccountField["SessionID"] = "int"
#ๅฎขๆทๅงๅ
CThostFtdcNotifyQueryAccountField["CustomerName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcNotifyQueryAccountField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcNotifyQueryAccountField["IdentifiedCardNo"] = "string"
#ๅฎขๆท็ฑปๅ
CThostFtdcNotifyQueryAccountField["CustType"] = "char"
#้ถ่กๅธๅท
CThostFtdcNotifyQueryAccountField["BankAccount"] = "string"
#้ถ่กๅฏ็
CThostFtdcNotifyQueryAccountField["BankPassWord"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcNotifyQueryAccountField["AccountID"] = "string"
#ๆ่ดงๅฏ็
CThostFtdcNotifyQueryAccountField["Password"] = "string"
#ๆ่ดงๅ
ฌๅธๆตๆฐดๅท
CThostFtdcNotifyQueryAccountField["FutureSerial"] = "int"
#ๅฎ่ฃ
็ผๅท
CThostFtdcNotifyQueryAccountField["InstallID"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcNotifyQueryAccountField["UserID"] = "string"
#้ช่ฏๅฎขๆท่ฏไปถๅท็ ๆ ๅฟ
CThostFtdcNotifyQueryAccountField["VerifyCertNoFlag"] = "char"
#ๅธ็งไปฃ็
CThostFtdcNotifyQueryAccountField["CurrencyID"] = "string"
#ๆ่ฆ
CThostFtdcNotifyQueryAccountField["Digest"] = "string"
#้ถ่กๅธๅท็ฑปๅ
CThostFtdcNotifyQueryAccountField["BankAccType"] = "char"
#ๆธ ้ๆ ๅฟ
CThostFtdcNotifyQueryAccountField["DeviceID"] = "string"
#ๆ่ดงๅไฝๅธๅท็ฑปๅ
CThostFtdcNotifyQueryAccountField["BankSecuAccType"] = "char"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcNotifyQueryAccountField["BrokerIDByBank"] = "string"
#ๆ่ดงๅไฝๅธๅท
CThostFtdcNotifyQueryAccountField["BankSecuAcc"] = "string"
#้ถ่กๅฏ็ ๆ ๅฟ
CThostFtdcNotifyQueryAccountField["BankPwdFlag"] = "char"
#ๆ่ดง่ต้ๅฏ็ ๆ ธๅฏนๆ ๅฟ
CThostFtdcNotifyQueryAccountField["SecuPwdFlag"] = "char"
#ไบคๆๆๅ
CThostFtdcNotifyQueryAccountField["OperNo"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcNotifyQueryAccountField["RequestID"] = "int"
#ไบคๆID
CThostFtdcNotifyQueryAccountField["TID"] = "int"
#้ถ่กๅฏ็จ้้ข
CThostFtdcNotifyQueryAccountField["BankUseAmount"] = "float"
#้ถ่กๅฏๅ้้ข
CThostFtdcNotifyQueryAccountField["BankFetchAmount"] = "float"
#้่ฏฏไปฃ็
CThostFtdcNotifyQueryAccountField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcNotifyQueryAccountField["ErrorMsg"] = "string"
structDict['CThostFtdcNotifyQueryAccountField'] = CThostFtdcNotifyQueryAccountField
#้ถๆ่ฝฌ่ดฆไบคๆๆตๆฐด่กจ
CThostFtdcTransferSerialField = {}
#ๅนณๅฐๆตๆฐดๅท
CThostFtdcTransferSerialField["PlateSerial"] = "int"
#ไบคๆๅ่ตทๆนๆฅๆ
CThostFtdcTransferSerialField["TradeDate"] = "string"
#ไบคๆๆฅๆ
CThostFtdcTransferSerialField["TradingDay"] = "string"
#ไบคๆๆถ้ด
CThostFtdcTransferSerialField["TradeTime"] = "string"
#ไบคๆไปฃ็
CThostFtdcTransferSerialField["TradeCode"] = "string"
#ไผ่ฏ็ผๅท
CThostFtdcTransferSerialField["SessionID"] = "int"
#้ถ่ก็ผ็
CThostFtdcTransferSerialField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆ็ผ็
CThostFtdcTransferSerialField["BankBranchID"] = "string"
#้ถ่กๅธๅท็ฑปๅ
CThostFtdcTransferSerialField["BankAccType"] = "char"
#้ถ่กๅธๅท
CThostFtdcTransferSerialField["BankAccount"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcTransferSerialField["BankSerial"] = "string"
#ๆ่ดงๅ
ฌๅธ็ผ็
CThostFtdcTransferSerialField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcTransferSerialField["BrokerBranchID"] = "string"
#ๆ่ดงๅ
ฌๅธๅธๅท็ฑปๅ
CThostFtdcTransferSerialField["FutureAccType"] = "char"
#ๆ่ต่
ๅธๅท
CThostFtdcTransferSerialField["AccountID"] = "string"
#ๆ่ต่
ไปฃ็
CThostFtdcTransferSerialField["InvestorID"] = "string"
#ๆ่ดงๅ
ฌๅธๆตๆฐดๅท
CThostFtdcTransferSerialField["FutureSerial"] = "int"
#่ฏไปถ็ฑปๅ
CThostFtdcTransferSerialField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcTransferSerialField["IdentifiedCardNo"] = "string"
#ๅธ็งไปฃ็
CThostFtdcTransferSerialField["CurrencyID"] = "string"
#ไบคๆ้้ข
CThostFtdcTransferSerialField["TradeAmount"] = "float"
#ๅบๆถๅฎขๆท่ดน็จ
CThostFtdcTransferSerialField["CustFee"] = "float"
#ๅบๆถๆ่ดงๅ
ฌๅธ่ดน็จ
CThostFtdcTransferSerialField["BrokerFee"] = "float"
#ๆๆๆ ๅฟ
CThostFtdcTransferSerialField["AvailabilityFlag"] = "char"
#ๆไฝๅ
CThostFtdcTransferSerialField["OperatorCode"] = "string"
#ๆฐ้ถ่กๅธๅท
CThostFtdcTransferSerialField["BankNewAccount"] = "string"
#้่ฏฏไปฃ็
CThostFtdcTransferSerialField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcTransferSerialField["ErrorMsg"] = "string"
structDict['CThostFtdcTransferSerialField'] = CThostFtdcTransferSerialField
#่ฏทๆฑๆฅ่ฏข่ฝฌๅธๆตๆฐด
CThostFtdcQryTransferSerialField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryTransferSerialField["BrokerID"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcQryTransferSerialField["AccountID"] = "string"
#้ถ่ก็ผ็
CThostFtdcQryTransferSerialField["BankID"] = "string"
#ๅธ็งไปฃ็
CThostFtdcQryTransferSerialField["CurrencyID"] = "string"
structDict['CThostFtdcQryTransferSerialField'] = CThostFtdcQryTransferSerialField
#ๆๅ็ญพๅฐ้็ฅ
CThostFtdcNotifyFutureSignInField = {}
#ไธๅกๅ่ฝ็
CThostFtdcNotifyFutureSignInField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcNotifyFutureSignInField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcNotifyFutureSignInField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcNotifyFutureSignInField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcNotifyFutureSignInField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcNotifyFutureSignInField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcNotifyFutureSignInField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcNotifyFutureSignInField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcNotifyFutureSignInField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcNotifyFutureSignInField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcNotifyFutureSignInField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcNotifyFutureSignInField["SessionID"] = "int"
#ๅฎ่ฃ
็ผๅท
CThostFtdcNotifyFutureSignInField["InstallID"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcNotifyFutureSignInField["UserID"] = "string"
#ๆ่ฆ
CThostFtdcNotifyFutureSignInField["Digest"] = "string"
#ๅธ็งไปฃ็
CThostFtdcNotifyFutureSignInField["CurrencyID"] = "string"
#ๆธ ้ๆ ๅฟ
CThostFtdcNotifyFutureSignInField["DeviceID"] = "string"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcNotifyFutureSignInField["BrokerIDByBank"] = "string"
#ไบคๆๆๅ
CThostFtdcNotifyFutureSignInField["OperNo"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcNotifyFutureSignInField["RequestID"] = "int"
#ไบคๆID
CThostFtdcNotifyFutureSignInField["TID"] = "int"
#้่ฏฏไปฃ็
CThostFtdcNotifyFutureSignInField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcNotifyFutureSignInField["ErrorMsg"] = "string"
#PINๅฏ้ฅ
CThostFtdcNotifyFutureSignInField["PinKey"] = "string"
#MACๅฏ้ฅ
CThostFtdcNotifyFutureSignInField["MacKey"] = "string"
structDict['CThostFtdcNotifyFutureSignInField'] = CThostFtdcNotifyFutureSignInField
#ๆๅ็ญพ้้็ฅ
CThostFtdcNotifyFutureSignOutField = {}
#ไธๅกๅ่ฝ็
CThostFtdcNotifyFutureSignOutField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcNotifyFutureSignOutField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcNotifyFutureSignOutField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcNotifyFutureSignOutField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcNotifyFutureSignOutField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcNotifyFutureSignOutField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcNotifyFutureSignOutField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcNotifyFutureSignOutField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcNotifyFutureSignOutField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcNotifyFutureSignOutField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcNotifyFutureSignOutField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcNotifyFutureSignOutField["SessionID"] = "int"
#ๅฎ่ฃ
็ผๅท
CThostFtdcNotifyFutureSignOutField["InstallID"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcNotifyFutureSignOutField["UserID"] = "string"
#ๆ่ฆ
CThostFtdcNotifyFutureSignOutField["Digest"] = "string"
#ๅธ็งไปฃ็
CThostFtdcNotifyFutureSignOutField["CurrencyID"] = "string"
#ๆธ ้ๆ ๅฟ
CThostFtdcNotifyFutureSignOutField["DeviceID"] = "string"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcNotifyFutureSignOutField["BrokerIDByBank"] = "string"
#ไบคๆๆๅ
CThostFtdcNotifyFutureSignOutField["OperNo"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcNotifyFutureSignOutField["RequestID"] = "int"
#ไบคๆID
CThostFtdcNotifyFutureSignOutField["TID"] = "int"
#้่ฏฏไปฃ็
CThostFtdcNotifyFutureSignOutField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcNotifyFutureSignOutField["ErrorMsg"] = "string"
structDict['CThostFtdcNotifyFutureSignOutField'] = CThostFtdcNotifyFutureSignOutField
#ไบคๆๆ ธๅฟๅ้ถๆๆฅ็ๅๅบๅฏ้ฅๅๆญฅๅค็็ปๆ็้็ฅ
CThostFtdcNotifySyncKeyField = {}
#ไธๅกๅ่ฝ็
CThostFtdcNotifySyncKeyField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcNotifySyncKeyField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcNotifySyncKeyField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcNotifySyncKeyField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcNotifySyncKeyField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcNotifySyncKeyField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcNotifySyncKeyField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcNotifySyncKeyField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcNotifySyncKeyField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcNotifySyncKeyField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcNotifySyncKeyField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcNotifySyncKeyField["SessionID"] = "int"
#ๅฎ่ฃ
็ผๅท
CThostFtdcNotifySyncKeyField["InstallID"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcNotifySyncKeyField["UserID"] = "string"
#ไบคๆๆ ธๅฟ็ป้ถๆๆฅ็็ๆถๆฏ
CThostFtdcNotifySyncKeyField["Message"] = "string"
#ๆธ ้ๆ ๅฟ
CThostFtdcNotifySyncKeyField["DeviceID"] = "string"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcNotifySyncKeyField["BrokerIDByBank"] = "string"
#ไบคๆๆๅ
CThostFtdcNotifySyncKeyField["OperNo"] = "string"
#่ฏทๆฑ็ผๅท
CThostFtdcNotifySyncKeyField["RequestID"] = "int"
#ไบคๆID
CThostFtdcNotifySyncKeyField["TID"] = "int"
#้่ฏฏไปฃ็
CThostFtdcNotifySyncKeyField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcNotifySyncKeyField["ErrorMsg"] = "string"
structDict['CThostFtdcNotifySyncKeyField'] = CThostFtdcNotifySyncKeyField
#่ฏทๆฑๆฅ่ฏข้ถๆ็ญพ็บฆๅ
ณ็ณป
CThostFtdcQryAccountregisterField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryAccountregisterField["BrokerID"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcQryAccountregisterField["AccountID"] = "string"
#้ถ่ก็ผ็
CThostFtdcQryAccountregisterField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆ็ผ็
CThostFtdcQryAccountregisterField["BankBranchID"] = "string"
#ๅธ็งไปฃ็
CThostFtdcQryAccountregisterField["CurrencyID"] = "string"
structDict['CThostFtdcQryAccountregisterField'] = CThostFtdcQryAccountregisterField
#ๅฎขๆทๅผ้ๆทไฟกๆฏ่กจ
CThostFtdcAccountregisterField = {}
#ไบคๆๆฅๆ
CThostFtdcAccountregisterField["TradeDay"] = "string"
#้ถ่ก็ผ็
CThostFtdcAccountregisterField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆ็ผ็
CThostFtdcAccountregisterField["BankBranchID"] = "string"
#้ถ่กๅธๅท
CThostFtdcAccountregisterField["BankAccount"] = "string"
#ๆ่ดงๅ
ฌๅธ็ผ็
CThostFtdcAccountregisterField["BrokerID"] = "string"
#ๆ่ดงๅ
ฌๅธๅๆฏๆบๆ็ผ็
CThostFtdcAccountregisterField["BrokerBranchID"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcAccountregisterField["AccountID"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcAccountregisterField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcAccountregisterField["IdentifiedCardNo"] = "string"
#ๅฎขๆทๅงๅ
CThostFtdcAccountregisterField["CustomerName"] = "string"
#ๅธ็งไปฃ็
CThostFtdcAccountregisterField["CurrencyID"] = "string"
#ๅผ้ๆท็ฑปๅซ
CThostFtdcAccountregisterField["OpenOrDestroy"] = "char"
#็ญพ็บฆๆฅๆ
CThostFtdcAccountregisterField["RegDate"] = "string"
#่งฃ็บฆๆฅๆ
CThostFtdcAccountregisterField["OutDate"] = "string"
#ไบคๆID
CThostFtdcAccountregisterField["TID"] = "int"
#ๅฎขๆท็ฑปๅ
CThostFtdcAccountregisterField["CustType"] = "char"
#้ถ่กๅธๅท็ฑปๅ
CThostFtdcAccountregisterField["BankAccType"] = "char"
structDict['CThostFtdcAccountregisterField'] = CThostFtdcAccountregisterField
#้ถๆๅผๆทไฟกๆฏ
CThostFtdcOpenAccountField = {}
#ไธๅกๅ่ฝ็
CThostFtdcOpenAccountField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcOpenAccountField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcOpenAccountField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcOpenAccountField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcOpenAccountField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcOpenAccountField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcOpenAccountField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcOpenAccountField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcOpenAccountField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcOpenAccountField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcOpenAccountField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcOpenAccountField["SessionID"] = "int"
#ๅฎขๆทๅงๅ
CThostFtdcOpenAccountField["CustomerName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcOpenAccountField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcOpenAccountField["IdentifiedCardNo"] = "string"
#ๆงๅซ
CThostFtdcOpenAccountField["Gender"] = "char"
#ๅฝๅฎถไปฃ็
CThostFtdcOpenAccountField["CountryCode"] = "string"
#ๅฎขๆท็ฑปๅ
CThostFtdcOpenAccountField["CustType"] = "char"
#ๅฐๅ
CThostFtdcOpenAccountField["Address"] = "string"
#้ฎ็ผ
CThostFtdcOpenAccountField["ZipCode"] = "string"
#็ต่ฏๅท็
CThostFtdcOpenAccountField["Telephone"] = "string"
#ๆๆบ
CThostFtdcOpenAccountField["MobilePhone"] = "string"
#ไผ ็
CThostFtdcOpenAccountField["Fax"] = "string"
#็ตๅญ้ฎไปถ
CThostFtdcOpenAccountField["EMail"] = "string"
#่ต้่ดฆๆท็ถๆ
CThostFtdcOpenAccountField["MoneyAccountStatus"] = "char"
#้ถ่กๅธๅท
CThostFtdcOpenAccountField["BankAccount"] = "string"
#้ถ่กๅฏ็
CThostFtdcOpenAccountField["BankPassWord"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcOpenAccountField["AccountID"] = "string"
#ๆ่ดงๅฏ็
CThostFtdcOpenAccountField["Password"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcOpenAccountField["InstallID"] = "int"
#้ช่ฏๅฎขๆท่ฏไปถๅท็ ๆ ๅฟ
CThostFtdcOpenAccountField["VerifyCertNoFlag"] = "char"
#ๅธ็งไปฃ็
CThostFtdcOpenAccountField["CurrencyID"] = "string"
#ๆฑ้ๆ ๅฟ
CThostFtdcOpenAccountField["CashExchangeCode"] = "char"
#ๆ่ฆ
CThostFtdcOpenAccountField["Digest"] = "string"
#้ถ่กๅธๅท็ฑปๅ
CThostFtdcOpenAccountField["BankAccType"] = "char"
#ๆธ ้ๆ ๅฟ
CThostFtdcOpenAccountField["DeviceID"] = "string"
#ๆ่ดงๅไฝๅธๅท็ฑปๅ
CThostFtdcOpenAccountField["BankSecuAccType"] = "char"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcOpenAccountField["BrokerIDByBank"] = "string"
#ๆ่ดงๅไฝๅธๅท
CThostFtdcOpenAccountField["BankSecuAcc"] = "string"
#้ถ่กๅฏ็ ๆ ๅฟ
CThostFtdcOpenAccountField["BankPwdFlag"] = "char"
#ๆ่ดง่ต้ๅฏ็ ๆ ธๅฏนๆ ๅฟ
CThostFtdcOpenAccountField["SecuPwdFlag"] = "char"
#ไบคๆๆๅ
CThostFtdcOpenAccountField["OperNo"] = "string"
#ไบคๆID
CThostFtdcOpenAccountField["TID"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcOpenAccountField["UserID"] = "string"
#้่ฏฏไปฃ็
CThostFtdcOpenAccountField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcOpenAccountField["ErrorMsg"] = "string"
structDict['CThostFtdcOpenAccountField'] = CThostFtdcOpenAccountField
#้ถๆ้ๆทไฟกๆฏ
CThostFtdcCancelAccountField = {}
#ไธๅกๅ่ฝ็
CThostFtdcCancelAccountField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcCancelAccountField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcCancelAccountField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcCancelAccountField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcCancelAccountField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcCancelAccountField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcCancelAccountField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcCancelAccountField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcCancelAccountField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcCancelAccountField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcCancelAccountField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcCancelAccountField["SessionID"] = "int"
#ๅฎขๆทๅงๅ
CThostFtdcCancelAccountField["CustomerName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcCancelAccountField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcCancelAccountField["IdentifiedCardNo"] = "string"
#ๆงๅซ
CThostFtdcCancelAccountField["Gender"] = "char"
#ๅฝๅฎถไปฃ็
CThostFtdcCancelAccountField["CountryCode"] = "string"
#ๅฎขๆท็ฑปๅ
CThostFtdcCancelAccountField["CustType"] = "char"
#ๅฐๅ
CThostFtdcCancelAccountField["Address"] = "string"
#้ฎ็ผ
CThostFtdcCancelAccountField["ZipCode"] = "string"
#็ต่ฏๅท็
CThostFtdcCancelAccountField["Telephone"] = "string"
#ๆๆบ
CThostFtdcCancelAccountField["MobilePhone"] = "string"
#ไผ ็
CThostFtdcCancelAccountField["Fax"] = "string"
#็ตๅญ้ฎไปถ
CThostFtdcCancelAccountField["EMail"] = "string"
#่ต้่ดฆๆท็ถๆ
CThostFtdcCancelAccountField["MoneyAccountStatus"] = "char"
#้ถ่กๅธๅท
CThostFtdcCancelAccountField["BankAccount"] = "string"
#้ถ่กๅฏ็
CThostFtdcCancelAccountField["BankPassWord"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcCancelAccountField["AccountID"] = "string"
#ๆ่ดงๅฏ็
CThostFtdcCancelAccountField["Password"] = "string"
#ๅฎ่ฃ
็ผๅท
CThostFtdcCancelAccountField["InstallID"] = "int"
#้ช่ฏๅฎขๆท่ฏไปถๅท็ ๆ ๅฟ
CThostFtdcCancelAccountField["VerifyCertNoFlag"] = "char"
#ๅธ็งไปฃ็
CThostFtdcCancelAccountField["CurrencyID"] = "string"
#ๆฑ้ๆ ๅฟ
CThostFtdcCancelAccountField["CashExchangeCode"] = "char"
#ๆ่ฆ
CThostFtdcCancelAccountField["Digest"] = "string"
#้ถ่กๅธๅท็ฑปๅ
CThostFtdcCancelAccountField["BankAccType"] = "char"
#ๆธ ้ๆ ๅฟ
CThostFtdcCancelAccountField["DeviceID"] = "string"
#ๆ่ดงๅไฝๅธๅท็ฑปๅ
CThostFtdcCancelAccountField["BankSecuAccType"] = "char"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcCancelAccountField["BrokerIDByBank"] = "string"
#ๆ่ดงๅไฝๅธๅท
CThostFtdcCancelAccountField["BankSecuAcc"] = "string"
#้ถ่กๅฏ็ ๆ ๅฟ
CThostFtdcCancelAccountField["BankPwdFlag"] = "char"
#ๆ่ดง่ต้ๅฏ็ ๆ ธๅฏนๆ ๅฟ
CThostFtdcCancelAccountField["SecuPwdFlag"] = "char"
#ไบคๆๆๅ
CThostFtdcCancelAccountField["OperNo"] = "string"
#ไบคๆID
CThostFtdcCancelAccountField["TID"] = "int"
#็จๆทๆ ่ฏ
CThostFtdcCancelAccountField["UserID"] = "string"
#้่ฏฏไปฃ็
CThostFtdcCancelAccountField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcCancelAccountField["ErrorMsg"] = "string"
structDict['CThostFtdcCancelAccountField'] = CThostFtdcCancelAccountField
#้ถๆๅๆด้ถ่ก่ดฆๅทไฟกๆฏ
CThostFtdcChangeAccountField = {}
#ไธๅกๅ่ฝ็
CThostFtdcChangeAccountField["TradeCode"] = "string"
#้ถ่กไปฃ็
CThostFtdcChangeAccountField["BankID"] = "string"
#้ถ่กๅๆฏๆบๆไปฃ็
CThostFtdcChangeAccountField["BankBranchID"] = "string"
#ๆๅไปฃ็
CThostFtdcChangeAccountField["BrokerID"] = "string"
#ๆๅๅๆฏๆบๆไปฃ็
CThostFtdcChangeAccountField["BrokerBranchID"] = "string"
#ไบคๆๆฅๆ
CThostFtdcChangeAccountField["TradeDate"] = "string"
#ไบคๆๆถ้ด
CThostFtdcChangeAccountField["TradeTime"] = "string"
#้ถ่กๆตๆฐดๅท
CThostFtdcChangeAccountField["BankSerial"] = "string"
#ไบคๆ็ณป็ปๆฅๆ
CThostFtdcChangeAccountField["TradingDay"] = "string"
#้ถๆๅนณๅฐๆถๆฏๆตๆฐดๅท
CThostFtdcChangeAccountField["PlateSerial"] = "int"
#ๆๅๅ็ๆ ๅฟ
CThostFtdcChangeAccountField["LastFragment"] = "char"
#ไผ่ฏๅท
CThostFtdcChangeAccountField["SessionID"] = "int"
#ๅฎขๆทๅงๅ
CThostFtdcChangeAccountField["CustomerName"] = "string"
#่ฏไปถ็ฑปๅ
CThostFtdcChangeAccountField["IdCardType"] = "char"
#่ฏไปถๅท็
CThostFtdcChangeAccountField["IdentifiedCardNo"] = "string"
#ๆงๅซ
CThostFtdcChangeAccountField["Gender"] = "char"
#ๅฝๅฎถไปฃ็
CThostFtdcChangeAccountField["CountryCode"] = "string"
#ๅฎขๆท็ฑปๅ
CThostFtdcChangeAccountField["CustType"] = "char"
#ๅฐๅ
CThostFtdcChangeAccountField["Address"] = "string"
#้ฎ็ผ
CThostFtdcChangeAccountField["ZipCode"] = "string"
#็ต่ฏๅท็
CThostFtdcChangeAccountField["Telephone"] = "string"
#ๆๆบ
CThostFtdcChangeAccountField["MobilePhone"] = "string"
#ไผ ็
CThostFtdcChangeAccountField["Fax"] = "string"
#็ตๅญ้ฎไปถ
CThostFtdcChangeAccountField["EMail"] = "string"
#่ต้่ดฆๆท็ถๆ
CThostFtdcChangeAccountField["MoneyAccountStatus"] = "char"
#้ถ่กๅธๅท
CThostFtdcChangeAccountField["BankAccount"] = "string"
#้ถ่กๅฏ็
CThostFtdcChangeAccountField["BankPassWord"] = "string"
#ๆฐ้ถ่กๅธๅท
CThostFtdcChangeAccountField["NewBankAccount"] = "string"
#ๆฐ้ถ่กๅฏ็
CThostFtdcChangeAccountField["NewBankPassWord"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcChangeAccountField["AccountID"] = "string"
#ๆ่ดงๅฏ็
CThostFtdcChangeAccountField["Password"] = "string"
#้ถ่กๅธๅท็ฑปๅ
CThostFtdcChangeAccountField["BankAccType"] = "char"
#ๅฎ่ฃ
็ผๅท
CThostFtdcChangeAccountField["InstallID"] = "int"
#้ช่ฏๅฎขๆท่ฏไปถๅท็ ๆ ๅฟ
CThostFtdcChangeAccountField["VerifyCertNoFlag"] = "char"
#ๅธ็งไปฃ็
CThostFtdcChangeAccountField["CurrencyID"] = "string"
#ๆ่ดงๅ
ฌๅธ้ถ่ก็ผ็
CThostFtdcChangeAccountField["BrokerIDByBank"] = "string"
#้ถ่กๅฏ็ ๆ ๅฟ
CThostFtdcChangeAccountField["BankPwdFlag"] = "char"
#ๆ่ดง่ต้ๅฏ็ ๆ ธๅฏนๆ ๅฟ
CThostFtdcChangeAccountField["SecuPwdFlag"] = "char"
#ไบคๆID
CThostFtdcChangeAccountField["TID"] = "int"
#ๆ่ฆ
CThostFtdcChangeAccountField["Digest"] = "string"
#้่ฏฏไปฃ็
CThostFtdcChangeAccountField["ErrorID"] = "int"
#้่ฏฏไฟกๆฏ
CThostFtdcChangeAccountField["ErrorMsg"] = "string"
structDict['CThostFtdcChangeAccountField'] = CThostFtdcChangeAccountField
#ไบ็บงไปฃ็ๆไฝๅ้ถๆๆ้
CThostFtdcSecAgentACIDMapField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcSecAgentACIDMapField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcSecAgentACIDMapField["UserID"] = "string"
#่ต้่ดฆๆท
CThostFtdcSecAgentACIDMapField["AccountID"] = "string"
#ๅธ็ง
CThostFtdcSecAgentACIDMapField["CurrencyID"] = "string"
#ๅขๅคไธญไปๆบๆ่ต้ๅธๅท
CThostFtdcSecAgentACIDMapField["BrokerSecAgentID"] = "string"
structDict['CThostFtdcSecAgentACIDMapField'] = CThostFtdcSecAgentACIDMapField
#ไบ็บงไปฃ็ๆไฝๅ้ถๆๆ้ๆฅ่ฏข
CThostFtdcQrySecAgentACIDMapField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQrySecAgentACIDMapField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcQrySecAgentACIDMapField["UserID"] = "string"
#่ต้่ดฆๆท
CThostFtdcQrySecAgentACIDMapField["AccountID"] = "string"
#ๅธ็ง
CThostFtdcQrySecAgentACIDMapField["CurrencyID"] = "string"
structDict['CThostFtdcQrySecAgentACIDMapField'] = CThostFtdcQrySecAgentACIDMapField
#็พๅคไธญๅฟไบคๆๆ้
CThostFtdcUserRightsAssignField = {}
#ๅบ็จๅๅ
ไปฃ็
CThostFtdcUserRightsAssignField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcUserRightsAssignField["UserID"] = "string"
#ไบคๆไธญๅฟไปฃ็
CThostFtdcUserRightsAssignField["DRIdentityID"] = "int"
structDict['CThostFtdcUserRightsAssignField'] = CThostFtdcUserRightsAssignField
#็ปๆตๅ
ฌๅธๆฏๅฆๆๅจๆฌๆ ็คบ็ไบคๆๆ้
CThostFtdcBrokerUserRightAssignField = {}
#ๅบ็จๅๅ
ไปฃ็
CThostFtdcBrokerUserRightAssignField["BrokerID"] = "string"
#ไบคๆไธญๅฟไปฃ็
CThostFtdcBrokerUserRightAssignField["DRIdentityID"] = "int"
#่ฝๅฆไบคๆ
CThostFtdcBrokerUserRightAssignField["Tradeable"] = "int"
structDict['CThostFtdcBrokerUserRightAssignField'] = CThostFtdcBrokerUserRightAssignField
#็พๅคไบคๆ่ฝฌๆขๆฅๆ
CThostFtdcDRTransferField = {}
#ๅไบคๆไธญๅฟไปฃ็
CThostFtdcDRTransferField["OrigDRIdentityID"] = "int"
#็ฎๆ ไบคๆไธญๅฟไปฃ็
CThostFtdcDRTransferField["DestDRIdentityID"] = "int"
#ๅๅบ็จๅๅ
ไปฃ็
CThostFtdcDRTransferField["OrigBrokerID"] = "string"
#็ฎๆ ๆ็จๅๅ
ไปฃ็
CThostFtdcDRTransferField["DestBrokerID"] = "string"
structDict['CThostFtdcDRTransferField'] = CThostFtdcDRTransferField
#Fens็จๆทไฟกๆฏ
CThostFtdcFensUserInfoField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcFensUserInfoField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcFensUserInfoField["UserID"] = "string"
#็ปๅฝๆจกๅผ
CThostFtdcFensUserInfoField["LoginMode"] = "char"
structDict['CThostFtdcFensUserInfoField'] = CThostFtdcFensUserInfoField
#ๅฝๅ้ถๆๆๅฑไบคๆไธญๅฟ
CThostFtdcCurrTransferIdentityField = {}
#ไบคๆไธญๅฟไปฃ็
CThostFtdcCurrTransferIdentityField["IdentityID"] = "int"
structDict['CThostFtdcCurrTransferIdentityField'] = CThostFtdcCurrTransferIdentityField
#็ฆๆญข็ปๅฝ็จๆท
CThostFtdcLoginForbiddenUserField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcLoginForbiddenUserField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcLoginForbiddenUserField["UserID"] = "string"
#IPๅฐๅ
CThostFtdcLoginForbiddenUserField["IPAddress"] = "string"
structDict['CThostFtdcLoginForbiddenUserField'] = CThostFtdcLoginForbiddenUserField
#ๆฅ่ฏข็ฆๆญข็ปๅฝ็จๆท
CThostFtdcQryLoginForbiddenUserField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcQryLoginForbiddenUserField["BrokerID"] = "string"
#็จๆทไปฃ็
CThostFtdcQryLoginForbiddenUserField["UserID"] = "string"
structDict['CThostFtdcQryLoginForbiddenUserField'] = CThostFtdcQryLoginForbiddenUserField
#UDP็ปๆญ็ปไฟกๆฏ
CThostFtdcMulticastGroupInfoField = {}
#็ปๆญ็ปIPๅฐๅ
CThostFtdcMulticastGroupInfoField["GroupIP"] = "string"
#็ปๆญ็ปIP็ซฏๅฃ
CThostFtdcMulticastGroupInfoField["GroupPort"] = "int"
#ๆบๅฐๅ
CThostFtdcMulticastGroupInfoField["SourceIP"] = "string"
structDict['CThostFtdcMulticastGroupInfoField'] = CThostFtdcMulticastGroupInfoField
#่ต้่ดฆๆทๅบๆฌๅๅค้
CThostFtdcTradingAccountReserveField = {}
#็ป็บชๅ
ฌๅธไปฃ็
CThostFtdcTradingAccountReserveField["BrokerID"] = "string"
#ๆ่ต่
ๅธๅท
CThostFtdcTradingAccountReserveField["AccountID"] = "string"
#ๅบๆฌๅๅค้
CThostFtdcTradingAccountReserveField["Reserve"] = "float"
#ๅธ็งไปฃ็
CThostFtdcTradingAccountReserveField["CurrencyID"] = "string"
structDict['CThostFtdcTradingAccountReserveField'] = CThostFtdcTradingAccountReserveField
#DBF่ฎฐๅฝ
CThostFtdcDBFRecordField = {}
#DBFๅฝไปค็ฑปๅ
CThostFtdcDBFRecordField["DBFComdType"] = "string"
#DBFๆถ้ด็ฑปๅ
CThostFtdcDBFRecordField["DBFComTime"] = "string"
#DBFๅๅงๆตๆฐดๅท็ฑปๅ
CThostFtdcDBFRecordField["DBFOComNo"] = "string"
#DBFๆตๆฐดๅท็ฑปๅ
CThostFtdcDBFRecordField["DBFComNo"] = "string"
#DBFๅญๆฎต็ฑปๅ
CThostFtdcDBFRecordField["DBFFdName1"] = "string"
#DBFๅญๆฎตๅ
ๅฎน็ฑปๅ
CThostFtdcDBFRecordField["DBFFdContent1"] = "string"
#DBFๅญๆฎต็ฑปๅ
CThostFtdcDBFRecordField["DBFFdName2"] = "string"
#DBFๅญๆฎตๅ
ๅฎน็ฑปๅ
CThostFtdcDBFRecordField["DBFFdContent2"] = "string"
#DBFๅญๆฎต็ฑปๅ
CThostFtdcDBFRecordField["DBFFdName3"] = "string"
#DBFๅญๆฎตๅ
ๅฎน็ฑปๅ
CThostFtdcDBFRecordField["DBFFdContent3"] = "string"
#DBFๅญๆฎต็ฑปๅ
CThostFtdcDBFRecordField["DBFFdName4"] = "string"
#DBFๅญๆฎตๅ
ๅฎน็ฑปๅ
CThostFtdcDBFRecordField["DBFFdContent4"] = "string"
structDict['CThostFtdcDBFRecordField'] = CThostFtdcDBFRecordField
| mit | 3,841,622,015,476,943,400 | 29.514254 | 111 | 0.817871 | false |
chen0510566/MissionPlanner | Lib/distutils/ccompiler.py | 50 | 49641 | """distutils.ccompiler
Contains CCompiler, an abstract base class that defines the interface
for the Distutils compiler abstraction model."""
__revision__ = "$Id$"
import sys
import os
import re
from distutils.errors import (CompileError, LinkError, UnknownFileError,
DistutilsPlatformError, DistutilsModuleError)
from distutils.spawn import spawn
from distutils.file_util import move_file
from distutils.dir_util import mkpath
from distutils.dep_util import newer_group
from distutils.util import split_quoted, execute
from distutils import log
_sysconfig = __import__('sysconfig')
def customize_compiler(compiler):
"""Do any platform-specific customization of a CCompiler instance.
Mainly needed on Unix, so we can plug in the information that
varies across Unices and is stored in Python's Makefile.
"""
if compiler.compiler_type == "unix":
(cc, cxx, opt, cflags, ccshared, ldshared, so_ext, ar, ar_flags) = \
_sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS',
'CCSHARED', 'LDSHARED', 'SO', 'AR',
'ARFLAGS')
if 'CC' in os.environ:
cc = os.environ['CC']
if 'CXX' in os.environ:
cxx = os.environ['CXX']
if 'LDSHARED' in os.environ:
ldshared = os.environ['LDSHARED']
if 'CPP' in os.environ:
cpp = os.environ['CPP']
else:
cpp = cc + " -E" # not always
if 'LDFLAGS' in os.environ:
ldshared = ldshared + ' ' + os.environ['LDFLAGS']
if 'CFLAGS' in os.environ:
cflags = opt + ' ' + os.environ['CFLAGS']
ldshared = ldshared + ' ' + os.environ['CFLAGS']
if 'CPPFLAGS' in os.environ:
cpp = cpp + ' ' + os.environ['CPPFLAGS']
cflags = cflags + ' ' + os.environ['CPPFLAGS']
ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
if 'AR' in os.environ:
ar = os.environ['AR']
if 'ARFLAGS' in os.environ:
archiver = ar + ' ' + os.environ['ARFLAGS']
else:
archiver = ar + ' ' + ar_flags
cc_cmd = cc + ' ' + cflags
compiler.set_executables(
preprocessor=cpp,
compiler=cc_cmd,
compiler_so=cc_cmd + ' ' + ccshared,
compiler_cxx=cxx,
linker_so=ldshared,
linker_exe=cc,
archiver=archiver)
compiler.shared_lib_extension = so_ext
class CCompiler:
"""Abstract base class to define the interface that must be implemented
by real compiler classes. Also has some utility methods used by
several compiler classes.
The basic idea behind a compiler abstraction class is that each
instance can be used for all the compile/link steps in building a
single project. Thus, attributes common to all of those compile and
link steps -- include directories, macros to define, libraries to link
against, etc. -- are attributes of the compiler instance. To allow for
variability in how individual files are treated, most of those
attributes may be varied on a per-compilation or per-link basis.
"""
# 'compiler_type' is a class attribute that identifies this class. It
# keeps code that wants to know what kind of compiler it's dealing with
# from having to import all possible compiler classes just to do an
# 'isinstance'. In concrete CCompiler subclasses, 'compiler_type'
# should really, really be one of the keys of the 'compiler_class'
# dictionary (see below -- used by the 'new_compiler()' factory
# function) -- authors of new compiler interface classes are
# responsible for updating 'compiler_class'!
compiler_type = None
# XXX things not handled by this compiler abstraction model:
# * client can't provide additional options for a compiler,
# e.g. warning, optimization, debugging flags. Perhaps this
# should be the domain of concrete compiler abstraction classes
# (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base
# class should have methods for the common ones.
# * can't completely override the include or library searchg
# path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2".
# I'm not sure how widely supported this is even by Unix
# compilers, much less on other platforms. And I'm even less
# sure how useful it is; maybe for cross-compiling, but
# support for that is a ways off. (And anyways, cross
# compilers probably have a dedicated binary with the
# right paths compiled in. I hope.)
# * can't do really freaky things with the library list/library
# dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against
# different versions of libfoo.a in different locations. I
# think this is useless without the ability to null out the
# library search path anyways.
# Subclasses that rely on the standard filename generation methods
# implemented below should override these; see the comment near
# those methods ('object_filenames()' et. al.) for details:
src_extensions = None # list of strings
obj_extension = None # string
static_lib_extension = None
shared_lib_extension = None # string
static_lib_format = None # format string
shared_lib_format = None # prob. same as static_lib_format
exe_extension = None # string
# Default language settings. language_map is used to detect a source
# file or Extension target language, checking source filenames.
# language_order is used to detect the language precedence, when deciding
# what language to use when mixing source types. For example, if some
# extension has two files with ".c" extension, and one with ".cpp", it
# is still linked as c++.
language_map = {".c" : "c",
".cc" : "c++",
".cpp" : "c++",
".cxx" : "c++",
".m" : "objc",
}
language_order = ["c++", "objc", "c"]
def __init__ (self, verbose=0, dry_run=0, force=0):
self.dry_run = dry_run
self.force = force
self.verbose = verbose
# 'output_dir': a common output directory for object, library,
# shared object, and shared library files
self.output_dir = None
# 'macros': a list of macro definitions (or undefinitions). A
# macro definition is a 2-tuple (name, value), where the value is
# either a string or None (no explicit value). A macro
# undefinition is a 1-tuple (name,).
self.macros = []
# 'include_dirs': a list of directories to search for include files
self.include_dirs = []
# 'libraries': a list of libraries to include in any link
# (library names, not filenames: eg. "foo" not "libfoo.a")
self.libraries = []
# 'library_dirs': a list of directories to search for libraries
self.library_dirs = []
# 'runtime_library_dirs': a list of directories to search for
# shared libraries/objects at runtime
self.runtime_library_dirs = []
# 'objects': a list of object files (or similar, such as explicitly
# named library files) to include on any link
self.objects = []
for key in self.executables.keys():
self.set_executable(key, self.executables[key])
def set_executables(self, **args):
"""Define the executables (and options for them) that will be run
to perform the various stages of compilation. The exact set of
executables that may be specified here depends on the compiler
class (via the 'executables' class attribute), but most will have:
compiler the C/C++ compiler
linker_so linker used to create shared objects and libraries
linker_exe linker used to create binary executables
archiver static library creator
On platforms with a command-line (Unix, DOS/Windows), each of these
is a string that will be split into executable name and (optional)
list of arguments. (Splitting the string is done similarly to how
Unix shells operate: words are delimited by spaces, but quotes and
backslashes can override this. See
'distutils.util.split_quoted()'.)
"""
# Note that some CCompiler implementation classes will define class
# attributes 'cpp', 'cc', etc. with hard-coded executable names;
# this is appropriate when a compiler class is for exactly one
# compiler/OS combination (eg. MSVCCompiler). Other compiler
# classes (UnixCCompiler, in particular) are driven by information
# discovered at run-time, since there are many different ways to do
# basically the same things with Unix C compilers.
for key in args.keys():
if key not in self.executables:
raise ValueError, \
"unknown executable '%s' for class %s" % \
(key, self.__class__.__name__)
self.set_executable(key, args[key])
def set_executable(self, key, value):
if isinstance(value, str):
setattr(self, key, split_quoted(value))
else:
setattr(self, key, value)
def _find_macro(self, name):
i = 0
for defn in self.macros:
if defn[0] == name:
return i
i = i + 1
return None
def _check_macro_definitions(self, definitions):
"""Ensures that every element of 'definitions' is a valid macro
definition, ie. either (name,value) 2-tuple or a (name,) tuple. Do
nothing if all definitions are OK, raise TypeError otherwise.
"""
for defn in definitions:
if not (isinstance(defn, tuple) and
(len (defn) == 1 or
(len (defn) == 2 and
(isinstance(defn[1], str) or defn[1] is None))) and
isinstance(defn[0], str)):
raise TypeError, \
("invalid macro definition '%s': " % defn) + \
"must be tuple (string,), (string, string), or " + \
"(string, None)"
# -- Bookkeeping methods -------------------------------------------
def define_macro(self, name, value=None):
"""Define a preprocessor macro for all compilations driven by this
compiler object. The optional parameter 'value' should be a
string; if it is not supplied, then the macro will be defined
without an explicit value and the exact outcome depends on the
compiler used (XXX true? does ANSI say anything about this?)
"""
# Delete from the list of macro definitions/undefinitions if
# already there (so that this one will take precedence).
i = self._find_macro (name)
if i is not None:
del self.macros[i]
defn = (name, value)
self.macros.append (defn)
def undefine_macro(self, name):
"""Undefine a preprocessor macro for all compilations driven by
this compiler object. If the same macro is defined by
'define_macro()' and undefined by 'undefine_macro()' the last call
takes precedence (including multiple redefinitions or
undefinitions). If the macro is redefined/undefined on a
per-compilation basis (ie. in the call to 'compile()'), then that
takes precedence.
"""
# Delete from the list of macro definitions/undefinitions if
# already there (so that this one will take precedence).
i = self._find_macro (name)
if i is not None:
del self.macros[i]
undefn = (name,)
self.macros.append (undefn)
def add_include_dir(self, dir):
"""Add 'dir' to the list of directories that will be searched for
header files. The compiler is instructed to search directories in
the order in which they are supplied by successive calls to
'add_include_dir()'.
"""
self.include_dirs.append (dir)
def set_include_dirs(self, dirs):
"""Set the list of directories that will be searched to 'dirs' (a
list of strings). Overrides any preceding calls to
'add_include_dir()'; subsequence calls to 'add_include_dir()' add
to the list passed to 'set_include_dirs()'. This does not affect
any list of standard include directories that the compiler may
search by default.
"""
self.include_dirs = dirs[:]
def add_library(self, libname):
"""Add 'libname' to the list of libraries that will be included in
all links driven by this compiler object. Note that 'libname'
should *not* be the name of a file containing a library, but the
name of the library itself: the actual filename will be inferred by
the linker, the compiler, or the compiler class (depending on the
platform).
The linker will be instructed to link against libraries in the
order they were supplied to 'add_library()' and/or
'set_libraries()'. It is perfectly valid to duplicate library
names; the linker will be instructed to link against libraries as
many times as they are mentioned.
"""
self.libraries.append (libname)
def set_libraries(self, libnames):
"""Set the list of libraries to be included in all links driven by
this compiler object to 'libnames' (a list of strings). This does
not affect any standard system libraries that the linker may
include by default.
"""
self.libraries = libnames[:]
def add_library_dir(self, dir):
"""Add 'dir' to the list of directories that will be searched for
libraries specified to 'add_library()' and 'set_libraries()'. The
linker will be instructed to search for libraries in the order they
are supplied to 'add_library_dir()' and/or 'set_library_dirs()'.
"""
self.library_dirs.append(dir)
def set_library_dirs(self, dirs):
"""Set the list of library search directories to 'dirs' (a list of
strings). This does not affect any standard library search path
that the linker may search by default.
"""
self.library_dirs = dirs[:]
def add_runtime_library_dir(self, dir):
"""Add 'dir' to the list of directories that will be searched for
shared libraries at runtime.
"""
self.runtime_library_dirs.append(dir)
def set_runtime_library_dirs(self, dirs):
"""Set the list of directories to search for shared libraries at
runtime to 'dirs' (a list of strings). This does not affect any
standard search path that the runtime linker may search by
default.
"""
self.runtime_library_dirs = dirs[:]
def add_link_object(self, object):
"""Add 'object' to the list of object files (or analogues, such as
explicitly named library files or the output of "resource
compilers") to be included in every link driven by this compiler
object.
"""
self.objects.append(object)
def set_link_objects(self, objects):
"""Set the list of object files (or analogues) to be included in
every link to 'objects'. This does not affect any standard object
files that the linker may include by default (such as system
libraries).
"""
self.objects = objects[:]
# -- Private utility methods --------------------------------------
# (here for the convenience of subclasses)
# Helper method to prep compiler in subclass compile() methods
def _setup_compile(self, outdir, macros, incdirs, sources, depends,
extra):
"""Process arguments and decide which source files to compile."""
if outdir is None:
outdir = self.output_dir
elif not isinstance(outdir, str):
raise TypeError, "'output_dir' must be a string or None"
if macros is None:
macros = self.macros
elif isinstance(macros, list):
macros = macros + (self.macros or [])
else:
raise TypeError, "'macros' (if supplied) must be a list of tuples"
if incdirs is None:
incdirs = self.include_dirs
elif isinstance(incdirs, (list, tuple)):
incdirs = list(incdirs) + (self.include_dirs or [])
else:
raise TypeError, \
"'include_dirs' (if supplied) must be a list of strings"
if extra is None:
extra = []
# Get the list of expected output (object) files
objects = self.object_filenames(sources,
strip_dir=0,
output_dir=outdir)
assert len(objects) == len(sources)
pp_opts = gen_preprocess_options(macros, incdirs)
build = {}
for i in range(len(sources)):
src = sources[i]
obj = objects[i]
ext = os.path.splitext(src)[1]
self.mkpath(os.path.dirname(obj))
build[obj] = (src, ext)
return macros, objects, extra, pp_opts, build
def _get_cc_args(self, pp_opts, debug, before):
# works for unixccompiler, emxccompiler, cygwinccompiler
cc_args = pp_opts + ['-c']
if debug:
cc_args[:0] = ['-g']
if before:
cc_args[:0] = before
return cc_args
def _fix_compile_args(self, output_dir, macros, include_dirs):
"""Typecheck and fix-up some of the arguments to the 'compile()'
method, and return fixed-up values. Specifically: if 'output_dir'
is None, replaces it with 'self.output_dir'; ensures that 'macros'
is a list, and augments it with 'self.macros'; ensures that
'include_dirs' is a list, and augments it with 'self.include_dirs'.
Guarantees that the returned values are of the correct type,
i.e. for 'output_dir' either string or None, and for 'macros' and
'include_dirs' either list or None.
"""
if output_dir is None:
output_dir = self.output_dir
elif not isinstance(output_dir, str):
raise TypeError, "'output_dir' must be a string or None"
if macros is None:
macros = self.macros
elif isinstance(macros, list):
macros = macros + (self.macros or [])
else:
raise TypeError, "'macros' (if supplied) must be a list of tuples"
if include_dirs is None:
include_dirs = self.include_dirs
elif isinstance(include_dirs, (list, tuple)):
include_dirs = list (include_dirs) + (self.include_dirs or [])
else:
raise TypeError, \
"'include_dirs' (if supplied) must be a list of strings"
return output_dir, macros, include_dirs
def _fix_object_args(self, objects, output_dir):
"""Typecheck and fix up some arguments supplied to various methods.
Specifically: ensure that 'objects' is a list; if output_dir is
None, replace with self.output_dir. Return fixed versions of
'objects' and 'output_dir'.
"""
if not isinstance(objects, (list, tuple)):
raise TypeError, \
"'objects' must be a list or tuple of strings"
objects = list (objects)
if output_dir is None:
output_dir = self.output_dir
elif not isinstance(output_dir, str):
raise TypeError, "'output_dir' must be a string or None"
return (objects, output_dir)
def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
"""Typecheck and fix up some of the arguments supplied to the
'link_*' methods. Specifically: ensure that all arguments are
lists, and augment them with their permanent versions
(eg. 'self.libraries' augments 'libraries'). Return a tuple with
fixed versions of all arguments.
"""
if libraries is None:
libraries = self.libraries
elif isinstance(libraries, (list, tuple)):
libraries = list (libraries) + (self.libraries or [])
else:
raise TypeError, \
"'libraries' (if supplied) must be a list of strings"
if library_dirs is None:
library_dirs = self.library_dirs
elif isinstance(library_dirs, (list, tuple)):
library_dirs = list (library_dirs) + (self.library_dirs or [])
else:
raise TypeError, \
"'library_dirs' (if supplied) must be a list of strings"
if runtime_library_dirs is None:
runtime_library_dirs = self.runtime_library_dirs
elif isinstance(runtime_library_dirs, (list, tuple)):
runtime_library_dirs = (list (runtime_library_dirs) +
(self.runtime_library_dirs or []))
else:
raise TypeError, \
"'runtime_library_dirs' (if supplied) " + \
"must be a list of strings"
return (libraries, library_dirs, runtime_library_dirs)
def _need_link(self, objects, output_file):
"""Return true if we need to relink the files listed in 'objects'
to recreate 'output_file'.
"""
if self.force:
return 1
else:
if self.dry_run:
newer = newer_group (objects, output_file, missing='newer')
else:
newer = newer_group (objects, output_file)
return newer
def detect_language(self, sources):
"""Detect the language of a given file, or list of files. Uses
language_map, and language_order to do the job.
"""
if not isinstance(sources, list):
sources = [sources]
lang = None
index = len(self.language_order)
for source in sources:
base, ext = os.path.splitext(source)
extlang = self.language_map.get(ext)
try:
extindex = self.language_order.index(extlang)
if extindex < index:
lang = extlang
index = extindex
except ValueError:
pass
return lang
# -- Worker methods ------------------------------------------------
# (must be implemented by subclasses)
def preprocess(self, source, output_file=None, macros=None,
include_dirs=None, extra_preargs=None, extra_postargs=None):
"""Preprocess a single C/C++ source file, named in 'source'.
Output will be written to file named 'output_file', or stdout if
'output_file' not supplied. 'macros' is a list of macro
definitions as for 'compile()', which will augment the macros set
with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a
list of directory names that will be added to the default list.
Raises PreprocessError on failure.
"""
pass
def compile(self, sources, output_dir=None, macros=None,
include_dirs=None, debug=0, extra_preargs=None,
extra_postargs=None, depends=None):
"""Compile one or more source files.
'sources' must be a list of filenames, most likely C/C++
files, but in reality anything that can be handled by a
particular compiler and compiler class (eg. MSVCCompiler can
handle resource files in 'sources'). Return a list of object
filenames, one per source filename in 'sources'. Depending on
the implementation, not all source files will necessarily be
compiled, but all corresponding object filenames will be
returned.
If 'output_dir' is given, object files will be put under it, while
retaining their original path component. That is, "foo/bar.c"
normally compiles to "foo/bar.o" (for a Unix implementation); if
'output_dir' is "build", then it would compile to
"build/foo/bar.o".
'macros', if given, must be a list of macro definitions. A macro
definition is either a (name, value) 2-tuple or a (name,) 1-tuple.
The former defines a macro; if the value is None, the macro is
defined without an explicit value. The 1-tuple case undefines a
macro. Later definitions/redefinitions/ undefinitions take
precedence.
'include_dirs', if given, must be a list of strings, the
directories to add to the default include file search path for this
compilation only.
'debug' is a boolean; if true, the compiler will be instructed to
output debug symbols in (or alongside) the object file(s).
'extra_preargs' and 'extra_postargs' are implementation- dependent.
On platforms that have the notion of a command-line (e.g. Unix,
DOS/Windows), they are most likely lists of strings: extra
command-line arguments to prepand/append to the compiler command
line. On other platforms, consult the implementation class
documentation. In any event, they are intended as an escape hatch
for those occasions when the abstract compiler framework doesn't
cut the mustard.
'depends', if given, is a list of filenames that all targets
depend on. If a source file is older than any file in
depends, then the source file will be recompiled. This
supports dependency tracking, but only at a coarse
granularity.
Raises CompileError on failure.
"""
# A concrete compiler class can either override this method
# entirely or implement _compile().
macros, objects, extra_postargs, pp_opts, build = \
self._setup_compile(output_dir, macros, include_dirs, sources,
depends, extra_postargs)
cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
for obj in objects:
try:
src, ext = build[obj]
except KeyError:
continue
self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
# Return *all* object filenames, not just the ones we just built.
return objects
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
"""Compile 'src' to product 'obj'."""
# A concrete compiler class that does not override compile()
# should implement _compile().
pass
def create_static_lib(self, objects, output_libname, output_dir=None,
debug=0, target_lang=None):
"""Link a bunch of stuff together to create a static library file.
The "bunch of stuff" consists of the list of object files supplied
as 'objects', the extra object files supplied to
'add_link_object()' and/or 'set_link_objects()', the libraries
supplied to 'add_library()' and/or 'set_libraries()', and the
libraries supplied as 'libraries' (if any).
'output_libname' should be a library name, not a filename; the
filename will be inferred from the library name. 'output_dir' is
the directory where the library file will be put.
'debug' is a boolean; if true, debugging information will be
included in the library (note that on most platforms, it is the
compile step where this matters: the 'debug' flag is included here
just for consistency).
'target_lang' is the target language for which the given objects
are being compiled. This allows specific linkage time treatment of
certain languages.
Raises LibError on failure.
"""
pass
# values for target_desc parameter in link()
SHARED_OBJECT = "shared_object"
SHARED_LIBRARY = "shared_library"
EXECUTABLE = "executable"
def link(self, target_desc, objects, output_filename, output_dir=None,
libraries=None, library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None):
"""Link a bunch of stuff together to create an executable or
shared library file.
The "bunch of stuff" consists of the list of object files supplied
as 'objects'. 'output_filename' should be a filename. If
'output_dir' is supplied, 'output_filename' is relative to it
(i.e. 'output_filename' can provide directory components if
needed).
'libraries' is a list of libraries to link against. These are
library names, not filenames, since they're translated into
filenames in a platform-specific way (eg. "foo" becomes "libfoo.a"
on Unix and "foo.lib" on DOS/Windows). However, they can include a
directory component, which means the linker will look in that
specific directory rather than searching all the normal locations.
'library_dirs', if supplied, should be a list of directories to
search for libraries that were specified as bare library names
(ie. no directory component). These are on top of the system
default and those supplied to 'add_library_dir()' and/or
'set_library_dirs()'. 'runtime_library_dirs' is a list of
directories that will be embedded into the shared library and used
to search for other shared libraries that *it* depends on at
run-time. (This may only be relevant on Unix.)
'export_symbols' is a list of symbols that the shared library will
export. (This appears to be relevant only on Windows.)
'debug' is as for 'compile()' and 'create_static_lib()', with the
slight distinction that it actually matters on most platforms (as
opposed to 'create_static_lib()', which includes a 'debug' flag
mostly for form's sake).
'extra_preargs' and 'extra_postargs' are as for 'compile()' (except
of course that they supply command-line arguments for the
particular linker being used).
'target_lang' is the target language for which the given objects
are being compiled. This allows specific linkage time treatment of
certain languages.
Raises LinkError on failure.
"""
raise NotImplementedError
# Old 'link_*()' methods, rewritten to use the new 'link()' method.
def link_shared_lib(self, objects, output_libname, output_dir=None,
libraries=None, library_dirs=None,
runtime_library_dirs=None, export_symbols=None,
debug=0, extra_preargs=None, extra_postargs=None,
build_temp=None, target_lang=None):
self.link(CCompiler.SHARED_LIBRARY, objects,
self.library_filename(output_libname, lib_type='shared'),
output_dir,
libraries, library_dirs, runtime_library_dirs,
export_symbols, debug,
extra_preargs, extra_postargs, build_temp, target_lang)
def link_shared_object(self, objects, output_filename, output_dir=None,
libraries=None, library_dirs=None,
runtime_library_dirs=None, export_symbols=None,
debug=0, extra_preargs=None, extra_postargs=None,
build_temp=None, target_lang=None):
self.link(CCompiler.SHARED_OBJECT, objects,
output_filename, output_dir,
libraries, library_dirs, runtime_library_dirs,
export_symbols, debug,
extra_preargs, extra_postargs, build_temp, target_lang)
def link_executable(self, objects, output_progname, output_dir=None,
libraries=None, library_dirs=None,
runtime_library_dirs=None, debug=0, extra_preargs=None,
extra_postargs=None, target_lang=None):
self.link(CCompiler.EXECUTABLE, objects,
self.executable_filename(output_progname), output_dir,
libraries, library_dirs, runtime_library_dirs, None,
debug, extra_preargs, extra_postargs, None, target_lang)
# -- Miscellaneous methods -----------------------------------------
# These are all used by the 'gen_lib_options() function; there is
# no appropriate default implementation so subclasses should
# implement all of these.
def library_dir_option(self, dir):
"""Return the compiler option to add 'dir' to the list of
directories searched for libraries.
"""
raise NotImplementedError
def runtime_library_dir_option(self, dir):
"""Return the compiler option to add 'dir' to the list of
directories searched for runtime libraries.
"""
raise NotImplementedError
def library_option(self, lib):
"""Return the compiler option to add 'dir' to the list of libraries
linked into the shared library or executable.
"""
raise NotImplementedError
def has_function(self, funcname, includes=None, include_dirs=None,
libraries=None, library_dirs=None):
"""Return a boolean indicating whether funcname is supported on
the current platform. The optional arguments can be used to
augment the compilation environment.
"""
# this can't be included at module scope because it tries to
# import math which might not be available at that point - maybe
# the necessary logic should just be inlined?
import tempfile
if includes is None:
includes = []
if include_dirs is None:
include_dirs = []
if libraries is None:
libraries = []
if library_dirs is None:
library_dirs = []
fd, fname = tempfile.mkstemp(".c", funcname, text=True)
f = os.fdopen(fd, "w")
try:
for incl in includes:
f.write("""#include "%s"\n""" % incl)
f.write("""\
main (int argc, char **argv) {
%s();
}
""" % funcname)
finally:
f.close()
try:
objects = self.compile([fname], include_dirs=include_dirs)
except CompileError:
return False
try:
self.link_executable(objects, "a.out",
libraries=libraries,
library_dirs=library_dirs)
except (LinkError, TypeError):
return False
return True
def find_library_file (self, dirs, lib, debug=0):
"""Search the specified list of directories for a static or shared
library file 'lib' and return the full path to that file. If
'debug' true, look for a debugging version (if that makes sense on
the current platform). Return None if 'lib' wasn't found in any of
the specified directories.
"""
raise NotImplementedError
# -- Filename generation methods -----------------------------------
# The default implementation of the filename generating methods are
# prejudiced towards the Unix/DOS/Windows view of the world:
# * object files are named by replacing the source file extension
# (eg. .c/.cpp -> .o/.obj)
# * library files (shared or static) are named by plugging the
# library name and extension into a format string, eg.
# "lib%s.%s" % (lib_name, ".a") for Unix static libraries
# * executables are named by appending an extension (possibly
# empty) to the program name: eg. progname + ".exe" for
# Windows
#
# To reduce redundant code, these methods expect to find
# several attributes in the current object (presumably defined
# as class attributes):
# * src_extensions -
# list of C/C++ source file extensions, eg. ['.c', '.cpp']
# * obj_extension -
# object file extension, eg. '.o' or '.obj'
# * static_lib_extension -
# extension for static library files, eg. '.a' or '.lib'
# * shared_lib_extension -
# extension for shared library/object files, eg. '.so', '.dll'
# * static_lib_format -
# format string for generating static library filenames,
# eg. 'lib%s.%s' or '%s.%s'
# * shared_lib_format
# format string for generating shared library filenames
# (probably same as static_lib_format, since the extension
# is one of the intended parameters to the format string)
# * exe_extension -
# extension for executable files, eg. '' or '.exe'
def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
if output_dir is None:
output_dir = ''
obj_names = []
for src_name in source_filenames:
base, ext = os.path.splitext(src_name)
base = os.path.splitdrive(base)[1] # Chop off the drive
base = base[os.path.isabs(base):] # If abs, chop off leading /
if ext not in self.src_extensions:
raise UnknownFileError, \
"unknown file type '%s' (from '%s')" % (ext, src_name)
if strip_dir:
base = os.path.basename(base)
obj_names.append(os.path.join(output_dir,
base + self.obj_extension))
return obj_names
def shared_object_filename(self, basename, strip_dir=0, output_dir=''):
assert output_dir is not None
if strip_dir:
basename = os.path.basename (basename)
return os.path.join(output_dir, basename + self.shared_lib_extension)
def executable_filename(self, basename, strip_dir=0, output_dir=''):
assert output_dir is not None
if strip_dir:
basename = os.path.basename (basename)
return os.path.join(output_dir, basename + (self.exe_extension or ''))
def library_filename(self, libname, lib_type='static', # or 'shared'
strip_dir=0, output_dir=''):
assert output_dir is not None
if lib_type not in ("static", "shared", "dylib"):
raise ValueError, "'lib_type' must be \"static\", \"shared\" or \"dylib\""
fmt = getattr(self, lib_type + "_lib_format")
ext = getattr(self, lib_type + "_lib_extension")
dir, base = os.path.split (libname)
filename = fmt % (base, ext)
if strip_dir:
dir = ''
return os.path.join(output_dir, dir, filename)
# -- Utility methods -----------------------------------------------
def announce(self, msg, level=1):
log.debug(msg)
def debug_print(self, msg):
from distutils.debug import DEBUG
if DEBUG:
print msg
def warn(self, msg):
sys.stderr.write("warning: %s\n" % msg)
def execute(self, func, args, msg=None, level=1):
execute(func, args, msg, self.dry_run)
def spawn(self, cmd):
spawn(cmd, dry_run=self.dry_run)
def move_file(self, src, dst):
return move_file(src, dst, dry_run=self.dry_run)
def mkpath(self, name, mode=0777):
mkpath(name, mode, dry_run=self.dry_run)
# class CCompiler
# Map a sys.platform/os.name ('posix', 'nt') to the default compiler
# type for that platform. Keys are interpreted as re match
# patterns. Order is important; platform mappings are preferred over
# OS names.
_default_compilers = (
# Platform string mappings
# on a cygwin built python we can use gcc like an ordinary UNIXish
# compiler
('cygwin.*', 'unix'),
('os2emx', 'emx'),
# OS name mappings
('posix', 'unix'),
('nt', 'msvc'),
)
def get_default_compiler(osname=None, platform=None):
""" Determine the default compiler to use for the given platform.
osname should be one of the standard Python OS names (i.e. the
ones returned by os.name) and platform the common value
returned by sys.platform for the platform in question.
The default values are os.name and sys.platform in case the
parameters are not given.
"""
if osname is None:
osname = os.name
if platform is None:
platform = sys.platform
for pattern, compiler in _default_compilers:
if re.match(pattern, platform) is not None or \
re.match(pattern, osname) is not None:
return compiler
# Default to Unix compiler
return 'unix'
# Map compiler types to (module_name, class_name) pairs -- ie. where to
# find the code that implements an interface to this compiler. (The module
# is assumed to be in the 'distutils' package.)
compiler_class = { 'unix': ('unixccompiler', 'UnixCCompiler',
"standard UNIX-style compiler"),
'msvc': ('msvccompiler', 'MSVCCompiler',
"Microsoft Visual C++"),
'cygwin': ('cygwinccompiler', 'CygwinCCompiler',
"Cygwin port of GNU C Compiler for Win32"),
'mingw32': ('cygwinccompiler', 'Mingw32CCompiler',
"Mingw32 port of GNU C Compiler for Win32"),
'bcpp': ('bcppcompiler', 'BCPPCompiler',
"Borland C++ Compiler"),
'emx': ('emxccompiler', 'EMXCCompiler',
"EMX port of GNU C Compiler for OS/2"),
}
def show_compilers():
"""Print list of available compilers (used by the "--help-compiler"
options to "build", "build_ext", "build_clib").
"""
# XXX this "knows" that the compiler option it's describing is
# "--compiler", which just happens to be the case for the three
# commands that use it.
from distutils.fancy_getopt import FancyGetopt
compilers = []
for compiler in compiler_class.keys():
compilers.append(("compiler="+compiler, None,
compiler_class[compiler][2]))
compilers.sort()
pretty_printer = FancyGetopt(compilers)
pretty_printer.print_help("List of available compilers:")
def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
"""Generate an instance of some CCompiler subclass for the supplied
platform/compiler combination. 'plat' defaults to 'os.name'
(eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
for that platform. Currently only 'posix' and 'nt' are supported, and
the default compilers are "traditional Unix interface" (UnixCCompiler
class) and Visual C++ (MSVCCompiler class). Note that it's perfectly
possible to ask for a Unix compiler object under Windows, and a
Microsoft compiler object under Unix -- if you supply a value for
'compiler', 'plat' is ignored.
"""
if plat is None:
plat = os.name
try:
if compiler is None:
compiler = get_default_compiler(plat)
(module_name, class_name, long_description) = compiler_class[compiler]
except KeyError:
msg = "don't know how to compile C/C++ code on platform '%s'" % plat
if compiler is not None:
msg = msg + " with '%s' compiler" % compiler
raise DistutilsPlatformError, msg
try:
module_name = "distutils." + module_name
__import__ (module_name)
module = sys.modules[module_name]
klass = vars(module)[class_name]
except ImportError:
raise DistutilsModuleError, \
"can't compile C/C++ code: unable to load module '%s'" % \
module_name
except KeyError:
raise DistutilsModuleError, \
("can't compile C/C++ code: unable to find class '%s' " +
"in module '%s'") % (class_name, module_name)
# XXX The None is necessary to preserve backwards compatibility
# with classes that expect verbose to be the first positional
# argument.
return klass(None, dry_run, force)
def gen_preprocess_options(macros, include_dirs):
"""Generate C pre-processor options (-D, -U, -I) as used by at least
two types of compilers: the typical Unix compiler and Visual C++.
'macros' is the usual thing, a list of 1- or 2-tuples, where (name,)
means undefine (-U) macro 'name', and (name,value) means define (-D)
macro 'name' to 'value'. 'include_dirs' is just a list of directory
names to be added to the header file search path (-I). Returns a list
of command-line options suitable for either Unix compilers or Visual
C++.
"""
# XXX it would be nice (mainly aesthetic, and so we don't generate
# stupid-looking command lines) to go over 'macros' and eliminate
# redundant definitions/undefinitions (ie. ensure that only the
# latest mention of a particular macro winds up on the command
# line). I don't think it's essential, though, since most (all?)
# Unix C compilers only pay attention to the latest -D or -U
# mention of a macro on their command line. Similar situation for
# 'include_dirs'. I'm punting on both for now. Anyways, weeding out
# redundancies like this should probably be the province of
# CCompiler, since the data structures used are inherited from it
# and therefore common to all CCompiler classes.
pp_opts = []
for macro in macros:
if not (isinstance(macro, tuple) and
1 <= len (macro) <= 2):
raise TypeError, \
("bad macro definition '%s': " +
"each element of 'macros' list must be a 1- or 2-tuple") % \
macro
if len (macro) == 1: # undefine this macro
pp_opts.append ("-U%s" % macro[0])
elif len (macro) == 2:
if macro[1] is None: # define with no explicit value
pp_opts.append ("-D%s" % macro[0])
else:
# XXX *don't* need to be clever about quoting the
# macro value here, because we're going to avoid the
# shell at all costs when we spawn the command!
pp_opts.append ("-D%s=%s" % macro)
for dir in include_dirs:
pp_opts.append ("-I%s" % dir)
return pp_opts
def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
"""Generate linker options for searching library directories and
linking with specific libraries.
'libraries' and 'library_dirs' are, respectively, lists of library names
(not filenames!) and search directories. Returns a list of command-line
options suitable for use with some compiler (depending on the two format
strings passed in).
"""
lib_opts = []
for dir in library_dirs:
lib_opts.append(compiler.library_dir_option(dir))
for dir in runtime_library_dirs:
opt = compiler.runtime_library_dir_option(dir)
if isinstance(opt, list):
lib_opts.extend(opt)
else:
lib_opts.append(opt)
# XXX it's important that we *not* remove redundant library mentions!
# sometimes you really do have to say "-lfoo -lbar -lfoo" in order to
# resolve all symbols. I just hope we never have to say "-lfoo obj.o
# -lbar" to get things to work -- that's certainly a possibility, but a
# pretty nasty way to arrange your C code.
for lib in libraries:
lib_dir, lib_name = os.path.split(lib)
if lib_dir != '':
lib_file = compiler.find_library_file([lib_dir], lib_name)
if lib_file is not None:
lib_opts.append(lib_file)
else:
compiler.warn("no library file corresponding to "
"'%s' found (skipping)" % lib)
else:
lib_opts.append(compiler.library_option(lib))
return lib_opts
| gpl-3.0 | -1,847,854,513,878,652,000 | 41.354585 | 86 | 0.590742 | false |
michaelkuty/django-oscar | tests/integration/basket/form_tests.py | 45 | 3927 | from django.test import TestCase
from django.conf import settings
import mock
from oscar.apps.basket import forms
from oscar.test import factories
class TestBasketLineForm(TestCase):
def setUp(self):
self.basket = factories.create_basket()
self.line = self.basket.all_lines()[0]
def mock_availability_return_value(self, is_available, reason=''):
policy = self.line.purchase_info.availability
policy.is_purchase_permitted = mock.MagicMock(
return_value=(is_available, reason))
def build_form(self, quantity=None):
if quantity is None:
quantity = self.line.quantity
return forms.BasketLineForm(
strategy=self.basket.strategy,
data={'quantity': quantity},
instance=self.line)
def test_enforces_availability_policy_for_valid_quantities(self):
self.mock_availability_return_value(True)
form = self.build_form()
self.assertTrue(form.is_valid())
def test_enforces_availability_policy_for_invalid_quantities(self):
self.mock_availability_return_value(False, "Some reason")
form = self.build_form()
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors['quantity'], ['Some reason'])
def test_skips_availability_policy_for_zero_quantities(self):
self.mock_availability_return_value(True)
form = self.build_form(quantity=0)
self.assertTrue(form.is_valid())
def test_enforces_max_line_quantity(self):
invalid_qty = settings.OSCAR_MAX_BASKET_QUANTITY_THRESHOLD + 1
form = self.build_form(quantity=invalid_qty)
self.assertFalse(form.is_valid())
class TestAddToBasketForm(TestCase):
def test_allows_a_product_quantity_to_be_increased(self):
basket = factories.create_basket()
product = basket.all_lines()[0].product
# Add more of the same product
data = {'quantity': 1}
form = forms.AddToBasketForm(
basket=basket, product=product, data=data)
self.assertTrue(form.is_valid())
def test_checks_whether_passed_product_id_matches_a_real_product(self):
basket = factories.create_basket()
product = basket.all_lines()[0].product
# Add more of the same product
data = {'quantity': -1}
form = forms.AddToBasketForm(
basket=basket, product=product, data=data)
self.assertFalse(form.is_valid())
def test_checks_if_purchase_is_permitted(self):
basket = factories.BasketFactory()
product = factories.ProductFactory()
# Build a 4-level mock monster so we can force the return value of
# whether the product is available to buy. This is a serious code smell
# and needs to be remedied.
info = mock.Mock()
info.availability = mock.Mock()
info.availability.is_purchase_permitted = mock.Mock(
return_value=(False, "Not on your nelly!"))
basket.strategy.fetch_for_product = mock.Mock(
return_value=info)
data = {'quantity': 1}
form = forms.AddToBasketForm(
basket=basket, product=product, data=data)
self.assertFalse(form.is_valid())
self.assertEqual('Not on your nelly!', form.errors['__all__'][0])
def test_mixed_currency_baskets_are_not_permitted(self):
# Ensure basket is one currency
basket = mock.Mock()
basket.currency = 'GBP'
basket.num_items = 1
# Ensure new product has different currency
info = mock.Mock()
info.price.currency = 'EUR'
basket.strategy.fetch_for_product = mock.Mock(
return_value=info)
product = factories.ProductFactory()
data = {'quantity': 1}
form = forms.AddToBasketForm(
basket=basket, product=product, data=data)
self.assertFalse(form.is_valid())
| bsd-3-clause | -5,335,687,175,264,762,000 | 34.7 | 79 | 0.639929 | false |
sofianehaddad/ot-svn | python/test/t_FORM_sensitivity.py | 2 | 3930 | #! /usr/bin/env python
from openturns import *
from math import *
from math import *
def printNumericalPoint(point, digits):
oss = "["
eps = pow(0.1, digits)
for i in range(point.getDimension()):
if i == 0:
sep = ""
else:
sep = ","
if fabs(point[i]) < eps:
oss += sep + '%.6f' % fabs(point[i])
else:
oss += sep + '%.6f' % point[i]
sep = ","
oss += "]"
return oss
TESTPREAMBLE()
try:
# We create a numerical math function
# Analytical construction
inputFunc = Description(2)
inputFunc[0] = "x0"
inputFunc[1] = "x1"
outputFunc = Description(1)
outputFunc[0] = "y0"
formulas = Description(outputFunc.getSize())
formulas[0] = "-(6+x0^2-x1)"
print "formulas=", formulas
myFunction = NumericalMathFunction(inputFunc, outputFunc, formulas)
dim = myFunction.getInputDimension()
# We create a normal distribution point of dimension 1
mean = NumericalPoint(dim, 0.0)
# x0
mean[0] = 5.0
# x1
mean[1] = 2.1
sigma = NumericalPoint(dim, 0.0)
# x0
sigma[0] = 3.3
# x1
sigma[1] = 3.0
R = IdentityMatrix(dim)
#
testDistributions = DistributionCollection(2)
testDistributions[0] = Normal(mean, sigma, R)
marginals = DistributionCollection(2)
marginals[0] = testDistributions[0].getMarginal(0)
marginals[1] = testDistributions[0].getMarginal(1)
testDistributions[1] = ComposedDistribution(marginals, NormalCopula(R))
for i in range(1):
myDistribution = testDistributions[i]
# We name the components of the distribution
componentDescription = Description(dim)
componentDescription[0] = "Marginal 1"
componentDescription[1] = "Marginal 2"
myDistribution.setDescription(componentDescription)
# We create a 'usual' RandomVector from the Distribution
vect = RandomVector(myDistribution)
# We create a composite random vector
output = RandomVector(myFunction, vect)
outputDescription = Description(1)
outputDescription[0] = "Interest Variable 1"
output.setDescription(outputDescription)
# We create an Event from this RandomVector
myEvent = Event(output, Greater(), 0.0)
# We create a NearestPoint algorithm
myCobyla = Cobyla()
myCobyla.setSpecificParameters(CobylaSpecificParameters())
myCobyla.setMaximumIterationsNumber(100)
myCobyla.setMaximumAbsoluteError(1.0e-10)
myCobyla.setMaximumRelativeError(1.0e-10)
myCobyla.setMaximumResidualError(1.0e-10)
myCobyla.setMaximumConstraintError(1.0e-10)
print "myCobyla=", myCobyla
# We create a FORM algorithm
# The first parameter is a NearestPointAlgorithm
# The second parameter is an event
# The third parameter is a starting point for the design point research
myAlgo = FORM(myCobyla, myEvent, mean)
print "FORM=", myAlgo
# Perform the simulation
myAlgo.run()
# Stream out the result
result = FORMResult(myAlgo.getResult())
digits = 5
print "importance factors=", printNumericalPoint(result.getImportanceFactors(), digits)
print "Hasofer reliability index=%.6f" % result.getHasoferReliabilityIndex()
print "result=", result
# Hasofer Reliability Index Sensitivity
hasoferReliabilityIndexSensitivity = result.getHasoferReliabilityIndexSensitivity(
)
print "hasoferReliabilityIndexSensitivity = ", repr(hasoferReliabilityIndexSensitivity)
# Event Probability Sensitivity
eventProbabilitySensitivity = result.getEventProbabilitySensitivity()
print "eventProbabilitySensitivity = ", repr(eventProbabilitySensitivity)
except:
import sys
print "t_FORM_sensitivity.py", sys.exc_type, sys.exc_value
| mit | -928,647,824,808,329,200 | 30.95122 | 95 | 0.654453 | false |
cmshobe/landlab | landlab/io/esri_ascii.py | 3 | 17226 | #! /usr/bin/env python
"""Read/write data from an ESRI ASCII file into a RasterModelGrid.
ESRI ASCII functions
++++++++++++++++++++
.. autosummary::
~landlab.io.esri_ascii.read_asc_header
~landlab.io.esri_ascii.read_esri_ascii
~landlab.io.esri_ascii.write_esri_ascii
"""
import os
import pathlib
import re
import numpy as np
from landlab.utils import add_halo
_VALID_HEADER_KEYS = [
"ncols",
"nrows",
"xllcorner",
"xllcenter",
"yllcorner",
"yllcenter",
"cellsize",
"nodata_value",
]
_HEADER_KEY_REGEX_PATTERN = re.compile(r"\s*(?P<key>[a-zA-z]\w+)")
_HEADER_REGEX_PATTERN = re.compile(r"\s*(?P<key>[a-zA-Z]\w+)\s+(?P<value>[\w.+-]+)")
_HEADER_VALUE_TESTS = {
"nrows": (int, lambda x: x > 0),
"ncols": (int, lambda x: x > 0),
"cellsize": (float, lambda x: x > 0),
"xllcorner": (float, lambda x: True),
"xllcenter": (float, lambda x: True),
"yllcorner": (float, lambda x: True),
"yllcenter": (float, lambda x: True),
"nodata_value": (float, lambda x: True),
}
class Error(Exception):
"""Base class for errors in this module."""
pass
class BadHeaderLineError(Error):
"""Raise this error for a bad header is line."""
def __init__(self, line):
self._line = line
def __str__(self):
return self._line # this line not yet tested
class MissingRequiredKeyError(Error):
"""Raise this error when a header is missing a required key."""
def __init__(self, key):
self._key = key
def __str__(self):
return self._key
class KeyTypeError(Error):
"""Raise this error when a header's key value is of the wrong type."""
def __init__(self, key, expected_type):
self._key = key
self._type = str(expected_type)
def __str__(self):
return "Unable to convert %s to %s" % (self._key, self._type)
class KeyValueError(Error):
"""Raise this error when a header's key value has a bad value."""
def __init__(self, key, message):
self._key = key
self._msg = message
def __str__(self):
return "%s: %s" % (self._key, self._msg) # this line not yet tested
class DataSizeError(Error):
"""Raise this error if the size of data does not match the header."""
def __init__(self, size, expected_size):
self._actual = size
self._expected = expected_size
def __str__(self):
return "%s != %s" % (self._actual, self._expected) # this line not yet tested
class MismatchGridDataSizeError(Error):
"""Raise this error if the data size does not match the grid size."""
def __init__(self, size, expected_size):
self._actual = size
self._expected = expected_size
def __str__(self):
return "(data size) %s != %s (grid size)" % (
self._actual,
self._expected,
) # this line not yet tested
class MismatchGridXYSpacing(Error):
"""Raise this error if the file cell size does not match the grid dx."""
def __init__(self, dx, expected_dx):
self._actual = dx
self._expected = expected_dx
def __str__(self):
return "(data dx) %s != %s (grid dx)" % (
self._actual,
self._expected,
) # this line not yet tested
class MismatchGridXYLowerLeft(Error):
"""Raise this error if the file lower left does not match the grid."""
def __init__(self, llc, expected_llc):
self._actual = llc
self._expected = expected_llc
def __str__(self):
return "(data lower-left) %s != %s (grid lower-left)" % (
self._actual,
self._expected,
) # this line not yet tested
def _parse_header_key_value(line):
"""Parse a header line into a key-value pair.
Parameters
----------
line : str
Header line.
Returns
-------
(str, str)
Header key-value pair
Raises
------
BadHeaderLineError
The is something wrong with the header line.
"""
match = _HEADER_KEY_REGEX_PATTERN.match(line)
if match is None:
return None
# raise BadHeaderLineError(line)
match = _HEADER_REGEX_PATTERN.match(line)
if match is None:
raise BadHeaderLineError(line)
(key, value) = (match.group("key").lower(), match.group("value"))
if key in _VALID_HEADER_KEYS:
return (key, value)
else:
raise BadHeaderLineError(line)
def _header_lines(asc_file):
"""Iterate over header lines for a ESRI ASCII file.
Parameters
----------
asc_file : file_like
File-like object for an ESRI ASCII file.
Yields
------
str
Header line.
"""
pos = asc_file.tell()
line = asc_file.readline()
while len(line) > 0:
if len(line.strip()) > 0:
item = _parse_header_key_value(line)
if item:
yield item
else:
asc_file.seek(pos, 0)
break
pos = asc_file.tell()
line = asc_file.readline()
def _header_is_valid(header):
"""Check if the ESRI ASCII header is valid.
Parameters
----------
header : dict
Header as key-values pairs.
Raises
------
MissingRequiredKeyError
The header is missing a required key.
KeyTypeError
The header has the key but its values is of the wrong type.
"""
header_keys = set(header)
required_keys = set(["ncols", "nrows", "cellsize"])
if not required_keys.issubset(header_keys):
raise MissingRequiredKeyError(", ".join(required_keys - header_keys))
for keys in [("xllcenter", "xllcorner"), ("yllcenter", "yllcorner")]:
if len(set(keys) & header_keys) != 1:
raise MissingRequiredKeyError("|".join(keys))
for (key, requires) in _HEADER_VALUE_TESTS.items():
to_type, is_valid = requires
if key not in header:
continue
try:
header[key] = to_type(header[key])
except ValueError:
raise KeyTypeError(key, to_type)
if not is_valid(header[key]):
raise KeyValueError(key, "Bad value")
return True
def read_asc_header(asc_file):
"""Read header information from an ESRI ASCII raster file.
The header contains the following variables,
- *ncols*: Number of cell columns
- *nrows*: Number of cell rows
- *xllcenter* or *xllcorner*: X (column) coordinate of lower-left
coordinate of grid (by center or lower-left corner of the cell)
- *yllcenter*, *yllcorner*: Y (row) coordinate of lower-left
coordinate of grid (by center or lower-left corner of the cell)
- *cellsize*: Grid spacing between rows and columns
- *nodata_value*: No-data value (optional)
Parameters
----------
asc_file : file_like
File-like object from which to read header.
Returns
-------
dict
Header as key-value pairs.
Raises
------
MissingRequiredKeyError
The header is missing a required key.
KeyTypeError
The header has the key but its values is of the wrong type.
Examples
--------
>>> from io import StringIO
>>> from landlab.io.esri_ascii import read_asc_header
>>> contents = StringIO('''
... nrows 100
... ncols 200
... cellsize 1.5
... xllcenter 0.5
... yllcenter -0.5
... ''')
>>> hdr = read_asc_header(contents)
>>> hdr['nrows'], hdr['ncols']
(100, 200)
>>> hdr['cellsize']
1.5
>>> hdr['xllcenter'], hdr['yllcenter']
(0.5, -0.5)
``MissingRequiredKey`` is raised if the header does not contain all of the
necessary keys.
>>> contents = StringIO('''
... ncols 200
... cellsize 1.5
... xllcenter 0.5
... yllcenter -0.5
... ''')
>>> read_asc_header(contents) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
MissingRequiredKeyError: nrows
``KeyTypeError`` is raises if a value is of the wrong type. For instance,
``nrows`` and ``ncols`` must be ``int``.
>>> contents = StringIO('''
... nrows 100.5
... ncols 200
... cellsize 1.5
... xllcenter 0.5
... yllcenter -0.5
... ''')
>>> read_asc_header(contents) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
KeyTypeError: Unable to convert nrows to <type 'int'>
"""
header = dict()
for (key, value) in _header_lines(asc_file):
header[key] = value
_header_is_valid(header)
return header
def _read_asc_data(asc_file):
"""Read gridded data from an ESRI ASCII data file.
Parameters
----------
asc_file : file-like
File-like object of the data file pointing to the start of the data.
.. note::
First row of the data is at the top of the raster grid, the second
row is the second from the top, and so on.
"""
return np.loadtxt(asc_file)
def read_esri_ascii(asc_file, grid=None, reshape=False, name=None, halo=0):
"""Read :py:class:`~landlab.RasterModelGrid` from an ESRI ASCII file.
Read data from *asc_file*, an ESRI_ ASCII file, into a
:py:class:`~landlab.RasterModelGrid`. *asc_file* is either the name of
the data file or is a file-like object.
The grid and data read from the file are returned as a tuple
(*grid*, *data*) where *grid* is an instance of
:py:class:`~landlab.RasterModelGrid` and *data* is a numpy
array of doubles with that has been reshaped to have the number of rows
and columns given in the header.
.. _ESRI: http://resources.esri.com/help/9.3/arcgisengine/java/GP_ToolRef/spatial_analyst_tools/esri_ascii_raster_format.htm
Parameters
----------
asc_file : str of file-like
Data file to read.
reshape : boolean, optional
Reshape the returned array, otherwise return a flattened array.
name : str, optional
Add data to the grid as a named field.
grid : *grid* , optional
Adds data to an existing *grid* instead of creating a new one.
halo : integer, optional
Adds outer border of depth halo to the *grid*.
Returns
-------
(grid, data) : tuple
A newly-created RasterModel grid and the associated node data.
Raises
------
DataSizeError
Data are not the same size as indicated by the header file.
MismatchGridDataSizeError
If a grid is passed, and the size of the grid does not agree with the
size of the data.
MismatchGridXYSpacing
If a grid is passed, and the cellsize listed in the heading does not
match the grid dx and dy.
MismatchGridXYLowerLeft
If a grid is passed and the xllcorner and yllcorner do not match that
of the grid.
Examples
--------
Assume that fop is the name of a file that contains text below
(make sure you have your path correct):
ncols 3
nrows 4
xllcorner 1.
yllcorner 2.
cellsize 10.
NODATA_value -9999
0. 1. 2.
3. 4. 5.
6. 7. 8.
9. 10. 11.
--------
>>> from landlab.io import read_esri_ascii
>>> (grid, data) = read_esri_ascii('fop') # doctest: +SKIP
>>> #grid is an object of type RasterModelGrid with 4 rows and 3 cols
>>> #data contains an array of length 4*3 that is equal to
>>> # [9., 10., 11., 6., 7., 8., 3., 4., 5., 0., 1., 2.]
>>> (grid, data) = read_esri_ascii('fop', halo=1) # doctest: +SKIP
>>> #now the data has a nodata_value ring of -9999 around it. So array is
>>> # [-9999, -9999, -9999, -9999, -9999, -9999,
>>> # -9999, 9., 10., 11., -9999,
>>> # -9999, 6., 7., 8., -9999,
>>> # -9999, 3., 4., 5., -9999,
>>> # -9999, 0., 1., 2. -9999,
>>> # -9999, -9999, -9999, -9999, -9999, -9999]
"""
from ..grid import RasterModelGrid
# if the asc_file is provided as a string, open it and pass the pointer to
# _read_asc_header, and _read_asc_data
if isinstance(asc_file, (str, pathlib.Path)):
with open(asc_file, "r") as f:
header = read_asc_header(f)
data = _read_asc_data(f)
# otherwise, pass asc_file directly.
else:
header = read_asc_header(asc_file)
data = _read_asc_data(asc_file)
# There is no reason for halo to be negative.
# Assume that if a negative value is given it should be 0.
if halo <= 0:
shape = (header["nrows"], header["ncols"])
if data.size != shape[0] * shape[1]:
raise DataSizeError(shape[0] * shape[1], data.size)
else:
shape = (header["nrows"] + 2 * halo, header["ncols"] + 2 * halo)
# check to see if a nodata_value was given. If not, assign -9999.
if "nodata_value" in header.keys():
nodata_value = header["nodata_value"]
else:
header["nodata_value"] = -9999.0
nodata_value = header["nodata_value"]
if data.size != (shape[0] - 2 * halo) * (shape[1] - 2 * halo):
raise DataSizeError(shape[0] * shape[1], data.size)
xy_spacing = (header["cellsize"], header["cellsize"])
xy_of_lower_left = (
header["xllcorner"] - halo * header["cellsize"],
header["yllcorner"] - halo * header["cellsize"],
)
data = np.flipud(data)
if halo > 0:
data = add_halo(
data.reshape(header["nrows"], header["ncols"]),
halo=halo,
halo_value=nodata_value,
).reshape((-1,))
if not reshape:
data = data.flatten()
if grid is not None:
if (grid.number_of_node_rows != shape[0]) or (
grid.number_of_node_columns != shape[1]
):
raise MismatchGridDataSizeError(
shape[0] * shape[1],
grid.number_of_node_rows * grid.number_of_node_columns,
)
if (grid.dx, grid.dy) != xy_spacing:
raise MismatchGridXYSpacing((grid.dx, grid.dy), xy_spacing)
if grid.xy_of_lower_left != xy_of_lower_left:
raise MismatchGridXYLowerLeft(grid.xy_of_lower_left, xy_of_lower_left)
if grid is None:
grid = RasterModelGrid(
shape, xy_spacing=xy_spacing, xy_of_lower_left=xy_of_lower_left
)
if name:
grid.add_field(name, data, at="node")
return (grid, data)
def write_esri_ascii(path, fields, names=None, clobber=False):
"""Write landlab fields to ESRI ASCII.
Write the data and grid information for *fields* to *path* in the ESRI
ASCII format.
Parameters
----------
path : str
Path to output file.
fields : field-like
Landlab field object that holds a grid and associated values.
names : iterable of str, optional
Names of the fields to include in the output file. If not provided,
write all fields.
clobber : boolean
If *path* exists, clobber the existing file, otherwise raise an
exception.
Examples
--------
>>> import numpy as np
>>> import os
>>> import tempfile
>>> from landlab import RasterModelGrid
>>> from landlab.io.esri_ascii import write_esri_ascii
>>> grid = RasterModelGrid((4, 5), xy_spacing=(2., 2.))
>>> _ = grid.add_field("air__temperature", np.arange(20.), at="node")
>>> with tempfile.TemporaryDirectory() as tmpdirname:
... fname = os.path.join(tmpdirname, 'test.asc')
... files = write_esri_ascii(fname, grid)
>>> for file in files:
... print(os.path.basename(file))
test.asc
>>> _ = grid.add_field("land_surface__elevation", np.arange(20.), at="node")
>>> with tempfile.TemporaryDirectory() as tmpdirname:
... fname = os.path.join(tmpdirname, 'test.asc')
... files = write_esri_ascii(fname, grid)
>>> files.sort()
>>> for file in files:
... print(os.path.basename(file))
test_air__temperature.asc
test_land_surface__elevation.asc
"""
if os.path.exists(path) and not clobber:
raise ValueError("file exists")
if isinstance(names, (str, pathlib.Path)):
names = [names]
names = names or fields.at_node.keys()
if len(names) == 1:
paths = [path]
elif len(names) > 1:
(base, ext) = os.path.splitext(path)
paths = [base + "_" + name + ext for name in names]
else:
raise ValueError("no node fields to write")
bad_names = set(names) - set(fields.at_node.keys())
if len(bad_names) > 0:
raise ValueError("unknown field name(s): %s" % ",".join(bad_names))
header = {
"ncols": fields.number_of_node_columns,
"nrows": fields.number_of_node_rows,
"xllcorner": fields.node_x[0],
"yllcorner": fields.node_y[0],
"cellsize": fields.dx,
}
for path, name in zip(paths, names):
header_lines = ["%s %s" % (key, str(val)) for key, val in list(header.items())]
data = fields.at_node[name].reshape(header["nrows"], header["ncols"])
np.savetxt(
path, np.flipud(data), header=os.linesep.join(header_lines), comments=""
)
return paths
| mit | -1,968,568,468,763,445,800 | 28.24618 | 128 | 0.582492 | false |
swiftstack/swift | swift/obj/server.py | 1 | 67311 | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Object Server for Swift """
import six
import six.moves.cPickle as pickle
from six.moves.urllib.parse import unquote
import json
import os
import multiprocessing
import time
import traceback
import socket
import math
from swift import gettext_ as _
from eventlet import sleep, wsgi, Timeout, tpool
from eventlet.greenthread import spawn
from swift.common.utils import public, get_logger, \
config_true_value, timing_stats, replication, \
normalize_delete_at_timestamp, get_log_line, Timestamp, \
get_expirer_container, parse_mime_headers, \
iter_multipart_mime_documents, extract_swift_bytes, safe_json_loads, \
config_auto_int_value, split_path, get_redirect_data, \
normalize_timestamp, md5
from swift.common.bufferedhttp import http_connect
from swift.common.constraints import check_object_creation, \
valid_timestamp, check_utf8, AUTO_CREATE_ACCOUNT_PREFIX
from swift.common.exceptions import ConnectionTimeout, DiskFileQuarantined, \
DiskFileNotExist, DiskFileCollision, DiskFileNoSpace, DiskFileDeleted, \
DiskFileDeviceUnavailable, DiskFileExpired, ChunkReadTimeout, \
ChunkReadError, DiskFileXattrNotSupported
from swift.common.request_helpers import \
OBJECT_SYSMETA_CONTAINER_UPDATE_OVERRIDE_PREFIX
from swift.obj import ssync_receiver
from swift.common.http import is_success, HTTP_MOVED_PERMANENTLY
from swift.common.base_storage_server import BaseStorageServer
from swift.common.header_key_dict import HeaderKeyDict
from swift.common.request_helpers import get_name_and_placement, \
is_user_meta, is_sys_or_user_meta, is_object_transient_sysmeta, \
resolve_etag_is_at_header, is_sys_meta, validate_internal_obj
from swift.common.swob import HTTPAccepted, HTTPBadRequest, HTTPCreated, \
HTTPInternalServerError, HTTPNoContent, HTTPNotFound, \
HTTPPreconditionFailed, HTTPRequestTimeout, HTTPUnprocessableEntity, \
HTTPClientDisconnect, HTTPMethodNotAllowed, Request, Response, \
HTTPInsufficientStorage, HTTPForbidden, HTTPException, HTTPConflict, \
HTTPServerError, wsgi_to_bytes, wsgi_to_str, normalize_etag
from swift.obj.diskfile import RESERVED_DATAFILE_META, DiskFileRouter
from swift.obj.expirer import build_task_obj
def iter_mime_headers_and_bodies(wsgi_input, mime_boundary, read_chunk_size):
mime_documents_iter = iter_multipart_mime_documents(
wsgi_input, mime_boundary, read_chunk_size)
for file_like in mime_documents_iter:
hdrs = parse_mime_headers(file_like)
yield (hdrs, file_like)
def drain(file_like, read_size, timeout):
"""
Read and discard any bytes from file_like.
:param file_like: file-like object to read from
:param read_size: how big a chunk to read at a time
:param timeout: how long to wait for a read (use None for no timeout)
:raises ChunkReadTimeout: if no chunk was read in time
"""
while True:
with ChunkReadTimeout(timeout):
chunk = file_like.read(read_size)
if not chunk:
break
def get_obj_name_and_placement(request):
"""
Split and validate path for an object.
:param request: a swob request
:returns: a tuple of path parts and storage policy
"""
device, partition, account, container, obj, policy = \
get_name_and_placement(request, 5, 5, True)
validate_internal_obj(account, container, obj)
return device, partition, account, container, obj, policy
def _make_backend_fragments_header(fragments):
if fragments:
result = {}
for ts, frag_list in fragments.items():
result[ts.internal] = frag_list
return json.dumps(result)
return None
class EventletPlungerString(bytes):
"""
Eventlet won't send headers until it's accumulated at least
eventlet.wsgi.MINIMUM_CHUNK_SIZE bytes or the app iter is exhausted.
If we want to send the response body behind Eventlet's back, perhaps
with some zero-copy wizardry, then we have to unclog the plumbing in
eventlet.wsgi to force the headers out, so we use an
EventletPlungerString to empty out all of Eventlet's buffers.
"""
def __len__(self):
return wsgi.MINIMUM_CHUNK_SIZE + 1
class ObjectController(BaseStorageServer):
"""Implements the WSGI application for the Swift Object Server."""
server_type = 'object-server'
def __init__(self, conf, logger=None):
"""
Creates a new WSGI application for the Swift Object Server. An
example configuration is given at
<source-dir>/etc/object-server.conf-sample or
/etc/swift/object-server.conf-sample.
"""
super(ObjectController, self).__init__(conf)
self.logger = logger or get_logger(conf, log_route='object-server')
self.node_timeout = float(conf.get('node_timeout', 3))
self.container_update_timeout = float(
conf.get('container_update_timeout', 1))
self.conn_timeout = float(conf.get('conn_timeout', 0.5))
self.client_timeout = float(conf.get('client_timeout', 60))
self.disk_chunk_size = int(conf.get('disk_chunk_size', 65536))
self.network_chunk_size = int(conf.get('network_chunk_size', 65536))
self.log_requests = config_true_value(conf.get('log_requests', 'true'))
self.max_upload_time = int(conf.get('max_upload_time', 86400))
self.slow = int(conf.get('slow', 0))
self.keep_cache_private = \
config_true_value(conf.get('keep_cache_private', 'false'))
default_allowed_headers = '''
content-disposition,
content-encoding,
x-delete-at,
x-object-manifest,
x-static-large-object,
cache-control,
content-language,
expires,
x-robots-tag
'''
extra_allowed_headers = [
header.strip().lower() for header in conf.get(
'allowed_headers', default_allowed_headers).split(',')
if header.strip()
]
self.allowed_headers = set()
for header in extra_allowed_headers:
if header not in RESERVED_DATAFILE_META:
self.allowed_headers.add(header)
if conf.get('auto_create_account_prefix'):
self.logger.warning('Option auto_create_account_prefix is '
'deprecated. Configure '
'auto_create_account_prefix under the '
'swift-constraints section of '
'swift.conf. This option will '
'be ignored in a future release.')
self.auto_create_account_prefix = \
conf['auto_create_account_prefix']
else:
self.auto_create_account_prefix = AUTO_CREATE_ACCOUNT_PREFIX
self.expiring_objects_account = self.auto_create_account_prefix + \
(conf.get('expiring_objects_account_name') or 'expiring_objects')
self.expiring_objects_container_divisor = \
int(conf.get('expiring_objects_container_divisor') or 86400)
# Initialization was successful, so now apply the network chunk size
# parameter as the default read / write buffer size for the network
# sockets.
#
# NOTE WELL: This is a class setting, so until we get set this on a
# per-connection basis, this affects reading and writing on ALL
# sockets, those between the proxy servers and external clients, and
# those between the proxy servers and the other internal servers.
#
# ** Because the primary motivation for this is to optimize how data
# is written back to the proxy server, we could use the value from the
# disk_chunk_size parameter. However, it affects all created sockets
# using this class so we have chosen to tie it to the
# network_chunk_size parameter value instead.
if six.PY2:
socket._fileobject.default_bufsize = self.network_chunk_size
# TODO: find a way to enable similar functionality in py3
# Provide further setup specific to an object server implementation.
self.setup(conf)
def setup(self, conf):
"""
Implementation specific setup. This method is called at the very end
by the constructor to allow a specific implementation to modify
existing attributes or add its own attributes.
:param conf: WSGI configuration parameter
"""
# Common on-disk hierarchy shared across account, container and object
# servers.
self._diskfile_router = DiskFileRouter(conf, self.logger)
# This is populated by global_conf_callback way below as the semaphore
# is shared by all workers.
if 'replication_semaphore' in conf:
# The value was put in a list so it could get past paste
self.replication_semaphore = conf['replication_semaphore'][0]
else:
self.replication_semaphore = None
self.replication_failure_threshold = int(
conf.get('replication_failure_threshold') or 100)
self.replication_failure_ratio = float(
conf.get('replication_failure_ratio') or 1.0)
servers_per_port = int(conf.get('servers_per_port', '0') or 0)
if servers_per_port:
# The typical servers-per-port deployment also uses one port per
# disk, so you really get N servers per disk. In that case,
# having a pool of 20 threads per server per disk is far too
# much. For example, given a 60-disk chassis and 4 servers per
# disk, the default configuration will give us 21 threads per
# server (the main thread plus the twenty tpool threads), for a
# total of around 60 * 21 * 4 = 5040 threads. This is clearly
# too high.
#
# Instead, we use a tpool size of 1, giving us 2 threads per
# process. In the example above, that's 60 * 2 * 4 = 480
# threads, which is reasonable since there are 240 processes.
default_tpool_size = 1
else:
# If we're not using servers-per-port, then leave the tpool size
# alone. The default (20) is typically good enough for one
# object server handling requests for many disks.
default_tpool_size = None
tpool_size = config_auto_int_value(
conf.get('eventlet_tpool_num_threads'),
default_tpool_size)
if tpool_size:
tpool.set_num_threads(tpool_size)
def get_diskfile(self, device, partition, account, container, obj,
policy, **kwargs):
"""
Utility method for instantiating a DiskFile object supporting a given
REST API.
An implementation of the object server that wants to use a different
DiskFile class would simply over-ride this method to provide that
behavior.
"""
return self._diskfile_router[policy].get_diskfile(
device, partition, account, container, obj, policy, **kwargs)
def async_update(self, op, account, container, obj, host, partition,
contdevice, headers_out, objdevice, policy,
logger_thread_locals=None, container_path=None):
"""
Sends or saves an async update.
:param op: operation performed (ex: 'PUT', or 'DELETE')
:param account: account name for the object
:param container: container name for the object
:param obj: object name
:param host: host that the container is on
:param partition: partition that the container is on
:param contdevice: device name that the container is on
:param headers_out: dictionary of headers to send in the container
request
:param objdevice: device name that the object is in
:param policy: the associated BaseStoragePolicy instance
:param logger_thread_locals: The thread local values to be set on the
self.logger to retain transaction
logging information.
:param container_path: optional path in the form `<account/container>`
to which the update should be sent. If given this path will be used
instead of constructing a path from the ``account`` and
``container`` params.
"""
if logger_thread_locals:
self.logger.thread_locals = logger_thread_locals
headers_out['user-agent'] = 'object-server %s' % os.getpid()
if container_path:
# use explicitly specified container path
full_path = '/%s/%s' % (container_path, obj)
else:
full_path = '/%s/%s/%s' % (account, container, obj)
redirect_data = None
if all([host, partition, contdevice]):
try:
with ConnectionTimeout(self.conn_timeout):
ip, port = host.rsplit(':', 1)
conn = http_connect(ip, port, contdevice, partition, op,
full_path, headers_out)
with Timeout(self.node_timeout):
response = conn.getresponse()
response.read()
if is_success(response.status):
return
if response.status == HTTP_MOVED_PERMANENTLY:
try:
redirect_data = get_redirect_data(response)
except ValueError as err:
self.logger.error(
'Container update failed for %r; problem with '
'redirect location: %s' % (obj, err))
else:
self.logger.error(_(
'ERROR Container update failed '
'(saving for async update later): %(status)d '
'response from %(ip)s:%(port)s/%(dev)s'),
{'status': response.status, 'ip': ip, 'port': port,
'dev': contdevice})
except (Exception, Timeout):
self.logger.exception(_(
'ERROR container update failed with '
'%(ip)s:%(port)s/%(dev)s (saving for async update later)'),
{'ip': ip, 'port': port, 'dev': contdevice})
data = {'op': op, 'account': account, 'container': container,
'obj': obj, 'headers': headers_out}
if redirect_data:
self.logger.debug(
'Update to %(path)s redirected to %(redirect)s',
{'path': full_path, 'redirect': redirect_data[0]})
container_path = redirect_data[0]
if container_path:
data['container_path'] = container_path
timestamp = headers_out.get('x-meta-timestamp',
headers_out.get('x-timestamp'))
self._diskfile_router[policy].pickle_async_update(
objdevice, account, container, obj, data, timestamp, policy)
def container_update(self, op, account, container, obj, request,
headers_out, objdevice, policy):
"""
Update the container when objects are updated.
:param op: operation performed (ex: 'PUT', or 'DELETE')
:param account: account name for the object
:param container: container name for the object
:param obj: object name
:param request: the original request object driving the update
:param headers_out: dictionary of headers to send in the container
request(s)
:param objdevice: device name that the object is in
:param policy: the BaseStoragePolicy instance
"""
headers_in = request.headers
conthosts = [h.strip() for h in
headers_in.get('X-Container-Host', '').split(',')]
contdevices = [d.strip() for d in
headers_in.get('X-Container-Device', '').split(',')]
contpartition = headers_in.get('X-Container-Partition', '')
if len(conthosts) != len(contdevices):
# This shouldn't happen unless there's a bug in the proxy,
# but if there is, we want to know about it.
self.logger.error(_(
'ERROR Container update failed: different '
'numbers of hosts and devices in request: '
'"%(hosts)s" vs "%(devices)s"') % {
'hosts': headers_in.get('X-Container-Host', ''),
'devices': headers_in.get('X-Container-Device', '')})
return
contpath = headers_in.get('X-Backend-Quoted-Container-Path')
if contpath:
contpath = unquote(contpath)
else:
contpath = headers_in.get('X-Backend-Container-Path')
if contpath:
try:
# TODO: this is very late in request handling to be validating
# a header - if we did *not* check and the header was bad
# presumably the update would fail and we would fall back to an
# async update to the root container, which might be best
# course of action rather than aborting update altogether?
split_path('/' + contpath, minsegs=2, maxsegs=2)
except ValueError:
self.logger.error(
"Invalid X-Backend-Container-Path, should be of the form "
"'account/container' but got %r." % contpath)
# fall back to updating root container
contpath = None
if contpartition:
# In py3, zip() continues to work for our purposes... But when
# we want to log an error, consumed items are not longer present
# in the zip, making the logs useless for operators. So, list().
updates = list(zip(conthosts, contdevices))
else:
updates = []
headers_out['x-trans-id'] = headers_in.get('x-trans-id', '-')
headers_out['referer'] = request.as_referer()
headers_out['X-Backend-Storage-Policy-Index'] = int(policy)
update_greenthreads = []
for conthost, contdevice in updates:
gt = spawn(self.async_update, op, account, container, obj,
conthost, contpartition, contdevice, headers_out,
objdevice, policy,
logger_thread_locals=self.logger.thread_locals,
container_path=contpath)
update_greenthreads.append(gt)
# Wait a little bit to see if the container updates are successful.
# If we immediately return after firing off the greenthread above, then
# we're more likely to confuse the end-user who does a listing right
# after getting a successful response to the object create. The
# `container_update_timeout` bounds the length of time we wait so that
# one slow container server doesn't make the entire request lag.
try:
with Timeout(self.container_update_timeout):
for gt in update_greenthreads:
gt.wait()
except Timeout:
# updates didn't go through, log it and return
self.logger.debug(
'Container update timeout (%.4fs) waiting for %s',
self.container_update_timeout, updates)
def delete_at_update(self, op, delete_at, account, container, obj,
request, objdevice, policy):
"""
Update the expiring objects container when objects are updated.
:param op: operation performed (ex: 'PUT', or 'DELETE')
:param delete_at: scheduled delete in UNIX seconds, int
:param account: account name for the object
:param container: container name for the object
:param obj: object name
:param request: the original request driving the update
:param objdevice: device name that the object is in
:param policy: the BaseStoragePolicy instance (used for tmp dir)
"""
if config_true_value(
request.headers.get('x-backend-replication', 'f')):
return
delete_at = normalize_delete_at_timestamp(delete_at)
updates = [(None, None)]
partition = None
hosts = contdevices = [None]
headers_in = request.headers
headers_out = HeaderKeyDict({
# system accounts are always Policy-0
'X-Backend-Storage-Policy-Index': 0,
'x-timestamp': request.timestamp.internal,
'x-trans-id': headers_in.get('x-trans-id', '-'),
'referer': request.as_referer()})
if op != 'DELETE':
hosts = headers_in.get('X-Delete-At-Host', None)
if hosts is None:
# If header is missing, no update needed as sufficient other
# object servers should perform the required update.
return
delete_at_container = headers_in.get('X-Delete-At-Container', None)
if not delete_at_container:
# older proxy servers did not send X-Delete-At-Container so for
# backwards compatibility calculate the value here, but also
# log a warning because this is prone to inconsistent
# expiring_objects_container_divisor configurations.
# See https://bugs.launchpad.net/swift/+bug/1187200
self.logger.warning(
'X-Delete-At-Container header must be specified for '
'expiring objects background %s to work properly. Making '
'best guess as to the container name for now.' % op)
delete_at_container = get_expirer_container(
delete_at, self.expiring_objects_container_divisor,
account, container, obj)
partition = headers_in.get('X-Delete-At-Partition', None)
contdevices = headers_in.get('X-Delete-At-Device', '')
updates = [upd for upd in
zip((h.strip() for h in hosts.split(',')),
(c.strip() for c in contdevices.split(',')))
if all(upd) and partition]
if not updates:
updates = [(None, None)]
headers_out['x-size'] = '0'
headers_out['x-content-type'] = 'text/plain'
headers_out['x-etag'] = 'd41d8cd98f00b204e9800998ecf8427e'
else:
if not config_true_value(
request.headers.get(
'X-Backend-Clean-Expiring-Object-Queue', 't')):
return
# DELETEs of old expiration data have no way of knowing what the
# old X-Delete-At-Container was at the time of the initial setting
# of the data, so a best guess is made here.
# Worst case is a DELETE is issued now for something that doesn't
# exist there and the original data is left where it is, where
# it will be ignored when the expirer eventually tries to issue the
# object DELETE later since the X-Delete-At value won't match up.
delete_at_container = get_expirer_container(
delete_at, self.expiring_objects_container_divisor,
account, container, obj)
delete_at_container = normalize_delete_at_timestamp(
delete_at_container)
for host, contdevice in updates:
self.async_update(
op, self.expiring_objects_account, delete_at_container,
build_task_obj(delete_at, account, container, obj),
host, partition, contdevice, headers_out, objdevice,
policy)
def _make_timeout_reader(self, file_like):
def timeout_reader():
with ChunkReadTimeout(self.client_timeout):
try:
return file_like.read(self.network_chunk_size)
except (IOError, ValueError):
raise ChunkReadError
return timeout_reader
def _read_put_commit_message(self, mime_documents_iter):
rcvd_commit = False
try:
with ChunkReadTimeout(self.client_timeout):
commit_hdrs, commit_iter = next(mime_documents_iter)
if commit_hdrs.get('X-Document', None) == "put commit":
rcvd_commit = True
drain(commit_iter, self.network_chunk_size, self.client_timeout)
except ChunkReadError:
raise HTTPClientDisconnect()
except ChunkReadTimeout:
raise HTTPRequestTimeout()
except StopIteration:
raise HTTPBadRequest(body="couldn't find PUT commit MIME doc")
return rcvd_commit
def _read_metadata_footer(self, mime_documents_iter):
try:
with ChunkReadTimeout(self.client_timeout):
footer_hdrs, footer_iter = next(mime_documents_iter)
except ChunkReadError:
raise HTTPClientDisconnect()
except ChunkReadTimeout:
raise HTTPRequestTimeout()
except StopIteration:
raise HTTPBadRequest(body="couldn't find footer MIME doc")
return self._parse_footer(footer_hdrs, footer_iter)
def _parse_footer(self, footer_hdrs, footer_iter):
"""
Validate footer metadata and translate JSON body into HeaderKeyDict.
"""
timeout_reader = self._make_timeout_reader(footer_iter)
try:
footer_body = b''.join(iter(timeout_reader, b''))
except ChunkReadError:
raise HTTPClientDisconnect()
except ChunkReadTimeout:
raise HTTPRequestTimeout()
footer_md5 = footer_hdrs.get('Content-MD5')
if not footer_md5:
raise HTTPBadRequest(body="no Content-MD5 in footer")
if footer_md5 != md5(footer_body, usedforsecurity=False).hexdigest():
raise HTTPUnprocessableEntity(body="footer MD5 mismatch")
try:
return HeaderKeyDict(json.loads(footer_body))
except ValueError:
raise HTTPBadRequest("invalid JSON for footer doc")
def _check_container_override(self, update_headers, metadata,
footers=None):
"""
Applies any overrides to the container update headers.
Overrides may be in the x-object-sysmeta-container-update- namespace or
the x-backend-container-update-override- namespace. The former is
preferred and is used by proxy middlewares. The latter is historical
but is still used with EC policy PUT requests; for backwards
compatibility the header names used with EC policy requests have not
been changed to the sysmeta namespace - that way the EC PUT path of a
newer proxy will remain compatible with an object server that pre-dates
the introduction of the x-object-sysmeta-container-update- namespace
and vice-versa.
:param update_headers: a dict of headers used in the container update
:param metadata: a dict that may container override items
:param footers: another dict that may container override items, at a
higher priority than metadata
"""
footers = footers or {}
# the order of this list is significant:
# x-object-sysmeta-container-update-override-* headers take precedence
# over x-backend-container-update-override-* headers
override_prefixes = ['x-backend-container-update-override-',
OBJECT_SYSMETA_CONTAINER_UPDATE_OVERRIDE_PREFIX]
for override_prefix in override_prefixes:
for key, val in metadata.items():
if key.lower().startswith(override_prefix):
override = key.lower().replace(override_prefix, 'x-')
update_headers[override] = val
# apply x-backend-container-update-override* from footers *before*
# x-object-sysmeta-container-update-override-* from headers
for key, val in footers.items():
if key.lower().startswith(override_prefix):
override = key.lower().replace(override_prefix, 'x-')
update_headers[override] = val
@public
@timing_stats()
def POST(self, request):
"""Handle HTTP POST requests for the Swift Object Server."""
device, partition, account, container, obj, policy = \
get_obj_name_and_placement(request)
req_timestamp = valid_timestamp(request)
new_delete_at = int(request.headers.get('X-Delete-At') or 0)
if new_delete_at and new_delete_at < req_timestamp:
return HTTPBadRequest(body='X-Delete-At in past', request=request,
content_type='text/plain')
next_part_power = request.headers.get('X-Backend-Next-Part-Power')
try:
disk_file = self.get_diskfile(
device, partition, account, container, obj,
policy=policy, open_expired=config_true_value(
request.headers.get('x-backend-replication', 'false')),
next_part_power=next_part_power)
except DiskFileDeviceUnavailable:
return HTTPInsufficientStorage(drive=device, request=request)
try:
orig_metadata = disk_file.read_metadata(current_time=req_timestamp)
except DiskFileXattrNotSupported:
return HTTPInsufficientStorage(drive=device, request=request)
except (DiskFileNotExist, DiskFileQuarantined):
return HTTPNotFound(request=request)
orig_timestamp = Timestamp(orig_metadata.get('X-Timestamp', 0))
orig_ctype_timestamp = disk_file.content_type_timestamp
req_ctype_time = '0'
req_ctype = request.headers.get('Content-Type')
if req_ctype:
req_ctype_time = request.headers.get('Content-Type-Timestamp',
req_timestamp.internal)
req_ctype_timestamp = Timestamp(req_ctype_time)
if orig_timestamp >= req_timestamp \
and orig_ctype_timestamp >= req_ctype_timestamp:
return HTTPConflict(
request=request,
headers={'X-Backend-Timestamp': orig_timestamp.internal})
if req_timestamp > orig_timestamp:
metadata = {'X-Timestamp': req_timestamp.internal}
metadata.update(val for val in request.headers.items()
if (is_user_meta('object', val[0]) or
is_object_transient_sysmeta(val[0])))
headers_to_copy = (
request.headers.get(
'X-Backend-Replication-Headers', '').split() +
list(self.allowed_headers))
for header_key in headers_to_copy:
if header_key in request.headers:
header_caps = header_key.title()
metadata[header_caps] = request.headers[header_key]
orig_delete_at = int(orig_metadata.get('X-Delete-At') or 0)
if orig_delete_at != new_delete_at:
if new_delete_at:
self.delete_at_update(
'PUT', new_delete_at, account, container, obj, request,
device, policy)
if orig_delete_at:
self.delete_at_update('DELETE', orig_delete_at, account,
container, obj, request, device,
policy)
else:
# preserve existing metadata, only content-type may be updated
metadata = dict(disk_file.get_metafile_metadata())
if req_ctype_timestamp > orig_ctype_timestamp:
# we have a new content-type, add to metadata and container update
content_type_headers = {
'Content-Type': request.headers['Content-Type'],
'Content-Type-Timestamp': req_ctype_timestamp.internal
}
metadata.update(content_type_headers)
else:
# send existing content-type with container update
content_type_headers = {
'Content-Type': disk_file.content_type,
'Content-Type-Timestamp': orig_ctype_timestamp.internal
}
if orig_ctype_timestamp != disk_file.data_timestamp:
# only add to metadata if it's not the datafile content-type
metadata.update(content_type_headers)
try:
disk_file.write_metadata(metadata)
except (DiskFileXattrNotSupported, DiskFileNoSpace):
return HTTPInsufficientStorage(drive=device, request=request)
if (content_type_headers['Content-Type-Timestamp']
!= disk_file.data_timestamp):
# Current content-type is not from the datafile, but the datafile
# content-type may have a swift_bytes param that was appended by
# SLO and we must continue to send that with the container update.
# Do this (rather than use a separate header) for backwards
# compatibility because there may be 'legacy' container updates in
# async pending that have content-types with swift_bytes params, so
# we have to be able to handle those in container server anyway.
_, swift_bytes = extract_swift_bytes(
disk_file.get_datafile_metadata()['Content-Type'])
if swift_bytes:
content_type_headers['Content-Type'] += (';swift_bytes=%s'
% swift_bytes)
update_headers = HeaderKeyDict({
'x-size': orig_metadata['Content-Length'],
'x-content-type': content_type_headers['Content-Type'],
'x-timestamp': disk_file.data_timestamp.internal,
'x-content-type-timestamp':
content_type_headers['Content-Type-Timestamp'],
'x-meta-timestamp': metadata['X-Timestamp'],
'x-etag': orig_metadata['ETag']})
# Special cases for backwards compatibility.
# For EC policy, send X-Object-Sysmeta-Ec-Etag which is same as the
# X-Backend-Container-Update-Override-Etag value sent with the original
# PUT. Similarly send X-Object-Sysmeta-Ec-Content-Length which is the
# same as the X-Backend-Container-Update-Override-Size value. We have
# to send Etag and size with a POST container update because the
# original PUT container update may have failed or be in async_pending.
if 'X-Object-Sysmeta-Ec-Etag' in orig_metadata:
update_headers['X-Etag'] = orig_metadata[
'X-Object-Sysmeta-Ec-Etag']
if 'X-Object-Sysmeta-Ec-Content-Length' in orig_metadata:
update_headers['X-Size'] = orig_metadata[
'X-Object-Sysmeta-Ec-Content-Length']
self._check_container_override(update_headers, orig_metadata)
# object POST updates are PUT to the container server
self.container_update(
'PUT', account, container, obj, request, update_headers,
device, policy)
# Add current content-type and sysmeta to response
resp_headers = {
'X-Backend-Content-Type': content_type_headers['Content-Type']}
for key, value in orig_metadata.items():
if is_sys_meta('object', key):
resp_headers[key] = value
return HTTPAccepted(request=request, headers=resp_headers)
def _pre_create_checks(self, request, device, partition,
account, container, obj, policy):
req_timestamp = valid_timestamp(request)
error_response = check_object_creation(request, obj)
if error_response:
raise error_response
try:
fsize = request.message_length()
except ValueError as e:
raise HTTPBadRequest(body=str(e), request=request,
content_type='text/plain')
# In case of multipart-MIME put, the proxy sends a chunked request,
# but may let us know the real content length so we can verify that
# we have enough disk space to hold the object.
if fsize is None:
fsize = request.headers.get('X-Backend-Obj-Content-Length')
if fsize is not None:
try:
fsize = int(fsize)
except ValueError as e:
raise HTTPBadRequest(body=str(e), request=request,
content_type='text/plain')
# SSYNC will include Frag-Index header for subrequests, in which case
# get_diskfile will ignore non-matching on-disk data files
frag_index = request.headers.get('X-Backend-Ssync-Frag-Index')
next_part_power = request.headers.get('X-Backend-Next-Part-Power')
try:
disk_file = self.get_diskfile(
device, partition, account, container, obj,
policy=policy, frag_index=frag_index,
next_part_power=next_part_power)
except DiskFileDeviceUnavailable:
raise HTTPInsufficientStorage(drive=device, request=request)
try:
orig_metadata = disk_file.read_metadata(current_time=req_timestamp)
orig_timestamp = disk_file.data_timestamp
except DiskFileXattrNotSupported:
raise HTTPInsufficientStorage(drive=device, request=request)
except DiskFileDeleted as e:
orig_metadata = {}
orig_timestamp = e.timestamp
except (DiskFileNotExist, DiskFileQuarantined):
orig_metadata = {}
orig_timestamp = Timestamp(0)
# Checks for If-None-Match
if request.if_none_match is not None and orig_metadata:
if '*' in request.if_none_match:
# File exists already so return 412
raise HTTPPreconditionFailed(request=request)
if orig_metadata.get('ETag') in request.if_none_match:
# The current ETag matches, so raise 412
raise HTTPPreconditionFailed(request=request)
if orig_timestamp >= req_timestamp:
raise HTTPConflict(
request=request,
headers={'X-Backend-Timestamp': orig_timestamp.internal})
return disk_file, fsize, orig_metadata
def _do_multi_stage_mime_continue_headers(self, request, obj_input):
"""
If the proxy wants to send us object metadata after the object body, it
sets some headers. We have to tell the proxy, in the 100 Continue
response, that we're able to parse a multipart MIME document and
extract the object and metadata from it. If we don't, then the proxy
won't actually send the footer metadata.
If the proxy doesn't want to do any of that, this is the identity
function for obj_input and multi_stage_mime_state will be False-y.
:returns: a tuple, (obj_input, multi_stage_mime_state)
"""
have_metadata_footer = False
use_multiphase_commit = False
hundred_continue_headers = []
if config_true_value(
request.headers.get(
'X-Backend-Obj-Multiphase-Commit')):
use_multiphase_commit = True
hundred_continue_headers.append(
('X-Obj-Multiphase-Commit', 'yes'))
if config_true_value(
request.headers.get('X-Backend-Obj-Metadata-Footer')):
have_metadata_footer = True
hundred_continue_headers.append(
('X-Obj-Metadata-Footer', 'yes'))
if have_metadata_footer or use_multiphase_commit:
obj_input.set_hundred_continue_response_headers(
hundred_continue_headers)
mime_boundary = wsgi_to_bytes(request.headers.get(
'X-Backend-Obj-Multipart-Mime-Boundary'))
if not mime_boundary:
raise HTTPBadRequest("no MIME boundary")
with ChunkReadTimeout(self.client_timeout):
mime_documents_iter = iter_mime_headers_and_bodies(
request.environ['wsgi.input'],
mime_boundary, self.network_chunk_size)
_junk_hdrs, obj_input = next(mime_documents_iter)
multi_stage_mime_state = {
'have_metadata_footer': have_metadata_footer,
'use_multiphase_commit': use_multiphase_commit,
'mime_documents_iter': mime_documents_iter,
}
else:
multi_stage_mime_state = {}
return obj_input, multi_stage_mime_state
def _stage_obj_data(self, request, device, obj_input, writer, fsize):
"""
Feed the object_input into the writer.
:returns: a tuple, (upload_size, etag)
"""
writer.open()
elapsed_time = 0
upload_expiration = time.time() + self.max_upload_time
timeout_reader = self._make_timeout_reader(obj_input)
for chunk in iter(timeout_reader, b''):
start_time = time.time()
if start_time > upload_expiration:
self.logger.increment('PUT.timeouts')
raise HTTPRequestTimeout(request=request)
writer.write(chunk)
elapsed_time += time.time() - start_time
upload_size, etag = writer.chunks_finished()
if fsize is not None and fsize != upload_size:
raise HTTPClientDisconnect(request=request)
if upload_size:
self.logger.transfer_rate(
'PUT.' + device + '.timing', elapsed_time,
upload_size)
return upload_size, etag
def _get_request_metadata(self, request, upload_size, etag):
"""
Pull object metadata off the request.
:returns: metadata, a dict of object metadata
"""
metadata = {
'X-Timestamp': request.timestamp.internal,
'Content-Type': request.headers['content-type'],
'Content-Length': str(upload_size),
'ETag': etag,
}
metadata.update(val for val in request.headers.items()
if (is_sys_or_user_meta('object', val[0]) or
is_object_transient_sysmeta(val[0])))
headers_to_copy = (
request.headers.get(
'X-Backend-Replication-Headers', '').split() +
list(self.allowed_headers))
for header_key in headers_to_copy:
if header_key in request.headers:
header_caps = header_key.title()
metadata[header_caps] = request.headers[header_key]
return metadata
def _read_mime_footers_metadata(self, have_metadata_footer,
mime_documents_iter, **kwargs):
"""
Read footer metadata from the bottom of the multi-stage MIME body.
:returns: metadata, a dict
"""
if have_metadata_footer:
metadata = self._read_metadata_footer(
mime_documents_iter)
footer_etag = metadata.pop('etag', '').lower()
if footer_etag:
metadata['ETag'] = footer_etag
else:
metadata = {}
return metadata
def _apply_extra_metadata(self, request, metadata, footers_metadata):
"""
Apply extra metadata precedence to prepare metadata for storage.
"""
metadata.update(val for val in footers_metadata.items()
if (is_sys_or_user_meta('object', val[0]) or
is_object_transient_sysmeta(val[0])))
# N.B. footers_metadata is a HeaderKeyDict
received_etag = normalize_etag(footers_metadata.get(
'etag', request.headers.get('etag', '')))
if received_etag and received_etag != metadata['ETag']:
raise HTTPUnprocessableEntity(request=request)
def _send_multi_stage_continue_headers(self, request,
use_multiphase_commit,
mime_documents_iter, **kwargs):
"""
If the PUT requires a two-phase commit (a data and a commit phase) send
the proxy server another 100-continue response to indicate that we are
finished writing object data
"""
if use_multiphase_commit:
request.environ['wsgi.input'].\
send_hundred_continue_response()
if not self._read_put_commit_message(mime_documents_iter):
raise HTTPServerError(request=request)
def _drain_mime_request(self, mime_documents_iter, **kwargs):
"""
Drain any remaining MIME docs from the socket. There shouldn't be any,
but we must read the whole request body.
"""
try:
while True:
with ChunkReadTimeout(self.client_timeout):
_junk_hdrs, _junk_body = next(mime_documents_iter)
drain(_junk_body, self.network_chunk_size,
self.client_timeout)
except ChunkReadError:
raise HTTPClientDisconnect()
except ChunkReadTimeout:
raise HTTPRequestTimeout()
except StopIteration:
pass
def _post_commit_updates(self, request, device,
account, container, obj, policy,
orig_metadata, footers_metadata, metadata):
orig_delete_at = int(orig_metadata.get('X-Delete-At') or 0)
new_delete_at = int(request.headers.get('X-Delete-At') or 0)
if orig_delete_at != new_delete_at:
if new_delete_at:
self.delete_at_update(
'PUT', new_delete_at, account, container, obj, request,
device, policy)
if orig_delete_at:
self.delete_at_update(
'DELETE', orig_delete_at, account, container, obj,
request, device, policy)
update_headers = HeaderKeyDict({
'x-size': metadata['Content-Length'],
'x-content-type': metadata['Content-Type'],
'x-timestamp': metadata['X-Timestamp'],
'x-etag': metadata['ETag']})
# apply any container update header overrides sent with request
self._check_container_override(update_headers, request.headers,
footers_metadata)
self.container_update(
'PUT', account, container, obj, request,
update_headers, device, policy)
@public
@timing_stats()
def PUT(self, request):
"""Handle HTTP PUT requests for the Swift Object Server."""
device, partition, account, container, obj, policy = \
get_obj_name_and_placement(request)
disk_file, fsize, orig_metadata = self._pre_create_checks(
request, device, partition, account, container, obj, policy)
writer = disk_file.writer(size=fsize)
try:
obj_input = request.environ['wsgi.input']
obj_input, multi_stage_mime_state = \
self._do_multi_stage_mime_continue_headers(request, obj_input)
upload_size, etag = self._stage_obj_data(
request, device, obj_input, writer, fsize)
metadata = self._get_request_metadata(request, upload_size, etag)
if multi_stage_mime_state:
footers_metadata = self._read_mime_footers_metadata(
**multi_stage_mime_state)
else:
footers_metadata = {}
self._apply_extra_metadata(request, metadata, footers_metadata)
writer.put(metadata)
if multi_stage_mime_state:
self._send_multi_stage_continue_headers(
request, **multi_stage_mime_state)
writer.commit(request.timestamp)
if multi_stage_mime_state:
self._drain_mime_request(**multi_stage_mime_state)
except (DiskFileXattrNotSupported, DiskFileNoSpace):
return HTTPInsufficientStorage(drive=device, request=request)
except ChunkReadError:
return HTTPClientDisconnect(request=request)
except ChunkReadTimeout:
return HTTPRequestTimeout(request=request)
finally:
writer.close()
self._post_commit_updates(request, device,
account, container, obj, policy,
orig_metadata, footers_metadata, metadata)
return HTTPCreated(request=request, etag=etag)
@public
@timing_stats()
def GET(self, request):
"""Handle HTTP GET requests for the Swift Object Server."""
device, partition, account, container, obj, policy = \
get_obj_name_and_placement(request)
request.headers.setdefault('X-Timestamp',
normalize_timestamp(time.time()))
req_timestamp = valid_timestamp(request)
frag_prefs = safe_json_loads(
request.headers.get('X-Backend-Fragment-Preferences'))
try:
disk_file = self.get_diskfile(
device, partition, account, container, obj,
policy=policy, frag_prefs=frag_prefs,
open_expired=config_true_value(
request.headers.get('x-backend-replication', 'false')))
except DiskFileDeviceUnavailable:
return HTTPInsufficientStorage(drive=device, request=request)
try:
with disk_file.open(current_time=req_timestamp):
metadata = disk_file.get_metadata()
ignore_range_headers = set(
h.strip().lower()
for h in request.headers.get(
'X-Backend-Ignore-Range-If-Metadata-Present',
'').split(','))
if ignore_range_headers.intersection(
h.lower() for h in metadata):
request.headers.pop('Range', None)
obj_size = int(metadata['Content-Length'])
file_x_ts = Timestamp(metadata['X-Timestamp'])
keep_cache = (self.keep_cache_private or
('X-Auth-Token' not in request.headers and
'X-Storage-Token' not in request.headers))
conditional_etag = resolve_etag_is_at_header(request, metadata)
response = Response(
app_iter=disk_file.reader(keep_cache=keep_cache),
request=request, conditional_response=True,
conditional_etag=conditional_etag)
response.headers['Content-Type'] = metadata.get(
'Content-Type', 'application/octet-stream')
for key, value in metadata.items():
if (is_sys_or_user_meta('object', key) or
is_object_transient_sysmeta(key) or
key.lower() in self.allowed_headers):
response.headers[key] = value
response.etag = metadata['ETag']
response.last_modified = math.ceil(float(file_x_ts))
response.content_length = obj_size
try:
response.content_encoding = metadata[
'Content-Encoding']
except KeyError:
pass
response.headers['X-Timestamp'] = file_x_ts.normal
response.headers['X-Backend-Timestamp'] = file_x_ts.internal
response.headers['X-Backend-Data-Timestamp'] = \
disk_file.data_timestamp.internal
if disk_file.durable_timestamp:
response.headers['X-Backend-Durable-Timestamp'] = \
disk_file.durable_timestamp.internal
response.headers['X-Backend-Fragments'] = \
_make_backend_fragments_header(disk_file.fragments)
resp = request.get_response(response)
except DiskFileXattrNotSupported:
return HTTPInsufficientStorage(drive=device, request=request)
except (DiskFileNotExist, DiskFileQuarantined) as e:
headers = {}
if hasattr(e, 'timestamp'):
headers['X-Backend-Timestamp'] = e.timestamp.internal
resp = HTTPNotFound(request=request, headers=headers,
conditional_response=True)
return resp
@public
@timing_stats(sample_rate=0.8)
def HEAD(self, request):
"""Handle HTTP HEAD requests for the Swift Object Server."""
device, partition, account, container, obj, policy = \
get_obj_name_and_placement(request)
request.headers.setdefault('X-Timestamp',
normalize_timestamp(time.time()))
req_timestamp = valid_timestamp(request)
frag_prefs = safe_json_loads(
request.headers.get('X-Backend-Fragment-Preferences'))
try:
disk_file = self.get_diskfile(
device, partition, account, container, obj,
policy=policy, frag_prefs=frag_prefs,
open_expired=config_true_value(
request.headers.get('x-backend-replication', 'false')))
except DiskFileDeviceUnavailable:
return HTTPInsufficientStorage(drive=device, request=request)
try:
metadata = disk_file.read_metadata(current_time=req_timestamp)
except DiskFileXattrNotSupported:
return HTTPInsufficientStorage(drive=device, request=request)
except (DiskFileNotExist, DiskFileQuarantined) as e:
headers = {}
if hasattr(e, 'timestamp'):
headers['X-Backend-Timestamp'] = e.timestamp.internal
return HTTPNotFound(request=request, headers=headers,
conditional_response=True)
conditional_etag = resolve_etag_is_at_header(request, metadata)
response = Response(request=request, conditional_response=True,
conditional_etag=conditional_etag)
response.headers['Content-Type'] = metadata.get(
'Content-Type', 'application/octet-stream')
for key, value in metadata.items():
if (is_sys_or_user_meta('object', key) or
is_object_transient_sysmeta(key) or
key.lower() in self.allowed_headers):
response.headers[key] = value
response.etag = metadata['ETag']
ts = Timestamp(metadata['X-Timestamp'])
response.last_modified = math.ceil(float(ts))
# Needed for container sync feature
response.headers['X-Timestamp'] = ts.normal
response.headers['X-Backend-Timestamp'] = ts.internal
response.headers['X-Backend-Data-Timestamp'] = \
disk_file.data_timestamp.internal
if disk_file.durable_timestamp:
response.headers['X-Backend-Durable-Timestamp'] = \
disk_file.durable_timestamp.internal
response.headers['X-Backend-Fragments'] = \
_make_backend_fragments_header(disk_file.fragments)
response.content_length = int(metadata['Content-Length'])
try:
response.content_encoding = metadata['Content-Encoding']
except KeyError:
pass
return response
@public
@timing_stats()
def DELETE(self, request):
"""Handle HTTP DELETE requests for the Swift Object Server."""
device, partition, account, container, obj, policy = \
get_obj_name_and_placement(request)
req_timestamp = valid_timestamp(request)
next_part_power = request.headers.get('X-Backend-Next-Part-Power')
try:
disk_file = self.get_diskfile(
device, partition, account, container, obj,
policy=policy, next_part_power=next_part_power)
except DiskFileDeviceUnavailable:
return HTTPInsufficientStorage(drive=device, request=request)
try:
orig_metadata = disk_file.read_metadata(current_time=req_timestamp)
except DiskFileXattrNotSupported:
return HTTPInsufficientStorage(drive=device, request=request)
except DiskFileExpired as e:
orig_timestamp = e.timestamp
orig_metadata = e.metadata
response_class = HTTPNotFound
except DiskFileDeleted as e:
orig_timestamp = e.timestamp
orig_metadata = {}
response_class = HTTPNotFound
except (DiskFileNotExist, DiskFileQuarantined):
orig_timestamp = 0
orig_metadata = {}
response_class = HTTPNotFound
else:
orig_timestamp = disk_file.data_timestamp
if orig_timestamp < req_timestamp:
response_class = HTTPNoContent
else:
response_class = HTTPConflict
response_timestamp = max(orig_timestamp, req_timestamp)
orig_delete_at = Timestamp(orig_metadata.get('X-Delete-At') or 0)
try:
req_if_delete_at_val = request.headers['x-if-delete-at']
req_if_delete_at = Timestamp(req_if_delete_at_val)
except KeyError:
pass
except ValueError:
return HTTPBadRequest(
request=request,
body='Bad X-If-Delete-At header value')
else:
# request includes x-if-delete-at; we must not place a tombstone
# if we can not verify the x-if-delete-at time
if not orig_timestamp:
# no object found at all
return HTTPNotFound()
if orig_timestamp >= req_timestamp:
# Found a newer object -- return 409 as work item is stale
return HTTPConflict()
if orig_delete_at != req_if_delete_at:
return HTTPPreconditionFailed(
request=request,
body='X-If-Delete-At and X-Delete-At do not match')
else:
# differentiate success from no object at all
response_class = HTTPNoContent
if orig_delete_at:
self.delete_at_update('DELETE', orig_delete_at, account,
container, obj, request, device,
policy)
if orig_timestamp < req_timestamp:
try:
disk_file.delete(req_timestamp)
except DiskFileNoSpace:
return HTTPInsufficientStorage(drive=device, request=request)
self.container_update(
'DELETE', account, container, obj, request,
HeaderKeyDict({'x-timestamp': req_timestamp.internal}),
device, policy)
return response_class(
request=request,
headers={'X-Backend-Timestamp': response_timestamp.internal,
'X-Backend-Content-Type': orig_metadata.get(
'Content-Type', '')})
@public
@replication
@timing_stats(sample_rate=0.1)
def REPLICATE(self, request):
"""
Handle REPLICATE requests for the Swift Object Server. This is used
by the object replicator to get hashes for directories.
Note that the name REPLICATE is preserved for historical reasons as
this verb really just returns the hashes information for the specified
parameters and is used, for example, by both replication and EC.
"""
device, partition, suffix_parts, policy = \
get_name_and_placement(request, 2, 3, True)
suffixes = suffix_parts.split('-') if suffix_parts else []
try:
hashes = self._diskfile_router[policy].get_hashes(
device, partition, suffixes, policy,
skip_rehash=bool(suffixes))
except DiskFileDeviceUnavailable:
resp = HTTPInsufficientStorage(drive=device, request=request)
else:
# force pickle protocol for compatibility with py2 nodes
resp = Response(body=pickle.dumps(hashes, protocol=2))
return resp
@public
@replication
@timing_stats(sample_rate=0.1)
def SSYNC(self, request):
return Response(app_iter=ssync_receiver.Receiver(self, request)())
def __call__(self, env, start_response):
"""WSGI Application entry point for the Swift Object Server."""
start_time = time.time()
req = Request(env)
self.logger.txn_id = req.headers.get('x-trans-id', None)
if not check_utf8(wsgi_to_str(req.path_info), internal=True):
res = HTTPPreconditionFailed(body='Invalid UTF8 or contains NULL')
else:
try:
# disallow methods which have not been marked 'public'
if req.method not in self.allowed_methods:
res = HTTPMethodNotAllowed()
else:
res = getattr(self, req.method)(req)
except DiskFileCollision:
res = HTTPForbidden(request=req)
except HTTPException as error_response:
res = error_response
except (Exception, Timeout):
self.logger.exception(_(
'ERROR __call__ error with %(method)s'
' %(path)s '), {'method': req.method, 'path': req.path})
res = HTTPInternalServerError(body=traceback.format_exc())
trans_time = time.time() - start_time
res.fix_conditional_response()
if self.log_requests:
log_line = get_log_line(req, res, trans_time, '', self.log_format,
self.anonymization_method,
self.anonymization_salt)
if req.method in ('REPLICATE', 'SSYNC') or \
'X-Backend-Replication' in req.headers:
self.logger.debug(log_line)
else:
self.logger.info(log_line)
if req.method in ('PUT', 'DELETE'):
slow = self.slow - trans_time
if slow > 0:
sleep(slow)
# To be able to zero-copy send the object, we need a few things.
# First, we have to be responding successfully to a GET, or else we're
# not sending the object. Second, we have to be able to extract the
# socket file descriptor from the WSGI input object. Third, the
# diskfile has to support zero-copy send.
#
# There's a good chance that this could work for 206 responses too,
# but the common case is sending the whole object, so we'll start
# there.
if req.method == 'GET' and res.status_int == 200 and \
isinstance(env['wsgi.input'], wsgi.Input):
app_iter = getattr(res, 'app_iter', None)
checker = getattr(app_iter, 'can_zero_copy_send', None)
if checker and checker():
# For any kind of zero-copy thing like sendfile or splice, we
# need the file descriptor. Eventlet doesn't provide a clean
# way of getting that, so we resort to this.
wsock = env['wsgi.input'].get_socket()
wsockfd = wsock.fileno()
# Don't call zero_copy_send() until after we force the HTTP
# headers out of Eventlet and into the socket.
def zero_copy_iter():
# If possible, set TCP_CORK so that headers don't
# immediately go on the wire, but instead, wait for some
# response body to make the TCP frames as large as
# possible (and hence as few packets as possible).
#
# On non-Linux systems, we might consider TCP_NODELAY, but
# since the only known zero-copy-capable diskfile uses
# Linux-specific syscalls, we'll defer that work until
# someone needs it.
if hasattr(socket, 'TCP_CORK'):
wsock.setsockopt(socket.IPPROTO_TCP,
socket.TCP_CORK, 1)
yield EventletPlungerString()
try:
app_iter.zero_copy_send(wsockfd)
except Exception:
self.logger.exception("zero_copy_send() blew up")
raise
yield b''
# Get headers ready to go out
res(env, start_response)
return zero_copy_iter()
else:
return res(env, start_response)
else:
return res(env, start_response)
def global_conf_callback(preloaded_app_conf, global_conf):
"""
Callback for swift.common.wsgi.run_wsgi during the global_conf
creation so that we can add our replication_semaphore, used to
limit the number of concurrent SSYNC_REQUESTS across all
workers.
:param preloaded_app_conf: The preloaded conf for the WSGI app.
This conf instance will go away, so
just read from it, don't write.
:param global_conf: The global conf that will eventually be
passed to the app_factory function later.
This conf is created before the worker
subprocesses are forked, so can be useful to
set up semaphores, shared memory, etc.
"""
replication_concurrency = int(
preloaded_app_conf.get('replication_concurrency') or 4)
if replication_concurrency:
# Have to put the value in a list so it can get past paste
global_conf['replication_semaphore'] = [
multiprocessing.BoundedSemaphore(replication_concurrency)]
def app_factory(global_conf, **local_conf):
"""paste.deploy app factory for creating WSGI object server apps"""
conf = global_conf.copy()
conf.update(local_conf)
return ObjectController(conf)
| apache-2.0 | 5,072,200,039,205,637,000 | 45.90662 | 79 | 0.588091 | false |
Zarokka/exaile | plugins/lastfmlove/__init__.py | 4 | 10746 | # Copyright (C) 2011 Mathias Brodala <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import Gtk
import logging
import os.path
from threading import (
Thread,
Timer
)
import pylast
from xl import (
common,
event,
player,
providers,
settings
)
from xl.nls import gettext as _
from xlgui import icons
from xlgui.widgets.menu import MenuItem
from xlgui.widgets.playlist_columns import (
Column,
ColumnMenuItem
)
import lastfmlove_preferences
from cellrenderertoggleimage import CellRendererToggleImage
LASTFMLOVER = None
logger = logging.getLogger(__name__)
basedir = os.path.dirname(os.path.realpath(__file__))
icons.MANAGER.add_icon_name_from_directory('love',
os.path.join(basedir, 'icons'))
icons.MANAGER.add_icon_name_from_directory('send-receive',
os.path.join(basedir, 'icons'))
def enable(exaile):
"""
Handles the deferred enable call
"""
global LASTFMLOVER
LASTFMLOVER = LastFMLover()
def disable(exaile):
"""
Disables the desktop cover plugin
"""
global LASTFMLOVER
LASTFMLOVER.destroy()
def get_preferences_pane():
return lastfmlove_preferences
class LoveColumn(Column):
name = 'loved'
display = _('Loved')
menu_title = _('Last.fm Loved')
size = 50
renderer = CellRendererToggleImage
datatype = bool
dataproperty = 'active'
def __init__(self, *args):
Column.__init__(self, *args)
self.model = None
pixbuf = icons.MANAGER.pixbuf_from_icon_name('love', self.get_icon_height())
self.cellrenderer.props.pixbuf = pixbuf
self.cellrenderer.connect('toggled', self.on_toggled)
def data_func(self, column, cellrenderer, model, iter):
"""
Displays the loved state
"""
global LASTFMLOVER
track = model.get_value(iter, 0)
lastfm_track = pylast.Track(
track.get_tag_display('artist'),
track.get_tag_display('title'),
LASTFMLOVER.network
)
cellrenderer.props.active = lastfm_track in LASTFMLOVER.loved_tracks
if LASTFMLOVER.network is None:
cellrenderer.props.sensitive = False
cellrenderer.props.render_prelit = False
else:
cellrenderer.props.sensitive = True
cellrenderer.props.render_prelit = True
self.model = model
def on_toggled(self, cellrenderer, path):
"""
Loves or unloves the selected track
"""
global LASTFMLOVER
if cellrenderer.props.sensitive and LASTFMLOVER.network is not None:
track = self.model.get_value(self.model.get_iter(path), 0)
LASTFMLOVER.toggle_loved(track)
class LoveMenuItem(MenuItem):
"""
A menu item representing the loved state of a
track and allowing for loving and unloving it
"""
def __init__(self, after, get_tracks_function=None):
MenuItem.__init__(self, 'loved', None, after)
self.get_tracks_function = get_tracks_function
def factory(self, menu, parent, context):
"""
Sets up the menu item
"""
global LASTFMLOVER
item = Gtk.ImageMenuItem.new_with_mnemonic(_('_Love This Track'))
item.set_image(Gtk.Image.new_from_icon_name(
'love', Gtk.IconSize.MENU))
if self.get_tracks_function is not None:
tracks = self.get_tracks_function()
empty = len(tracks) == 0
else:
empty = context.get('selection-empty', True)
if not empty:
tracks = context.get('selected-tracks', [])
if not empty and LASTFMLOVER.network is not None:
# We only care about the first track
track = tracks[0]
lastfm_track = pylast.Track(
track.get_tag_display('artist'),
track.get_tag_display('title'),
LASTFMLOVER.network
)
if lastfm_track in LASTFMLOVER.loved_tracks:
item.set_label(_('Unlove This Track'))
item.connect('activate', self.on_activate, track)
else:
item.set_sensitive(False)
return item
def on_activate(self, menuitem, track):
"""
Loves or unloves the selected track
"""
global LASTFMLOVER
LASTFMLOVER.toggle_loved(track)
class LastFMLover(object):
"""
Allows for retrieval and setting
of loved tracks via Last.fm
"""
def __init__(self):
"""
Sets up the connection to Last.fm
as well as the graphical interface
"""
self.network = None
self.user = None
self.loved_tracks = []
self.timer = None
self.column_menu_item = ColumnMenuItem(column=LoveColumn, after=['__rating'])
self.menu_item = LoveMenuItem(after=['rating'])
def get_tracks_function():
"""
Drop in replacement for menu item context
to retrieve the currently playing track
"""
current_track = player.PLAYER.current
if current_track is not None:
return [current_track]
return []
self.tray_menu_item = LoveMenuItem(
after=['rating'],
get_tracks_function=get_tracks_function
)
self.setup_network()
providers.register('playlist-columns', LoveColumn);
providers.register('playlist-columns-menu', self.column_menu_item)
providers.register('playlist-context-menu', self.menu_item)
providers.register('tray-icon-context', self.tray_menu_item)
event.add_ui_callback(self.on_option_set, 'plugin_lastfmlove_option_set')
def destroy(self):
"""
Cleanups
"""
event.remove_callback(self.on_option_set, 'plugin_lastfmlove_option_set')
providers.unregister('tray-icon-context', self.tray_menu_item)
providers.unregister('playlist-context-menu', self.menu_item)
providers.unregister('playlist-columns-menu', self.column_menu_item)
providers.unregister('playlist-columns', LoveColumn)
if self.timer is not None and self.timer.is_alive():
self.timer.cancel()
def setup_network(self):
"""
Tries to set up the network, retrieve the user
and the initial list of loved tracks
"""
try:
self.network = pylast.LastFMNetwork(
api_key=settings.get_option('plugin/lastfmlove/api_key', 'K'),
api_secret=settings.get_option('plugin/lastfmlove/api_secret', 'S'),
username=settings.get_option('plugin/ascrobbler/user', ''),
password_hash=settings.get_option('plugin/ascrobbler/password', '')
)
self.user = self.network.get_user(self.network.username)
except Exception as e:
self.network = None
self.user = None
if self.timer is not None and self.timer.is_alive():
self.timer.cancel()
logger.warning('Error while connecting to Last.fm network: {0}'.format(e))
else:
thread = Thread(target=self.get_loved_tracks)
thread.daemon = True
thread.start()
logger.info('Connection to Last.fm network successful')
def restart_timer(self):
"""
Restarts the timer which starts the retrieval of tracks
"""
if self.timer is not None and self.timer.is_alive():
self.timer.cancel()
self.timer = Timer(
settings.get_option('plugin/lastfmlove/refresh_interval', 3600),
self.get_loved_tracks
)
self.timer.daemon = True
self.timer.start()
def get_loved_tracks(self):
"""
Updates the list of loved tracks
"""
logger.debug('Retrieving list of loved tracks...')
try:
tracks = self.user.get_loved_tracks(limit=None)
# Unwrap pylast.Track from pylast.LovedTrack
self.loved_tracks = [l.track for l in tracks]
except Exception as e:
logger.warning('Failed to retrieve list of loved tracks: {0}'.format(e))
self.restart_timer()
def toggle_loved(self, track):
"""
Toggles the loved state of a track
:param track: the track to love/unlove
:type track: `xl.trax.Track`
"""
lastfm_track = pylast.Track(
track.get_tag_display('artist'),
track.get_tag_display('title'),
LASTFMLOVER.network
)
if lastfm_track in self.loved_tracks:
self.unlove_track(lastfm_track)
else:
self.love_track(lastfm_track)
@common.threaded
def love_track(self, track):
"""
Loves a track
:param track: the track to love
:type track: `pylast.Track`
"""
try:
track.love()
except Exception as e:
logger.warning('Error while loving track {0}: {1}'.format(track, e))
else:
self.loved_tracks.append(track)
logger.info('Loved track {0}'.format(track))
@common.threaded
def unlove_track(self, track):
"""
Unloves a track
:param track: the track to unlove
:type track: `pylast.Track`
"""
try:
track.unlove()
except Exception as e:
logger.warning('Error while unloving track {0}: {1}'.format(track, e))
else:
self.loved_tracks.remove(track)
logger.info('Unloved track {0}'.format(track))
def on_option_set(self, event, settings, option):
"""
Takes action upon setting changes
"""
if option in ('plugin/lastfmlove/api_key', 'plugin/lastfmlove/api_secret'):
self.setup_network()
elif option == 'plugin/lastfmlove/refresh_interval':
self.restart_timer()
| gpl-2.0 | -6,094,412,419,796,677,000 | 29.9683 | 86 | 0.594268 | false |
40223149/2015cd_midterm | static/Brython3.1.1-20150328-091302/Lib/zipfile.py | 620 | 66368 | """
Read and write ZIP files.
XXX references to utf-8 need further investigation.
"""
import io
import os
import re
import imp
import sys
import time
import stat
import shutil
import struct
import binascii
try:
import zlib # We may need its compression method
crc32 = zlib.crc32
except ImportError:
zlib = None
crc32 = binascii.crc32
try:
import bz2 # We may need its compression method
except ImportError:
bz2 = None
try:
import lzma # We may need its compression method
except ImportError:
lzma = None
__all__ = ["BadZipFile", "BadZipfile", "error",
"ZIP_STORED", "ZIP_DEFLATED", "ZIP_BZIP2", "ZIP_LZMA",
"is_zipfile", "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile"]
class BadZipFile(Exception):
pass
class LargeZipFile(Exception):
"""
Raised when writing a zipfile, the zipfile requires ZIP64 extensions
and those extensions are disabled.
"""
error = BadZipfile = BadZipFile # Pre-3.2 compatibility names
ZIP64_LIMIT = (1 << 31) - 1
ZIP_FILECOUNT_LIMIT = 1 << 16
ZIP_MAX_COMMENT = (1 << 16) - 1
# constants for Zip file compression methods
ZIP_STORED = 0
ZIP_DEFLATED = 8
ZIP_BZIP2 = 12
ZIP_LZMA = 14
# Other ZIP compression methods not supported
DEFAULT_VERSION = 20
ZIP64_VERSION = 45
BZIP2_VERSION = 46
LZMA_VERSION = 63
# we recognize (but not necessarily support) all features up to that version
MAX_EXTRACT_VERSION = 63
# Below are some formats and associated data for reading/writing headers using
# the struct module. The names and structures of headers/records are those used
# in the PKWARE description of the ZIP file format:
# http://www.pkware.com/documents/casestudies/APPNOTE.TXT
# (URL valid as of January 2008)
# The "end of central directory" structure, magic number, size, and indices
# (section V.I in the format document)
structEndArchive = b"<4s4H2LH"
stringEndArchive = b"PK\005\006"
sizeEndCentDir = struct.calcsize(structEndArchive)
_ECD_SIGNATURE = 0
_ECD_DISK_NUMBER = 1
_ECD_DISK_START = 2
_ECD_ENTRIES_THIS_DISK = 3
_ECD_ENTRIES_TOTAL = 4
_ECD_SIZE = 5
_ECD_OFFSET = 6
_ECD_COMMENT_SIZE = 7
# These last two indices are not part of the structure as defined in the
# spec, but they are used internally by this module as a convenience
_ECD_COMMENT = 8
_ECD_LOCATION = 9
# The "central directory" structure, magic number, size, and indices
# of entries in the structure (section V.F in the format document)
structCentralDir = "<4s4B4HL2L5H2L"
stringCentralDir = b"PK\001\002"
sizeCentralDir = struct.calcsize(structCentralDir)
# indexes of entries in the central directory structure
_CD_SIGNATURE = 0
_CD_CREATE_VERSION = 1
_CD_CREATE_SYSTEM = 2
_CD_EXTRACT_VERSION = 3
_CD_EXTRACT_SYSTEM = 4
_CD_FLAG_BITS = 5
_CD_COMPRESS_TYPE = 6
_CD_TIME = 7
_CD_DATE = 8
_CD_CRC = 9
_CD_COMPRESSED_SIZE = 10
_CD_UNCOMPRESSED_SIZE = 11
_CD_FILENAME_LENGTH = 12
_CD_EXTRA_FIELD_LENGTH = 13
_CD_COMMENT_LENGTH = 14
_CD_DISK_NUMBER_START = 15
_CD_INTERNAL_FILE_ATTRIBUTES = 16
_CD_EXTERNAL_FILE_ATTRIBUTES = 17
_CD_LOCAL_HEADER_OFFSET = 18
# The "local file header" structure, magic number, size, and indices
# (section V.A in the format document)
structFileHeader = "<4s2B4HL2L2H"
stringFileHeader = b"PK\003\004"
sizeFileHeader = struct.calcsize(structFileHeader)
_FH_SIGNATURE = 0
_FH_EXTRACT_VERSION = 1
_FH_EXTRACT_SYSTEM = 2
_FH_GENERAL_PURPOSE_FLAG_BITS = 3
_FH_COMPRESSION_METHOD = 4
_FH_LAST_MOD_TIME = 5
_FH_LAST_MOD_DATE = 6
_FH_CRC = 7
_FH_COMPRESSED_SIZE = 8
_FH_UNCOMPRESSED_SIZE = 9
_FH_FILENAME_LENGTH = 10
_FH_EXTRA_FIELD_LENGTH = 11
# The "Zip64 end of central directory locator" structure, magic number, and size
structEndArchive64Locator = "<4sLQL"
stringEndArchive64Locator = b"PK\x06\x07"
sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator)
# The "Zip64 end of central directory" record, magic number, size, and indices
# (section V.G in the format document)
structEndArchive64 = "<4sQ2H2L4Q"
stringEndArchive64 = b"PK\x06\x06"
sizeEndCentDir64 = struct.calcsize(structEndArchive64)
_CD64_SIGNATURE = 0
_CD64_DIRECTORY_RECSIZE = 1
_CD64_CREATE_VERSION = 2
_CD64_EXTRACT_VERSION = 3
_CD64_DISK_NUMBER = 4
_CD64_DISK_NUMBER_START = 5
_CD64_NUMBER_ENTRIES_THIS_DISK = 6
_CD64_NUMBER_ENTRIES_TOTAL = 7
_CD64_DIRECTORY_SIZE = 8
_CD64_OFFSET_START_CENTDIR = 9
def _check_zipfile(fp):
try:
if _EndRecData(fp):
return True # file has correct magic number
except IOError:
pass
return False
def is_zipfile(filename):
"""Quickly see if a file is a ZIP file by checking the magic number.
The filename argument may be a file or file-like object too.
"""
result = False
try:
if hasattr(filename, "read"):
result = _check_zipfile(fp=filename)
else:
with open(filename, "rb") as fp:
result = _check_zipfile(fp)
except IOError:
pass
return result
def _EndRecData64(fpin, offset, endrec):
"""
Read the ZIP64 end-of-archive records and use that to update endrec
"""
try:
fpin.seek(offset - sizeEndCentDir64Locator, 2)
except IOError:
# If the seek fails, the file is not large enough to contain a ZIP64
# end-of-archive record, so just return the end record we were given.
return endrec
data = fpin.read(sizeEndCentDir64Locator)
if len(data) != sizeEndCentDir64Locator:
return endrec
sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data)
if sig != stringEndArchive64Locator:
return endrec
if diskno != 0 or disks != 1:
raise BadZipFile("zipfiles that span multiple disks are not supported")
# Assume no 'zip64 extensible data'
fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2)
data = fpin.read(sizeEndCentDir64)
if len(data) != sizeEndCentDir64:
return endrec
sig, sz, create_version, read_version, disk_num, disk_dir, \
dircount, dircount2, dirsize, diroffset = \
struct.unpack(structEndArchive64, data)
if sig != stringEndArchive64:
return endrec
# Update the original endrec using data from the ZIP64 record
endrec[_ECD_SIGNATURE] = sig
endrec[_ECD_DISK_NUMBER] = disk_num
endrec[_ECD_DISK_START] = disk_dir
endrec[_ECD_ENTRIES_THIS_DISK] = dircount
endrec[_ECD_ENTRIES_TOTAL] = dircount2
endrec[_ECD_SIZE] = dirsize
endrec[_ECD_OFFSET] = diroffset
return endrec
def _EndRecData(fpin):
"""Return data from the "End of Central Directory" record, or None.
The data is a list of the nine items in the ZIP "End of central dir"
record followed by a tenth item, the file seek offset of this record."""
# Determine file size
fpin.seek(0, 2)
filesize = fpin.tell()
# Check to see if this is ZIP file with no archive comment (the
# "end of central directory" structure should be the last item in the
# file if this is the case).
try:
fpin.seek(-sizeEndCentDir, 2)
except IOError:
return None
data = fpin.read()
if (len(data) == sizeEndCentDir and
data[0:4] == stringEndArchive and
data[-2:] == b"\000\000"):
# the signature is correct and there's no comment, unpack structure
endrec = struct.unpack(structEndArchive, data)
endrec=list(endrec)
# Append a blank comment and record start offset
endrec.append(b"")
endrec.append(filesize - sizeEndCentDir)
# Try to read the "Zip64 end of central directory" structure
return _EndRecData64(fpin, -sizeEndCentDir, endrec)
# Either this is not a ZIP file, or it is a ZIP file with an archive
# comment. Search the end of the file for the "end of central directory"
# record signature. The comment is the last item in the ZIP file and may be
# up to 64K long. It is assumed that the "end of central directory" magic
# number does not appear in the comment.
maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0)
fpin.seek(maxCommentStart, 0)
data = fpin.read()
start = data.rfind(stringEndArchive)
if start >= 0:
# found the magic number; attempt to unpack and interpret
recData = data[start:start+sizeEndCentDir]
if len(recData) != sizeEndCentDir:
# Zip file is corrupted.
return None
endrec = list(struct.unpack(structEndArchive, recData))
commentSize = endrec[_ECD_COMMENT_SIZE] #as claimed by the zip file
comment = data[start+sizeEndCentDir:start+sizeEndCentDir+commentSize]
endrec.append(comment)
endrec.append(maxCommentStart + start)
# Try to read the "Zip64 end of central directory" structure
return _EndRecData64(fpin, maxCommentStart + start - filesize,
endrec)
# Unable to find a valid end of central directory structure
return None
class ZipInfo (object):
"""Class with attributes describing each file in the ZIP archive."""
__slots__ = (
'orig_filename',
'filename',
'date_time',
'compress_type',
'comment',
'extra',
'create_system',
'create_version',
'extract_version',
'reserved',
'flag_bits',
'volume',
'internal_attr',
'external_attr',
'header_offset',
'CRC',
'compress_size',
'file_size',
'_raw_time',
)
def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)):
self.orig_filename = filename # Original file name in archive
# Terminate the file name at the first null byte. Null bytes in file
# names are used as tricks by viruses in archives.
null_byte = filename.find(chr(0))
if null_byte >= 0:
filename = filename[0:null_byte]
# This is used to ensure paths in generated ZIP files always use
# forward slashes as the directory separator, as required by the
# ZIP format specification.
if os.sep != "/" and os.sep in filename:
filename = filename.replace(os.sep, "/")
self.filename = filename # Normalized file name
self.date_time = date_time # year, month, day, hour, min, sec
if date_time[0] < 1980:
raise ValueError('ZIP does not support timestamps before 1980')
# Standard values:
self.compress_type = ZIP_STORED # Type of compression for the file
self.comment = b"" # Comment for each file
self.extra = b"" # ZIP extra data
if sys.platform == 'win32':
self.create_system = 0 # System which created ZIP archive
else:
# Assume everything else is unix-y
self.create_system = 3 # System which created ZIP archive
self.create_version = DEFAULT_VERSION # Version which created ZIP archive
self.extract_version = DEFAULT_VERSION # Version needed to extract archive
self.reserved = 0 # Must be zero
self.flag_bits = 0 # ZIP flag bits
self.volume = 0 # Volume number of file header
self.internal_attr = 0 # Internal attributes
self.external_attr = 0 # External file attributes
# Other attributes are set by class ZipFile:
# header_offset Byte offset to the file header
# CRC CRC-32 of the uncompressed file
# compress_size Size of the compressed file
# file_size Size of the uncompressed file
def FileHeader(self, zip64=None):
"""Return the per-file header as a string."""
dt = self.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
if self.flag_bits & 0x08:
# Set these to zero because we write them after the file data
CRC = compress_size = file_size = 0
else:
CRC = self.CRC
compress_size = self.compress_size
file_size = self.file_size
extra = self.extra
min_version = 0
if zip64 is None:
zip64 = file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT
if zip64:
fmt = '<HHQQ'
extra = extra + struct.pack(fmt,
1, struct.calcsize(fmt)-4, file_size, compress_size)
if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT:
if not zip64:
raise LargeZipFile("Filesize would require ZIP64 extensions")
# File is larger than what fits into a 4 byte integer,
# fall back to the ZIP64 extension
file_size = 0xffffffff
compress_size = 0xffffffff
min_version = ZIP64_VERSION
if self.compress_type == ZIP_BZIP2:
min_version = max(BZIP2_VERSION, min_version)
elif self.compress_type == ZIP_LZMA:
min_version = max(LZMA_VERSION, min_version)
self.extract_version = max(min_version, self.extract_version)
self.create_version = max(min_version, self.create_version)
filename, flag_bits = self._encodeFilenameFlags()
header = struct.pack(structFileHeader, stringFileHeader,
self.extract_version, self.reserved, flag_bits,
self.compress_type, dostime, dosdate, CRC,
compress_size, file_size,
len(filename), len(extra))
return header + filename + extra
def _encodeFilenameFlags(self):
try:
return self.filename.encode('ascii'), self.flag_bits
except UnicodeEncodeError:
return self.filename.encode('utf-8'), self.flag_bits | 0x800
def _decodeExtra(self):
# Try to decode the extra field.
extra = self.extra
unpack = struct.unpack
while extra:
tp, ln = unpack('<HH', extra[:4])
if tp == 1:
if ln >= 24:
counts = unpack('<QQQ', extra[4:28])
elif ln == 16:
counts = unpack('<QQ', extra[4:20])
elif ln == 8:
counts = unpack('<Q', extra[4:12])
elif ln == 0:
counts = ()
else:
raise RuntimeError("Corrupt extra field %s"%(ln,))
idx = 0
# ZIP64 extension (large files and/or large archives)
if self.file_size in (0xffffffffffffffff, 0xffffffff):
self.file_size = counts[idx]
idx += 1
if self.compress_size == 0xFFFFFFFF:
self.compress_size = counts[idx]
idx += 1
if self.header_offset == 0xffffffff:
old = self.header_offset
self.header_offset = counts[idx]
idx+=1
extra = extra[ln+4:]
class _ZipDecrypter:
"""Class to handle decryption of files stored within a ZIP archive.
ZIP supports a password-based form of encryption. Even though known
plaintext attacks have been found against it, it is still useful
to be able to get data out of such a file.
Usage:
zd = _ZipDecrypter(mypwd)
plain_char = zd(cypher_char)
plain_text = map(zd, cypher_text)
"""
def _GenerateCRCTable():
"""Generate a CRC-32 table.
ZIP encryption uses the CRC32 one-byte primitive for scrambling some
internal keys. We noticed that a direct implementation is faster than
relying on binascii.crc32().
"""
poly = 0xedb88320
table = [0] * 256
for i in range(256):
crc = i
for j in range(8):
if crc & 1:
crc = ((crc >> 1) & 0x7FFFFFFF) ^ poly
else:
crc = ((crc >> 1) & 0x7FFFFFFF)
table[i] = crc
return table
crctable = _GenerateCRCTable()
def _crc32(self, ch, crc):
"""Compute the CRC32 primitive on one byte."""
return ((crc >> 8) & 0xffffff) ^ self.crctable[(crc ^ ch) & 0xff]
def __init__(self, pwd):
self.key0 = 305419896
self.key1 = 591751049
self.key2 = 878082192
for p in pwd:
self._UpdateKeys(p)
def _UpdateKeys(self, c):
self.key0 = self._crc32(c, self.key0)
self.key1 = (self.key1 + (self.key0 & 255)) & 4294967295
self.key1 = (self.key1 * 134775813 + 1) & 4294967295
self.key2 = self._crc32((self.key1 >> 24) & 255, self.key2)
def __call__(self, c):
"""Decrypt a single character."""
assert isinstance(c, int)
k = self.key2 | 2
c = c ^ (((k * (k^1)) >> 8) & 255)
self._UpdateKeys(c)
return c
class LZMACompressor:
def __init__(self):
self._comp = None
def _init(self):
props = lzma._encode_filter_properties({'id': lzma.FILTER_LZMA1})
self._comp = lzma.LZMACompressor(lzma.FORMAT_RAW, filters=[
lzma._decode_filter_properties(lzma.FILTER_LZMA1, props)
])
return struct.pack('<BBH', 9, 4, len(props)) + props
def compress(self, data):
if self._comp is None:
return self._init() + self._comp.compress(data)
return self._comp.compress(data)
def flush(self):
if self._comp is None:
return self._init() + self._comp.flush()
return self._comp.flush()
class LZMADecompressor:
def __init__(self):
self._decomp = None
self._unconsumed = b''
self.eof = False
def decompress(self, data):
if self._decomp is None:
self._unconsumed += data
if len(self._unconsumed) <= 4:
return b''
psize, = struct.unpack('<H', self._unconsumed[2:4])
if len(self._unconsumed) <= 4 + psize:
return b''
self._decomp = lzma.LZMADecompressor(lzma.FORMAT_RAW, filters=[
lzma._decode_filter_properties(lzma.FILTER_LZMA1,
self._unconsumed[4:4 + psize])
])
data = self._unconsumed[4 + psize:]
del self._unconsumed
result = self._decomp.decompress(data)
self.eof = self._decomp.eof
return result
compressor_names = {
0: 'store',
1: 'shrink',
2: 'reduce',
3: 'reduce',
4: 'reduce',
5: 'reduce',
6: 'implode',
7: 'tokenize',
8: 'deflate',
9: 'deflate64',
10: 'implode',
12: 'bzip2',
14: 'lzma',
18: 'terse',
19: 'lz77',
97: 'wavpack',
98: 'ppmd',
}
def _check_compression(compression):
if compression == ZIP_STORED:
pass
elif compression == ZIP_DEFLATED:
if not zlib:
raise RuntimeError(
"Compression requires the (missing) zlib module")
elif compression == ZIP_BZIP2:
if not bz2:
raise RuntimeError(
"Compression requires the (missing) bz2 module")
elif compression == ZIP_LZMA:
if not lzma:
raise RuntimeError(
"Compression requires the (missing) lzma module")
else:
raise RuntimeError("That compression method is not supported")
def _get_compressor(compress_type):
if compress_type == ZIP_DEFLATED:
return zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
zlib.DEFLATED, -15)
elif compress_type == ZIP_BZIP2:
return bz2.BZ2Compressor()
elif compress_type == ZIP_LZMA:
return LZMACompressor()
else:
return None
def _get_decompressor(compress_type):
if compress_type == ZIP_STORED:
return None
elif compress_type == ZIP_DEFLATED:
return zlib.decompressobj(-15)
elif compress_type == ZIP_BZIP2:
return bz2.BZ2Decompressor()
elif compress_type == ZIP_LZMA:
return LZMADecompressor()
else:
descr = compressor_names.get(compress_type)
if descr:
raise NotImplementedError("compression type %d (%s)" % (compress_type, descr))
else:
raise NotImplementedError("compression type %d" % (compress_type,))
class ZipExtFile(io.BufferedIOBase):
"""File-like object for reading an archive member.
Is returned by ZipFile.open().
"""
# Max size supported by decompressor.
MAX_N = 1 << 31 - 1
# Read from compressed files in 4k blocks.
MIN_READ_SIZE = 4096
# Search for universal newlines or line chunks.
PATTERN = re.compile(br'^(?P<chunk>[^\r\n]+)|(?P<newline>\n|\r\n?)')
def __init__(self, fileobj, mode, zipinfo, decrypter=None,
close_fileobj=False):
self._fileobj = fileobj
self._decrypter = decrypter
self._close_fileobj = close_fileobj
self._compress_type = zipinfo.compress_type
self._compress_left = zipinfo.compress_size
self._left = zipinfo.file_size
self._decompressor = _get_decompressor(self._compress_type)
self._eof = False
self._readbuffer = b''
self._offset = 0
self._universal = 'U' in mode
self.newlines = None
# Adjust read size for encrypted files since the first 12 bytes
# are for the encryption/password information.
if self._decrypter is not None:
self._compress_left -= 12
self.mode = mode
self.name = zipinfo.filename
if hasattr(zipinfo, 'CRC'):
self._expected_crc = zipinfo.CRC
self._running_crc = crc32(b'') & 0xffffffff
else:
self._expected_crc = None
def readline(self, limit=-1):
"""Read and return a line from the stream.
If limit is specified, at most limit bytes will be read.
"""
if not self._universal and limit < 0:
# Shortcut common case - newline found in buffer.
i = self._readbuffer.find(b'\n', self._offset) + 1
if i > 0:
line = self._readbuffer[self._offset: i]
self._offset = i
return line
if not self._universal:
return io.BufferedIOBase.readline(self, limit)
line = b''
while limit < 0 or len(line) < limit:
readahead = self.peek(2)
if readahead == b'':
return line
#
# Search for universal newlines or line chunks.
#
# The pattern returns either a line chunk or a newline, but not
# both. Combined with peek(2), we are assured that the sequence
# '\r\n' is always retrieved completely and never split into
# separate newlines - '\r', '\n' due to coincidental readaheads.
#
match = self.PATTERN.search(readahead)
newline = match.group('newline')
if newline is not None:
if self.newlines is None:
self.newlines = []
if newline not in self.newlines:
self.newlines.append(newline)
self._offset += len(newline)
return line + b'\n'
chunk = match.group('chunk')
if limit >= 0:
chunk = chunk[: limit - len(line)]
self._offset += len(chunk)
line += chunk
return line
def peek(self, n=1):
"""Returns buffered bytes without advancing the position."""
if n > len(self._readbuffer) - self._offset:
chunk = self.read(n)
if len(chunk) > self._offset:
self._readbuffer = chunk + self._readbuffer[self._offset:]
self._offset = 0
else:
self._offset -= len(chunk)
# Return up to 512 bytes to reduce allocation overhead for tight loops.
return self._readbuffer[self._offset: self._offset + 512]
def readable(self):
return True
def read(self, n=-1):
"""Read and return up to n bytes.
If the argument is omitted, None, or negative, data is read and returned until EOF is reached..
"""
if n is None or n < 0:
buf = self._readbuffer[self._offset:]
self._readbuffer = b''
self._offset = 0
while not self._eof:
buf += self._read1(self.MAX_N)
return buf
end = n + self._offset
if end < len(self._readbuffer):
buf = self._readbuffer[self._offset:end]
self._offset = end
return buf
n = end - len(self._readbuffer)
buf = self._readbuffer[self._offset:]
self._readbuffer = b''
self._offset = 0
while n > 0 and not self._eof:
data = self._read1(n)
if n < len(data):
self._readbuffer = data
self._offset = n
buf += data[:n]
break
buf += data
n -= len(data)
return buf
def _update_crc(self, newdata):
# Update the CRC using the given data.
if self._expected_crc is None:
# No need to compute the CRC if we don't have a reference value
return
self._running_crc = crc32(newdata, self._running_crc) & 0xffffffff
# Check the CRC if we're at the end of the file
if self._eof and self._running_crc != self._expected_crc:
raise BadZipFile("Bad CRC-32 for file %r" % self.name)
def read1(self, n):
"""Read up to n bytes with at most one read() system call."""
if n is None or n < 0:
buf = self._readbuffer[self._offset:]
self._readbuffer = b''
self._offset = 0
while not self._eof:
data = self._read1(self.MAX_N)
if data:
buf += data
break
return buf
end = n + self._offset
if end < len(self._readbuffer):
buf = self._readbuffer[self._offset:end]
self._offset = end
return buf
n = end - len(self._readbuffer)
buf = self._readbuffer[self._offset:]
self._readbuffer = b''
self._offset = 0
if n > 0:
while not self._eof:
data = self._read1(n)
if n < len(data):
self._readbuffer = data
self._offset = n
buf += data[:n]
break
if data:
buf += data
break
return buf
def _read1(self, n):
# Read up to n compressed bytes with at most one read() system call,
# decrypt and decompress them.
if self._eof or n <= 0:
return b''
# Read from file.
if self._compress_type == ZIP_DEFLATED:
## Handle unconsumed data.
data = self._decompressor.unconsumed_tail
if n > len(data):
data += self._read2(n - len(data))
else:
data = self._read2(n)
if self._compress_type == ZIP_STORED:
self._eof = self._compress_left <= 0
elif self._compress_type == ZIP_DEFLATED:
n = max(n, self.MIN_READ_SIZE)
data = self._decompressor.decompress(data, n)
self._eof = (self._decompressor.eof or
self._compress_left <= 0 and
not self._decompressor.unconsumed_tail)
if self._eof:
data += self._decompressor.flush()
else:
data = self._decompressor.decompress(data)
self._eof = self._decompressor.eof or self._compress_left <= 0
data = data[:self._left]
self._left -= len(data)
if self._left <= 0:
self._eof = True
self._update_crc(data)
return data
def _read2(self, n):
if self._compress_left <= 0:
return b''
n = max(n, self.MIN_READ_SIZE)
n = min(n, self._compress_left)
data = self._fileobj.read(n)
self._compress_left -= len(data)
if self._decrypter is not None:
data = bytes(map(self._decrypter, data))
return data
def close(self):
try:
if self._close_fileobj:
self._fileobj.close()
finally:
super().close()
class ZipFile:
""" Class with methods to open, read, write, close, list zip files.
z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=False)
file: Either the path to the file, or a file-like object.
If it is a path, the file will be opened and closed by ZipFile.
mode: The mode can be either read "r", write "w" or append "a".
compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib),
ZIP_BZIP2 (requires bz2) or ZIP_LZMA (requires lzma).
allowZip64: if True ZipFile will create files with ZIP64 extensions when
needed, otherwise it will raise an exception when this would
be necessary.
"""
fp = None # Set here since __del__ checks it
_windows_illegal_name_trans_table = None
def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=False):
"""Open the ZIP file with mode read "r", write "w" or append "a"."""
if mode not in ("r", "w", "a"):
raise RuntimeError('ZipFile() requires mode "r", "w", or "a"')
_check_compression(compression)
self._allowZip64 = allowZip64
self._didModify = False
self.debug = 0 # Level of printing: 0 through 3
self.NameToInfo = {} # Find file info given name
self.filelist = [] # List of ZipInfo instances for archive
self.compression = compression # Method of compression
self.mode = key = mode.replace('b', '')[0]
self.pwd = None
self._comment = b''
# Check if we were passed a file-like object
if isinstance(file, str):
# No, it's a filename
self._filePassed = 0
self.filename = file
modeDict = {'r' : 'rb', 'w': 'wb', 'a' : 'r+b'}
try:
self.fp = io.open(file, modeDict[mode])
except IOError:
if mode == 'a':
mode = key = 'w'
self.fp = io.open(file, modeDict[mode])
else:
raise
else:
self._filePassed = 1
self.fp = file
self.filename = getattr(file, 'name', None)
try:
if key == 'r':
self._RealGetContents()
elif key == 'w':
# set the modified flag so central directory gets written
# even if no files are added to the archive
self._didModify = True
elif key == 'a':
try:
# See if file is a zip file
self._RealGetContents()
# seek to start of directory and overwrite
self.fp.seek(self.start_dir, 0)
except BadZipFile:
# file is not a zip file, just append
self.fp.seek(0, 2)
# set the modified flag so central directory gets written
# even if no files are added to the archive
self._didModify = True
else:
raise RuntimeError('Mode must be "r", "w" or "a"')
except:
fp = self.fp
self.fp = None
if not self._filePassed:
fp.close()
raise
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def _RealGetContents(self):
"""Read in the table of contents for the ZIP file."""
fp = self.fp
try:
endrec = _EndRecData(fp)
except IOError:
raise BadZipFile("File is not a zip file")
if not endrec:
raise BadZipFile("File is not a zip file")
if self.debug > 1:
print(endrec)
size_cd = endrec[_ECD_SIZE] # bytes in central directory
offset_cd = endrec[_ECD_OFFSET] # offset of central directory
self._comment = endrec[_ECD_COMMENT] # archive comment
# "concat" is zero, unless zip was concatenated to another file
concat = endrec[_ECD_LOCATION] - size_cd - offset_cd
if endrec[_ECD_SIGNATURE] == stringEndArchive64:
# If Zip64 extension structures are present, account for them
concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator)
if self.debug > 2:
inferred = concat + offset_cd
print("given, inferred, offset", offset_cd, inferred, concat)
# self.start_dir: Position of start of central directory
self.start_dir = offset_cd + concat
fp.seek(self.start_dir, 0)
data = fp.read(size_cd)
fp = io.BytesIO(data)
total = 0
while total < size_cd:
centdir = fp.read(sizeCentralDir)
if len(centdir) != sizeCentralDir:
raise BadZipFile("Truncated central directory")
centdir = struct.unpack(structCentralDir, centdir)
if centdir[_CD_SIGNATURE] != stringCentralDir:
raise BadZipFile("Bad magic number for central directory")
if self.debug > 2:
print(centdir)
filename = fp.read(centdir[_CD_FILENAME_LENGTH])
flags = centdir[5]
if flags & 0x800:
# UTF-8 file names extension
filename = filename.decode('utf-8')
else:
# Historical ZIP filename encoding
filename = filename.decode('cp437')
# Create ZipInfo instance to store file information
x = ZipInfo(filename)
x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])
x.comment = fp.read(centdir[_CD_COMMENT_LENGTH])
x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET]
(x.create_version, x.create_system, x.extract_version, x.reserved,
x.flag_bits, x.compress_type, t, d,
x.CRC, x.compress_size, x.file_size) = centdir[1:12]
if x.extract_version > MAX_EXTRACT_VERSION:
raise NotImplementedError("zip file version %.1f" %
(x.extract_version / 10))
x.volume, x.internal_attr, x.external_attr = centdir[15:18]
# Convert date/time code to (year, month, day, hour, min, sec)
x._raw_time = t
x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F,
t>>11, (t>>5)&0x3F, (t&0x1F) * 2 )
x._decodeExtra()
x.header_offset = x.header_offset + concat
self.filelist.append(x)
self.NameToInfo[x.filename] = x
# update total bytes read from central directory
total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH]
+ centdir[_CD_EXTRA_FIELD_LENGTH]
+ centdir[_CD_COMMENT_LENGTH])
if self.debug > 2:
print("total", total)
def namelist(self):
"""Return a list of file names in the archive."""
return [data.filename for data in self.filelist]
def infolist(self):
"""Return a list of class ZipInfo instances for files in the
archive."""
return self.filelist
def printdir(self, file=None):
"""Print a table of contents for the zip file."""
print("%-46s %19s %12s" % ("File Name", "Modified ", "Size"),
file=file)
for zinfo in self.filelist:
date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6]
print("%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size),
file=file)
def testzip(self):
"""Read all the files and check the CRC."""
chunk_size = 2 ** 20
for zinfo in self.filelist:
try:
# Read by chunks, to avoid an OverflowError or a
# MemoryError with very large embedded files.
with self.open(zinfo.filename, "r") as f:
while f.read(chunk_size): # Check CRC-32
pass
except BadZipFile:
return zinfo.filename
def getinfo(self, name):
"""Return the instance of ZipInfo given 'name'."""
info = self.NameToInfo.get(name)
if info is None:
raise KeyError(
'There is no item named %r in the archive' % name)
return info
def setpassword(self, pwd):
"""Set default password for encrypted files."""
if pwd and not isinstance(pwd, bytes):
raise TypeError("pwd: expected bytes, got %s" % type(pwd))
if pwd:
self.pwd = pwd
else:
self.pwd = None
@property
def comment(self):
"""The comment text associated with the ZIP file."""
return self._comment
@comment.setter
def comment(self, comment):
if not isinstance(comment, bytes):
raise TypeError("comment: expected bytes, got %s" % type(comment))
# check for valid comment length
if len(comment) >= ZIP_MAX_COMMENT:
if self.debug:
print('Archive comment is too long; truncating to %d bytes'
% ZIP_MAX_COMMENT)
comment = comment[:ZIP_MAX_COMMENT]
self._comment = comment
self._didModify = True
def read(self, name, pwd=None):
"""Return file bytes (as a string) for name."""
with self.open(name, "r", pwd) as fp:
return fp.read()
def open(self, name, mode="r", pwd=None):
"""Return file-like object for 'name'."""
if mode not in ("r", "U", "rU"):
raise RuntimeError('open() requires mode "r", "U", or "rU"')
if pwd and not isinstance(pwd, bytes):
raise TypeError("pwd: expected bytes, got %s" % type(pwd))
if not self.fp:
raise RuntimeError(
"Attempt to read ZIP archive that was already closed")
# Only open a new file for instances where we were not
# given a file object in the constructor
if self._filePassed:
zef_file = self.fp
else:
zef_file = io.open(self.filename, 'rb')
try:
# Make sure we have an info object
if isinstance(name, ZipInfo):
# 'name' is already an info object
zinfo = name
else:
# Get info object for name
zinfo = self.getinfo(name)
zef_file.seek(zinfo.header_offset, 0)
# Skip the file header:
fheader = zef_file.read(sizeFileHeader)
if len(fheader) != sizeFileHeader:
raise BadZipFile("Truncated file header")
fheader = struct.unpack(structFileHeader, fheader)
if fheader[_FH_SIGNATURE] != stringFileHeader:
raise BadZipFile("Bad magic number for file header")
fname = zef_file.read(fheader[_FH_FILENAME_LENGTH])
if fheader[_FH_EXTRA_FIELD_LENGTH]:
zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])
if zinfo.flag_bits & 0x20:
# Zip 2.7: compressed patched data
raise NotImplementedError("compressed patched data (flag bit 5)")
if zinfo.flag_bits & 0x40:
# strong encryption
raise NotImplementedError("strong encryption (flag bit 6)")
if zinfo.flag_bits & 0x800:
# UTF-8 filename
fname_str = fname.decode("utf-8")
else:
fname_str = fname.decode("cp437")
if fname_str != zinfo.orig_filename:
raise BadZipFile(
'File name in directory %r and header %r differ.'
% (zinfo.orig_filename, fname))
# check for encrypted flag & handle password
is_encrypted = zinfo.flag_bits & 0x1
zd = None
if is_encrypted:
if not pwd:
pwd = self.pwd
if not pwd:
raise RuntimeError("File %s is encrypted, password "
"required for extraction" % name)
zd = _ZipDecrypter(pwd)
# The first 12 bytes in the cypher stream is an encryption header
# used to strengthen the algorithm. The first 11 bytes are
# completely random, while the 12th contains the MSB of the CRC,
# or the MSB of the file time depending on the header type
# and is used to check the correctness of the password.
header = zef_file.read(12)
h = list(map(zd, header[0:12]))
if zinfo.flag_bits & 0x8:
# compare against the file type from extended local headers
check_byte = (zinfo._raw_time >> 8) & 0xff
else:
# compare against the CRC otherwise
check_byte = (zinfo.CRC >> 24) & 0xff
if h[11] != check_byte:
raise RuntimeError("Bad password for file", name)
return ZipExtFile(zef_file, mode, zinfo, zd,
close_fileobj=not self._filePassed)
except:
if not self._filePassed:
zef_file.close()
raise
def extract(self, member, path=None, pwd=None):
"""Extract a member from the archive to the current working directory,
using its full name. Its file information is extracted as accurately
as possible. `member' may be a filename or a ZipInfo object. You can
specify a different directory using `path'.
"""
if not isinstance(member, ZipInfo):
member = self.getinfo(member)
if path is None:
path = os.getcwd()
return self._extract_member(member, path, pwd)
def extractall(self, path=None, members=None, pwd=None):
"""Extract all members from the archive to the current working
directory. `path' specifies a different directory to extract to.
`members' is optional and must be a subset of the list returned
by namelist().
"""
if members is None:
members = self.namelist()
for zipinfo in members:
self.extract(zipinfo, path, pwd)
@classmethod
def _sanitize_windows_name(cls, arcname, pathsep):
"""Replace bad characters and remove trailing dots from parts."""
table = cls._windows_illegal_name_trans_table
if not table:
illegal = ':<>|"?*'
table = str.maketrans(illegal, '_' * len(illegal))
cls._windows_illegal_name_trans_table = table
arcname = arcname.translate(table)
# remove trailing dots
arcname = (x.rstrip('.') for x in arcname.split(pathsep))
# rejoin, removing empty parts.
arcname = pathsep.join(x for x in arcname if x)
return arcname
def _extract_member(self, member, targetpath, pwd):
"""Extract the ZipInfo object 'member' to a physical
file on the path targetpath.
"""
# build the destination pathname, replacing
# forward slashes to platform specific separators.
arcname = member.filename.replace('/', os.path.sep)
if os.path.altsep:
arcname = arcname.replace(os.path.altsep, os.path.sep)
# interpret absolute pathname as relative, remove drive letter or
# UNC path, redundant separators, "." and ".." components.
arcname = os.path.splitdrive(arcname)[1]
invalid_path_parts = ('', os.path.curdir, os.path.pardir)
arcname = os.path.sep.join(x for x in arcname.split(os.path.sep)
if x not in invalid_path_parts)
if os.path.sep == '\\':
# filter illegal characters on Windows
arcname = self._sanitize_windows_name(arcname, os.path.sep)
targetpath = os.path.join(targetpath, arcname)
targetpath = os.path.normpath(targetpath)
# Create all upper directories if necessary.
upperdirs = os.path.dirname(targetpath)
if upperdirs and not os.path.exists(upperdirs):
os.makedirs(upperdirs)
if member.filename[-1] == '/':
if not os.path.isdir(targetpath):
os.mkdir(targetpath)
return targetpath
with self.open(member, pwd=pwd) as source, \
open(targetpath, "wb") as target:
shutil.copyfileobj(source, target)
return targetpath
def _writecheck(self, zinfo):
"""Check for errors before writing a file to the archive."""
if zinfo.filename in self.NameToInfo:
if self.debug: # Warning for duplicate names
print("Duplicate name:", zinfo.filename)
if self.mode not in ("w", "a"):
raise RuntimeError('write() requires mode "w" or "a"')
if not self.fp:
raise RuntimeError(
"Attempt to write ZIP archive that was already closed")
_check_compression(zinfo.compress_type)
if zinfo.file_size > ZIP64_LIMIT:
if not self._allowZip64:
raise LargeZipFile("Filesize would require ZIP64 extensions")
if zinfo.header_offset > ZIP64_LIMIT:
if not self._allowZip64:
raise LargeZipFile(
"Zipfile size would require ZIP64 extensions")
def write(self, filename, arcname=None, compress_type=None):
"""Put the bytes from filename into the archive under the name
arcname."""
if not self.fp:
raise RuntimeError(
"Attempt to write to ZIP archive that was already closed")
st = os.stat(filename)
isdir = stat.S_ISDIR(st.st_mode)
mtime = time.localtime(st.st_mtime)
date_time = mtime[0:6]
# Create ZipInfo instance to store file information
if arcname is None:
arcname = filename
arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
while arcname[0] in (os.sep, os.altsep):
arcname = arcname[1:]
if isdir:
arcname += '/'
zinfo = ZipInfo(arcname, date_time)
zinfo.external_attr = (st[0] & 0xFFFF) << 16 # Unix attributes
if compress_type is None:
zinfo.compress_type = self.compression
else:
zinfo.compress_type = compress_type
zinfo.file_size = st.st_size
zinfo.flag_bits = 0x00
zinfo.header_offset = self.fp.tell() # Start of header bytes
if zinfo.compress_type == ZIP_LZMA:
# Compressed data includes an end-of-stream (EOS) marker
zinfo.flag_bits |= 0x02
self._writecheck(zinfo)
self._didModify = True
if isdir:
zinfo.file_size = 0
zinfo.compress_size = 0
zinfo.CRC = 0
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
self.fp.write(zinfo.FileHeader(False))
return
cmpr = _get_compressor(zinfo.compress_type)
with open(filename, "rb") as fp:
# Must overwrite CRC and sizes with correct data later
zinfo.CRC = CRC = 0
zinfo.compress_size = compress_size = 0
# Compressed size can be larger than uncompressed size
zip64 = self._allowZip64 and \
zinfo.file_size * 1.05 > ZIP64_LIMIT
self.fp.write(zinfo.FileHeader(zip64))
file_size = 0
while 1:
buf = fp.read(1024 * 8)
if not buf:
break
file_size = file_size + len(buf)
CRC = crc32(buf, CRC) & 0xffffffff
if cmpr:
buf = cmpr.compress(buf)
compress_size = compress_size + len(buf)
self.fp.write(buf)
if cmpr:
buf = cmpr.flush()
compress_size = compress_size + len(buf)
self.fp.write(buf)
zinfo.compress_size = compress_size
else:
zinfo.compress_size = file_size
zinfo.CRC = CRC
zinfo.file_size = file_size
if not zip64 and self._allowZip64:
if file_size > ZIP64_LIMIT:
raise RuntimeError('File size has increased during compressing')
if compress_size > ZIP64_LIMIT:
raise RuntimeError('Compressed size larger than uncompressed size')
# Seek backwards and write file header (which will now include
# correct CRC and file sizes)
position = self.fp.tell() # Preserve current position in file
self.fp.seek(zinfo.header_offset, 0)
self.fp.write(zinfo.FileHeader(zip64))
self.fp.seek(position, 0)
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def writestr(self, zinfo_or_arcname, data, compress_type=None):
"""Write a file into the archive. The contents is 'data', which
may be either a 'str' or a 'bytes' instance; if it is a 'str',
it is encoded as UTF-8 first.
'zinfo_or_arcname' is either a ZipInfo instance or
the name of the file in the archive."""
if isinstance(data, str):
data = data.encode("utf-8")
if not isinstance(zinfo_or_arcname, ZipInfo):
zinfo = ZipInfo(filename=zinfo_or_arcname,
date_time=time.localtime(time.time())[:6])
zinfo.compress_type = self.compression
zinfo.external_attr = 0o600 << 16
else:
zinfo = zinfo_or_arcname
if not self.fp:
raise RuntimeError(
"Attempt to write to ZIP archive that was already closed")
zinfo.file_size = len(data) # Uncompressed size
zinfo.header_offset = self.fp.tell() # Start of header data
if compress_type is not None:
zinfo.compress_type = compress_type
if zinfo.compress_type == ZIP_LZMA:
# Compressed data includes an end-of-stream (EOS) marker
zinfo.flag_bits |= 0x02
self._writecheck(zinfo)
self._didModify = True
zinfo.CRC = crc32(data) & 0xffffffff # CRC-32 checksum
co = _get_compressor(zinfo.compress_type)
if co:
data = co.compress(data) + co.flush()
zinfo.compress_size = len(data) # Compressed size
else:
zinfo.compress_size = zinfo.file_size
zip64 = zinfo.file_size > ZIP64_LIMIT or \
zinfo.compress_size > ZIP64_LIMIT
if zip64 and not self._allowZip64:
raise LargeZipFile("Filesize would require ZIP64 extensions")
self.fp.write(zinfo.FileHeader(zip64))
self.fp.write(data)
if zinfo.flag_bits & 0x08:
# Write CRC and file sizes after the file data
fmt = '<LQQ' if zip64 else '<LLL'
self.fp.write(struct.pack(fmt, zinfo.CRC, zinfo.compress_size,
zinfo.file_size))
self.fp.flush()
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def __del__(self):
"""Call the "close()" method in case the user forgot."""
self.close()
def close(self):
"""Close the file, and for mode "w" and "a" write the ending
records."""
if self.fp is None:
return
try:
if self.mode in ("w", "a") and self._didModify: # write ending records
count = 0
pos1 = self.fp.tell()
for zinfo in self.filelist: # write central directory
count = count + 1
dt = zinfo.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
extra = []
if zinfo.file_size > ZIP64_LIMIT \
or zinfo.compress_size > ZIP64_LIMIT:
extra.append(zinfo.file_size)
extra.append(zinfo.compress_size)
file_size = 0xffffffff
compress_size = 0xffffffff
else:
file_size = zinfo.file_size
compress_size = zinfo.compress_size
if zinfo.header_offset > ZIP64_LIMIT:
extra.append(zinfo.header_offset)
header_offset = 0xffffffff
else:
header_offset = zinfo.header_offset
extra_data = zinfo.extra
min_version = 0
if extra:
# Append a ZIP64 field to the extra's
extra_data = struct.pack(
'<HH' + 'Q'*len(extra),
1, 8*len(extra), *extra) + extra_data
min_version = ZIP64_VERSION
if zinfo.compress_type == ZIP_BZIP2:
min_version = max(BZIP2_VERSION, min_version)
elif zinfo.compress_type == ZIP_LZMA:
min_version = max(LZMA_VERSION, min_version)
extract_version = max(min_version, zinfo.extract_version)
create_version = max(min_version, zinfo.create_version)
try:
filename, flag_bits = zinfo._encodeFilenameFlags()
centdir = struct.pack(structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
except DeprecationWarning:
print((structCentralDir, stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
zinfo.flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(zinfo.filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset), file=sys.stderr)
raise
self.fp.write(centdir)
self.fp.write(filename)
self.fp.write(extra_data)
self.fp.write(zinfo.comment)
pos2 = self.fp.tell()
# Write end-of-zip-archive record
centDirCount = count
centDirSize = pos2 - pos1
centDirOffset = pos1
if (centDirCount >= ZIP_FILECOUNT_LIMIT or
centDirOffset > ZIP64_LIMIT or
centDirSize > ZIP64_LIMIT):
# Need to write the ZIP64 end-of-archive records
zip64endrec = struct.pack(
structEndArchive64, stringEndArchive64,
44, 45, 45, 0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset)
self.fp.write(zip64endrec)
zip64locrec = struct.pack(
structEndArchive64Locator,
stringEndArchive64Locator, 0, pos2, 1)
self.fp.write(zip64locrec)
centDirCount = min(centDirCount, 0xFFFF)
centDirSize = min(centDirSize, 0xFFFFFFFF)
centDirOffset = min(centDirOffset, 0xFFFFFFFF)
endrec = struct.pack(structEndArchive, stringEndArchive,
0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset, len(self._comment))
self.fp.write(endrec)
self.fp.write(self._comment)
self.fp.flush()
finally:
fp = self.fp
self.fp = None
if not self._filePassed:
fp.close()
class PyZipFile(ZipFile):
"""Class to create ZIP archives with Python library files and packages."""
def __init__(self, file, mode="r", compression=ZIP_STORED,
allowZip64=False, optimize=-1):
ZipFile.__init__(self, file, mode=mode, compression=compression,
allowZip64=allowZip64)
self._optimize = optimize
def writepy(self, pathname, basename=""):
"""Add all files from "pathname" to the ZIP archive.
If pathname is a package directory, search the directory and
all package subdirectories recursively for all *.py and enter
the modules into the archive. If pathname is a plain
directory, listdir *.py and enter all modules. Else, pathname
must be a Python *.py file and the module will be put into the
archive. Added modules are always module.pyo or module.pyc.
This method will compile the module.py into module.pyc if
necessary.
"""
dir, name = os.path.split(pathname)
if os.path.isdir(pathname):
initname = os.path.join(pathname, "__init__.py")
if os.path.isfile(initname):
# This is a package directory, add it
if basename:
basename = "%s/%s" % (basename, name)
else:
basename = name
if self.debug:
print("Adding package in", pathname, "as", basename)
fname, arcname = self._get_codename(initname[0:-3], basename)
if self.debug:
print("Adding", arcname)
self.write(fname, arcname)
dirlist = os.listdir(pathname)
dirlist.remove("__init__.py")
# Add all *.py files and package subdirectories
for filename in dirlist:
path = os.path.join(pathname, filename)
root, ext = os.path.splitext(filename)
if os.path.isdir(path):
if os.path.isfile(os.path.join(path, "__init__.py")):
# This is a package directory, add it
self.writepy(path, basename) # Recursive call
elif ext == ".py":
fname, arcname = self._get_codename(path[0:-3],
basename)
if self.debug:
print("Adding", arcname)
self.write(fname, arcname)
else:
# This is NOT a package directory, add its files at top level
if self.debug:
print("Adding files from directory", pathname)
for filename in os.listdir(pathname):
path = os.path.join(pathname, filename)
root, ext = os.path.splitext(filename)
if ext == ".py":
fname, arcname = self._get_codename(path[0:-3],
basename)
if self.debug:
print("Adding", arcname)
self.write(fname, arcname)
else:
if pathname[-3:] != ".py":
raise RuntimeError(
'Files added with writepy() must end with ".py"')
fname, arcname = self._get_codename(pathname[0:-3], basename)
if self.debug:
print("Adding file", arcname)
self.write(fname, arcname)
def _get_codename(self, pathname, basename):
"""Return (filename, archivename) for the path.
Given a module name path, return the correct file path and
archive name, compiling if necessary. For example, given
/python/lib/string, return (/python/lib/string.pyc, string).
"""
def _compile(file, optimize=-1):
import py_compile
if self.debug:
print("Compiling", file)
try:
py_compile.compile(file, doraise=True, optimize=optimize)
except py_compile.PyCompileError as err:
print(err.msg)
return False
return True
file_py = pathname + ".py"
file_pyc = pathname + ".pyc"
file_pyo = pathname + ".pyo"
pycache_pyc = imp.cache_from_source(file_py, True)
pycache_pyo = imp.cache_from_source(file_py, False)
if self._optimize == -1:
# legacy mode: use whatever file is present
if (os.path.isfile(file_pyo) and
os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime):
# Use .pyo file.
arcname = fname = file_pyo
elif (os.path.isfile(file_pyc) and
os.stat(file_pyc).st_mtime >= os.stat(file_py).st_mtime):
# Use .pyc file.
arcname = fname = file_pyc
elif (os.path.isfile(pycache_pyc) and
os.stat(pycache_pyc).st_mtime >= os.stat(file_py).st_mtime):
# Use the __pycache__/*.pyc file, but write it to the legacy pyc
# file name in the archive.
fname = pycache_pyc
arcname = file_pyc
elif (os.path.isfile(pycache_pyo) and
os.stat(pycache_pyo).st_mtime >= os.stat(file_py).st_mtime):
# Use the __pycache__/*.pyo file, but write it to the legacy pyo
# file name in the archive.
fname = pycache_pyo
arcname = file_pyo
else:
# Compile py into PEP 3147 pyc file.
if _compile(file_py):
fname = (pycache_pyc if __debug__ else pycache_pyo)
arcname = (file_pyc if __debug__ else file_pyo)
else:
fname = arcname = file_py
else:
# new mode: use given optimization level
if self._optimize == 0:
fname = pycache_pyc
arcname = file_pyc
else:
fname = pycache_pyo
arcname = file_pyo
if not (os.path.isfile(fname) and
os.stat(fname).st_mtime >= os.stat(file_py).st_mtime):
if not _compile(file_py, optimize=self._optimize):
fname = arcname = file_py
archivename = os.path.split(arcname)[1]
if basename:
archivename = "%s/%s" % (basename, archivename)
return (fname, archivename)
def main(args = None):
import textwrap
USAGE=textwrap.dedent("""\
Usage:
zipfile.py -l zipfile.zip # Show listing of a zipfile
zipfile.py -t zipfile.zip # Test if a zipfile is valid
zipfile.py -e zipfile.zip target # Extract zipfile into target dir
zipfile.py -c zipfile.zip src ... # Create zipfile from sources
""")
if args is None:
args = sys.argv[1:]
if not args or args[0] not in ('-l', '-c', '-e', '-t'):
print(USAGE)
sys.exit(1)
if args[0] == '-l':
if len(args) != 2:
print(USAGE)
sys.exit(1)
with ZipFile(args[1], 'r') as zf:
zf.printdir()
elif args[0] == '-t':
if len(args) != 2:
print(USAGE)
sys.exit(1)
with ZipFile(args[1], 'r') as zf:
badfile = zf.testzip()
if badfile:
print("The following enclosed file is corrupted: {!r}".format(badfile))
print("Done testing")
elif args[0] == '-e':
if len(args) != 3:
print(USAGE)
sys.exit(1)
with ZipFile(args[1], 'r') as zf:
out = args[2]
for path in zf.namelist():
if path.startswith('./'):
tgt = os.path.join(out, path[2:])
else:
tgt = os.path.join(out, path)
tgtdir = os.path.dirname(tgt)
if not os.path.exists(tgtdir):
os.makedirs(tgtdir)
with open(tgt, 'wb') as fp:
fp.write(zf.read(path))
elif args[0] == '-c':
if len(args) < 3:
print(USAGE)
sys.exit(1)
def addToZip(zf, path, zippath):
if os.path.isfile(path):
zf.write(path, zippath, ZIP_DEFLATED)
elif os.path.isdir(path):
for nm in os.listdir(path):
addToZip(zf,
os.path.join(path, nm), os.path.join(zippath, nm))
# else: ignore
with ZipFile(args[1], 'w', allowZip64=True) as zf:
for src in args[2:]:
addToZip(zf, src, os.path.basename(src))
if __name__ == "__main__":
main()
| gpl-3.0 | -8,797,345,527,266,180,000 | 36.390423 | 103 | 0.547432 | false |
double12gzh/nova | nova/api/openstack/compute/contrib/os_tenant_networks.py | 8 | 8472 | # Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
import netaddr.core as netexc
from oslo_config import cfg
from oslo_log import log as logging
import six
import webob
from webob import exc
from nova.api.openstack import extensions
from nova import context as nova_context
from nova import exception
from nova.i18n import _
from nova.i18n import _LE
import nova.network
from nova import quota
CONF = cfg.CONF
os_network_opts = [
cfg.BoolOpt("enable_network_quota",
default=False,
help='Enables or disables quota checking for tenant '
'networks'),
cfg.StrOpt('use_neutron_default_nets',
default="False",
help='Control for checking for default networks'),
cfg.StrOpt('neutron_default_tenant_id',
default="default",
help='Default tenant id when creating neutron '
'networks'),
cfg.IntOpt('quota_networks',
default=3,
help='Number of private networks allowed per project'),
]
CONF.register_opts(os_network_opts)
QUOTAS = quota.QUOTAS
LOG = logging.getLogger(__name__)
authorize = extensions.extension_authorizer('compute', 'os-tenant-networks')
def network_dict(network):
# NOTE(danms): Here, network should be an object, which could have come
# from neutron and thus be missing most of the attributes. Providing a
# default to get() avoids trying to lazy-load missing attributes.
return {"id": network.get("uuid", None) or network.get("id", None),
"cidr": str(network.get("cidr", None)),
"label": network.get("label", None)}
class NetworkController(object):
def __init__(self, network_api=None):
self.network_api = nova.network.API()
self._default_networks = []
def _refresh_default_networks(self):
self._default_networks = []
if CONF.use_neutron_default_nets == "True":
try:
self._default_networks = self._get_default_networks()
except Exception:
LOG.exception(_LE("Failed to get default networks"))
def _get_default_networks(self):
project_id = CONF.neutron_default_tenant_id
ctx = nova_context.RequestContext(user_id=None,
project_id=project_id)
networks = {}
for n in self.network_api.get_all(ctx):
networks[n['id']] = n['label']
return [{'id': k, 'label': v} for k, v in six.iteritems(networks)]
def index(self, req):
context = req.environ['nova.context']
authorize(context)
networks = list(self.network_api.get_all(context))
if not self._default_networks:
self._refresh_default_networks()
networks.extend(self._default_networks)
return {'networks': [network_dict(n) for n in networks]}
def show(self, req, id):
context = req.environ['nova.context']
authorize(context)
try:
network = self.network_api.get(context, id)
except exception.NetworkNotFound:
msg = _("Network not found")
raise exc.HTTPNotFound(explanation=msg)
return {'network': network_dict(network)}
def delete(self, req, id):
context = req.environ['nova.context']
authorize(context)
reservation = None
try:
if CONF.enable_network_quota:
reservation = QUOTAS.reserve(context, networks=-1)
except Exception:
reservation = None
LOG.exception(_LE("Failed to update usages deallocating "
"network."))
def _rollback_quota(reservation):
if CONF.enable_network_quota and reservation:
QUOTAS.rollback(context, reservation)
try:
self.network_api.delete(context, id)
except exception.PolicyNotAuthorized as e:
_rollback_quota(reservation)
raise exc.HTTPForbidden(explanation=six.text_type(e))
except exception.NetworkInUse as e:
_rollback_quota(reservation)
raise exc.HTTPConflict(explanation=e.format_message())
except exception.NetworkNotFound:
_rollback_quota(reservation)
msg = _("Network not found")
raise exc.HTTPNotFound(explanation=msg)
if CONF.enable_network_quota and reservation:
QUOTAS.commit(context, reservation)
response = webob.Response(status_int=202)
return response
def create(self, req, body):
if not body:
raise exc.HTTPUnprocessableEntity()
context = req.environ["nova.context"]
authorize(context)
network = body["network"]
keys = ["cidr", "cidr_v6", "ipam", "vlan_start", "network_size",
"num_networks"]
kwargs = {k: network.get(k) for k in keys}
if not network.get("label"):
msg = _("Network label is required")
raise exc.HTTPBadRequest(explanation=msg)
label = network["label"]
if not (kwargs["cidr"] or kwargs["cidr_v6"]):
msg = _("No CIDR requested")
raise exc.HTTPBadRequest(explanation=msg)
if kwargs["cidr"]:
try:
net = netaddr.IPNetwork(kwargs["cidr"])
if net.size < 4:
msg = _("Requested network does not contain "
"enough (2+) usable hosts")
raise exc.HTTPBadRequest(explanation=msg)
except netexc.AddrFormatError:
msg = _("CIDR is malformed.")
raise exc.HTTPBadRequest(explanation=msg)
except netexc.AddrConversionError:
msg = _("Address could not be converted.")
raise exc.HTTPBadRequest(explanation=msg)
networks = []
try:
if CONF.enable_network_quota:
reservation = QUOTAS.reserve(context, networks=1)
except exception.OverQuota:
msg = _("Quota exceeded, too many networks.")
raise exc.HTTPBadRequest(explanation=msg)
try:
networks = self.network_api.create(context,
label=label, **kwargs)
if CONF.enable_network_quota:
QUOTAS.commit(context, reservation)
except exception.PolicyNotAuthorized as e:
raise exc.HTTPForbidden(explanation=six.text_type(e))
except Exception:
if CONF.enable_network_quota:
QUOTAS.rollback(context, reservation)
msg = _("Create networks failed")
LOG.exception(msg, extra=network)
raise exc.HTTPServiceUnavailable(explanation=msg)
return {"network": network_dict(networks[0])}
class Os_tenant_networks(extensions.ExtensionDescriptor):
"""Tenant-based Network Management Extension."""
name = "OSTenantNetworks"
alias = "os-tenant-networks"
namespace = ("http://docs.openstack.org/compute/"
"ext/os-tenant-networks/api/v2")
updated = "2012-03-07T14:46:43Z"
def get_resources(self):
ext = extensions.ResourceExtension('os-tenant-networks',
NetworkController())
return [ext]
def _sync_networks(context, project_id, session):
ctx = nova_context.RequestContext(user_id=None, project_id=project_id)
ctx = ctx.elevated()
networks = nova.network.api.API().get_all(ctx)
return dict(networks=len(networks))
if CONF.enable_network_quota:
QUOTAS.register_resource(quota.ReservableResource('networks',
_sync_networks,
'quota_networks'))
| apache-2.0 | -5,706,419,390,907,437,000 | 36.321586 | 78 | 0.596553 | false |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.2/Lib/importlib/test/source/test_abc_loader.py | 51 | 31363 | import importlib
from importlib import abc
from .. import abc as testing_abc
from .. import util
from . import util as source_util
import imp
import inspect
import io
import marshal
import os
import sys
import types
import unittest
import warnings
class SourceOnlyLoaderMock(abc.SourceLoader):
# Globals that should be defined for all modules.
source = (b"_ = '::'.join([__name__, __file__, __cached__, __package__, "
b"repr(__loader__)])")
def __init__(self, path):
self.path = path
def get_data(self, path):
assert self.path == path
return self.source
def get_filename(self, fullname):
return self.path
class SourceLoaderMock(SourceOnlyLoaderMock):
source_mtime = 1
def __init__(self, path, magic=imp.get_magic()):
super().__init__(path)
self.bytecode_path = imp.cache_from_source(self.path)
data = bytearray(magic)
data.extend(marshal._w_long(self.source_mtime))
code_object = compile(self.source, self.path, 'exec',
dont_inherit=True)
data.extend(marshal.dumps(code_object))
self.bytecode = bytes(data)
self.written = {}
def get_data(self, path):
if path == self.path:
return super().get_data(path)
elif path == self.bytecode_path:
return self.bytecode
else:
raise IOError
def path_mtime(self, path):
assert path == self.path
return self.source_mtime
def set_data(self, path, data):
self.written[path] = bytes(data)
return path == self.bytecode_path
class PyLoaderMock(abc.PyLoader):
# Globals that should be defined for all modules.
source = (b"_ = '::'.join([__name__, __file__, __package__, "
b"repr(__loader__)])")
def __init__(self, data):
"""Take a dict of 'module_name: path' pairings.
Paths should have no file extension, allowing packages to be denoted by
ending in '__init__'.
"""
self.module_paths = data
self.path_to_module = {val:key for key,val in data.items()}
def get_data(self, path):
if path not in self.path_to_module:
raise IOError
return self.source
def is_package(self, name):
filename = os.path.basename(self.get_filename(name))
return os.path.splitext(filename)[0] == '__init__'
def source_path(self, name):
try:
return self.module_paths[name]
except KeyError:
raise ImportError
def get_filename(self, name):
"""Silence deprecation warning."""
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
path = super().get_filename(name)
assert len(w) == 1
assert issubclass(w[0].category, PendingDeprecationWarning)
return path
class PyLoaderCompatMock(PyLoaderMock):
"""Mock that matches what is suggested to have a loader that is compatible
from Python 3.1 onwards."""
def get_filename(self, fullname):
try:
return self.module_paths[fullname]
except KeyError:
raise ImportError
def source_path(self, fullname):
try:
return self.get_filename(fullname)
except ImportError:
return None
class PyPycLoaderMock(abc.PyPycLoader, PyLoaderMock):
default_mtime = 1
def __init__(self, source, bc={}):
"""Initialize mock.
'bc' is a dict keyed on a module's name. The value is dict with
possible keys of 'path', 'mtime', 'magic', and 'bc'. Except for 'path',
each of those keys control if any part of created bytecode is to
deviate from default values.
"""
super().__init__(source)
self.module_bytecode = {}
self.path_to_bytecode = {}
self.bytecode_to_path = {}
for name, data in bc.items():
self.path_to_bytecode[data['path']] = name
self.bytecode_to_path[name] = data['path']
magic = data.get('magic', imp.get_magic())
mtime = importlib._w_long(data.get('mtime', self.default_mtime))
if 'bc' in data:
bc = data['bc']
else:
bc = self.compile_bc(name)
self.module_bytecode[name] = magic + mtime + bc
def compile_bc(self, name):
source_path = self.module_paths.get(name, '<test>') or '<test>'
code = compile(self.source, source_path, 'exec')
return marshal.dumps(code)
def source_mtime(self, name):
if name in self.module_paths:
return self.default_mtime
elif name in self.module_bytecode:
return None
else:
raise ImportError
def bytecode_path(self, name):
try:
return self.bytecode_to_path[name]
except KeyError:
if name in self.module_paths:
return None
else:
raise ImportError
def write_bytecode(self, name, bytecode):
self.module_bytecode[name] = bytecode
return True
def get_data(self, path):
if path in self.path_to_module:
return super().get_data(path)
elif path in self.path_to_bytecode:
name = self.path_to_bytecode[path]
return self.module_bytecode[name]
else:
raise IOError
def is_package(self, name):
try:
return super().is_package(name)
except TypeError:
return '__init__' in self.bytecode_to_path[name]
def get_code(self, name):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
code_object = super().get_code(name)
assert len(w) == 1
assert issubclass(w[0].category, PendingDeprecationWarning)
return code_object
class PyLoaderTests(testing_abc.LoaderTests):
"""Tests for importlib.abc.PyLoader."""
mocker = PyLoaderMock
def eq_attrs(self, ob, **kwargs):
for attr, val in kwargs.items():
found = getattr(ob, attr)
self.assertEqual(found, val,
"{} attribute: {} != {}".format(attr, found, val))
def test_module(self):
name = '<module>'
path = os.path.join('', 'path', 'to', 'module')
mock = self.mocker({name: path})
with util.uncache(name):
module = mock.load_module(name)
self.assertTrue(name in sys.modules)
self.eq_attrs(module, __name__=name, __file__=path, __package__='',
__loader__=mock)
self.assertTrue(not hasattr(module, '__path__'))
return mock, name
def test_package(self):
name = '<pkg>'
path = os.path.join('path', 'to', name, '__init__')
mock = self.mocker({name: path})
with util.uncache(name):
module = mock.load_module(name)
self.assertTrue(name in sys.modules)
self.eq_attrs(module, __name__=name, __file__=path,
__path__=[os.path.dirname(path)], __package__=name,
__loader__=mock)
return mock, name
def test_lacking_parent(self):
name = 'pkg.mod'
path = os.path.join('path', 'to', 'pkg', 'mod')
mock = self.mocker({name: path})
with util.uncache(name):
module = mock.load_module(name)
self.assertIn(name, sys.modules)
self.eq_attrs(module, __name__=name, __file__=path, __package__='pkg',
__loader__=mock)
self.assertFalse(hasattr(module, '__path__'))
return mock, name
def test_module_reuse(self):
name = 'mod'
path = os.path.join('path', 'to', 'mod')
module = imp.new_module(name)
mock = self.mocker({name: path})
with util.uncache(name):
sys.modules[name] = module
loaded_module = mock.load_module(name)
self.assertTrue(loaded_module is module)
self.assertTrue(sys.modules[name] is module)
return mock, name
def test_state_after_failure(self):
name = "mod"
module = imp.new_module(name)
module.blah = None
mock = self.mocker({name: os.path.join('path', 'to', 'mod')})
mock.source = b"1/0"
with util.uncache(name):
sys.modules[name] = module
with self.assertRaises(ZeroDivisionError):
mock.load_module(name)
self.assertTrue(sys.modules[name] is module)
self.assertTrue(hasattr(module, 'blah'))
return mock
def test_unloadable(self):
name = "mod"
mock = self.mocker({name: os.path.join('path', 'to', 'mod')})
mock.source = b"1/0"
with util.uncache(name):
with self.assertRaises(ZeroDivisionError):
mock.load_module(name)
self.assertTrue(name not in sys.modules)
return mock
class PyLoaderCompatTests(PyLoaderTests):
"""Test that the suggested code to make a loader that is compatible from
Python 3.1 forward works."""
mocker = PyLoaderCompatMock
class PyLoaderInterfaceTests(unittest.TestCase):
"""Tests for importlib.abc.PyLoader to make sure that when source_path()
doesn't return a path everything works as expected."""
def test_no_source_path(self):
# No source path should lead to ImportError.
name = 'mod'
mock = PyLoaderMock({})
with util.uncache(name), self.assertRaises(ImportError):
mock.load_module(name)
def test_source_path_is_None(self):
name = 'mod'
mock = PyLoaderMock({name: None})
with util.uncache(name), self.assertRaises(ImportError):
mock.load_module(name)
def test_get_filename_with_source_path(self):
# get_filename() should return what source_path() returns.
name = 'mod'
path = os.path.join('path', 'to', 'source')
mock = PyLoaderMock({name: path})
with util.uncache(name):
self.assertEqual(mock.get_filename(name), path)
def test_get_filename_no_source_path(self):
# get_filename() should raise ImportError if source_path returns None.
name = 'mod'
mock = PyLoaderMock({name: None})
with util.uncache(name), self.assertRaises(ImportError):
mock.get_filename(name)
class PyPycLoaderTests(PyLoaderTests):
"""Tests for importlib.abc.PyPycLoader."""
mocker = PyPycLoaderMock
@source_util.writes_bytecode_files
def verify_bytecode(self, mock, name):
assert name in mock.module_paths
self.assertIn(name, mock.module_bytecode)
magic = mock.module_bytecode[name][:4]
self.assertEqual(magic, imp.get_magic())
mtime = importlib._r_long(mock.module_bytecode[name][4:8])
self.assertEqual(mtime, 1)
bc = mock.module_bytecode[name][8:]
self.assertEqual(bc, mock.compile_bc(name))
def test_module(self):
mock, name = super().test_module()
self.verify_bytecode(mock, name)
def test_package(self):
mock, name = super().test_package()
self.verify_bytecode(mock, name)
def test_lacking_parent(self):
mock, name = super().test_lacking_parent()
self.verify_bytecode(mock, name)
def test_module_reuse(self):
mock, name = super().test_module_reuse()
self.verify_bytecode(mock, name)
def test_state_after_failure(self):
super().test_state_after_failure()
def test_unloadable(self):
super().test_unloadable()
class PyPycLoaderInterfaceTests(unittest.TestCase):
"""Test for the interface of importlib.abc.PyPycLoader."""
def get_filename_check(self, src_path, bc_path, expect):
name = 'mod'
mock = PyPycLoaderMock({name: src_path}, {name: {'path': bc_path}})
with util.uncache(name):
assert mock.source_path(name) == src_path
assert mock.bytecode_path(name) == bc_path
self.assertEqual(mock.get_filename(name), expect)
def test_filename_with_source_bc(self):
# When source and bytecode paths present, return the source path.
self.get_filename_check('source_path', 'bc_path', 'source_path')
def test_filename_with_source_no_bc(self):
# With source but no bc, return source path.
self.get_filename_check('source_path', None, 'source_path')
def test_filename_with_no_source_bc(self):
# With not source but bc, return the bc path.
self.get_filename_check(None, 'bc_path', 'bc_path')
def test_filename_with_no_source_or_bc(self):
# With no source or bc, raise ImportError.
name = 'mod'
mock = PyPycLoaderMock({name: None}, {name: {'path': None}})
with util.uncache(name), self.assertRaises(ImportError):
mock.get_filename(name)
class SkipWritingBytecodeTests(unittest.TestCase):
"""Test that bytecode is properly handled based on
sys.dont_write_bytecode."""
@source_util.writes_bytecode_files
def run_test(self, dont_write_bytecode):
name = 'mod'
mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')})
sys.dont_write_bytecode = dont_write_bytecode
with util.uncache(name):
mock.load_module(name)
self.assertTrue((name in mock.module_bytecode) is not
dont_write_bytecode)
def test_no_bytecode_written(self):
self.run_test(True)
def test_bytecode_written(self):
self.run_test(False)
class RegeneratedBytecodeTests(unittest.TestCase):
"""Test that bytecode is regenerated as expected."""
@source_util.writes_bytecode_files
def test_different_magic(self):
# A different magic number should lead to new bytecode.
name = 'mod'
bad_magic = b'\x00\x00\x00\x00'
assert bad_magic != imp.get_magic()
mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')},
{name: {'path': os.path.join('path', 'to',
'mod.bytecode'),
'magic': bad_magic}})
with util.uncache(name):
mock.load_module(name)
self.assertTrue(name in mock.module_bytecode)
magic = mock.module_bytecode[name][:4]
self.assertEqual(magic, imp.get_magic())
@source_util.writes_bytecode_files
def test_old_mtime(self):
# Bytecode with an older mtime should be regenerated.
name = 'mod'
old_mtime = PyPycLoaderMock.default_mtime - 1
mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')},
{name: {'path': 'path/to/mod.bytecode', 'mtime': old_mtime}})
with util.uncache(name):
mock.load_module(name)
self.assertTrue(name in mock.module_bytecode)
mtime = importlib._r_long(mock.module_bytecode[name][4:8])
self.assertEqual(mtime, PyPycLoaderMock.default_mtime)
class BadBytecodeFailureTests(unittest.TestCase):
"""Test import failures when there is no source and parts of the bytecode
is bad."""
def test_bad_magic(self):
# A bad magic number should lead to an ImportError.
name = 'mod'
bad_magic = b'\x00\x00\x00\x00'
bc = {name:
{'path': os.path.join('path', 'to', 'mod'),
'magic': bad_magic}}
mock = PyPycLoaderMock({name: None}, bc)
with util.uncache(name), self.assertRaises(ImportError):
mock.load_module(name)
def test_no_bytecode(self):
# Missing code object bytecode should lead to an EOFError.
name = 'mod'
bc = {name: {'path': os.path.join('path', 'to', 'mod'), 'bc': b''}}
mock = PyPycLoaderMock({name: None}, bc)
with util.uncache(name), self.assertRaises(EOFError):
mock.load_module(name)
def test_bad_bytecode(self):
# Malformed code object bytecode should lead to a ValueError.
name = 'mod'
bc = {name: {'path': os.path.join('path', 'to', 'mod'), 'bc': b'1234'}}
mock = PyPycLoaderMock({name: None}, bc)
with util.uncache(name), self.assertRaises(ValueError):
mock.load_module(name)
def raise_ImportError(*args, **kwargs):
raise ImportError
class MissingPathsTests(unittest.TestCase):
"""Test what happens when a source or bytecode path does not exist (either
from *_path returning None or raising ImportError)."""
def test_source_path_None(self):
# Bytecode should be used when source_path returns None, along with
# __file__ being set to the bytecode path.
name = 'mod'
bytecode_path = 'path/to/mod'
mock = PyPycLoaderMock({name: None}, {name: {'path': bytecode_path}})
with util.uncache(name):
module = mock.load_module(name)
self.assertEqual(module.__file__, bytecode_path)
# Testing for bytecode_path returning None handled by all tests where no
# bytecode initially exists.
def test_all_paths_None(self):
# If all *_path methods return None, raise ImportError.
name = 'mod'
mock = PyPycLoaderMock({name: None})
with util.uncache(name), self.assertRaises(ImportError):
mock.load_module(name)
def test_source_path_ImportError(self):
# An ImportError from source_path should trigger an ImportError.
name = 'mod'
mock = PyPycLoaderMock({}, {name: {'path': os.path.join('path', 'to',
'mod')}})
with util.uncache(name), self.assertRaises(ImportError):
mock.load_module(name)
def test_bytecode_path_ImportError(self):
# An ImportError from bytecode_path should trigger an ImportError.
name = 'mod'
mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')})
bad_meth = types.MethodType(raise_ImportError, mock)
mock.bytecode_path = bad_meth
with util.uncache(name), self.assertRaises(ImportError):
mock.load_module(name)
class SourceLoaderTestHarness(unittest.TestCase):
def setUp(self, *, is_package=True, **kwargs):
self.package = 'pkg'
if is_package:
self.path = os.path.join(self.package, '__init__.py')
self.name = self.package
else:
module_name = 'mod'
self.path = os.path.join(self.package, '.'.join(['mod', 'py']))
self.name = '.'.join([self.package, module_name])
self.cached = imp.cache_from_source(self.path)
self.loader = self.loader_mock(self.path, **kwargs)
def verify_module(self, module):
self.assertEqual(module.__name__, self.name)
self.assertEqual(module.__file__, self.path)
self.assertEqual(module.__cached__, self.cached)
self.assertEqual(module.__package__, self.package)
self.assertEqual(module.__loader__, self.loader)
values = module._.split('::')
self.assertEqual(values[0], self.name)
self.assertEqual(values[1], self.path)
self.assertEqual(values[2], self.cached)
self.assertEqual(values[3], self.package)
self.assertEqual(values[4], repr(self.loader))
def verify_code(self, code_object):
module = imp.new_module(self.name)
module.__file__ = self.path
module.__cached__ = self.cached
module.__package__ = self.package
module.__loader__ = self.loader
module.__path__ = []
exec(code_object, module.__dict__)
self.verify_module(module)
class SourceOnlyLoaderTests(SourceLoaderTestHarness):
"""Test importlib.abc.SourceLoader for source-only loading.
Reload testing is subsumed by the tests for
importlib.util.module_for_loader.
"""
loader_mock = SourceOnlyLoaderMock
def test_get_source(self):
# Verify the source code is returned as a string.
# If an IOError is raised by get_data then raise ImportError.
expected_source = self.loader.source.decode('utf-8')
self.assertEqual(self.loader.get_source(self.name), expected_source)
def raise_IOError(path):
raise IOError
self.loader.get_data = raise_IOError
with self.assertRaises(ImportError):
self.loader.get_source(self.name)
def test_is_package(self):
# Properly detect when loading a package.
self.setUp(is_package=True)
self.assertTrue(self.loader.is_package(self.name))
self.setUp(is_package=False)
self.assertFalse(self.loader.is_package(self.name))
def test_get_code(self):
# Verify the code object is created.
code_object = self.loader.get_code(self.name)
self.verify_code(code_object)
def test_load_module(self):
# Loading a module should set __name__, __loader__, __package__,
# __path__ (for packages), __file__, and __cached__.
# The module should also be put into sys.modules.
with util.uncache(self.name):
module = self.loader.load_module(self.name)
self.verify_module(module)
self.assertEqual(module.__path__, [os.path.dirname(self.path)])
self.assertTrue(self.name in sys.modules)
def test_package_settings(self):
# __package__ needs to be set, while __path__ is set on if the module
# is a package.
# Testing the values for a package are covered by test_load_module.
self.setUp(is_package=False)
with util.uncache(self.name):
module = self.loader.load_module(self.name)
self.verify_module(module)
self.assertTrue(not hasattr(module, '__path__'))
def test_get_source_encoding(self):
# Source is considered encoded in UTF-8 by default unless otherwise
# specified by an encoding line.
source = "_ = 'รผ'"
self.loader.source = source.encode('utf-8')
returned_source = self.loader.get_source(self.name)
self.assertEqual(returned_source, source)
source = "# coding: latin-1\n_ = รผ"
self.loader.source = source.encode('latin-1')
returned_source = self.loader.get_source(self.name)
self.assertEqual(returned_source, source)
@unittest.skipIf(sys.dont_write_bytecode, "sys.dont_write_bytecode is true")
class SourceLoaderBytecodeTests(SourceLoaderTestHarness):
"""Test importlib.abc.SourceLoader's use of bytecode.
Source-only testing handled by SourceOnlyLoaderTests.
"""
loader_mock = SourceLoaderMock
def verify_code(self, code_object, *, bytecode_written=False):
super().verify_code(code_object)
if bytecode_written:
self.assertIn(self.cached, self.loader.written)
data = bytearray(imp.get_magic())
data.extend(marshal._w_long(self.loader.source_mtime))
data.extend(marshal.dumps(code_object))
self.assertEqual(self.loader.written[self.cached], bytes(data))
def test_code_with_everything(self):
# When everything should work.
code_object = self.loader.get_code(self.name)
self.verify_code(code_object)
def test_no_bytecode(self):
# If no bytecode exists then move on to the source.
self.loader.bytecode_path = "<does not exist>"
# Sanity check
with self.assertRaises(IOError):
bytecode_path = imp.cache_from_source(self.path)
self.loader.get_data(bytecode_path)
code_object = self.loader.get_code(self.name)
self.verify_code(code_object, bytecode_written=True)
def test_code_bad_timestamp(self):
# Bytecode is only used when the timestamp matches the source EXACTLY.
for source_mtime in (0, 2):
assert source_mtime != self.loader.source_mtime
original = self.loader.source_mtime
self.loader.source_mtime = source_mtime
# If bytecode is used then EOFError would be raised by marshal.
self.loader.bytecode = self.loader.bytecode[8:]
code_object = self.loader.get_code(self.name)
self.verify_code(code_object, bytecode_written=True)
self.loader.source_mtime = original
def test_code_bad_magic(self):
# Skip over bytecode with a bad magic number.
self.setUp(magic=b'0000')
# If bytecode is used then EOFError would be raised by marshal.
self.loader.bytecode = self.loader.bytecode[8:]
code_object = self.loader.get_code(self.name)
self.verify_code(code_object, bytecode_written=True)
def test_dont_write_bytecode(self):
# Bytecode is not written if sys.dont_write_bytecode is true.
# Can assume it is false already thanks to the skipIf class decorator.
try:
sys.dont_write_bytecode = True
self.loader.bytecode_path = "<does not exist>"
code_object = self.loader.get_code(self.name)
self.assertNotIn(self.cached, self.loader.written)
finally:
sys.dont_write_bytecode = False
def test_no_set_data(self):
# If set_data is not defined, one can still read bytecode.
self.setUp(magic=b'0000')
original_set_data = self.loader.__class__.set_data
try:
del self.loader.__class__.set_data
code_object = self.loader.get_code(self.name)
self.verify_code(code_object)
finally:
self.loader.__class__.set_data = original_set_data
def test_set_data_raises_exceptions(self):
# Raising NotImplementedError or IOError is okay for set_data.
def raise_exception(exc):
def closure(*args, **kwargs):
raise exc
return closure
self.setUp(magic=b'0000')
self.loader.set_data = raise_exception(NotImplementedError)
code_object = self.loader.get_code(self.name)
self.verify_code(code_object)
class SourceLoaderGetSourceTests(unittest.TestCase):
"""Tests for importlib.abc.SourceLoader.get_source()."""
def test_default_encoding(self):
# Should have no problems with UTF-8 text.
name = 'mod'
mock = SourceOnlyLoaderMock('mod.file')
source = 'x = "รผ"'
mock.source = source.encode('utf-8')
returned_source = mock.get_source(name)
self.assertEqual(returned_source, source)
def test_decoded_source(self):
# Decoding should work.
name = 'mod'
mock = SourceOnlyLoaderMock("mod.file")
source = "# coding: Latin-1\nx='รผ'"
assert source.encode('latin-1') != source.encode('utf-8')
mock.source = source.encode('latin-1')
returned_source = mock.get_source(name)
self.assertEqual(returned_source, source)
def test_universal_newlines(self):
# PEP 302 says universal newlines should be used.
name = 'mod'
mock = SourceOnlyLoaderMock('mod.file')
source = "x = 42\r\ny = -13\r\n"
mock.source = source.encode('utf-8')
expect = io.IncrementalNewlineDecoder(None, True).decode(source)
self.assertEqual(mock.get_source(name), expect)
class AbstractMethodImplTests(unittest.TestCase):
"""Test the concrete abstractmethod implementations."""
class Loader(abc.Loader):
def load_module(self, fullname):
super().load_module(fullname)
class Finder(abc.Finder):
def find_module(self, _):
super().find_module(_)
class ResourceLoader(Loader, abc.ResourceLoader):
def get_data(self, _):
super().get_data(_)
class InspectLoader(Loader, abc.InspectLoader):
def is_package(self, _):
super().is_package(_)
def get_code(self, _):
super().get_code(_)
def get_source(self, _):
super().get_source(_)
class ExecutionLoader(InspectLoader, abc.ExecutionLoader):
def get_filename(self, _):
super().get_filename(_)
class SourceLoader(ResourceLoader, ExecutionLoader, abc.SourceLoader):
pass
class PyLoader(ResourceLoader, InspectLoader, abc.PyLoader):
def source_path(self, _):
super().source_path(_)
class PyPycLoader(PyLoader, abc.PyPycLoader):
def bytecode_path(self, _):
super().bytecode_path(_)
def source_mtime(self, _):
super().source_mtime(_)
def write_bytecode(self, _, _2):
super().write_bytecode(_, _2)
def raises_NotImplementedError(self, ins, *args):
for method_name in args:
method = getattr(ins, method_name)
arg_count = len(inspect.getfullargspec(method)[0]) - 1
args = [''] * arg_count
try:
method(*args)
except NotImplementedError:
pass
else:
msg = "{}.{} did not raise NotImplementedError"
self.fail(msg.format(ins.__class__.__name__, method_name))
def test_Loader(self):
self.raises_NotImplementedError(self.Loader(), 'load_module')
# XXX misplaced; should be somewhere else
def test_Finder(self):
self.raises_NotImplementedError(self.Finder(), 'find_module')
def test_ResourceLoader(self):
self.raises_NotImplementedError(self.ResourceLoader(), 'load_module',
'get_data')
def test_InspectLoader(self):
self.raises_NotImplementedError(self.InspectLoader(), 'load_module',
'is_package', 'get_code', 'get_source')
def test_ExecutionLoader(self):
self.raises_NotImplementedError(self.ExecutionLoader(), 'load_module',
'is_package', 'get_code', 'get_source',
'get_filename')
def test_SourceLoader(self):
ins = self.SourceLoader()
# Required abstractmethods.
self.raises_NotImplementedError(ins, 'get_filename', 'get_data')
# Optional abstractmethods.
self.raises_NotImplementedError(ins,'path_mtime', 'set_data')
def test_PyLoader(self):
self.raises_NotImplementedError(self.PyLoader(), 'source_path',
'get_data', 'is_package')
def test_PyPycLoader(self):
self.raises_NotImplementedError(self.PyPycLoader(), 'source_path',
'source_mtime', 'bytecode_path',
'write_bytecode')
def test_main():
from test.support import run_unittest
run_unittest(PyLoaderTests, PyLoaderCompatTests,
PyLoaderInterfaceTests,
PyPycLoaderTests, PyPycLoaderInterfaceTests,
SkipWritingBytecodeTests, RegeneratedBytecodeTests,
BadBytecodeFailureTests, MissingPathsTests,
SourceOnlyLoaderTests,
SourceLoaderBytecodeTests,
SourceLoaderGetSourceTests,
AbstractMethodImplTests)
if __name__ == '__main__':
test_main()
| mit | 741,450,913,225,220,600 | 34.797945 | 79 | 0.598744 | false |
cccfran/sympy | sympy/polys/tests/test_modulargcd.py | 125 | 9007 | from sympy.polys.rings import ring
from sympy.polys.domains import ZZ, QQ, AlgebraicField
from sympy.polys.modulargcd import (
modgcd_univariate,
modgcd_bivariate,
_chinese_remainder_reconstruction_multivariate,
modgcd_multivariate,
_to_ZZ_poly,
_to_ANP_poly,
func_field_modgcd,
_func_field_modgcd_m)
from sympy import sqrt
def test_modgcd_univariate_integers():
R, x = ring("x", ZZ)
f, g = R.zero, R.zero
assert modgcd_univariate(f, g) == (0, 0, 0)
f, g = R.zero, x
assert modgcd_univariate(f, g) == (x, 0, 1)
assert modgcd_univariate(g, f) == (x, 1, 0)
f, g = R.zero, -x
assert modgcd_univariate(f, g) == (x, 0, -1)
assert modgcd_univariate(g, f) == (x, -1, 0)
f, g = 2*x, R(2)
assert modgcd_univariate(f, g) == (2, x, 1)
f, g = 2*x + 2, 6*x**2 - 6
assert modgcd_univariate(f, g) == (2*x + 2, 1, 3*x - 3)
f = x**4 + 8*x**3 + 21*x**2 + 22*x + 8
g = x**3 + 6*x**2 + 11*x + 6
h = x**2 + 3*x + 2
cff = x**2 + 5*x + 4
cfg = x + 3
assert modgcd_univariate(f, g) == (h, cff, cfg)
f = x**4 - 4
g = x**4 + 4*x**2 + 4
h = x**2 + 2
cff = x**2 - 2
cfg = x**2 + 2
assert modgcd_univariate(f, g) == (h, cff, cfg)
f = x**8 + x**6 - 3*x**4 - 3*x**3 + 8*x**2 + 2*x - 5
g = 3*x**6 + 5*x**4 - 4*x**2 - 9*x + 21
h = 1
cff = f
cfg = g
assert modgcd_univariate(f, g) == (h, cff, cfg)
f = - 352518131239247345597970242177235495263669787845475025293906825864749649589178600387510272*x**49 \
+ 46818041807522713962450042363465092040687472354933295397472942006618953623327997952*x**42 \
+ 378182690892293941192071663536490788434899030680411695933646320291525827756032*x**35 \
+ 112806468807371824947796775491032386836656074179286744191026149539708928*x**28 \
- 12278371209708240950316872681744825481125965781519138077173235712*x**21 \
+ 289127344604779611146960547954288113529690984687482920704*x**14 \
+ 19007977035740498977629742919480623972236450681*x**7 \
+ 311973482284542371301330321821976049
g = 365431878023781158602430064717380211405897160759702125019136*x**21 \
+ 197599133478719444145775798221171663643171734081650688*x**14 \
- 9504116979659010018253915765478924103928886144*x**7 \
- 311973482284542371301330321821976049
assert modgcd_univariate(f, f.diff(x))[0] == g
f = 1317378933230047068160*x + 2945748836994210856960
g = 120352542776360960*x + 269116466014453760
h = 120352542776360960*x + 269116466014453760
cff = 10946
cfg = 1
assert modgcd_univariate(f, g) == (h, cff, cfg)
def test_modgcd_bivariate_integers():
R, x, y = ring("x,y", ZZ)
f, g = R.zero, R.zero
assert modgcd_bivariate(f, g) == (0, 0, 0)
f, g = 2*x, R(2)
assert modgcd_bivariate(f, g) == (2, x, 1)
f, g = x + 2*y, x + y
assert modgcd_bivariate(f, g) == (1, f, g)
f, g = x**2 + 2*x*y + y**2, x**3 + y**3
assert modgcd_bivariate(f, g) == (x + y, x + y, x**2 - x*y + y**2)
f, g = x*y**2 + 2*x*y + x, x*y**3 + x
assert modgcd_bivariate(f, g) == (x*y + x, y + 1, y**2 - y + 1)
f, g = x**2*y**2 + x**2*y + 1, x*y**2 + x*y + 1
assert modgcd_bivariate(f, g) == (1, f, g)
f = 2*x*y**2 + 4*x*y + 2*x + y**2 + 2*y + 1
g = 2*x*y**3 + 2*x + y**3 + 1
assert modgcd_bivariate(f, g) == (2*x*y + 2*x + y + 1, y + 1, y**2 - y + 1)
f, g = 2*x**2 + 4*x + 2, x + 1
assert modgcd_bivariate(f, g) == (x + 1, 2*x + 2, 1)
f, g = x + 1, 2*x**2 + 4*x + 2
assert modgcd_bivariate(f, g) == (x + 1, 1, 2*x + 2)
f = 2*x**2 + 4*x*y - 2*x - 4*y
g = x**2 + x - 2
assert modgcd_bivariate(f, g) == (x - 1, 2*x + 4*y, x + 2)
f = 2*x**2 + 2*x*y - 3*x - 3*y
g = 4*x*y - 2*x + 4*y**2 - 2*y
assert modgcd_bivariate(f, g) == (x + y, 2*x - 3, 4*y - 2)
def test_chinese_remainder():
R, x, y = ring("x, y", ZZ)
p, q = 3, 5
hp = x**3*y - x**2 - 1
hq = -x**3*y - 2*x*y**2 + 2
hpq = _chinese_remainder_reconstruction_multivariate(hp, hq, p, q)
assert hpq.trunc_ground(p) == hp
assert hpq.trunc_ground(q) == hq
T, z = ring("z", R)
p, q = 3, 7
hp = (x*y + 1)*z**2 + x
hq = (x**2 - 3*y)*z + 2
hpq = _chinese_remainder_reconstruction_multivariate(hp, hq, p, q)
assert hpq.trunc_ground(p) == hp
assert hpq.trunc_ground(q) == hq
def test_modgcd_multivariate_integers():
R, x, y = ring("x,y", ZZ)
f, g = R.zero, R.zero
assert modgcd_multivariate(f, g) == (0, 0, 0)
f, g = 2*x**2 + 4*x + 2, x + 1
assert modgcd_multivariate(f, g) == (x + 1, 2*x + 2, 1)
f, g = x + 1, 2*x**2 + 4*x + 2
assert modgcd_multivariate(f, g) == (x + 1, 1, 2*x + 2)
f = 2*x**2 + 2*x*y - 3*x - 3*y
g = 4*x*y - 2*x + 4*y**2 - 2*y
assert modgcd_multivariate(f, g) == (x + y, 2*x - 3, 4*y - 2)
f, g = x*y**2 + 2*x*y + x, x*y**3 + x
assert modgcd_multivariate(f, g) == (x*y + x, y + 1, y**2 - y + 1)
f, g = x**2*y**2 + x**2*y + 1, x*y**2 + x*y + 1
assert modgcd_multivariate(f, g) == (1, f, g)
f = x**4 + 8*x**3 + 21*x**2 + 22*x + 8
g = x**3 + 6*x**2 + 11*x + 6
h = x**2 + 3*x + 2
cff = x**2 + 5*x + 4
cfg = x + 3
assert modgcd_multivariate(f, g) == (h, cff, cfg)
R, x, y, z, u = ring("x,y,z,u", ZZ)
f, g = x + y + z, -x - y - z - u
assert modgcd_multivariate(f, g) == (1, f, g)
f, g = u**2 + 2*u + 1, 2*u + 2
assert modgcd_multivariate(f, g) == (u + 1, u + 1, 2)
f, g = z**2*u**2 + 2*z**2*u + z**2 + z*u + z, u**2 + 2*u + 1
h, cff, cfg = u + 1, z**2*u + z**2 + z, u + 1
assert modgcd_multivariate(f, g) == (h, cff, cfg)
assert modgcd_multivariate(g, f) == (h, cfg, cff)
R, x, y, z = ring("x,y,z", ZZ)
f, g = x - y*z, x - y*z
assert modgcd_multivariate(f, g) == (x - y*z, 1, 1)
f, g, h = R.fateman_poly_F_1()
H, cff, cfg = modgcd_multivariate(f, g)
assert H == h and H*cff == f and H*cfg == g
R, x, y, z, u, v = ring("x,y,z,u,v", ZZ)
f, g, h = R.fateman_poly_F_1()
H, cff, cfg = modgcd_multivariate(f, g)
assert H == h and H*cff == f and H*cfg == g
R, x, y, z, u, v, a, b = ring("x,y,z,u,v,a,b", ZZ)
f, g, h = R.fateman_poly_F_1()
H, cff, cfg = modgcd_multivariate(f, g)
assert H == h and H*cff == f and H*cfg == g
R, x, y, z, u, v, a, b, c, d = ring("x,y,z,u,v,a,b,c,d", ZZ)
f, g, h = R.fateman_poly_F_1()
H, cff, cfg = modgcd_multivariate(f, g)
assert H == h and H*cff == f and H*cfg == g
R, x, y, z = ring("x,y,z", ZZ)
f, g, h = R.fateman_poly_F_2()
H, cff, cfg = modgcd_multivariate(f, g)
assert H == h and H*cff == f and H*cfg == g
f, g, h = R.fateman_poly_F_3()
H, cff, cfg = modgcd_multivariate(f, g)
assert H == h and H*cff == f and H*cfg == g
R, x, y, z, t = ring("x,y,z,t", ZZ)
f, g, h = R.fateman_poly_F_3()
H, cff, cfg = modgcd_multivariate(f, g)
assert H == h and H*cff == f and H*cfg == g
def test_to_ZZ_ANP_poly():
A = AlgebraicField(QQ, sqrt(2))
R, x = ring("x", A)
f = x*(sqrt(2) + 1)
T, x_, z_ = ring("x_, z_", ZZ)
f_ = x_*z_ + x_
assert _to_ZZ_poly(f, T) == f_
assert _to_ANP_poly(f_, R) == f
R, x, t, s = ring("x, t, s", A)
f = x*t**2 + x*s + sqrt(2)
D, t_, s_ = ring("t_, s_", ZZ)
T, x_, z_ = ring("x_, z_", D)
f_ = (t_**2 + s_)*x_ + z_
assert _to_ZZ_poly(f, T) == f_
assert _to_ANP_poly(f_, R) == f
def test_modgcd_algebraic_field():
A = AlgebraicField(QQ, sqrt(2))
R, x = ring("x", A)
one = A.one
f, g = 2*x, R(2)
assert func_field_modgcd(f, g) == (one, f, g)
f, g = 2*x, R(sqrt(2))
assert func_field_modgcd(f, g) == (one, f, g)
f, g = 2*x + 2, 6*x**2 - 6
assert func_field_modgcd(f, g) == (x + 1, R(2), 6*x - 6)
R, x, y = ring("x, y", A)
f, g = x + sqrt(2)*y, x + y
assert func_field_modgcd(f, g) == (one, f, g)
f, g = x*y + sqrt(2)*y**2, R(sqrt(2))*y
assert func_field_modgcd(f, g) == (y, x + sqrt(2)*y, R(sqrt(2)))
f, g = x**2 + 2*sqrt(2)*x*y + 2*y**2, x + sqrt(2)*y
assert func_field_modgcd(f, g) == (g, g, one)
A = AlgebraicField(QQ, sqrt(2), sqrt(3))
R, x, y, z = ring("x, y, z", A)
h = x**2*y**7 + sqrt(6)/21*z
f, g = h*(27*y**3 + 1), h*(y + x)
assert func_field_modgcd(f, g) == (h, 27*y**3+1, y+x)
h = x**13*y**3 + 1/2*x**10 + 1/sqrt(2)
f, g = h*(x + 1), h*sqrt(2)/sqrt(3)
assert func_field_modgcd(f, g) == (h, x + 1, R(sqrt(2)/sqrt(3)))
A = AlgebraicField(QQ, sqrt(2)**(-1)*sqrt(3))
R, x = ring("x", A)
f, g = x + 1, x - 1
assert func_field_modgcd(f, g) == (A.one, f, g)
# when func_field_modgcd suppors function fields, this test can be changed
def test_modgcd_func_field():
D, t = ring("t", ZZ)
R, x, z = ring("x, z", D)
minpoly = (z**2*t**2 + z**2*t - 1).drop(0)
f, g = x + 1, x - 1
assert _func_field_modgcd_m(f, g, minpoly) == R.one
| bsd-3-clause | 3,054,125,712,414,328,300 | 26.713846 | 108 | 0.512157 | false |
e-q/scipy | scipy/sparse/linalg/tests/test_pydata_sparse.py | 19 | 5954 | import pytest
import numpy as np
import scipy.sparse as sp
import scipy.sparse.linalg as splin
from numpy.testing import assert_allclose
try:
import sparse
except Exception:
sparse = None
pytestmark = pytest.mark.skipif(sparse is None,
reason="pydata/sparse not installed")
msg = "pydata/sparse (0.8) does not implement necessary operations"
sparse_params = [pytest.param("COO"),
pytest.param("DOK", marks=[pytest.mark.xfail(reason=msg)])]
scipy_sparse_classes = [
sp.bsr_matrix,
sp.csr_matrix,
sp.coo_matrix,
sp.csc_matrix,
sp.dia_matrix,
sp.dok_matrix
]
@pytest.fixture(params=sparse_params)
def sparse_cls(request):
return getattr(sparse, request.param)
@pytest.fixture(params=scipy_sparse_classes)
def sp_sparse_cls(request):
return request.param
@pytest.fixture
def same_matrix(sparse_cls, sp_sparse_cls):
np.random.seed(1234)
A_dense = np.random.rand(9, 9)
return sp_sparse_cls(A_dense), sparse_cls(A_dense)
@pytest.fixture
def matrices(sparse_cls):
np.random.seed(1234)
A_dense = np.random.rand(9, 9)
A_dense = A_dense @ A_dense.T
A_sparse = sparse_cls(A_dense)
b = np.random.rand(9)
return A_dense, A_sparse, b
def test_isolve_gmres(matrices):
# Several of the iterative solvers use the same
# isolve.utils.make_system wrapper code, so test just one of them.
A_dense, A_sparse, b = matrices
x, info = splin.gmres(A_sparse, b, atol=1e-15)
assert info == 0
assert isinstance(x, np.ndarray)
assert_allclose(A_sparse @ x, b)
def test_lsmr(matrices):
A_dense, A_sparse, b = matrices
res0 = splin.lsmr(A_dense, b)
res = splin.lsmr(A_sparse, b)
assert_allclose(res[0], res0[0], atol=1.8e-5)
def test_lsqr(matrices):
A_dense, A_sparse, b = matrices
res0 = splin.lsqr(A_dense, b)
res = splin.lsqr(A_sparse, b)
assert_allclose(res[0], res0[0], atol=1e-5)
def test_eigs(matrices):
A_dense, A_sparse, v0 = matrices
M_dense = np.diag(v0**2)
M_sparse = A_sparse.__class__(M_dense)
w_dense, v_dense = splin.eigs(A_dense, k=3, v0=v0)
w, v = splin.eigs(A_sparse, k=3, v0=v0)
assert_allclose(w, w_dense)
assert_allclose(v, v_dense)
for M in [M_sparse, M_dense]:
w_dense, v_dense = splin.eigs(A_dense, M=M_dense, k=3, v0=v0)
w, v = splin.eigs(A_sparse, M=M, k=3, v0=v0)
assert_allclose(w, w_dense)
assert_allclose(v, v_dense)
w_dense, v_dense = splin.eigsh(A_dense, M=M_dense, k=3, v0=v0)
w, v = splin.eigsh(A_sparse, M=M, k=3, v0=v0)
assert_allclose(w, w_dense)
assert_allclose(v, v_dense)
def test_svds(matrices):
A_dense, A_sparse, v0 = matrices
u0, s0, vt0 = splin.svds(A_dense, k=2, v0=v0)
u, s, vt = splin.svds(A_sparse, k=2, v0=v0)
assert_allclose(s, s0)
assert_allclose(u, u0)
assert_allclose(vt, vt0)
def test_lobpcg(matrices):
A_dense, A_sparse, x = matrices
X = x[:,None]
w_dense, v_dense = splin.lobpcg(A_dense, X)
w, v = splin.lobpcg(A_sparse, X)
assert_allclose(w, w_dense)
assert_allclose(v, v_dense)
def test_spsolve(matrices):
A_dense, A_sparse, b = matrices
b2 = np.random.rand(len(b), 3)
x0 = splin.spsolve(sp.csc_matrix(A_dense), b)
x = splin.spsolve(A_sparse, b)
assert isinstance(x, np.ndarray)
assert_allclose(x, x0)
x0 = splin.spsolve(sp.csc_matrix(A_dense), b)
x = splin.spsolve(A_sparse, b, use_umfpack=True)
assert isinstance(x, np.ndarray)
assert_allclose(x, x0)
x0 = splin.spsolve(sp.csc_matrix(A_dense), b2)
x = splin.spsolve(A_sparse, b2)
assert isinstance(x, np.ndarray)
assert_allclose(x, x0)
x0 = splin.spsolve(sp.csc_matrix(A_dense),
sp.csc_matrix(A_dense))
x = splin.spsolve(A_sparse, A_sparse)
assert isinstance(x, type(A_sparse))
assert_allclose(x.todense(), x0.todense())
def test_splu(matrices):
A_dense, A_sparse, b = matrices
n = len(b)
sparse_cls = type(A_sparse)
lu = splin.splu(A_sparse)
assert isinstance(lu.L, sparse_cls)
assert isinstance(lu.U, sparse_cls)
Pr = sparse_cls(sp.csc_matrix((np.ones(n), (lu.perm_r, np.arange(n)))))
Pc = sparse_cls(sp.csc_matrix((np.ones(n), (np.arange(n), lu.perm_c))))
A2 = Pr.T @ lu.L @ lu.U @ Pc.T
assert_allclose(A2.todense(), A_sparse.todense())
z = lu.solve(A_sparse.todense())
assert_allclose(z, np.eye(n), atol=1e-10)
def test_spilu(matrices):
A_dense, A_sparse, b = matrices
sparse_cls = type(A_sparse)
lu = splin.spilu(A_sparse)
assert isinstance(lu.L, sparse_cls)
assert isinstance(lu.U, sparse_cls)
z = lu.solve(A_sparse.todense())
assert_allclose(z, np.eye(len(b)), atol=1e-3)
def test_spsolve_triangular(matrices):
A_dense, A_sparse, b = matrices
A_sparse = sparse.tril(A_sparse)
x = splin.spsolve_triangular(A_sparse, b)
assert_allclose(A_sparse @ x, b)
def test_onenormest(matrices):
A_dense, A_sparse, b = matrices
est0 = splin.onenormest(A_dense)
est = splin.onenormest(A_sparse)
assert_allclose(est, est0)
def test_inv(matrices):
A_dense, A_sparse, b = matrices
x0 = splin.inv(sp.csc_matrix(A_dense))
x = splin.inv(A_sparse)
assert_allclose(x.todense(), x0.todense())
def test_expm(matrices):
A_dense, A_sparse, b = matrices
x0 = splin.expm(sp.csc_matrix(A_dense))
x = splin.expm(A_sparse)
assert_allclose(x.todense(), x0.todense())
def test_expm_multiply(matrices):
A_dense, A_sparse, b = matrices
x0 = splin.expm_multiply(A_dense, b)
x = splin.expm_multiply(A_sparse, b)
assert_allclose(x, x0)
def test_eq(same_matrix):
sp_sparse, pd_sparse = same_matrix
assert (sp_sparse == pd_sparse).all()
def test_ne(same_matrix):
sp_sparse, pd_sparse = same_matrix
assert not (sp_sparse != pd_sparse).any()
| bsd-3-clause | 6,461,715,653,502,411,000 | 24.33617 | 76 | 0.633692 | false |
bright-sparks/chromium-spacewalk | chrome/browser/metrics/variations/generate_resources_map_unittest.py | 16 | 3094 | #!/usr/bin/python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unittests for generate_resources_map.py"""
import unittest
import generate_resources_map
class GenerateResourcesMapUnittest(unittest.TestCase):
TEST_INPUT = """
// This file is automatically generated by GRIT. Do not edit.
#pragma once
#define IDS_BOOKMARKS_NO_ITEMS 12500
#define IDS_BOOKMARK_BAR_IMPORT_LINK 12501
#define IDS_BOOKMARK_GROUP_FROM_IE 12502
#define IDS_BOOKMARK_GROUP_FROM_FIREFOX 12503
"""
def testGetResourceListFromString(self):
expected_tuples = [(301430091, "IDS_BOOKMARKS_NO_ITEMS", "12500"),
(2654138887, "IDS_BOOKMARK_BAR_IMPORT_LINK", "12501"),
(2894469061, "IDS_BOOKMARK_GROUP_FROM_IE", "12502"),
(3847176170, "IDS_BOOKMARK_GROUP_FROM_FIREFOX", "12503")]
expected = [generate_resources_map.Resource(*t) for t in expected_tuples]
actual_tuples = generate_resources_map._GetResourceListFromString(
self.TEST_INPUT)
self.assertEqual(expected_tuples, actual_tuples)
def testCheckForHashCollisions(self):
collisions_tuples = [(123, "IDS_FOO", "12500"),
(456, "IDS_BAR", "12501"),
(456, "IDS_BAZ", "12502"),
(890, "IDS_QUX", "12503"),
(899, "IDS_NO", "12504"),
(899, "IDS_YES", "12505")]
list_with_collisions = [generate_resources_map.Resource(*t)
for t in collisions_tuples]
expected_collision_tuples = [(456, "IDS_BAR", "12501"),
(456, "IDS_BAZ", "12502"),
(899, "IDS_NO", "12504"),
(899, "IDS_YES", "12505")]
expected_collisions = [generate_resources_map.Resource(*t)
for t in expected_collision_tuples]
actual_collisions = sorted(
generate_resources_map._CheckForHashCollisions(list_with_collisions))
actual_collisions
self.assertEqual(expected_collisions, actual_collisions)
def testGenerateFileContent(self):
expected = (
"""// This file was generated by generate_resources_map.py. Do not edit.
#include "chrome/browser/metrics/variations/generated_resources_map.h"
namespace chrome_variations {
const uint32_t kResourceHashes[] = {
301430091U, // IDS_BOOKMARKS_NO_ITEMS
2654138887U, // IDS_BOOKMARK_BAR_IMPORT_LINK
2894469061U, // IDS_BOOKMARK_GROUP_FROM_IE
3847176170U, // IDS_BOOKMARK_GROUP_FROM_FIREFOX
};
const int kResourceIndices[] = {
12500, // IDS_BOOKMARKS_NO_ITEMS
12501, // IDS_BOOKMARK_BAR_IMPORT_LINK
12502, // IDS_BOOKMARK_GROUP_FROM_IE
12503, // IDS_BOOKMARK_GROUP_FROM_FIREFOX
};
} // namespace chrome_variations
""")
actual = generate_resources_map._GenerateFileContent(self.TEST_INPUT)
self.assertEqual(expected, actual)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | -721,776,451,401,817,700 | 33 | 80 | 0.62702 | false |
deshipu/micropython | tests/basics/int1.py | 46 | 1581 | print(int(False))
print(int(True))
print(int(0))
print(int(1))
print(int(+1))
print(int(-1))
print(int('0'))
print(int('+0'))
print(int('-0'))
print(int('1'))
print(int('+1'))
print(int('-1'))
print(int('01'))
print(int('9'))
print(int('10'))
print(int('+10'))
print(int('-10'))
print(int('12'))
print(int('-12'))
print(int('99'))
print(int('100'))
print(int('314'))
print(int(' 314'))
print(int('314 '))
print(int(' \t\t 314 \t\t '))
print(int(' 1 '))
print(int(' -3 '))
print(int('0', 10))
print(int('1', 10))
print(int(' \t 1 \t ', 10))
print(int('11', 10))
print(int('11', 16))
print(int('11', 8))
print(int('11', 2))
print(int('11', 36))
print(int('xyz', 36))
print(int('0o123', 0))
print(int('8388607'))
print(int('0x123', 16))
print(int('0X123', 16))
print(int('0A', 16))
print(int('0o123', 8))
print(int('0O123', 8))
print(int('0123', 8))
print(int('0b100', 2))
print(int('0B100', 2))
print(int('0100', 2))
print(int(' \t 0o12', 8))
print(int('0o12 \t ', 8))
print(int(b"12", 10))
print(int(b"12"))
def test(value, base):
try:
print(int(value, base))
except ValueError:
print('ValueError')
test('x', 0)
test('1x', 0)
test(' 1x', 0)
test(' 1' + chr(2) + ' ', 0)
test('', 0)
test(' ', 0)
test(' \t\t ', 0)
test('0x', 16)
test('0x', 0)
test('0o', 8)
test('0o', 0)
test('0b', 2)
test('0b', 0)
test('0b2', 2)
test('0o8', 8)
test('0xg', 16)
test('1 1', 16)
test('123', 37)
# check that we don't parse this as a floating point number
print(0x1e+1)
# can't convert list to int
try:
int([])
except TypeError:
print("TypeError")
| mit | -1,526,860,424,142,122,800 | 16.566667 | 59 | 0.5642 | false |
jayme-github/CouchPotatoServer | libs/enzyme/real.py | 180 | 4547 | # -*- coding: utf-8 -*-
# enzyme - Video metadata parser
# Copyright 2011-2012 Antoine Bertin <[email protected]>
# Copyright 2003-2006 Thomas Schueppel <[email protected]>
# Copyright 2003-2006 Dirk Meyer <[email protected]>
#
# This file is part of enzyme.
#
# enzyme is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# enzyme is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with enzyme. If not, see <http://www.gnu.org/licenses/>.
__all__ = ['Parser']
import struct
import logging
from exceptions import ParseError
import core
# http://www.pcisys.net/~melanson/codecs/rmff.htm
# http://www.pcisys.net/~melanson/codecs/
# get logging object
log = logging.getLogger(__name__)
class RealVideo(core.AVContainer):
def __init__(self, file):
core.AVContainer.__init__(self)
self.mime = 'video/real'
self.type = 'Real Video'
h = file.read(10)
try:
(object_id, object_size, object_version) = struct.unpack('>4sIH', h)
except struct.error:
# EOF.
raise ParseError()
if not object_id == '.RMF':
raise ParseError()
file_version, num_headers = struct.unpack('>II', file.read(8))
log.debug(u'size: %d, ver: %d, headers: %d' % \
(object_size, file_version, num_headers))
for _ in range(0, num_headers):
try:
oi = struct.unpack('>4sIH', file.read(10))
except (struct.error, IOError):
# Header data we expected wasn't there. File may be
# only partially complete.
break
if object_id == 'DATA' and oi[0] != 'INDX':
log.debug(u'INDX chunk expected after DATA but not found -- file corrupt')
break
(object_id, object_size, object_version) = oi
if object_id == 'DATA':
# Seek over the data chunk rather than reading it in.
file.seek(object_size - 10, 1)
else:
self._read_header(object_id, file.read(object_size - 10))
log.debug(u'%r [%d]' % (object_id, object_size - 10))
# Read all the following headers
def _read_header(self, object_id, s):
if object_id == 'PROP':
prop = struct.unpack('>9IHH', s)
log.debug(u'PROP: %r' % prop)
if object_id == 'MDPR':
mdpr = struct.unpack('>H7I', s[:30])
log.debug(u'MDPR: %r' % mdpr)
self.length = mdpr[7] / 1000.0
(stream_name_size,) = struct.unpack('>B', s[30:31])
stream_name = s[31:31 + stream_name_size]
pos = 31 + stream_name_size
(mime_type_size,) = struct.unpack('>B', s[pos:pos + 1])
mime = s[pos + 1:pos + 1 + mime_type_size]
pos += mime_type_size + 1
(type_specific_len,) = struct.unpack('>I', s[pos:pos + 4])
type_specific = s[pos + 4:pos + 4 + type_specific_len]
pos += 4 + type_specific_len
if mime[:5] == 'audio':
ai = core.AudioStream()
ai.id = mdpr[0]
ai.bitrate = mdpr[2]
self.audio.append(ai)
elif mime[:5] == 'video':
vi = core.VideoStream()
vi.id = mdpr[0]
vi.bitrate = mdpr[2]
self.video.append(vi)
else:
log.debug(u'Unknown: %r' % mime)
if object_id == 'CONT':
pos = 0
(title_len,) = struct.unpack('>H', s[pos:pos + 2])
self.title = s[2:title_len + 2]
pos += title_len + 2
(author_len,) = struct.unpack('>H', s[pos:pos + 2])
self.artist = s[pos + 2:pos + author_len + 2]
pos += author_len + 2
(copyright_len,) = struct.unpack('>H', s[pos:pos + 2])
self.copyright = s[pos + 2:pos + copyright_len + 2]
pos += copyright_len + 2
(comment_len,) = struct.unpack('>H', s[pos:pos + 2])
self.comment = s[pos + 2:pos + comment_len + 2]
Parser = RealVideo
| gpl-3.0 | 1,306,237,727,436,258,000 | 37.533898 | 90 | 0.545415 | false |
pwoodworth/intellij-community | python/helpers/docutils/parsers/rst/directives/html.py | 61 | 3223 | # $Id: html.py 4667 2006-07-12 21:40:56Z wiemann $
# Author: David Goodger <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
Directives for typically HTML-specific constructs.
"""
__docformat__ = 'reStructuredText'
import sys
from docutils import nodes, utils
from docutils.parsers.rst import Directive
from docutils.parsers.rst import states
from docutils.transforms import components
class MetaBody(states.SpecializedBody):
class meta(nodes.Special, nodes.PreBibliographic, nodes.Element):
"""HTML-specific "meta" element."""
pass
def field_marker(self, match, context, next_state):
"""Meta element."""
node, blank_finish = self.parsemeta(match)
self.parent += node
return [], next_state, []
def parsemeta(self, match):
name = self.parse_field_marker(match)
indented, indent, line_offset, blank_finish = \
self.state_machine.get_first_known_indented(match.end())
node = self.meta()
pending = nodes.pending(components.Filter,
{'component': 'writer',
'format': 'html',
'nodes': [node]})
node['content'] = ' '.join(indented)
if not indented:
line = self.state_machine.line
msg = self.reporter.info(
'No content for meta tag "%s".' % name,
nodes.literal_block(line, line),
line=self.state_machine.abs_line_number())
return msg, blank_finish
tokens = name.split()
try:
attname, val = utils.extract_name_value(tokens[0])[0]
node[attname.lower()] = val
except utils.NameValueError:
node['name'] = tokens[0]
for token in tokens[1:]:
try:
attname, val = utils.extract_name_value(token)[0]
node[attname.lower()] = val
except utils.NameValueError, detail:
line = self.state_machine.line
msg = self.reporter.error(
'Error parsing meta tag attribute "%s": %s.'
% (token, detail), nodes.literal_block(line, line),
line=self.state_machine.abs_line_number())
return msg, blank_finish
self.document.note_pending(pending)
return pending, blank_finish
class Meta(Directive):
has_content = True
SMkwargs = {'state_classes': (MetaBody,)}
def run(self):
self.assert_has_content()
node = nodes.Element()
new_line_offset, blank_finish = self.state.nested_list_parse(
self.content, self.content_offset, node,
initial_state='MetaBody', blank_finish=1,
state_machine_kwargs=self.SMkwargs)
if (new_line_offset - self.content_offset) != len(self.content):
# incomplete parse of block?
error = self.state_machine.reporter.error(
'Invalid meta directive.',
nodes.literal_block(self.block_text, self.block_text),
line=self.lineno)
node += error
return node.children
| apache-2.0 | -8,302,856,566,071,981,000 | 35.625 | 73 | 0.572448 | false |
cbertinato/pandas | pandas/tests/frame/test_axis_select_reindex.py | 1 | 44030 | from datetime import datetime
import numpy as np
import pytest
from pandas.errors import PerformanceWarning
import pandas as pd
from pandas import (
Categorical, DataFrame, Index, MultiIndex, Series, date_range, isna)
from pandas.tests.frame.common import TestData
import pandas.util.testing as tm
from pandas.util.testing import assert_frame_equal
class TestDataFrameSelectReindex(TestData):
# These are specific reindex-based tests; other indexing tests should go in
# test_indexing
def test_drop_names(self):
df = DataFrame([[1, 2, 3], [3, 4, 5], [5, 6, 7]],
index=['a', 'b', 'c'],
columns=['d', 'e', 'f'])
df.index.name, df.columns.name = 'first', 'second'
df_dropped_b = df.drop('b')
df_dropped_e = df.drop('e', axis=1)
df_inplace_b, df_inplace_e = df.copy(), df.copy()
df_inplace_b.drop('b', inplace=True)
df_inplace_e.drop('e', axis=1, inplace=True)
for obj in (df_dropped_b, df_dropped_e, df_inplace_b, df_inplace_e):
assert obj.index.name == 'first'
assert obj.columns.name == 'second'
assert list(df.columns) == ['d', 'e', 'f']
msg = r"\['g'\] not found in axis"
with pytest.raises(KeyError, match=msg):
df.drop(['g'])
with pytest.raises(KeyError, match=msg):
df.drop(['g'], 1)
# errors = 'ignore'
dropped = df.drop(['g'], errors='ignore')
expected = Index(['a', 'b', 'c'], name='first')
tm.assert_index_equal(dropped.index, expected)
dropped = df.drop(['b', 'g'], errors='ignore')
expected = Index(['a', 'c'], name='first')
tm.assert_index_equal(dropped.index, expected)
dropped = df.drop(['g'], axis=1, errors='ignore')
expected = Index(['d', 'e', 'f'], name='second')
tm.assert_index_equal(dropped.columns, expected)
dropped = df.drop(['d', 'g'], axis=1, errors='ignore')
expected = Index(['e', 'f'], name='second')
tm.assert_index_equal(dropped.columns, expected)
# GH 16398
dropped = df.drop([], errors='ignore')
expected = Index(['a', 'b', 'c'], name='first')
tm.assert_index_equal(dropped.index, expected)
def test_drop_col_still_multiindex(self):
arrays = [['a', 'b', 'c', 'top'],
['', '', '', 'OD'],
['', '', '', 'wx']]
tuples = sorted(zip(*arrays))
index = MultiIndex.from_tuples(tuples)
df = DataFrame(np.random.randn(3, 4), columns=index)
del df[('a', '', '')]
assert(isinstance(df.columns, MultiIndex))
def test_drop(self):
simple = DataFrame({"A": [1, 2, 3, 4], "B": [0, 1, 2, 3]})
assert_frame_equal(simple.drop("A", axis=1), simple[['B']])
assert_frame_equal(simple.drop(["A", "B"], axis='columns'),
simple[[]])
assert_frame_equal(simple.drop([0, 1, 3], axis=0), simple.loc[[2], :])
assert_frame_equal(simple.drop(
[0, 3], axis='index'), simple.loc[[1, 2], :])
with pytest.raises(KeyError, match=r"\[5\] not found in axis"):
simple.drop(5)
with pytest.raises(KeyError, match=r"\['C'\] not found in axis"):
simple.drop('C', 1)
with pytest.raises(KeyError, match=r"\[5\] not found in axis"):
simple.drop([1, 5])
with pytest.raises(KeyError, match=r"\['C'\] not found in axis"):
simple.drop(['A', 'C'], 1)
# errors = 'ignore'
assert_frame_equal(simple.drop(5, errors='ignore'), simple)
assert_frame_equal(simple.drop([0, 5], errors='ignore'),
simple.loc[[1, 2, 3], :])
assert_frame_equal(simple.drop('C', axis=1, errors='ignore'), simple)
assert_frame_equal(simple.drop(['A', 'C'], axis=1, errors='ignore'),
simple[['B']])
# non-unique - wheee!
nu_df = DataFrame(list(zip(range(3), range(-3, 1), list('abc'))),
columns=['a', 'a', 'b'])
assert_frame_equal(nu_df.drop('a', axis=1), nu_df[['b']])
assert_frame_equal(nu_df.drop('b', axis='columns'), nu_df['a'])
assert_frame_equal(nu_df.drop([]), nu_df) # GH 16398
nu_df = nu_df.set_index(pd.Index(['X', 'Y', 'X']))
nu_df.columns = list('abc')
assert_frame_equal(nu_df.drop('X', axis='rows'), nu_df.loc[["Y"], :])
assert_frame_equal(nu_df.drop(['X', 'Y'], axis=0), nu_df.loc[[], :])
# inplace cache issue
# GH 5628
df = pd.DataFrame(np.random.randn(10, 3), columns=list('abc'))
expected = df[~(df.b > 0)]
df.drop(labels=df[df.b > 0].index, inplace=True)
assert_frame_equal(df, expected)
def test_drop_multiindex_not_lexsorted(self):
# GH 11640
# define the lexsorted version
lexsorted_mi = MultiIndex.from_tuples(
[('a', ''), ('b1', 'c1'), ('b2', 'c2')], names=['b', 'c'])
lexsorted_df = DataFrame([[1, 3, 4]], columns=lexsorted_mi)
assert lexsorted_df.columns.is_lexsorted()
# define the non-lexsorted version
not_lexsorted_df = DataFrame(columns=['a', 'b', 'c', 'd'],
data=[[1, 'b1', 'c1', 3],
[1, 'b2', 'c2', 4]])
not_lexsorted_df = not_lexsorted_df.pivot_table(
index='a', columns=['b', 'c'], values='d')
not_lexsorted_df = not_lexsorted_df.reset_index()
assert not not_lexsorted_df.columns.is_lexsorted()
# compare the results
tm.assert_frame_equal(lexsorted_df, not_lexsorted_df)
expected = lexsorted_df.drop('a', axis=1)
with tm.assert_produces_warning(PerformanceWarning):
result = not_lexsorted_df.drop('a', axis=1)
tm.assert_frame_equal(result, expected)
def test_drop_api_equivalence(self):
# equivalence of the labels/axis and index/columns API's (GH12392)
df = DataFrame([[1, 2, 3], [3, 4, 5], [5, 6, 7]],
index=['a', 'b', 'c'],
columns=['d', 'e', 'f'])
res1 = df.drop('a')
res2 = df.drop(index='a')
tm.assert_frame_equal(res1, res2)
res1 = df.drop('d', 1)
res2 = df.drop(columns='d')
tm.assert_frame_equal(res1, res2)
res1 = df.drop(labels='e', axis=1)
res2 = df.drop(columns='e')
tm.assert_frame_equal(res1, res2)
res1 = df.drop(['a'], axis=0)
res2 = df.drop(index=['a'])
tm.assert_frame_equal(res1, res2)
res1 = df.drop(['a'], axis=0).drop(['d'], axis=1)
res2 = df.drop(index=['a'], columns=['d'])
tm.assert_frame_equal(res1, res2)
with pytest.raises(ValueError):
df.drop(labels='a', index='b')
with pytest.raises(ValueError):
df.drop(labels='a', columns='b')
with pytest.raises(ValueError):
df.drop(axis=1)
def test_merge_join_different_levels(self):
# GH 9455
# first dataframe
df1 = DataFrame(columns=['a', 'b'], data=[[1, 11], [0, 22]])
# second dataframe
columns = MultiIndex.from_tuples([('a', ''), ('c', 'c1')])
df2 = DataFrame(columns=columns, data=[[1, 33], [0, 44]])
# merge
columns = ['a', 'b', ('c', 'c1')]
expected = DataFrame(columns=columns, data=[[1, 11, 33], [0, 22, 44]])
with tm.assert_produces_warning(UserWarning):
result = pd.merge(df1, df2, on='a')
tm.assert_frame_equal(result, expected)
# join, see discussion in GH 12219
columns = ['a', 'b', ('a', ''), ('c', 'c1')]
expected = DataFrame(columns=columns,
data=[[1, 11, 0, 44], [0, 22, 1, 33]])
with tm.assert_produces_warning(UserWarning):
result = df1.join(df2, on='a')
tm.assert_frame_equal(result, expected)
def test_reindex(self):
newFrame = self.frame.reindex(self.ts1.index)
for col in newFrame.columns:
for idx, val in newFrame[col].items():
if idx in self.frame.index:
if np.isnan(val):
assert np.isnan(self.frame[col][idx])
else:
assert val == self.frame[col][idx]
else:
assert np.isnan(val)
for col, series in newFrame.items():
assert tm.equalContents(series.index, newFrame.index)
emptyFrame = self.frame.reindex(Index([]))
assert len(emptyFrame.index) == 0
# Cython code should be unit-tested directly
nonContigFrame = self.frame.reindex(self.ts1.index[::2])
for col in nonContigFrame.columns:
for idx, val in nonContigFrame[col].items():
if idx in self.frame.index:
if np.isnan(val):
assert np.isnan(self.frame[col][idx])
else:
assert val == self.frame[col][idx]
else:
assert np.isnan(val)
for col, series in nonContigFrame.items():
assert tm.equalContents(series.index, nonContigFrame.index)
# corner cases
# Same index, copies values but not index if copy=False
newFrame = self.frame.reindex(self.frame.index, copy=False)
assert newFrame.index is self.frame.index
# length zero
newFrame = self.frame.reindex([])
assert newFrame.empty
assert len(newFrame.columns) == len(self.frame.columns)
# length zero with columns reindexed with non-empty index
newFrame = self.frame.reindex([])
newFrame = newFrame.reindex(self.frame.index)
assert len(newFrame.index) == len(self.frame.index)
assert len(newFrame.columns) == len(self.frame.columns)
# pass non-Index
newFrame = self.frame.reindex(list(self.ts1.index))
tm.assert_index_equal(newFrame.index, self.ts1.index)
# copy with no axes
result = self.frame.reindex()
assert_frame_equal(result, self.frame)
assert result is not self.frame
def test_reindex_nan(self):
df = pd.DataFrame([[1, 2], [3, 5], [7, 11], [9, 23]],
index=[2, np.nan, 1, 5],
columns=['joe', 'jim'])
i, j = [np.nan, 5, 5, np.nan, 1, 2, np.nan], [1, 3, 3, 1, 2, 0, 1]
assert_frame_equal(df.reindex(i), df.iloc[j])
df.index = df.index.astype('object')
assert_frame_equal(df.reindex(i), df.iloc[j], check_index_type=False)
# GH10388
df = pd.DataFrame({'other': ['a', 'b', np.nan, 'c'],
'date': ['2015-03-22', np.nan,
'2012-01-08', np.nan],
'amount': [2, 3, 4, 5]})
df['date'] = pd.to_datetime(df.date)
df['delta'] = (pd.to_datetime('2015-06-18') - df['date']).shift(1)
left = df.set_index(['delta', 'other', 'date']).reset_index()
right = df.reindex(columns=['delta', 'other', 'date', 'amount'])
assert_frame_equal(left, right)
def test_reindex_name_remains(self):
s = Series(np.random.rand(10))
df = DataFrame(s, index=np.arange(len(s)))
i = Series(np.arange(10), name='iname')
df = df.reindex(i)
assert df.index.name == 'iname'
df = df.reindex(Index(np.arange(10), name='tmpname'))
assert df.index.name == 'tmpname'
s = Series(np.random.rand(10))
df = DataFrame(s.T, index=np.arange(len(s)))
i = Series(np.arange(10), name='iname')
df = df.reindex(columns=i)
assert df.columns.name == 'iname'
def test_reindex_int(self):
smaller = self.intframe.reindex(self.intframe.index[::2])
assert smaller['A'].dtype == np.int64
bigger = smaller.reindex(self.intframe.index)
assert bigger['A'].dtype == np.float64
smaller = self.intframe.reindex(columns=['A', 'B'])
assert smaller['A'].dtype == np.int64
def test_reindex_like(self):
other = self.frame.reindex(index=self.frame.index[:10],
columns=['C', 'B'])
assert_frame_equal(other, self.frame.reindex_like(other))
def test_reindex_columns(self):
new_frame = self.frame.reindex(columns=['A', 'B', 'E'])
tm.assert_series_equal(new_frame['B'], self.frame['B'])
assert np.isnan(new_frame['E']).all()
assert 'C' not in new_frame
# Length zero
new_frame = self.frame.reindex(columns=[])
assert new_frame.empty
def test_reindex_columns_method(self):
# GH 14992, reindexing over columns ignored method
df = DataFrame(data=[[11, 12, 13], [21, 22, 23], [31, 32, 33]],
index=[1, 2, 4],
columns=[1, 2, 4],
dtype=float)
# default method
result = df.reindex(columns=range(6))
expected = DataFrame(data=[[np.nan, 11, 12, np.nan, 13, np.nan],
[np.nan, 21, 22, np.nan, 23, np.nan],
[np.nan, 31, 32, np.nan, 33, np.nan]],
index=[1, 2, 4],
columns=range(6),
dtype=float)
assert_frame_equal(result, expected)
# method='ffill'
result = df.reindex(columns=range(6), method='ffill')
expected = DataFrame(data=[[np.nan, 11, 12, 12, 13, 13],
[np.nan, 21, 22, 22, 23, 23],
[np.nan, 31, 32, 32, 33, 33]],
index=[1, 2, 4],
columns=range(6),
dtype=float)
assert_frame_equal(result, expected)
# method='bfill'
result = df.reindex(columns=range(6), method='bfill')
expected = DataFrame(data=[[11, 11, 12, 13, 13, np.nan],
[21, 21, 22, 23, 23, np.nan],
[31, 31, 32, 33, 33, np.nan]],
index=[1, 2, 4],
columns=range(6),
dtype=float)
assert_frame_equal(result, expected)
def test_reindex_axes(self):
# GH 3317, reindexing by both axes loses freq of the index
df = DataFrame(np.ones((3, 3)),
index=[datetime(2012, 1, 1),
datetime(2012, 1, 2),
datetime(2012, 1, 3)],
columns=['a', 'b', 'c'])
time_freq = date_range('2012-01-01', '2012-01-03', freq='d')
some_cols = ['a', 'b']
index_freq = df.reindex(index=time_freq).index.freq
both_freq = df.reindex(index=time_freq, columns=some_cols).index.freq
seq_freq = df.reindex(index=time_freq).reindex(
columns=some_cols).index.freq
assert index_freq == both_freq
assert index_freq == seq_freq
def test_reindex_fill_value(self):
df = DataFrame(np.random.randn(10, 4))
# axis=0
result = df.reindex(list(range(15)))
assert np.isnan(result.values[-5:]).all()
result = df.reindex(range(15), fill_value=0)
expected = df.reindex(range(15)).fillna(0)
assert_frame_equal(result, expected)
# axis=1
result = df.reindex(columns=range(5), fill_value=0.)
expected = df.copy()
expected[4] = 0.
assert_frame_equal(result, expected)
result = df.reindex(columns=range(5), fill_value=0)
expected = df.copy()
expected[4] = 0
assert_frame_equal(result, expected)
result = df.reindex(columns=range(5), fill_value='foo')
expected = df.copy()
expected[4] = 'foo'
assert_frame_equal(result, expected)
# reindex_axis
with tm.assert_produces_warning(FutureWarning):
result = df.reindex_axis(range(15), fill_value=0., axis=0)
expected = df.reindex(range(15)).fillna(0)
assert_frame_equal(result, expected)
with tm.assert_produces_warning(FutureWarning):
result = df.reindex_axis(range(5), fill_value=0., axis=1)
expected = df.reindex(columns=range(5)).fillna(0)
assert_frame_equal(result, expected)
# other dtypes
df['foo'] = 'foo'
result = df.reindex(range(15), fill_value=0)
expected = df.reindex(range(15)).fillna(0)
assert_frame_equal(result, expected)
def test_reindex_dups(self):
# GH4746, reindex on duplicate index error messages
arr = np.random.randn(10)
df = DataFrame(arr, index=[1, 2, 3, 4, 5, 1, 2, 3, 4, 5])
# set index is ok
result = df.copy()
result.index = list(range(len(df)))
expected = DataFrame(arr, index=list(range(len(df))))
assert_frame_equal(result, expected)
# reindex fails
msg = "cannot reindex from a duplicate axis"
with pytest.raises(ValueError, match=msg):
df.reindex(index=list(range(len(df))))
def test_reindex_axis_style(self):
# https://github.com/pandas-dev/pandas/issues/12392
df = pd.DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]})
expected = pd.DataFrame({"A": [1, 2, np.nan], "B": [4, 5, np.nan]},
index=[0, 1, 3])
result = df.reindex([0, 1, 3])
assert_frame_equal(result, expected)
result = df.reindex([0, 1, 3], axis=0)
assert_frame_equal(result, expected)
result = df.reindex([0, 1, 3], axis='index')
assert_frame_equal(result, expected)
def test_reindex_positional_warns(self):
# https://github.com/pandas-dev/pandas/issues/12392
df = pd.DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]})
expected = pd.DataFrame({"A": [1., 2], 'B': [4., 5],
"C": [np.nan, np.nan]})
with tm.assert_produces_warning(FutureWarning):
result = df.reindex([0, 1], ['A', 'B', 'C'])
assert_frame_equal(result, expected)
def test_reindex_axis_style_raises(self):
# https://github.com/pandas-dev/pandas/issues/12392
df = pd.DataFrame({"A": [1, 2, 3], 'B': [4, 5, 6]})
with pytest.raises(TypeError, match="Cannot specify both 'axis'"):
df.reindex([0, 1], ['A'], axis=1)
with pytest.raises(TypeError, match="Cannot specify both 'axis'"):
df.reindex([0, 1], ['A'], axis='index')
with pytest.raises(TypeError, match="Cannot specify both 'axis'"):
df.reindex(index=[0, 1], axis='index')
with pytest.raises(TypeError, match="Cannot specify both 'axis'"):
df.reindex(index=[0, 1], axis='columns')
with pytest.raises(TypeError, match="Cannot specify both 'axis'"):
df.reindex(columns=[0, 1], axis='columns')
with pytest.raises(TypeError, match="Cannot specify both 'axis'"):
df.reindex(index=[0, 1], columns=[0, 1], axis='columns')
with pytest.raises(TypeError, match='Cannot specify all'):
df.reindex([0, 1], [0], ['A'])
# Mixing styles
with pytest.raises(TypeError, match="Cannot specify both 'axis'"):
df.reindex(index=[0, 1], axis='index')
with pytest.raises(TypeError, match="Cannot specify both 'axis'"):
df.reindex(index=[0, 1], axis='columns')
# Duplicates
with pytest.raises(TypeError, match="multiple values"):
df.reindex([0, 1], labels=[0, 1])
def test_reindex_single_named_indexer(self):
# https://github.com/pandas-dev/pandas/issues/12392
df = pd.DataFrame({"A": [1, 2, 3], "B": [1, 2, 3]})
result = df.reindex([0, 1], columns=['A'])
expected = pd.DataFrame({"A": [1, 2]})
assert_frame_equal(result, expected)
def test_reindex_api_equivalence(self):
# https://github.com/pandas-dev/pandas/issues/12392
# equivalence of the labels/axis and index/columns API's
df = DataFrame([[1, 2, 3], [3, 4, 5], [5, 6, 7]],
index=['a', 'b', 'c'],
columns=['d', 'e', 'f'])
res1 = df.reindex(['b', 'a'])
res2 = df.reindex(index=['b', 'a'])
res3 = df.reindex(labels=['b', 'a'])
res4 = df.reindex(labels=['b', 'a'], axis=0)
res5 = df.reindex(['b', 'a'], axis=0)
for res in [res2, res3, res4, res5]:
tm.assert_frame_equal(res1, res)
res1 = df.reindex(columns=['e', 'd'])
res2 = df.reindex(['e', 'd'], axis=1)
res3 = df.reindex(labels=['e', 'd'], axis=1)
for res in [res2, res3]:
tm.assert_frame_equal(res1, res)
with tm.assert_produces_warning(FutureWarning) as m:
res1 = df.reindex(['b', 'a'], ['e', 'd'])
assert 'reindex' in str(m[0].message)
res2 = df.reindex(columns=['e', 'd'], index=['b', 'a'])
res3 = df.reindex(labels=['b', 'a'], axis=0).reindex(labels=['e', 'd'],
axis=1)
for res in [res2, res3]:
tm.assert_frame_equal(res1, res)
def test_align(self):
af, bf = self.frame.align(self.frame)
assert af._data is not self.frame._data
af, bf = self.frame.align(self.frame, copy=False)
assert af._data is self.frame._data
# axis = 0
other = self.frame.iloc[:-5, :3]
af, bf = self.frame.align(other, axis=0, fill_value=-1)
tm.assert_index_equal(bf.columns, other.columns)
# test fill value
join_idx = self.frame.index.join(other.index)
diff_a = self.frame.index.difference(join_idx)
diff_b = other.index.difference(join_idx)
diff_a_vals = af.reindex(diff_a).values
diff_b_vals = bf.reindex(diff_b).values
assert (diff_a_vals == -1).all()
af, bf = self.frame.align(other, join='right', axis=0)
tm.assert_index_equal(bf.columns, other.columns)
tm.assert_index_equal(bf.index, other.index)
tm.assert_index_equal(af.index, other.index)
# axis = 1
other = self.frame.iloc[:-5, :3].copy()
af, bf = self.frame.align(other, axis=1)
tm.assert_index_equal(bf.columns, self.frame.columns)
tm.assert_index_equal(bf.index, other.index)
# test fill value
join_idx = self.frame.index.join(other.index)
diff_a = self.frame.index.difference(join_idx)
diff_b = other.index.difference(join_idx)
diff_a_vals = af.reindex(diff_a).values
# TODO(wesm): unused?
diff_b_vals = bf.reindex(diff_b).values # noqa
assert (diff_a_vals == -1).all()
af, bf = self.frame.align(other, join='inner', axis=1)
tm.assert_index_equal(bf.columns, other.columns)
af, bf = self.frame.align(other, join='inner', axis=1, method='pad')
tm.assert_index_equal(bf.columns, other.columns)
# test other non-float types
af, bf = self.intframe.align(other, join='inner', axis=1, method='pad')
tm.assert_index_equal(bf.columns, other.columns)
af, bf = self.mixed_frame.align(self.mixed_frame,
join='inner', axis=1, method='pad')
tm.assert_index_equal(bf.columns, self.mixed_frame.columns)
af, bf = self.frame.align(other.iloc[:, 0], join='inner', axis=1,
method=None, fill_value=None)
tm.assert_index_equal(bf.index, Index([]))
af, bf = self.frame.align(other.iloc[:, 0], join='inner', axis=1,
method=None, fill_value=0)
tm.assert_index_equal(bf.index, Index([]))
# mixed floats/ints
af, bf = self.mixed_float.align(other.iloc[:, 0], join='inner', axis=1,
method=None, fill_value=0)
tm.assert_index_equal(bf.index, Index([]))
af, bf = self.mixed_int.align(other.iloc[:, 0], join='inner', axis=1,
method=None, fill_value=0)
tm.assert_index_equal(bf.index, Index([]))
# Try to align DataFrame to Series along bad axis
with pytest.raises(ValueError):
self.frame.align(af.iloc[0, :3], join='inner', axis=2)
# align dataframe to series with broadcast or not
idx = self.frame.index
s = Series(range(len(idx)), index=idx)
left, right = self.frame.align(s, axis=0)
tm.assert_index_equal(left.index, self.frame.index)
tm.assert_index_equal(right.index, self.frame.index)
assert isinstance(right, Series)
left, right = self.frame.align(s, broadcast_axis=1)
tm.assert_index_equal(left.index, self.frame.index)
expected = {c: s for c in self.frame.columns}
expected = DataFrame(expected, index=self.frame.index,
columns=self.frame.columns)
tm.assert_frame_equal(right, expected)
# see gh-9558
df = DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6]})
result = df[df['a'] == 2]
expected = DataFrame([[2, 5]], index=[1], columns=['a', 'b'])
tm.assert_frame_equal(result, expected)
result = df.where(df['a'] == 2, 0)
expected = DataFrame({'a': [0, 2, 0], 'b': [0, 5, 0]})
tm.assert_frame_equal(result, expected)
def _check_align(self, a, b, axis, fill_axis, how, method, limit=None):
aa, ab = a.align(b, axis=axis, join=how, method=method, limit=limit,
fill_axis=fill_axis)
join_index, join_columns = None, None
ea, eb = a, b
if axis is None or axis == 0:
join_index = a.index.join(b.index, how=how)
ea = ea.reindex(index=join_index)
eb = eb.reindex(index=join_index)
if axis is None or axis == 1:
join_columns = a.columns.join(b.columns, how=how)
ea = ea.reindex(columns=join_columns)
eb = eb.reindex(columns=join_columns)
ea = ea.fillna(axis=fill_axis, method=method, limit=limit)
eb = eb.fillna(axis=fill_axis, method=method, limit=limit)
assert_frame_equal(aa, ea)
assert_frame_equal(ab, eb)
@pytest.mark.parametrize('meth', ['pad', 'bfill'])
@pytest.mark.parametrize('ax', [0, 1, None])
@pytest.mark.parametrize('fax', [0, 1])
@pytest.mark.parametrize('how', ['inner', 'outer', 'left', 'right'])
def test_align_fill_method(self, how, meth, ax, fax):
self._check_align_fill(how, meth, ax, fax)
def _check_align_fill(self, kind, meth, ax, fax):
left = self.frame.iloc[0:4, :10]
right = self.frame.iloc[2:, 6:]
empty = self.frame.iloc[:0, :0]
self._check_align(left, right, axis=ax, fill_axis=fax,
how=kind, method=meth)
self._check_align(left, right, axis=ax, fill_axis=fax,
how=kind, method=meth, limit=1)
# empty left
self._check_align(empty, right, axis=ax, fill_axis=fax,
how=kind, method=meth)
self._check_align(empty, right, axis=ax, fill_axis=fax,
how=kind, method=meth, limit=1)
# empty right
self._check_align(left, empty, axis=ax, fill_axis=fax,
how=kind, method=meth)
self._check_align(left, empty, axis=ax, fill_axis=fax,
how=kind, method=meth, limit=1)
# both empty
self._check_align(empty, empty, axis=ax, fill_axis=fax,
how=kind, method=meth)
self._check_align(empty, empty, axis=ax, fill_axis=fax,
how=kind, method=meth, limit=1)
def test_align_int_fill_bug(self):
# GH #910
X = np.arange(10 * 10, dtype='float64').reshape(10, 10)
Y = np.ones((10, 1), dtype=int)
df1 = DataFrame(X)
df1['0.X'] = Y.squeeze()
df2 = df1.astype(float)
result = df1 - df1.mean()
expected = df2 - df2.mean()
assert_frame_equal(result, expected)
def test_align_multiindex(self):
# GH 10665
# same test cases as test_align_multiindex in test_series.py
midx = pd.MultiIndex.from_product([range(2), range(3), range(2)],
names=('a', 'b', 'c'))
idx = pd.Index(range(2), name='b')
df1 = pd.DataFrame(np.arange(12, dtype='int64'), index=midx)
df2 = pd.DataFrame(np.arange(2, dtype='int64'), index=idx)
# these must be the same results (but flipped)
res1l, res1r = df1.align(df2, join='left')
res2l, res2r = df2.align(df1, join='right')
expl = df1
assert_frame_equal(expl, res1l)
assert_frame_equal(expl, res2r)
expr = pd.DataFrame([0, 0, 1, 1, np.nan, np.nan] * 2, index=midx)
assert_frame_equal(expr, res1r)
assert_frame_equal(expr, res2l)
res1l, res1r = df1.align(df2, join='right')
res2l, res2r = df2.align(df1, join='left')
exp_idx = pd.MultiIndex.from_product([range(2), range(2), range(2)],
names=('a', 'b', 'c'))
expl = pd.DataFrame([0, 1, 2, 3, 6, 7, 8, 9], index=exp_idx)
assert_frame_equal(expl, res1l)
assert_frame_equal(expl, res2r)
expr = pd.DataFrame([0, 0, 1, 1] * 2, index=exp_idx)
assert_frame_equal(expr, res1r)
assert_frame_equal(expr, res2l)
def test_align_series_combinations(self):
df = pd.DataFrame({'a': [1, 3, 5],
'b': [1, 3, 5]}, index=list('ACE'))
s = pd.Series([1, 2, 4], index=list('ABD'), name='x')
# frame + series
res1, res2 = df.align(s, axis=0)
exp1 = pd.DataFrame({'a': [1, np.nan, 3, np.nan, 5],
'b': [1, np.nan, 3, np.nan, 5]},
index=list('ABCDE'))
exp2 = pd.Series([1, 2, np.nan, 4, np.nan],
index=list('ABCDE'), name='x')
tm.assert_frame_equal(res1, exp1)
tm.assert_series_equal(res2, exp2)
# series + frame
res1, res2 = s.align(df)
tm.assert_series_equal(res1, exp2)
tm.assert_frame_equal(res2, exp1)
def test_filter(self):
# Items
filtered = self.frame.filter(['A', 'B', 'E'])
assert len(filtered.columns) == 2
assert 'E' not in filtered
filtered = self.frame.filter(['A', 'B', 'E'], axis='columns')
assert len(filtered.columns) == 2
assert 'E' not in filtered
# Other axis
idx = self.frame.index[0:4]
filtered = self.frame.filter(idx, axis='index')
expected = self.frame.reindex(index=idx)
tm.assert_frame_equal(filtered, expected)
# like
fcopy = self.frame.copy()
fcopy['AA'] = 1
filtered = fcopy.filter(like='A')
assert len(filtered.columns) == 2
assert 'AA' in filtered
# like with ints in column names
df = DataFrame(0., index=[0, 1, 2], columns=[0, 1, '_A', '_B'])
filtered = df.filter(like='_')
assert len(filtered.columns) == 2
# regex with ints in column names
# from PR #10384
df = DataFrame(0., index=[0, 1, 2], columns=['A1', 1, 'B', 2, 'C'])
expected = DataFrame(
0., index=[0, 1, 2], columns=pd.Index([1, 2], dtype=object))
filtered = df.filter(regex='^[0-9]+$')
tm.assert_frame_equal(filtered, expected)
expected = DataFrame(0., index=[0, 1, 2], columns=[0, '0', 1, '1'])
# shouldn't remove anything
filtered = expected.filter(regex='^[0-9]+$')
tm.assert_frame_equal(filtered, expected)
# pass in None
with pytest.raises(TypeError, match='Must pass'):
self.frame.filter()
with pytest.raises(TypeError, match='Must pass'):
self.frame.filter(items=None)
with pytest.raises(TypeError, match='Must pass'):
self.frame.filter(axis=1)
# test mutually exclusive arguments
with pytest.raises(TypeError, match='mutually exclusive'):
self.frame.filter(items=['one', 'three'], regex='e$', like='bbi')
with pytest.raises(TypeError, match='mutually exclusive'):
self.frame.filter(items=['one', 'three'], regex='e$', axis=1)
with pytest.raises(TypeError, match='mutually exclusive'):
self.frame.filter(items=['one', 'three'], regex='e$')
with pytest.raises(TypeError, match='mutually exclusive'):
self.frame.filter(items=['one', 'three'], like='bbi', axis=0)
with pytest.raises(TypeError, match='mutually exclusive'):
self.frame.filter(items=['one', 'three'], like='bbi')
# objects
filtered = self.mixed_frame.filter(like='foo')
assert 'foo' in filtered
# unicode columns, won't ascii-encode
df = self.frame.rename(columns={'B': '\u2202'})
filtered = df.filter(like='C')
assert 'C' in filtered
def test_filter_regex_search(self):
fcopy = self.frame.copy()
fcopy['AA'] = 1
# regex
filtered = fcopy.filter(regex='[A]+')
assert len(filtered.columns) == 2
assert 'AA' in filtered
# doesn't have to be at beginning
df = DataFrame({'aBBa': [1, 2],
'BBaBB': [1, 2],
'aCCa': [1, 2],
'aCCaBB': [1, 2]})
result = df.filter(regex='BB')
exp = df[[x for x in df.columns if 'BB' in x]]
assert_frame_equal(result, exp)
@pytest.mark.parametrize('name,expected', [
('a', DataFrame({'a': [1, 2]})),
('a', DataFrame({'a': [1, 2]})),
('ใ', DataFrame({'ใ': [3, 4]}))
])
def test_filter_unicode(self, name, expected):
# GH13101
df = DataFrame({'a': [1, 2], 'ใ': [3, 4]})
assert_frame_equal(df.filter(like=name), expected)
assert_frame_equal(df.filter(regex=name), expected)
@pytest.mark.parametrize('name', ['a', 'a'])
def test_filter_bytestring(self, name):
# GH13101
df = DataFrame({b'a': [1, 2], b'b': [3, 4]})
expected = DataFrame({b'a': [1, 2]})
assert_frame_equal(df.filter(like=name), expected)
assert_frame_equal(df.filter(regex=name), expected)
def test_filter_corner(self):
empty = DataFrame()
result = empty.filter([])
assert_frame_equal(result, empty)
result = empty.filter(like='foo')
assert_frame_equal(result, empty)
def test_take(self):
# homogeneous
order = [3, 1, 2, 0]
for df in [self.frame]:
result = df.take(order, axis=0)
expected = df.reindex(df.index.take(order))
assert_frame_equal(result, expected)
# axis = 1
result = df.take(order, axis=1)
expected = df.loc[:, ['D', 'B', 'C', 'A']]
assert_frame_equal(result, expected, check_names=False)
# negative indices
order = [2, 1, -1]
for df in [self.frame]:
result = df.take(order, axis=0)
expected = df.reindex(df.index.take(order))
assert_frame_equal(result, expected)
with tm.assert_produces_warning(FutureWarning):
result = df.take(order, convert=True, axis=0)
assert_frame_equal(result, expected)
with tm.assert_produces_warning(FutureWarning):
result = df.take(order, convert=False, axis=0)
assert_frame_equal(result, expected)
# axis = 1
result = df.take(order, axis=1)
expected = df.loc[:, ['C', 'B', 'D']]
assert_frame_equal(result, expected, check_names=False)
# illegal indices
msg = "indices are out-of-bounds"
with pytest.raises(IndexError, match=msg):
df.take([3, 1, 2, 30], axis=0)
with pytest.raises(IndexError, match=msg):
df.take([3, 1, 2, -31], axis=0)
with pytest.raises(IndexError, match=msg):
df.take([3, 1, 2, 5], axis=1)
with pytest.raises(IndexError, match=msg):
df.take([3, 1, 2, -5], axis=1)
# mixed-dtype
order = [4, 1, 2, 0, 3]
for df in [self.mixed_frame]:
result = df.take(order, axis=0)
expected = df.reindex(df.index.take(order))
assert_frame_equal(result, expected)
# axis = 1
result = df.take(order, axis=1)
expected = df.loc[:, ['foo', 'B', 'C', 'A', 'D']]
assert_frame_equal(result, expected)
# negative indices
order = [4, 1, -2]
for df in [self.mixed_frame]:
result = df.take(order, axis=0)
expected = df.reindex(df.index.take(order))
assert_frame_equal(result, expected)
# axis = 1
result = df.take(order, axis=1)
expected = df.loc[:, ['foo', 'B', 'D']]
assert_frame_equal(result, expected)
# by dtype
order = [1, 2, 0, 3]
for df in [self.mixed_float, self.mixed_int]:
result = df.take(order, axis=0)
expected = df.reindex(df.index.take(order))
assert_frame_equal(result, expected)
# axis = 1
result = df.take(order, axis=1)
expected = df.loc[:, ['B', 'C', 'A', 'D']]
assert_frame_equal(result, expected)
def test_reindex_boolean(self):
frame = DataFrame(np.ones((10, 2), dtype=bool),
index=np.arange(0, 20, 2),
columns=[0, 2])
reindexed = frame.reindex(np.arange(10))
assert reindexed.values.dtype == np.object_
assert isna(reindexed[0][1])
reindexed = frame.reindex(columns=range(3))
assert reindexed.values.dtype == np.object_
assert isna(reindexed[1]).all()
def test_reindex_objects(self):
reindexed = self.mixed_frame.reindex(columns=['foo', 'A', 'B'])
assert 'foo' in reindexed
reindexed = self.mixed_frame.reindex(columns=['A', 'B'])
assert 'foo' not in reindexed
def test_reindex_corner(self):
index = Index(['a', 'b', 'c'])
dm = self.empty.reindex(index=[1, 2, 3])
reindexed = dm.reindex(columns=index)
tm.assert_index_equal(reindexed.columns, index)
# ints are weird
smaller = self.intframe.reindex(columns=['A', 'B', 'E'])
assert smaller['E'].dtype == np.float64
def test_reindex_axis(self):
cols = ['A', 'B', 'E']
with tm.assert_produces_warning(FutureWarning) as m:
reindexed1 = self.intframe.reindex_axis(cols, axis=1)
assert 'reindex' in str(m[0].message)
reindexed2 = self.intframe.reindex(columns=cols)
assert_frame_equal(reindexed1, reindexed2)
rows = self.intframe.index[0:5]
with tm.assert_produces_warning(FutureWarning) as m:
reindexed1 = self.intframe.reindex_axis(rows, axis=0)
assert 'reindex' in str(m[0].message)
reindexed2 = self.intframe.reindex(index=rows)
assert_frame_equal(reindexed1, reindexed2)
msg = ("No axis named 2 for object type"
" <class 'pandas.core.frame.DataFrame'>")
with pytest.raises(ValueError, match=msg):
self.intframe.reindex_axis(rows, axis=2)
# no-op case
cols = self.frame.columns.copy()
with tm.assert_produces_warning(FutureWarning) as m:
newFrame = self.frame.reindex_axis(cols, axis=1)
assert 'reindex' in str(m[0].message)
assert_frame_equal(newFrame, self.frame)
def test_reindex_with_nans(self):
df = DataFrame([[1, 2], [3, 4], [np.nan, np.nan], [7, 8], [9, 10]],
columns=['a', 'b'],
index=[100.0, 101.0, np.nan, 102.0, 103.0])
result = df.reindex(index=[101.0, 102.0, 103.0])
expected = df.iloc[[1, 3, 4]]
assert_frame_equal(result, expected)
result = df.reindex(index=[103.0])
expected = df.iloc[[4]]
assert_frame_equal(result, expected)
result = df.reindex(index=[101.0])
expected = df.iloc[[1]]
assert_frame_equal(result, expected)
def test_reindex_multi(self):
df = DataFrame(np.random.randn(3, 3))
result = df.reindex(index=range(4), columns=range(4))
expected = df.reindex(list(range(4))).reindex(columns=range(4))
assert_frame_equal(result, expected)
df = DataFrame(np.random.randint(0, 10, (3, 3)))
result = df.reindex(index=range(4), columns=range(4))
expected = df.reindex(list(range(4))).reindex(columns=range(4))
assert_frame_equal(result, expected)
df = DataFrame(np.random.randint(0, 10, (3, 3)))
result = df.reindex(index=range(2), columns=range(2))
expected = df.reindex(range(2)).reindex(columns=range(2))
assert_frame_equal(result, expected)
df = DataFrame(np.random.randn(5, 3) + 1j, columns=['a', 'b', 'c'])
result = df.reindex(index=[0, 1], columns=['a', 'b'])
expected = df.reindex([0, 1]).reindex(columns=['a', 'b'])
assert_frame_equal(result, expected)
def test_reindex_multi_categorical_time(self):
# https://github.com/pandas-dev/pandas/issues/21390
midx = pd.MultiIndex.from_product(
[Categorical(['a', 'b', 'c']),
Categorical(date_range("2012-01-01", periods=3, freq='H'))])
df = pd.DataFrame({'a': range(len(midx))}, index=midx)
df2 = df.iloc[[0, 1, 2, 3, 4, 5, 6, 8]]
result = df2.reindex(midx)
expected = pd.DataFrame(
{'a': [0, 1, 2, 3, 4, 5, 6, np.nan, 8]}, index=midx)
assert_frame_equal(result, expected)
data = [[1, 2, 3], [1, 2, 3]]
@pytest.mark.parametrize('actual', [
DataFrame(data=data, index=['a', 'a']),
DataFrame(data=data, index=['a', 'b']),
DataFrame(data=data, index=['a', 'b']).set_index([0, 1]),
DataFrame(data=data, index=['a', 'a']).set_index([0, 1])
])
def test_raise_on_drop_duplicate_index(self, actual):
# issue 19186
level = 0 if isinstance(actual.index, MultiIndex) else None
with pytest.raises(KeyError):
actual.drop('c', level=level, axis=0)
with pytest.raises(KeyError):
actual.T.drop('c', level=level, axis=1)
expected_no_err = actual.drop('c', axis=0, level=level,
errors='ignore')
assert_frame_equal(expected_no_err, actual)
expected_no_err = actual.T.drop('c', axis=1, level=level,
errors='ignore')
assert_frame_equal(expected_no_err.T, actual)
@pytest.mark.parametrize('index', [[1, 2, 3], [1, 1, 2]])
@pytest.mark.parametrize('drop_labels', [[], [1], [2]])
def test_drop_empty_list(self, index, drop_labels):
# GH 21494
expected_index = [i for i in index if i not in drop_labels]
frame = pd.DataFrame(index=index).drop(drop_labels)
tm.assert_frame_equal(frame, pd.DataFrame(index=expected_index))
@pytest.mark.parametrize('index', [[1, 2, 3], [1, 2, 2]])
@pytest.mark.parametrize('drop_labels', [[1, 4], [4, 5]])
def test_drop_non_empty_list(self, index, drop_labels):
# GH 21494
with pytest.raises(KeyError, match='not found in axis'):
pd.DataFrame(index=index).drop(drop_labels)
| bsd-3-clause | 3,177,258,851,787,552,000 | 37.787665 | 79 | 0.53857 | false |
lfz/Guided-Denoise | Attackset/Iter4_ensv3_resv2_inresv2_random/nets/inception_v4.py | 45 | 15643 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains the definition of the Inception V4 architecture.
As described in http://arxiv.org/abs/1602.07261.
Inception-v4, Inception-ResNet and the Impact of Residual Connections
on Learning
Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from nets import inception_utils
slim = tf.contrib.slim
def block_inception_a(inputs, scope=None, reuse=None):
"""Builds Inception-A block for Inception v4 network."""
# By default use stride=1 and SAME padding
with slim.arg_scope([slim.conv2d, slim.avg_pool2d, slim.max_pool2d],
stride=1, padding='SAME'):
with tf.variable_scope(scope, 'BlockInceptionA', [inputs], reuse=reuse):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(inputs, 96, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(inputs, 64, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 96, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(inputs, 64, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, 96, [3, 3], scope='Conv2d_0b_3x3')
branch_2 = slim.conv2d(branch_2, 96, [3, 3], scope='Conv2d_0c_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = slim.avg_pool2d(inputs, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, 96, [1, 1], scope='Conv2d_0b_1x1')
return tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
def block_reduction_a(inputs, scope=None, reuse=None):
"""Builds Reduction-A block for Inception v4 network."""
# By default use stride=1 and SAME padding
with slim.arg_scope([slim.conv2d, slim.avg_pool2d, slim.max_pool2d],
stride=1, padding='SAME'):
with tf.variable_scope(scope, 'BlockReductionA', [inputs], reuse=reuse):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(inputs, 384, [3, 3], stride=2, padding='VALID',
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(inputs, 192, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 224, [3, 3], scope='Conv2d_0b_3x3')
branch_1 = slim.conv2d(branch_1, 256, [3, 3], stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.max_pool2d(inputs, [3, 3], stride=2, padding='VALID',
scope='MaxPool_1a_3x3')
return tf.concat(axis=3, values=[branch_0, branch_1, branch_2])
def block_inception_b(inputs, scope=None, reuse=None):
"""Builds Inception-B block for Inception v4 network."""
# By default use stride=1 and SAME padding
with slim.arg_scope([slim.conv2d, slim.avg_pool2d, slim.max_pool2d],
stride=1, padding='SAME'):
with tf.variable_scope(scope, 'BlockInceptionB', [inputs], reuse=reuse):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(inputs, 384, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(inputs, 192, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 224, [1, 7], scope='Conv2d_0b_1x7')
branch_1 = slim.conv2d(branch_1, 256, [7, 1], scope='Conv2d_0c_7x1')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(inputs, 192, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, 192, [7, 1], scope='Conv2d_0b_7x1')
branch_2 = slim.conv2d(branch_2, 224, [1, 7], scope='Conv2d_0c_1x7')
branch_2 = slim.conv2d(branch_2, 224, [7, 1], scope='Conv2d_0d_7x1')
branch_2 = slim.conv2d(branch_2, 256, [1, 7], scope='Conv2d_0e_1x7')
with tf.variable_scope('Branch_3'):
branch_3 = slim.avg_pool2d(inputs, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, 128, [1, 1], scope='Conv2d_0b_1x1')
return tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
def block_reduction_b(inputs, scope=None, reuse=None):
"""Builds Reduction-B block for Inception v4 network."""
# By default use stride=1 and SAME padding
with slim.arg_scope([slim.conv2d, slim.avg_pool2d, slim.max_pool2d],
stride=1, padding='SAME'):
with tf.variable_scope(scope, 'BlockReductionB', [inputs], reuse=reuse):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(inputs, 192, [1, 1], scope='Conv2d_0a_1x1')
branch_0 = slim.conv2d(branch_0, 192, [3, 3], stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(inputs, 256, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 256, [1, 7], scope='Conv2d_0b_1x7')
branch_1 = slim.conv2d(branch_1, 320, [7, 1], scope='Conv2d_0c_7x1')
branch_1 = slim.conv2d(branch_1, 320, [3, 3], stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.max_pool2d(inputs, [3, 3], stride=2, padding='VALID',
scope='MaxPool_1a_3x3')
return tf.concat(axis=3, values=[branch_0, branch_1, branch_2])
def block_inception_c(inputs, scope=None, reuse=None):
"""Builds Inception-C block for Inception v4 network."""
# By default use stride=1 and SAME padding
with slim.arg_scope([slim.conv2d, slim.avg_pool2d, slim.max_pool2d],
stride=1, padding='SAME'):
with tf.variable_scope(scope, 'BlockInceptionC', [inputs], reuse=reuse):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(inputs, 256, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(inputs, 384, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = tf.concat(axis=3, values=[
slim.conv2d(branch_1, 256, [1, 3], scope='Conv2d_0b_1x3'),
slim.conv2d(branch_1, 256, [3, 1], scope='Conv2d_0c_3x1')])
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(inputs, 384, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, 448, [3, 1], scope='Conv2d_0b_3x1')
branch_2 = slim.conv2d(branch_2, 512, [1, 3], scope='Conv2d_0c_1x3')
branch_2 = tf.concat(axis=3, values=[
slim.conv2d(branch_2, 256, [1, 3], scope='Conv2d_0d_1x3'),
slim.conv2d(branch_2, 256, [3, 1], scope='Conv2d_0e_3x1')])
with tf.variable_scope('Branch_3'):
branch_3 = slim.avg_pool2d(inputs, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, 256, [1, 1], scope='Conv2d_0b_1x1')
return tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
def inception_v4_base(inputs, final_endpoint='Mixed_7d', scope=None):
"""Creates the Inception V4 network up to the given final endpoint.
Args:
inputs: a 4-D tensor of size [batch_size, height, width, 3].
final_endpoint: specifies the endpoint to construct the network up to.
It can be one of [ 'Conv2d_1a_3x3', 'Conv2d_2a_3x3', 'Conv2d_2b_3x3',
'Mixed_3a', 'Mixed_4a', 'Mixed_5a', 'Mixed_5b', 'Mixed_5c', 'Mixed_5d',
'Mixed_5e', 'Mixed_6a', 'Mixed_6b', 'Mixed_6c', 'Mixed_6d', 'Mixed_6e',
'Mixed_6f', 'Mixed_6g', 'Mixed_6h', 'Mixed_7a', 'Mixed_7b', 'Mixed_7c',
'Mixed_7d']
scope: Optional variable_scope.
Returns:
logits: the logits outputs of the model.
end_points: the set of end_points from the inception model.
Raises:
ValueError: if final_endpoint is not set to one of the predefined values,
"""
end_points = {}
def add_and_check_final(name, net):
end_points[name] = net
return name == final_endpoint
with tf.variable_scope(scope, 'InceptionV4', [inputs]):
with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d],
stride=1, padding='SAME'):
# 299 x 299 x 3
net = slim.conv2d(inputs, 32, [3, 3], stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
if add_and_check_final('Conv2d_1a_3x3', net): return net, end_points
# 149 x 149 x 32
net = slim.conv2d(net, 32, [3, 3], padding='VALID',
scope='Conv2d_2a_3x3')
if add_and_check_final('Conv2d_2a_3x3', net): return net, end_points
# 147 x 147 x 32
net = slim.conv2d(net, 64, [3, 3], scope='Conv2d_2b_3x3')
if add_and_check_final('Conv2d_2b_3x3', net): return net, end_points
# 147 x 147 x 64
with tf.variable_scope('Mixed_3a'):
with tf.variable_scope('Branch_0'):
branch_0 = slim.max_pool2d(net, [3, 3], stride=2, padding='VALID',
scope='MaxPool_0a_3x3')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, 96, [3, 3], stride=2, padding='VALID',
scope='Conv2d_0a_3x3')
net = tf.concat(axis=3, values=[branch_0, branch_1])
if add_and_check_final('Mixed_3a', net): return net, end_points
# 73 x 73 x 160
with tf.variable_scope('Mixed_4a'):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, 64, [1, 1], scope='Conv2d_0a_1x1')
branch_0 = slim.conv2d(branch_0, 96, [3, 3], padding='VALID',
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, 64, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 64, [1, 7], scope='Conv2d_0b_1x7')
branch_1 = slim.conv2d(branch_1, 64, [7, 1], scope='Conv2d_0c_7x1')
branch_1 = slim.conv2d(branch_1, 96, [3, 3], padding='VALID',
scope='Conv2d_1a_3x3')
net = tf.concat(axis=3, values=[branch_0, branch_1])
if add_and_check_final('Mixed_4a', net): return net, end_points
# 71 x 71 x 192
with tf.variable_scope('Mixed_5a'):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, 192, [3, 3], stride=2, padding='VALID',
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
branch_1 = slim.max_pool2d(net, [3, 3], stride=2, padding='VALID',
scope='MaxPool_1a_3x3')
net = tf.concat(axis=3, values=[branch_0, branch_1])
if add_and_check_final('Mixed_5a', net): return net, end_points
# 35 x 35 x 384
# 4 x Inception-A blocks
for idx in range(4):
block_scope = 'Mixed_5' + chr(ord('b') + idx)
net = block_inception_a(net, block_scope)
if add_and_check_final(block_scope, net): return net, end_points
# 35 x 35 x 384
# Reduction-A block
net = block_reduction_a(net, 'Mixed_6a')
if add_and_check_final('Mixed_6a', net): return net, end_points
# 17 x 17 x 1024
# 7 x Inception-B blocks
for idx in range(7):
block_scope = 'Mixed_6' + chr(ord('b') + idx)
net = block_inception_b(net, block_scope)
if add_and_check_final(block_scope, net): return net, end_points
# 17 x 17 x 1024
# Reduction-B block
net = block_reduction_b(net, 'Mixed_7a')
if add_and_check_final('Mixed_7a', net): return net, end_points
# 8 x 8 x 1536
# 3 x Inception-C blocks
for idx in range(3):
block_scope = 'Mixed_7' + chr(ord('b') + idx)
net = block_inception_c(net, block_scope)
if add_and_check_final(block_scope, net): return net, end_points
raise ValueError('Unknown final endpoint %s' % final_endpoint)
def inception_v4(inputs, num_classes=1001, is_training=True,
dropout_keep_prob=0.8,
reuse=None,
scope='InceptionV4',
create_aux_logits=True):
"""Creates the Inception V4 model.
Args:
inputs: a 4-D tensor of size [batch_size, height, width, 3].
num_classes: number of predicted classes.
is_training: whether is training or not.
dropout_keep_prob: float, the fraction to keep before final layer.
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
scope: Optional variable_scope.
create_aux_logits: Whether to include the auxiliary logits.
Returns:
logits: the logits outputs of the model.
end_points: the set of end_points from the inception model.
"""
end_points = {}
with tf.variable_scope(scope, 'InceptionV4', [inputs], reuse=reuse) as scope:
with slim.arg_scope([slim.batch_norm, slim.dropout],
is_training=is_training):
net, end_points = inception_v4_base(inputs, scope=scope)
with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d],
stride=1, padding='SAME'):
# Auxiliary Head logits
if create_aux_logits:
with tf.variable_scope('AuxLogits'):
# 17 x 17 x 1024
aux_logits = end_points['Mixed_6h']
aux_logits = slim.avg_pool2d(aux_logits, [5, 5], stride=3,
padding='VALID',
scope='AvgPool_1a_5x5')
aux_logits = slim.conv2d(aux_logits, 128, [1, 1],
scope='Conv2d_1b_1x1')
aux_logits = slim.conv2d(aux_logits, 768,
aux_logits.get_shape()[1:3],
padding='VALID', scope='Conv2d_2a')
aux_logits = slim.flatten(aux_logits)
aux_logits = slim.fully_connected(aux_logits, num_classes,
activation_fn=None,
scope='Aux_logits')
end_points['AuxLogits'] = aux_logits
# Final pooling and prediction
with tf.variable_scope('Logits'):
# 8 x 8 x 1536
net = slim.avg_pool2d(net, net.get_shape()[1:3], padding='VALID',
scope='AvgPool_1a')
# 1 x 1 x 1536
net = slim.dropout(net, dropout_keep_prob, scope='Dropout_1b')
net = slim.flatten(net, scope='PreLogitsFlatten')
end_points['PreLogitsFlatten'] = net
# 1536
logits = slim.fully_connected(net, num_classes, activation_fn=None,
scope='Logits')
end_points['Logits'] = logits
end_points['Predictions'] = tf.nn.softmax(logits, name='Predictions')
return logits, end_points
inception_v4.default_image_size = 299
inception_v4_arg_scope = inception_utils.inception_arg_scope
| apache-2.0 | 2,829,554,062,622,032,000 | 47.430341 | 80 | 0.59036 | false |
mupif/mupif | mupif/EnsightReader2.py | 1 | 13109 | #
# MuPIF: Multi-Physics Integration Framework
# Copyright (C) 2010-2015 Borek Patzak
#
# Czech Technical University, Faculty of Civil Engineering,
# Department of Structural Mechanics, 166 29 Prague, Czech Republic
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301 USA
#
from builtins import range
import re
from . import Mesh
from . import Vertex
from . import Cell
from . import BBox
from . import Field
from . import FieldID
from . import ValueType
debug = 0
def readEnsightGeo(name, partFilter, partRec):
"""
Reads Ensight geometry file (Ensight6 format) and returns corresponding Mesh object instance. Supports only unstructured meshes.
:param str name: Path to Ensight geometry file (\*.geo)
:param tuple partFilter: Only parts with id contained in partFiler will be imported
:param list partRec: A list containing info about individual parts (number of elements). Needed by readEnsightField
:return: mesh
:rtype: Mesh.Mesh
"""
vertexMapping = {}
vertices = []
cells = []
vnum = 0
enum = 0
# open the geo file
f = open(name, 'r')
if debug:
print("Importing geometry from %s" % name)
mesh = Mesh.UnstructuredMesh()
# process header (6 lines)
desc1 = f.readline()
desc2 = f.readline()
nodeidrec = f.readline()
# check if nodal ids given -> required
if not re.match('node\s+id\s+given', nodeidrec):
print("Given node ids required")
return
elemidrec = f.readline()
# check if element ids given -> required
if not re.match('element\s+id\s+given', elemidrec):
print("Given element ids required")
return
coordkwdrec = f.readline()
numberOfUnstructuredNodes = int(f.readline())
# check
# read unstructured coordinates
for i in range(numberOfUnstructuredNodes):
line = f.readline()
match = re.match('(.{8})(.{12})(.{12})(.{12})', line)
# print line
if match:
id = int(match.group(1))
x = float(match.group(2))
y = float(match.group(3))
z = float(match.group(4))
# print id, x, y, z
vertexMapping[id] = vnum # remember mapping id -> vertex number
vertices.append(Vertex.Vertex(vnum, id, (x, y, z)))
# increment vertex counter
vnum = vnum+1
# read parts in sequential order
line = f.readline()
while line:
match = re.search('\s*part\s+(\d+)', line)
if match:
partnum = int(match.group(1))
partRec.append({}) # add empty dict for each part containing number of elements for each elemeet type
if partnum in partFilter:
if debug:
print("Importing part %d" % partnum)
partdesc = f.readline().rstrip('\r\n')
# process part
# get element type
line = f.readline()
(line, enum) = readEnsightGeo_Part(f, line, mesh, enum, cells, vertexMapping, partnum, partdesc, partRec)
else:
line = f.readline()
else:
line = f.readline()
if debug:
print("Setting up mesh: %d vertices, %d cells" % (vnum, enum))
print(len(vertices), len(cells))
mesh.setup(vertices, cells)
return mesh
def readEnsightGeo_Part(f, line, mesh, enum, cells, vertexMapping, partnum, partdesc, partRec):
"""
Reads single cell part geometry from an Ensight file.
:param File f: File object
:param str line: Current line to process (should contain element type)
:param Mesh.Mesh mesh: Mupif mesh object to accommodate new cells
:param int enum: Accumulated cell number
:param list cells: List of individual Cells
:param dict vertexMapping: Map from vertex label (as given in Ensight file) to local number
:param int partnum: Part number
:param list partdesc: Partition description record
:param list partRec: Output agrument (list) containing info about individual parts (number of elements). Needed by readEnsightField
:return: tuple (line, cell number)
:rtype: tuple (line, enum)
"""
# if the next line is not next part record, then should be element section
while not re.search('\s*part\s+(\d+)', line):
if line == '':
break
# ok no "part" keyword, parse element section
eltype = line.rstrip('\r\n')
if debug:
print("(", eltype, ")")
line = f.readline()
nelem = int(line.rstrip('\r\n'))
# remember info to partRec
partRec[partnum-1][eltype] = nelem
if debug:
print("part %s nelem %d" % (partdesc, nelem))
# read individual elements
for i in range(nelem):
elemRec = f.readline()
if eltype == "hexa8":
match = re.match('(.{8})(.{8})(.{8})(.{8})(.{8})(.{8})(.{8})(.{8})(.{8})', elemRec)
if match:
elnum = int(match.group(1))
elnodes = (int(match.group(2)), int(match.group(3)), int(match.group(4)), int(match.group(5)),
int(match.group(6)), int(match.group(7)), int(match.group(8)), int(match.group(9)))
# print ("Brick: %d (%d %d %d %d %d %d %d %d)"%(elnum, elnodes[0],elnodes[1],elnodes[2],elnodes[3],elnodes[4],elnodes[5],elnodes[6],elnodes[7]))
_vert = [vertexMapping[i] for i in elnodes]
cells.append(Cell.Brick_3d_lin(mesh, enum, enum, tuple(_vert)))
enum = enum+1
elif eltype == "quad4":
match = re.match('(.{8})(.{8})(.{8})(.{8})(.{8})', elemRec)
if match:
elnum = int(match.group(1))
elnodes = (int(match.group(2)), int(match.group(3)), int(match.group(4)), int(match.group(5)))
if debug:
print("Quad: %d (%d %d %d %d)" % (elnum, elnodes[0], elnodes[1], elnodes[2], elnodes[3]))
_vert = [vertexMapping[i] for i in elnodes]
cells.append(Cell.Quad_2d_lin(mesh, enum, enum, tuple(_vert)))
enum = enum+1
else:
pass
print("Element type %s not suported" % eltype)
# finished parsing part for specific element type
line = f.readline()
# next part record found
return line, enum
def readEnsightField(name, parts, partRec, type, fieldID, mesh, units, time):
"""
Reads either Per-node or Per-element variable file and returns corresponding Field representation.
:param str name: Input field name with variable data
:param tuple parts: Only parts with id contained in partFiler will be imported
:param list partRec: A list containing info about individual parts (number of elements per each element type).
:param int type: Determines type of field values: type = 1 scalar, type = 3 vector, type = 6 tensor
:param FieldID fieldID: Field type (displacement, strain, temperature ...)
:param Mesh.Mesh mesh: Corresponding mesh
:param PhysicalUnit units: field units
:param PhysicalQuantity time: time
:return: FieldID for unknowns
:rtype: Field
"""
vertexVals = []
cellVals = []
indx = list(range(6))
values = []
if type == 1:
ftype = ValueType.Scalar
elif type == 3:
ftype = ValueType.Vector
else:
ftype = ValueType.Tensor
# open the geo file
f = open(name, 'r')
# get variable name (1st line)
varname = f.readline().rstrip('\r\n')
if debug:
print("Importing %s from %s" % (varname, name))
# now check if nodal records available or part (cell records)
line = f.readline()
match = re.match('part\s+(\d+)', line)
if not match:
# nodal (vertex based specification)
size = mesh.getNumberOfVertices() * type
print("Expecting ", mesh.getNumberOfVertices(), " nodal records in ", size//6, " lines")
# read nodeal variables
for i in range(size//6): # six values per row in fixed format 12.5e
for j in indx:
try:
vertexVals.append(float(line[j*12:(j+1)*12]))
except:
print("exception....", j, line, ">", line[j*12:(j+1)*12])
line = f.readline()
# parse remaining values
# line = f.readline()
for j in range(size % 6):
vertexVals.append(float(line[j*12:(j+1)*12]))
if size % 6 > 0:
line = f.readline()
# done parsing nodal record(s)
# so this should be per-vertex variable file -> vertex based field
# convert vertexVals into form required by field
for i in range(mesh.getNumberOfVertices()):
if type == 1: # scalar
values.append((vertexVals[i],))
elif type == 3: # vector
values.append((vertexVals[i*3], vertexVals[i*3+1], vertexVals[i*3+2]))
elif type == 6: # tensor
values.append((vertexVals[i*6], vertexVals[i*6+1],
vertexVals[i*6+2], vertexVals[i*6+3],
vertexVals[i*6+4], vertexVals[i*6+4]))
field = Field.Field(mesh, fieldID, ftype, units, time, values, Field.FieldType.FT_vertexBased)
return field
else:
# ok nodal section missing, parts should provide per-cell variables
while line:
match = re.search('\s*part\s+(\d+)', line)
if match:
partnum = int(match.group(1))
if partnum in parts:
if debug:
print("Importing part %d" % partnum)
# get element type
line = f.readline()
# if the next line is not next part record, then should be element section
while not re.search('\s*part\s+(\d+)', line):
# ok no "part" keyword, parse element section
eltype = line.rstrip('\r\n')
if debug:
print("eltype:", eltype)
nelem = partRec[partnum-1][eltype] # get number of elements in part
if debug:
print("(", eltype, nelem, ")")
size = nelem * type
cellVals = [] # empty values for each element group
for i in range(size//6): # six values per row in fixed format 12.5e
line = f.readline()
# print ".",
for j in indx:
cellVals.append(float(line[j*12:(j+1)*12]))
# parse remaining values
line = f.readline()
for j in range(size % 6):
cellVals.append(float(line[j*12:(j+1)*12]))
if size % 6 > 0:
line = f.readline()
# print "%"
# now convert that into format required by filed
for i in range(nelem):
if type == 1: # scalar
values.append((cellVals[i],))
elif type == 3: # vector
values.append((cellVals[i*3], cellVals[i*3+1], cellVals[i*3+2]))
elif type == 6: # tensor
values.append((cellVals[i*6], cellVals[i*6+1],
cellVals[i*6+2], cellVals[i*6+3],
cellVals[i*6+4], cellVals[i*6+4]))
if debug:
print("done importing element section")
# done parsing cell record(s) in part
else: # if (partnum in parts): proceed to next part
line = f.readline()
else:
line = f.readline()
# so this should be per-cell variable file -> cell based field
field = Field.Field(mesh, fieldID, ftype, units, time, values, Field.FieldType.FT_cellBased)
return field
| lgpl-3.0 | -3,159,519,565,527,876,600 | 41.287097 | 164 | 0.5468 | false |
ClearCorp-dev/odoo | addons/account/report/account_invoice_report.py | 60 | 12934 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
import openerp.addons.decimal_precision as dp
from openerp.osv import fields,osv
class account_invoice_report(osv.osv):
_name = "account.invoice.report"
_description = "Invoices Statistics"
_auto = False
_rec_name = 'date'
def _compute_amounts_in_user_currency(self, cr, uid, ids, field_names, args, context=None):
"""Compute the amounts in the currency of the user
"""
if context is None:
context={}
currency_obj = self.pool.get('res.currency')
currency_rate_obj = self.pool.get('res.currency.rate')
user_currency_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.currency_id.id
currency_rate_id = currency_rate_obj.search(cr, uid, [('rate', '=', 1)], limit=1, context=context)[0]
base_currency_id = currency_rate_obj.browse(cr, uid, currency_rate_id, context=context).currency_id.id
res = {}
ctx = context.copy()
for item in self.browse(cr, uid, ids, context=context):
ctx['date'] = item.date
price_total = currency_obj.compute(cr, uid, base_currency_id, user_currency_id, item.price_total, context=ctx)
price_average = currency_obj.compute(cr, uid, base_currency_id, user_currency_id, item.price_average, context=ctx)
residual = currency_obj.compute(cr, uid, base_currency_id, user_currency_id, item.residual, context=ctx)
res[item.id] = {
'user_currency_price_total': price_total,
'user_currency_price_average': price_average,
'user_currency_residual': residual,
}
return res
_columns = {
'date': fields.date('Date', readonly=True),
'product_id': fields.many2one('product.product', 'Product', readonly=True),
'product_qty':fields.float('Product Quantity', readonly=True),
'uom_name': fields.char('Reference Unit of Measure', size=128, readonly=True),
'payment_term': fields.many2one('account.payment.term', 'Payment Term', readonly=True),
'period_id': fields.many2one('account.period', 'Force Period', domain=[('state','<>','done')], readonly=True),
'fiscal_position': fields.many2one('account.fiscal.position', 'Fiscal Position', readonly=True),
'currency_id': fields.many2one('res.currency', 'Currency', readonly=True),
'categ_id': fields.many2one('product.category','Category of Product', readonly=True),
'journal_id': fields.many2one('account.journal', 'Journal', readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner', readonly=True),
'commercial_partner_id': fields.many2one('res.partner', 'Partner Company', help="Commercial Entity"),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'user_id': fields.many2one('res.users', 'Salesperson', readonly=True),
'price_total': fields.float('Total Without Tax', readonly=True),
'user_currency_price_total': fields.function(_compute_amounts_in_user_currency, string="Total Without Tax", type='float', digits_compute=dp.get_precision('Account'), multi="_compute_amounts"),
'price_average': fields.float('Average Price', readonly=True, group_operator="avg"),
'user_currency_price_average': fields.function(_compute_amounts_in_user_currency, string="Average Price", type='float', digits_compute=dp.get_precision('Account'), multi="_compute_amounts"),
'currency_rate': fields.float('Currency Rate', readonly=True),
'nbr': fields.integer('# of Invoices', readonly=True), # TDE FIXME master: rename into nbr_lines
'type': fields.selection([
('out_invoice','Customer Invoice'),
('in_invoice','Supplier Invoice'),
('out_refund','Customer Refund'),
('in_refund','Supplier Refund'),
],'Type', readonly=True),
'state': fields.selection([
('draft','Draft'),
('proforma','Pro-forma'),
('proforma2','Pro-forma'),
('open','Open'),
('paid','Done'),
('cancel','Cancelled')
], 'Invoice Status', readonly=True),
'date_due': fields.date('Due Date', readonly=True),
'account_id': fields.many2one('account.account', 'Account',readonly=True),
'account_line_id': fields.many2one('account.account', 'Account Line',readonly=True),
'partner_bank_id': fields.many2one('res.partner.bank', 'Bank Account',readonly=True),
'residual': fields.float('Total Residual', readonly=True),
'user_currency_residual': fields.function(_compute_amounts_in_user_currency, string="Total Residual", type='float', digits_compute=dp.get_precision('Account'), multi="_compute_amounts"),
'country_id': fields.many2one('res.country', 'Country of the Partner Company'),
}
_order = 'date desc'
_depends = {
'account.invoice': [
'account_id', 'amount_total', 'commercial_partner_id', 'company_id',
'currency_id', 'date_due', 'date_invoice', 'fiscal_position',
'journal_id', 'partner_bank_id', 'partner_id', 'payment_term',
'period_id', 'residual', 'state', 'type', 'user_id',
],
'account.invoice.line': [
'account_id', 'invoice_id', 'price_subtotal', 'product_id',
'quantity', 'uos_id',
],
'product.product': ['product_tmpl_id'],
'product.template': ['categ_id'],
'product.uom': ['category_id', 'factor', 'name', 'uom_type'],
'res.currency.rate': ['currency_id', 'name'],
'res.partner': ['country_id'],
}
def _select(self):
select_str = """
SELECT sub.id, sub.date, sub.product_id, sub.partner_id, sub.country_id,
sub.payment_term, sub.period_id, sub.uom_name, sub.currency_id, sub.journal_id,
sub.fiscal_position, sub.user_id, sub.company_id, sub.nbr, sub.type, sub.state,
sub.categ_id, sub.date_due, sub.account_id, sub.account_line_id, sub.partner_bank_id,
sub.product_qty, sub.price_total / cr.rate as price_total, sub.price_average /cr.rate as price_average,
cr.rate as currency_rate, sub.residual / cr.rate as residual, sub.commercial_partner_id as commercial_partner_id
"""
return select_str
def _sub_select(self):
select_str = """
SELECT min(ail.id) AS id,
ai.date_invoice AS date,
ail.product_id, ai.partner_id, ai.payment_term, ai.period_id,
CASE
WHEN u.uom_type::text <> 'reference'::text
THEN ( SELECT product_uom.name
FROM product_uom
WHERE product_uom.uom_type::text = 'reference'::text
AND product_uom.active
AND product_uom.category_id = u.category_id LIMIT 1)
ELSE u.name
END AS uom_name,
ai.currency_id, ai.journal_id, ai.fiscal_position, ai.user_id, ai.company_id,
count(ail.*) AS nbr,
ai.type, ai.state, pt.categ_id, ai.date_due, ai.account_id, ail.account_id AS account_line_id,
ai.partner_bank_id,
SUM(CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN (- ail.quantity) / u.factor
ELSE ail.quantity / u.factor
END) AS product_qty,
SUM(CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN - ail.price_subtotal
ELSE ail.price_subtotal
END) AS price_total,
CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN SUM(- ail.price_subtotal)
ELSE SUM(ail.price_subtotal)
END / CASE
WHEN SUM(ail.quantity / u.factor) <> 0::numeric
THEN CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN SUM((- ail.quantity) / u.factor)
ELSE SUM(ail.quantity / u.factor)
END
ELSE 1::numeric
END AS price_average,
CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN - ai.residual
ELSE ai.residual
END / CASE
WHEN (( SELECT count(l.id) AS count
FROM account_invoice_line l
LEFT JOIN account_invoice a ON a.id = l.invoice_id
WHERE a.id = ai.id)) <> 0
THEN ( SELECT count(l.id) AS count
FROM account_invoice_line l
LEFT JOIN account_invoice a ON a.id = l.invoice_id
WHERE a.id = ai.id)
ELSE 1::bigint
END::numeric AS residual,
ai.commercial_partner_id as commercial_partner_id,
partner.country_id
"""
return select_str
def _from(self):
from_str = """
FROM account_invoice_line ail
JOIN account_invoice ai ON ai.id = ail.invoice_id
JOIN res_partner partner ON ai.commercial_partner_id = partner.id
LEFT JOIN product_product pr ON pr.id = ail.product_id
left JOIN product_template pt ON pt.id = pr.product_tmpl_id
LEFT JOIN product_uom u ON u.id = ail.uos_id
"""
return from_str
def _group_by(self):
group_by_str = """
GROUP BY ail.product_id, ai.date_invoice, ai.id,
ai.partner_id, ai.payment_term, ai.period_id, u.name, ai.currency_id, ai.journal_id,
ai.fiscal_position, ai.user_id, ai.company_id, ai.type, ai.state, pt.categ_id,
ai.date_due, ai.account_id, ail.account_id, ai.partner_bank_id, ai.residual,
ai.amount_total, u.uom_type, u.category_id, ai.commercial_partner_id, partner.country_id
"""
return group_by_str
def init(self, cr):
# self._table = account_invoice_report
tools.drop_view_if_exists(cr, self._table)
cr.execute("""CREATE or REPLACE VIEW %s as (
%s
FROM (
%s %s %s
) AS sub
JOIN res_currency_rate cr ON (cr.currency_id = sub.currency_id)
WHERE
cr.id IN (SELECT id
FROM res_currency_rate cr2
WHERE (cr2.currency_id = sub.currency_id)
AND ((sub.date IS NOT NULL AND cr2.name <= sub.date)
OR (sub.date IS NULL AND cr2.name <= NOW()))
ORDER BY name DESC LIMIT 1)
)""" % (
self._table,
self._select(), self._sub_select(), self._from(), self._group_by()))
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -1,653,493,608,334,182,100 | 54.51073 | 200 | 0.546621 | false |
stadelmanma/OpenPNM | test/unit/Network/GenericNetworkTest.py | 1 | 14373 | import OpenPNM
import scipy as sp
class GenericNetworkTest:
def setup_class(self):
self.net = OpenPNM.Network.Cubic(shape=[10, 10, 10])
def teardown_class(self):
mgr = OpenPNM.Base.Workspace()
mgr.clear()
def test_find_connected_pores_numeric_not_flattend(self):
a = self.net.find_connected_pores(throats=[0, 1])
assert sp.all(a.flatten() == [0, 1, 1, 2])
def test_find_connected_pores_numeric_flattend(self):
a = self.net.find_connected_pores(throats=[0, 1], flatten=True)
assert sp.all(a == [0, 1, 2])
def test_find_connected_pores_boolean_flattend(self):
Tind = sp.zeros((self.net.Nt,), dtype=bool)
Tind[[0, 1]] = True
a = self.net.find_connected_pores(throats=Tind, flatten=True)
assert sp.all(a == [0, 1, 2])
def test_find_connected_pores_empty_flattend(self):
a = self.net.find_connected_pores(throats=[], flatten=True)
assert sp.shape(a) == (0, )
def test_find_neighbor_pores_numeric(self):
a = self.net.find_neighbor_pores(pores=[])
assert sp.size(a) == 0
def test_find_neighbor_pores_boolean(self):
Pind = sp.zeros((self.net.Np,), dtype=bool)
Pind[[0, 1]] = True
a = self.net.find_neighbor_pores(pores=Pind)
assert sp.all(a == [2, 10, 11, 100, 101])
def test_find_neighbor_pores_numeric_union(self):
a = self.net.find_neighbor_pores(pores=[0, 2],
mode='union')
assert sp.all(a == [1, 3, 10, 12, 100, 102])
def test_find_neighbor_pores_numeric_intersection(self):
a = self.net.find_neighbor_pores(pores=[0, 2],
mode='intersection')
assert sp.all(a == [1])
def test_find_neighbor_pores_numeric_notintersection(self):
a = self.net.find_neighbor_pores(pores=[0, 2],
mode='not_intersection')
assert sp.all(a == [3, 10, 12, 100, 102])
def test_find_neighbor_pores_numeric_union_incl_self(self):
a = self.net.find_neighbor_pores(pores=[0, 2],
mode='union',
excl_self=False)
assert sp.all(a == [0, 1, 2, 3, 10, 12, 100, 102])
def test_find_neighbor_pores_numeric_intersection_incl_self(self):
a = self.net.find_neighbor_pores(pores=[0, 2],
mode='intersection',
excl_self=False)
assert sp.all(a == [1])
def test_find_neighbor_pores_numeric_notintersection_incl_self(self):
a = self.net.find_neighbor_pores(pores=[0, 2],
mode='not_intersection',
excl_self=False)
assert sp.all(a == [0, 2, 3, 10, 12, 100, 102])
def test_find_neighbor_throats_empty(self):
a = self.net.find_neighbor_throats(pores=[])
assert sp.size(a) == 0
def test_find_neighbor_throats_boolean(self):
Pind = sp.zeros((self.net.Np,), dtype=bool)
Pind[[0, 1]] = True
a = self.net.find_neighbor_throats(pores=Pind)
assert sp.all(a == [0, 1, 900, 901, 1800, 1801])
def test_find_neighbor_throats_numeric_union(self):
a = self.net.find_neighbor_throats(pores=[0, 2], mode='union')
assert sp.all(a == [0, 1, 2, 900, 902, 1800, 1802])
def test_find_neighbor_throats_numeric_intersection(self):
a = self.net.find_neighbor_throats(pores=[0, 2], mode='intersection')
assert sp.size(a) == 0
def test_find_neighbor_throats_numeric_not_intersection(self):
a = self.net.find_neighbor_throats(pores=[0, 2],
mode='not_intersection')
assert sp.all(a == [0, 1, 2, 900, 902, 1800, 1802])
def test_num_neighbors_empty(self):
a = self.net.num_neighbors(pores=[], element='pores')
assert sp.size(a) == 0
a = self.net.num_neighbors(pores=[], element='throats')
assert sp.size(a) == 0
def test_num_neighbors_pores_flattened(self):
a = self.net.num_neighbors(pores=0, element='pores', flatten=True)
assert a == 3
assert isinstance(a, int)
a = self.net.num_neighbors(pores=[0, 2], element='pores', flatten=True)
assert a == 6
assert isinstance(a, int)
def test_num_neighbors_pores_with_modes(self):
a = self.net.num_neighbors(pores=[0, 2], element='pores', mode='union',
flatten=True)
assert a == 6
a = self.net.num_neighbors(pores=[0, 2], element='pores',
mode='intersection', flatten=True)
assert a == 1
a = self.net.num_neighbors(pores=[0, 2], element='pores',
mode='not_intersection', flatten=True)
assert a == 5
def test_num_neighbors_pores_notflattened(self):
a = self.net.num_neighbors(pores=[0, 2], flatten=False)
assert sp.all(a == [3, 4])
a = self.net.num_neighbors(pores=0, flatten=False)
assert sp.all(a == [3])
assert isinstance(a, sp.ndarray)
def test_num_neighbors_throats_flattened(self):
a = self.net.num_neighbors(pores=0, element='throats', flatten=True)
assert a == 3
a = self.net.num_neighbors(pores=[0, 1], element='throats',
flatten=True)
assert a == 6
self.net.extend(throat_conns=[[0, 1], [0, 2]])
a = self.net.num_neighbors(pores=0, element='throats', flatten=True)
assert a == 5
a = self.net.num_neighbors(pores=[0, 1], element='throats',
flatten=True)
assert a == 8
self.net.trim(throats=self.net.Ts[-2:])
def test_num_neighbors_throats_with_modes(self):
a = self.net.num_neighbors(pores=[0, 1], element='throats',
mode='union', flatten=True)
assert a == 6
self.net.extend(throat_conns=[[0, 1], [0, 2]])
a = self.net.num_neighbors(pores=[0, 1], element='throats',
mode='union', flatten=True)
assert a == 8
a = self.net.num_neighbors(pores=[0, 1], element='throats',
mode='intersection', flatten=True)
assert a == 2
a = self.net.num_neighbors(pores=[0, 1], element='throats',
mode='not_intersection', flatten=True)
assert a == 6
self.net.trim(throats=self.net.Ts[-2:])
def test_num_neighbors_throats_not_flattened(self):
a = self.net.num_neighbors(pores=0, element='throats', flatten=False)
assert sp.all(a == [3])
a = self.net.num_neighbors(pores=[0, 1, 2, 3], element='throats',
flatten=False)
assert sp.all(a == [3, 4, 4, 4])
self.net.extend(throat_conns=[[0, 1], [0, 2]])
a = self.net.num_neighbors(pores=0, element='throats', flatten=False)
assert sp.all(a == [5])
a = self.net.num_neighbors(pores=[0, 1, 2, 3], element='throats',
flatten=False)
assert sp.all(a == [5, 5, 5, 4])
self.net.trim(throats=self.net.Ts[-2:])
def test_find_interface_throats(self):
self.net['pore.domain1'] = False
self.net['pore.domain2'] = False
self.net['pore.domain3'] = False
self.net['pore.domain1'][[0, 1, 2]] = True
self.net['pore.domain2'][[100, 101, 102]] = True
self.net['pore.domain3'][900:] = True
a = self.net.find_interface_throats(labels=['domain1', 'domain2'])
assert sp.all(a == [1800, 1801, 1802])
a = self.net.find_interface_throats(labels=['domain1', 'domain3'])
assert sp.size(a) == 0
def test_check_network_health_healthy(self):
a = self.net.check_network_health()
items = set(['disconnected_clusters',
'isolated_pores',
'trim_pores',
'duplicate_throats',
'bidirectional_throats',
'headless_throats',
'looped_throats'])
assert items == a.keys()
assert sp.size(list(a.values())) == 0
def test_check_network_isolated_pores(self):
net = OpenPNM.Network.Cubic(shape=[5, 5, 5])
Ts = net.find_neighbor_throats(pores=0)
net.trim(throats=Ts)
a = net.check_network_health()
assert a['isolated_pores'] == 0
net.trim(a['trim_pores'])
a = net.check_network_health()
assert sp.size(list(a.values())) == 0
def test_check_network_health_duplicate_throat(self):
net = OpenPNM.Network.Cubic(shape=[5, 5, 5])
P12 = net['throat.conns'][0]
net.extend(throat_conns=[P12])
a = net.check_network_health()
assert len(a['duplicate_throats']) == 1
assert len(a['duplicate_throats'][0]) == 2
def test_check_network_health_triplicate_throats(self):
net = OpenPNM.Network.Cubic(shape=[5, 5, 5])
P12 = net['throat.conns'][0]
net.extend(throat_conns=[P12])
net.extend(throat_conns=[P12])
a = net.check_network_health()
assert len(a['duplicate_throats']) == 1
assert len(a['duplicate_throats'][0]) == 3
def test_check_network_health_multiple_duplicate_throats(self):
net = OpenPNM.Network.Cubic(shape=[5, 5, 5])
P12 = net['throat.conns'][0]
net.extend(throat_conns=[P12])
P12 = net['throat.conns'][1]
net.extend(throat_conns=[P12])
a = net.check_network_health()
assert len(a['duplicate_throats']) == 2
assert len(a['duplicate_throats'][1]) == 2
def test_check_network_health_bidirectional_throats(self):
net = OpenPNM.Network.Cubic(shape=[5, 5, 5])
P12 = net['throat.conns'][0]
net['throat.conns'][0] = [P12[1], P12[0]]
a = net.check_network_health()
assert sp.size(a['bidirectional_throats']) == 1
assert sp.size(a['duplicate_throats']) == 0
def test_check_network_health_headless_throats(self):
net = OpenPNM.Network.Cubic(shape=[5, 5, 5])
net.extend(throat_conns=[[5, 5555]])
a = net.check_network_health()
assert a['headless_throats'] == sp.array([300])
def test_check_network_health_looped_throats(self):
net = OpenPNM.Network.Cubic(shape=[5, 5, 5])
net.extend(throat_conns=[[5, 5]])
a = net.check_network_health()
assert a['looped_throats'] == sp.array([300])
def test_find_nearby_pores_distance_1(self):
a = self.net.find_nearby_pores(pores=[0, 1], distance=1)
b = self.net.find_neighbor_pores(pores=[0, 1], flatten=False)
assert sp.all([sp.all(a[i] == b[i]) for i in range(0, len(a))])
def test_find_nearby_pores_distance_2(self):
a = self.net.find_nearby_pores(pores=[0, 1], distance=2)
assert sp.all([sp.size(a[i]) for i in [0, 1]] == [10, 14])
def test_find_nearby_pores_distance_0(self):
a = self.net.find_nearby_pores(pores=[0, 1], distance=0)
assert sp.shape(a) == (2, 0)
def test_find_nearby_pores_distance_1_flattened(self):
a = self.net.find_nearby_pores(pores=[0, 1], distance=1, flatten=True)
b = self.net.find_neighbor_pores(pores=[0, 1])
assert sp.all(a == b)
def test_find_nearby_pores_distance_2_flattened(self):
a = self.net.find_nearby_pores(pores=[0, 1], distance=2, flatten=True)
assert sp.size(a) == 15
def test_find_nearby_pores_distance_2_flattened_inclself(self):
a = self.net.find_nearby_pores(pores=[0, 1],
distance=2,
flatten=True,
excl_self=False)
assert sp.size(a) == 17
assert sp.all(sp.in1d([0, 1], a))
def test_add_boundary_pores_cubic(self):
net = OpenPNM.Network.Cubic(shape=[3, 3, 3], spacing=1)
net.add_boundary_pores(pores=net.pores('top'), offset=[0, 0, 1])
assert net.Np == 36
assert net.Nt == 63
def test_add_boundary_pores_cubic_2D(self):
net = OpenPNM.Network.Cubic(shape=[3, 3, 1], spacing=1)
Ps = net.Ps
net.add_boundary_pores(pores=Ps, offset=[0, 0, 1])
assert net.Np == 18
assert net.Nt == 21
net.add_boundary_pores(pores=Ps, offset=[0, 0, -1])
assert net.Np == 27
assert net.Nt == 30
def test_add_boundary_pores_cubic_custom_label(self):
net = OpenPNM.Network.Cubic(shape=[3, 3, 3], spacing=1)
Ps = net.pores('top')
net.add_boundary_pores(pores=Ps,
offset=[0, 0, 1],
apply_label='pore.test')
assert 'pore.test' in net.labels()
Ps = net.pores('bottom')
net.add_boundary_pores(pores=Ps,
offset=[0, 0, -1],
apply_label='test2')
assert 'pore.test2' in net.labels()
def test_add_boundary_pores_cubicdual(self):
net = OpenPNM.Network.CubicDual(shape=[5, 5, 5],
label_1='primary',
label_2='secondary')
Ps = net.pores(labels=['surface', 'bottom'], mode='intersection')
net.add_boundary_pores(pores=Ps, offset=[0, 0, -0.5])
Ps2 = net.pores(labels=['boundary'], mode='intersection')
assert Ps.size == Ps2.size
assert ~sp.any(sp.in1d(Ps, Ps2))
def test_add_boundary_pores_delaunay(self):
net = OpenPNM.Network.Delaunay(num_pores=30, domain_size=[1, 1, 1])
throats = net.Nt
pores = sp.random.randint(30, size=5)
net.add_boundary_pores(pores=pores, offset=[0, 0, 1])
assert net.Np == 35
assert net.Nt == throats + 5
def test_add_boundary_pores_delaunaycubic(self):
net = OpenPNM.Network.DelaunayCubic(shape=[3, 3, 3], spacing=1)
throats = net.Nt
pores = sp.random.randint(27, size=5)
net.add_boundary_pores(pores=pores, offset=[0, 0, 1])
assert net.Np == 32
assert net.Nt == throats + 5
| mit | -4,780,750,674,859,090,000 | 41.649852 | 79 | 0.549016 | false |
godfather1103/WeiboRobot | python27/1.0/lib/ctypes/test/test_unicode.py | 35 | 5126 | # coding: latin-1
import unittest
import ctypes
from ctypes.test import need_symbol
import _ctypes_test
@need_symbol('c_wchar')
class UnicodeTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
dll = ctypes.CDLL(_ctypes_test.__file__)
cls.wcslen = dll.my_wcslen
cls.wcslen.argtypes = [ctypes.c_wchar_p]
def setUp(self):
self.prev_conv_mode = ctypes.set_conversion_mode("ascii", "strict")
def tearDown(self):
ctypes.set_conversion_mode(*self.prev_conv_mode)
def test_ascii_strict(self):
wcslen = self.wcslen
ctypes.set_conversion_mode("ascii", "strict")
# no conversions take place with unicode arguments
self.assertEqual(wcslen(u"abc"), 3)
self.assertEqual(wcslen(u"ab\u2070"), 3)
# string args are converted
self.assertEqual(wcslen("abc"), 3)
self.assertRaises(ctypes.ArgumentError, wcslen, "abรค")
def test_ascii_replace(self):
wcslen = self.wcslen
ctypes.set_conversion_mode("ascii", "replace")
self.assertEqual(wcslen(u"abc"), 3)
self.assertEqual(wcslen(u"ab\u2070"), 3)
self.assertEqual(wcslen("abc"), 3)
self.assertEqual(wcslen("abรค"), 3)
def test_ascii_ignore(self):
wcslen = self.wcslen
ctypes.set_conversion_mode("ascii", "ignore")
self.assertEqual(wcslen(u"abc"), 3)
self.assertEqual(wcslen(u"ab\u2070"), 3)
# ignore error mode skips non-ascii characters
self.assertEqual(wcslen("abc"), 3)
self.assertEqual(wcslen("รครถรผร"), 0)
def test_latin1_strict(self):
wcslen = self.wcslen
ctypes.set_conversion_mode("latin-1", "strict")
self.assertEqual(wcslen(u"abc"), 3)
self.assertEqual(wcslen(u"ab\u2070"), 3)
self.assertEqual(wcslen("abc"), 3)
self.assertEqual(wcslen("รครถรผร"), 4)
def test_buffers(self):
ctypes.set_conversion_mode("ascii", "strict")
buf = ctypes.create_unicode_buffer("abc")
self.assertEqual(len(buf), 3+1)
ctypes.set_conversion_mode("ascii", "replace")
buf = ctypes.create_unicode_buffer("abรครถรผ")
self.assertEqual(buf[:], u"ab\uFFFD\uFFFD\uFFFD\0")
self.assertEqual(buf[::], u"ab\uFFFD\uFFFD\uFFFD\0")
self.assertEqual(buf[::-1], u"\0\uFFFD\uFFFD\uFFFDba")
self.assertEqual(buf[::2], u"a\uFFFD\uFFFD")
self.assertEqual(buf[6:5:-1], u"")
ctypes.set_conversion_mode("ascii", "ignore")
buf = ctypes.create_unicode_buffer("abรครถรผ")
# is that correct? not sure. But with 'ignore', you get what you pay for..
self.assertEqual(buf[:], u"ab\0\0\0\0")
self.assertEqual(buf[::], u"ab\0\0\0\0")
self.assertEqual(buf[::-1], u"\0\0\0\0ba")
self.assertEqual(buf[::2], u"a\0\0")
self.assertEqual(buf[6:5:-1], u"")
@need_symbol('c_wchar')
class StringTestCase(UnicodeTestCase):
@classmethod
def setUpClass(cls):
super(StringTestCase, cls).setUpClass()
cls.func = ctypes.CDLL(_ctypes_test.__file__)._testfunc_p_p
def setUp(self):
func = self.func
self.prev_conv_mode = ctypes.set_conversion_mode("ascii", "strict")
func.argtypes = [ctypes.c_char_p]
func.restype = ctypes.c_char_p
def tearDown(self):
func = self.func
ctypes.set_conversion_mode(*self.prev_conv_mode)
func.argtypes = None
func.restype = ctypes.c_int
def test_ascii_replace(self):
func = self.func
ctypes.set_conversion_mode("ascii", "strict")
self.assertEqual(func("abc"), "abc")
self.assertEqual(func(u"abc"), "abc")
self.assertRaises(ctypes.ArgumentError, func, u"abรค")
def test_ascii_ignore(self):
func = self.func
ctypes.set_conversion_mode("ascii", "ignore")
self.assertEqual(func("abc"), "abc")
self.assertEqual(func(u"abc"), "abc")
self.assertEqual(func(u"รครถรผร"), "")
def test_ascii_replace(self):
func = self.func
ctypes.set_conversion_mode("ascii", "replace")
self.assertEqual(func("abc"), "abc")
self.assertEqual(func(u"abc"), "abc")
self.assertEqual(func(u"รครถรผร"), "????")
def test_buffers(self):
ctypes.set_conversion_mode("ascii", "strict")
buf = ctypes.create_string_buffer(u"abc")
self.assertEqual(len(buf), 3+1)
ctypes.set_conversion_mode("ascii", "replace")
buf = ctypes.create_string_buffer(u"abรครถรผ")
self.assertEqual(buf[:], "ab???\0")
self.assertEqual(buf[::], "ab???\0")
self.assertEqual(buf[::-1], "\0???ba")
self.assertEqual(buf[::2], "a??")
self.assertEqual(buf[6:5:-1], "")
ctypes.set_conversion_mode("ascii", "ignore")
buf = ctypes.create_string_buffer(u"abรครถรผ")
# is that correct? not sure. But with 'ignore', you get what you pay for..
self.assertEqual(buf[:], "ab\0\0\0\0")
self.assertEqual(buf[::], "ab\0\0\0\0")
self.assertEqual(buf[::-1], "\0\0\0\0ba")
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | -5,835,137,217,168,437,000 | 36.144928 | 83 | 0.603199 | false |
chaosim/dao | samples/hello.py | 1 | 1301 | from dao import word
from samplevars import x
def parse(grammar_element, text):
x = Var()
code = grammar_element(x)+x
return eval([code, text])
def match(grammar_element, text):
x = Var()
code = grammar_element(x)
return eval([code, text])
print parse(word, 'hello')
print match(word, 'hello')
def hello(x):
return word('hello')+some(space)+word(x)
#[sequence, [word, 'hello'] [some, [space]] [word, x]]
print parse(hello, 'hello world')
print match(hello, 'hello world')
#
def f():
global a
a1 = 1
if a1:
a2 = 2
else:
pass
a3 = phi(a1, a2)
use(a3)
def f():
global a
a1 = 1
if a1:
a2 = 2
else:
a3 = 3
a4 = phi(a2, a3)
print a4
use(a4)
a = 3
def f():
global a
a = phi(a, a)
a = a - 1
f()
use(a)
a1 = 3
def f():
global a
a3 = phi(a1, a2)
a2 = a3-1
f()
use(a2)
i1 = 0
j1 = 0
def f():
i3 = phi(i1, i2) #i3 = phi(0, j3)
j3 = phi(j1, j2) #j3 = phi(0, j3+1)
i2 = j3
j2 = i2+1 #j2 = j3+1
g()
use(i2, j2)
i1 = 0
j1 = 0
def f():
i3 = phi(i1, i2) #i3 = phi(0, 0)
j3 = phi(j1, j2) #j3 = phi(0, j3+1)
i2 = 0
j2 = j2+1 #j2 = j3+1
g()
i5 = phi(i3, i4(g()))
i6 = i5+1
use(i6, j2) | gpl-3.0 | -1,657,078,229,050,500,600 | 12.977011 | 62 | 0.484243 | false |
nbp/git-repo | editor.py | 85 | 2660 | #
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import re
import sys
import subprocess
import tempfile
from error import EditorError
class Editor(object):
"""Manages the user's preferred text editor."""
_editor = None
globalConfig = None
@classmethod
def _GetEditor(cls):
if cls._editor is None:
cls._editor = cls._SelectEditor()
return cls._editor
@classmethod
def _SelectEditor(cls):
e = os.getenv('GIT_EDITOR')
if e:
return e
if cls.globalConfig:
e = cls.globalConfig.GetString('core.editor')
if e:
return e
e = os.getenv('VISUAL')
if e:
return e
e = os.getenv('EDITOR')
if e:
return e
if os.getenv('TERM') == 'dumb':
print(
"""No editor specified in GIT_EDITOR, core.editor, VISUAL or EDITOR.
Tried to fall back to vi but terminal is dumb. Please configure at
least one of these before using this command.""", file=sys.stderr)
sys.exit(1)
return 'vi'
@classmethod
def EditString(cls, data):
"""Opens an editor to edit the given content.
Args:
data : the text to edit
Returns:
new value of edited text; None if editing did not succeed
"""
editor = cls._GetEditor()
if editor == ':':
return data
fd, path = tempfile.mkstemp()
try:
os.write(fd, data)
os.close(fd)
fd = None
if re.compile("^.*[$ \t'].*$").match(editor):
args = [editor + ' "$@"', 'sh']
shell = True
else:
args = [editor]
shell = False
args.append(path)
try:
rc = subprocess.Popen(args, shell=shell).wait()
except OSError as e:
raise EditorError('editor failed, %s: %s %s'
% (str(e), editor, path))
if rc != 0:
raise EditorError('editor failed with exit status %d: %s %s'
% (rc, editor, path))
fd2 = open(path)
try:
return fd2.read()
finally:
fd2.close()
finally:
if fd:
os.close(fd)
os.remove(path)
| apache-2.0 | 382,766,275,043,493,800 | 23.181818 | 74 | 0.61391 | false |
jsheperd/rotate_backup | rotate_backup.py | 1 | 2651 | #!/usr/bin/env python
import sys
import os
import glob
import time
class archive:
# The archive class represent an archive media with its age related parameters
def __init__(self, path):
self.path = path
self.time = time.gmtime(os.path.getmtime(path))
self.year = time.strftime("%Y", self.time)
self.month = time.strftime("%Y%m", self.time)
self.week = time.strftime("%Y%W", self.time)
self.day = time.strftime("%Y%m%d", self.time)
self.hour = time.strftime("%Y%m%d%H", self.time)
self.min = time.strftime("%Y%m%d%H%M", self.time)
self.sec = time.strftime("%Y%m%d%H%M%S", self.time)
def rm(self):
# remove the archive from the filesystem
print "rm %s" % self.path
os.remove(self.path)
class binStoreNewest:
# class to store binNum binStores in younger to older order
# each binstore represent an archive, that is the youngest one of its group
def __init__(self, binNum):
self.bins = {}
self.binNum = binNum
def add(self, id, item):
# add a new archive to the clustering
if id in self.bins: # there is an archive from this group already
storedItem = self.bins[id]
if storedItem.time < item.time: # act item is newer then the stored one,
self.bins[id] = item # replace that
else:
self.bins[id] = item # there wasn't archive for this group till now
keys = self.bins.keys()
keys.sort()
for id in keys[:-self.binNum]: # keep the binNum newest ones
del self.bins[id]
def getPaths(self):
return [item.path for item in self.bins.values()]
def getBinTops(sourceArray, binNum, clusterFunction):
# Create groups from the archives by the clusterFunction
# Return with the newest archives from each group for the newset binNum groups
binStore = binStoreNewest(binNum)
for item in sourceArray:
binStore.add(clusterFunction(item), item)
return binStore.getPaths()
if __name__ == '__main__':
# Example usage
if len(sys.argv) >= 2:
files = sys.argv[1:]
else:
files = glob.glob("./data/*")
archives = [archive(filename) for filename in files]
daily = getBinTops(archives, 7, lambda item: item.day)
weekly = getBinTops(archives, 4, lambda item: item.week)
monthly = getBinTops(archives, 12, lambda item: item.month)
yearly = getBinTops(archives, 10, lambda item: item.year)
keepPaths = daily + weekly + monthly + yearly
for item in archives:
if item.path not in keepPaths:
item.rm()
| unlicense | -853,748,002,442,862,600 | 34.346667 | 85 | 0.62467 | false |
ehirt/odoo | addons/board/__init__.py | 439 | 1144 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2012 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import board
import controllers
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 9,056,403,539,778,147,000 | 43 | 78 | 0.618881 | false |
rghe/ansible | test/units/modules/network/netscaler/test_netscaler_cs_policy.py | 18 | 12568 |
# Copyright (c) 2017 Citrix Systems
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from ansible.compat.tests.mock import patch, Mock, MagicMock, call
import sys
if sys.version_info[:2] != (2, 6):
import requests
from units.modules.utils import set_module_args
from .netscaler_module import TestModule, nitro_base_patcher
class TestNetscalerCSPolicyModule(TestModule):
@classmethod
def setUpClass(cls):
class MockException(Exception):
pass
cls.MockException = MockException
m = MagicMock()
nssrc_modules_mock = {
'nssrc.com.citrix.netscaler.nitro.resource.config.cs': m,
'nssrc.com.citrix.netscaler.nitro.resource.config.cs.cspolicy': m,
}
cls.nitro_specific_patcher = patch.dict(sys.modules, nssrc_modules_mock)
cls.nitro_base_patcher = nitro_base_patcher
@classmethod
def tearDownClass(cls):
cls.nitro_base_patcher.stop()
cls.nitro_specific_patcher.stop()
def set_module_state(self, state):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state=state,
))
def setUp(self):
super(TestNetscalerCSPolicyModule, self).setUp()
self.nitro_base_patcher.start()
self.nitro_specific_patcher.start()
def tearDown(self):
super(TestNetscalerCSPolicyModule, self).tearDown()
self.nitro_base_patcher.stop()
self.nitro_specific_patcher.stop()
def test_graceful_nitro_api_import_error(self):
# Stop nitro api patching to cause ImportError
self.set_module_state('present')
self.nitro_base_patcher.stop()
self.nitro_specific_patcher.stop()
from ansible.modules.network.netscaler import netscaler_cs_policy
self.module = netscaler_cs_policy
result = self.failed()
self.assertEqual(result['msg'], 'Could not load nitro python sdk')
def test_graceful_nitro_error_on_login(self):
self.set_module_state('present')
from ansible.modules.network.netscaler import netscaler_cs_policy
class MockException(Exception):
def __init__(self, *args, **kwargs):
self.errorcode = 0
self.message = ''
client_mock = Mock()
client_mock.login = Mock(side_effect=MockException)
m = Mock(return_value=client_mock)
with patch('ansible.modules.network.netscaler.netscaler_cs_policy.get_nitro_client', m):
with patch('ansible.modules.network.netscaler.netscaler_cs_policy.nitro_exception', MockException):
self.module = netscaler_cs_policy
result = self.failed()
self.assertTrue(result['msg'].startswith('nitro exception'), msg='nitro exception during login not handled properly')
def test_graceful_no_connection_error(self):
if sys.version_info[:2] == (2, 6):
self.skipTest('requests library not available under python2.6')
self.set_module_state('present')
from ansible.modules.network.netscaler import netscaler_cs_policy
client_mock = Mock()
attrs = {'login.side_effect': requests.exceptions.ConnectionError}
client_mock.configure_mock(**attrs)
m = Mock(return_value=client_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_policy',
get_nitro_client=m,
nitro_exception=self.MockException,
):
self.module = netscaler_cs_policy
result = self.failed()
self.assertTrue(result['msg'].startswith('Connection error'), msg='Connection error was not handled gracefully')
def test_graceful_login_error(self):
self.set_module_state('present')
from ansible.modules.network.netscaler import netscaler_cs_policy
if sys.version_info[:2] == (2, 6):
self.skipTest('requests library not available under python2.6')
class MockException(Exception):
pass
client_mock = Mock()
attrs = {'login.side_effect': requests.exceptions.SSLError}
client_mock.configure_mock(**attrs)
m = Mock(return_value=client_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_policy',
get_nitro_client=m,
nitro_exception=MockException,
):
self.module = netscaler_cs_policy
result = self.failed()
self.assertTrue(result['msg'].startswith('SSL Error'), msg='SSL Error was not handled gracefully')
def test_create_non_existing_cs_policy(self):
self.set_module_state('present')
from ansible.modules.network.netscaler import netscaler_cs_policy
cs_policy_mock = MagicMock()
attrs = {
'diff_object.return_value': {},
}
cs_policy_mock.configure_mock(**attrs)
m = MagicMock(return_value=cs_policy_mock)
policy_exists_mock = Mock(side_effect=[False, True])
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_policy',
ConfigProxy=m,
policy_exists=policy_exists_mock,
nitro_exception=self.MockException,
ensure_feature_is_enabled=Mock(),
):
self.module = netscaler_cs_policy
result = self.exited()
cs_policy_mock.assert_has_calls([call.add()])
self.assertTrue(result['changed'], msg='Change not recorded')
def test_update_cs_policy_when_cs_policy_differs(self):
self.set_module_state('present')
from ansible.modules.network.netscaler import netscaler_cs_policy
cs_policy_mock = MagicMock()
attrs = {
'diff_object.return_value': {},
}
cs_policy_mock.configure_mock(**attrs)
m = MagicMock(return_value=cs_policy_mock)
policy_exists_mock = Mock(side_effect=[True, True])
policy_identical_mock = Mock(side_effect=[False, True])
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_policy',
ConfigProxy=m,
policy_exists=policy_exists_mock,
policy_identical=policy_identical_mock,
ensure_feature_is_enabled=Mock(),
nitro_exception=self.MockException,
):
self.module = netscaler_cs_policy
result = self.exited()
cs_policy_mock.assert_has_calls([call.update()])
self.assertTrue(result['changed'], msg='Change not recorded')
def test_no_change_to_module_when_all_identical(self):
self.set_module_state('present')
from ansible.modules.network.netscaler import netscaler_cs_policy
cs_policy_mock = MagicMock()
attrs = {
'diff_object.return_value': {},
}
cs_policy_mock.configure_mock(**attrs)
m = MagicMock(return_value=cs_policy_mock)
policy_exists_mock = Mock(side_effect=[True, True])
policy_identical_mock = Mock(side_effect=[True, True])
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_policy',
ConfigProxy=m,
policy_exists=policy_exists_mock,
policy_identical=policy_identical_mock,
ensure_feature_is_enabled=Mock(),
nitro_exception=self.MockException,
):
self.module = netscaler_cs_policy
result = self.exited()
self.assertFalse(result['changed'], msg='Erroneous changed status update')
def test_absent_operation(self):
self.set_module_state('absent')
from ansible.modules.network.netscaler import netscaler_cs_policy
cs_policy_mock = MagicMock()
attrs = {
'diff_object.return_value': {},
}
cs_policy_mock.configure_mock(**attrs)
m = MagicMock(return_value=cs_policy_mock)
policy_exists_mock = Mock(side_effect=[True, False])
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_policy',
ConfigProxy=m,
policy_exists=policy_exists_mock,
nitro_exception=self.MockException,
ensure_feature_is_enabled=Mock(),
):
self.module = netscaler_cs_policy
result = self.exited()
cs_policy_mock.assert_has_calls([call.delete()])
self.assertTrue(result['changed'], msg='Changed status not set correctly')
def test_absent_operation_no_change(self):
self.set_module_state('absent')
from ansible.modules.network.netscaler import netscaler_cs_policy
cs_policy_mock = MagicMock()
attrs = {
'diff_object.return_value': {},
}
cs_policy_mock.configure_mock(**attrs)
m = MagicMock(return_value=cs_policy_mock)
policy_exists_mock = Mock(side_effect=[False, False])
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_policy',
ConfigProxy=m,
policy_exists=policy_exists_mock,
nitro_exception=self.MockException,
ensure_feature_is_enabled=Mock(),
):
self.module = netscaler_cs_policy
result = self.exited()
cs_policy_mock.assert_not_called()
self.assertFalse(result['changed'], msg='Changed status not set correctly')
def test_graceful_nitro_exception_operation_present(self):
self.set_module_state('present')
from ansible.modules.network.netscaler import netscaler_cs_policy
class MockException(Exception):
def __init__(self, *args, **kwargs):
self.errorcode = 0
self.message = ''
m = Mock(side_effect=MockException)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_policy',
policy_exists=m,
ensure_feature_is_enabled=Mock(),
nitro_exception=MockException
):
self.module = netscaler_cs_policy
result = self.failed()
self.assertTrue(
result['msg'].startswith('nitro exception'),
msg='Nitro exception not caught on operation present'
)
def test_graceful_nitro_exception_operation_absent(self):
self.set_module_state('absent')
from ansible.modules.network.netscaler import netscaler_cs_policy
class MockException(Exception):
def __init__(self, *args, **kwargs):
self.errorcode = 0
self.message = ''
m = Mock(side_effect=MockException)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_policy',
policy_exists=m,
nitro_exception=MockException,
ensure_feature_is_enabled=Mock(),
):
self.module = netscaler_cs_policy
result = self.failed()
self.assertTrue(
result['msg'].startswith('nitro exception'),
msg='Nitro exception not caught on operation absent'
)
def test_ensure_feature_is_enabled_called(self):
self.set_module_state('present')
from ansible.modules.network.netscaler import netscaler_cs_policy
client_mock = Mock()
ensure_feature_is_enabled_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_policy',
get_nitro_client=Mock(return_value=client_mock),
policy_exists=Mock(side_effect=[True, True]),
nitro_exception=self.MockException,
ensure_feature_is_enabled=ensure_feature_is_enabled_mock,
):
self.module = netscaler_cs_policy
result = self.exited()
ensure_feature_is_enabled_mock.assert_has_calls([call(client_mock, 'CS')])
| gpl-3.0 | 7,039,932,926,364,580,000 | 37.200608 | 133 | 0.619828 | false |
cloudera/ibis | ibis/tests/expr/test_pipe.py | 3 | 1763 | # Copyright 2014 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import ibis
@pytest.fixture
def pipe_table():
return ibis.table(
[
('key1', 'string'),
('key2', 'string'),
('key3', 'string'),
('value', 'double'),
],
'foo_table',
)
def test_pipe_positional_args(pipe_table):
def my_func(data, foo, bar):
return data[bar] + foo
result = pipe_table.pipe(my_func, 4, 'value')
expected = pipe_table['value'] + 4
assert result.equals(expected)
def test_pipe_keyword_args(pipe_table):
def my_func(data, foo=None, bar=None):
return data[bar] + foo
result = pipe_table.pipe(my_func, foo=4, bar='value')
expected = pipe_table['value'] + 4
assert result.equals(expected)
def test_pipe_pass_to_keyword(pipe_table):
def my_func(x, y, data=None):
return data[x] + y
result = pipe_table.pipe((my_func, 'data'), 'value', 4)
expected = pipe_table['value'] + 4
assert result.equals(expected)
def test_call_pipe_equivalence(pipe_table):
result = pipe_table(lambda x: x['key1'].cast('double').sum())
expected = pipe_table.key1.cast('double').sum()
assert result.equals(expected)
| apache-2.0 | -947,330,347,682,574,000 | 25.712121 | 74 | 0.648327 | false |
r2t2sdr/r2t2 | u-boot/tools/buildman/toolchain.py | 5 | 8510 | # Copyright (c) 2012 The Chromium OS Authors.
#
# SPDX-License-Identifier: GPL-2.0+
#
import re
import glob
import os
import bsettings
import command
class Toolchain:
"""A single toolchain
Public members:
gcc: Full path to C compiler
path: Directory path containing C compiler
cross: Cross compile string, e.g. 'arm-linux-'
arch: Architecture of toolchain as determined from the first
component of the filename. E.g. arm-linux-gcc becomes arm
"""
def __init__(self, fname, test, verbose=False):
"""Create a new toolchain object.
Args:
fname: Filename of the gcc component
test: True to run the toolchain to test it
"""
self.gcc = fname
self.path = os.path.dirname(fname)
self.cross = os.path.basename(fname)[:-3]
pos = self.cross.find('-')
self.arch = self.cross[:pos] if pos != -1 else 'sandbox'
env = self.MakeEnvironment()
# As a basic sanity check, run the C compiler with --version
cmd = [fname, '--version']
if test:
result = command.RunPipe([cmd], capture=True, env=env,
raise_on_error=False)
self.ok = result.return_code == 0
if verbose:
print 'Tool chain test: ',
if self.ok:
print 'OK'
else:
print 'BAD'
print 'Command: ', cmd
print result.stdout
print result.stderr
else:
self.ok = True
self.priority = self.GetPriority(fname)
def GetPriority(self, fname):
"""Return the priority of the toolchain.
Toolchains are ranked according to their suitability by their
filename prefix.
Args:
fname: Filename of toolchain
Returns:
Priority of toolchain, 0=highest, 20=lowest.
"""
priority_list = ['-elf', '-unknown-linux-gnu', '-linux',
'-none-linux-gnueabi', '-uclinux', '-none-eabi',
'-gentoo-linux-gnu', '-linux-gnueabi', '-le-linux', '-uclinux']
for prio in range(len(priority_list)):
if priority_list[prio] in fname:
return prio
return prio
def MakeEnvironment(self):
"""Returns an environment for using the toolchain.
Thie takes the current environment, adds CROSS_COMPILE and
augments PATH so that the toolchain will operate correctly.
"""
env = dict(os.environ)
env['CROSS_COMPILE'] = self.cross
env['PATH'] += (':' + self.path)
return env
class Toolchains:
"""Manage a list of toolchains for building U-Boot
We select one toolchain for each architecture type
Public members:
toolchains: Dict of Toolchain objects, keyed by architecture name
paths: List of paths to check for toolchains (may contain wildcards)
"""
def __init__(self):
self.toolchains = {}
self.paths = []
toolchains = bsettings.GetItems('toolchain')
if not toolchains:
print ("Warning: No tool chains - please add a [toolchain] section"
" to your buildman config file %s. See README for details" %
bsettings.config_fname)
for name, value in toolchains:
if '*' in value:
self.paths += glob.glob(value)
else:
self.paths.append(value)
self._make_flags = dict(bsettings.GetItems('make-flags'))
def Add(self, fname, test=True, verbose=False):
"""Add a toolchain to our list
We select the given toolchain as our preferred one for its
architecture if it is a higher priority than the others.
Args:
fname: Filename of toolchain's gcc driver
test: True to run the toolchain to test it
"""
toolchain = Toolchain(fname, test, verbose)
add_it = toolchain.ok
if toolchain.arch in self.toolchains:
add_it = (toolchain.priority <
self.toolchains[toolchain.arch].priority)
if add_it:
self.toolchains[toolchain.arch] = toolchain
def Scan(self, verbose):
"""Scan for available toolchains and select the best for each arch.
We look for all the toolchains we can file, figure out the
architecture for each, and whether it works. Then we select the
highest priority toolchain for each arch.
Args:
verbose: True to print out progress information
"""
if verbose: print 'Scanning for tool chains'
for path in self.paths:
if verbose: print " - scanning path '%s'" % path
for subdir in ['.', 'bin', 'usr/bin']:
dirname = os.path.join(path, subdir)
if verbose: print " - looking in '%s'" % dirname
for fname in glob.glob(dirname + '/*gcc'):
if verbose: print " - found '%s'" % fname
self.Add(fname, True, verbose)
def List(self):
"""List out the selected toolchains for each architecture"""
print 'List of available toolchains (%d):' % len(self.toolchains)
if len(self.toolchains):
for key, value in sorted(self.toolchains.iteritems()):
print '%-10s: %s' % (key, value.gcc)
else:
print 'None'
def Select(self, arch):
"""Returns the toolchain for a given architecture
Args:
args: Name of architecture (e.g. 'arm', 'ppc_8xx')
returns:
toolchain object, or None if none found
"""
for name, value in bsettings.GetItems('toolchain-alias'):
if arch == name:
arch = value
if not arch in self.toolchains:
raise ValueError, ("No tool chain found for arch '%s'" % arch)
return self.toolchains[arch]
def ResolveReferences(self, var_dict, args):
"""Resolve variable references in a string
This converts ${blah} within the string to the value of blah.
This function works recursively.
Args:
var_dict: Dictionary containing variables and their values
args: String containing make arguments
Returns:
Resolved string
>>> bsettings.Setup()
>>> tcs = Toolchains()
>>> tcs.Add('fred', False)
>>> var_dict = {'oblique' : 'OBLIQUE', 'first' : 'fi${second}rst', \
'second' : '2nd'}
>>> tcs.ResolveReferences(var_dict, 'this=${oblique}_set')
'this=OBLIQUE_set'
>>> tcs.ResolveReferences(var_dict, 'this=${oblique}_set${first}nd')
'this=OBLIQUE_setfi2ndrstnd'
"""
re_var = re.compile('(\$\{[a-z0-9A-Z]{1,}\})')
while True:
m = re_var.search(args)
if not m:
break
lookup = m.group(0)[2:-1]
value = var_dict.get(lookup, '')
args = args[:m.start(0)] + value + args[m.end(0):]
return args
def GetMakeArguments(self, board):
"""Returns 'make' arguments for a given board
The flags are in a section called 'make-flags'. Flags are named
after the target they represent, for example snapper9260=TESTING=1
will pass TESTING=1 to make when building the snapper9260 board.
References to other boards can be added in the string also. For
example:
[make-flags]
at91-boards=ENABLE_AT91_TEST=1
snapper9260=${at91-boards} BUILD_TAG=442
snapper9g45=${at91-boards} BUILD_TAG=443
This will return 'ENABLE_AT91_TEST=1 BUILD_TAG=442' for snapper9260
and 'ENABLE_AT91_TEST=1 BUILD_TAG=443' for snapper9g45.
A special 'target' variable is set to the board target.
Args:
board: Board object for the board to check.
Returns:
'make' flags for that board, or '' if none
"""
self._make_flags['target'] = board.target
arg_str = self.ResolveReferences(self._make_flags,
self._make_flags.get(board.target, ''))
args = arg_str.split(' ')
i = 0
while i < len(args):
if not args[i]:
del args[i]
else:
i += 1
return args
| gpl-3.0 | 826,594,270,267,984,000 | 33.453441 | 79 | 0.5604 | false |
borjam/exabgp | src/exabgp/bgp/message/update/nlri/vpls.py | 3 | 3647 | # encoding: utf-8
"""
vpls.py
Created by Nikita Shirokov on 2014-06-16.
Copyright (c) 2014-2017 Nikita Shirokov. All rights reserved.
Copyright (c) 2014-2017 Exa Networks. All rights reserved.
License: 3-clause BSD. (See the COPYRIGHT file)
"""
from struct import unpack
from struct import pack
from exabgp.protocol.family import AFI
from exabgp.protocol.family import SAFI
from exabgp.bgp.message.direction import OUT
from exabgp.bgp.message.notification import Notify
from exabgp.bgp.message.update.nlri.nlri import NLRI
from exabgp.bgp.message.update.nlri.qualifier import RouteDistinguisher
def _unique():
value = 0
while True:
yield value
value += 1
unique = _unique()
@NLRI.register(AFI.l2vpn, SAFI.vpls)
class VPLS(NLRI):
# XXX: Should take AFI, SAFI and OUT.direction as parameter to match other NLRI
def __init__(self, rd, endpoint, base, offset, size):
NLRI.__init__(self, AFI.l2vpn, SAFI.vpls)
self.action = OUT.ANNOUNCE
self.nexthop = None
self.rd = rd
self.base = base
self.offset = offset
self.size = size
self.endpoint = endpoint
self.unique = next(unique)
def feedback(self, action):
if self.nexthop is None and action == OUT.ANNOUNCE:
return 'vpls nlri next-hop missing'
if self.endpoint is None:
return 'vpls nlri endpoint missing'
if self.base is None:
return 'vpls nlri base missing'
if self.offset is None:
return 'vpls nlri offset missing'
if self.size is None:
return 'vpls nlri size missing'
if self.rd is None:
return 'vpls nlri route-distinguisher missing'
if self.base > (0xFFFFF - self.size): # 20 bits, 3 bytes
return 'vpls nlri size inconsistency'
return ''
def assign(self, name, value):
setattr(self, name, value)
def pack_nlri(self, negotiated=None):
return (
b'\x00\x11' # pack('!H',17)
+ self.rd.pack()
+ pack('!HHH', self.endpoint, self.offset, self.size)
+ pack('!L', (self.base << 4) | 0x1)[1:] # setting the bottom of stack, should we ?
)
# XXX: FIXME: we need an unique key here.
# XXX: What can we use as unique key ?
def json(self, compact=None):
content = ', '.join(
[
self.rd.json(),
'"endpoint": %s' % self.endpoint,
'"base": %s' % self.base,
'"offset": %s' % self.offset,
'"size": %s' % self.size,
]
)
return '{ %s }' % (content)
def extensive(self):
return "vpls%s endpoint %s base %s offset %s size %s %s" % (
self.rd,
self.endpoint,
self.base,
self.offset,
self.size,
'' if self.nexthop is None else 'next-hop %s' % self.nexthop,
)
def __str__(self):
return self.extensive()
@classmethod
def unpack_nlri(cls, afi, safi, bgp, action, addpath):
# label is 20bits, stored using 3 bytes, 24 bits
(length,) = unpack('!H', bgp[0:2])
if len(bgp) != length + 2:
raise Notify(3, 10, 'l2vpn vpls message length is not consistent with encoded bgp')
rd = RouteDistinguisher(bgp[2:10])
endpoint, offset, size = unpack('!HHH', bgp[10:16])
base = unpack('!L', b'\x00' + bgp[16:19])[0] >> 4
nlri = cls(rd, endpoint, base, offset, size)
nlri.action = action
# nlri.nexthop = IP.unpack(nexthop)
return nlri, bgp[19:]
| bsd-3-clause | -3,046,368,619,695,330,300 | 31.274336 | 96 | 0.57609 | false |
mrchapp/meta-openembedded | meta-oe/lib/oeqa/selftest/cases/meta_oe_sources.py | 4 | 1206 | import os
import re
import glob as g
import shutil
import tempfile
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
class MetaOESourceMirroring(OESelftestTestCase):
# Can we download everything from the OpenEmbedded Sources Mirror over http only
def test_oe_source_mirror(self):
self.write_config("""
BB_ALLOWED_NETWORKS = " sources.openembedded.org"
MIRRORS = ""
DL_DIR = "${TMPDIR}/test_oe_downloads"
PREMIRRORS = "\\
bzr://.*/.* http://sources.openembedded.org/ \\n \\
cvs://.*/.* http://sources.openembedded.org/ \\n \\
git://.*/.* http://sources.openembedded.org/ \\n \\
gitsm://.*/.* http://sources.openembedded.org/ \\n \\
hg://.*/.* http://sources.openembedded.org/ \\n \\
osc://.*/.* http://sources.openembedded.org/ \\n \\
p4://.*/.* http://sources.openembedded.org/ \\n \\
svn://.*/.* http://sources.openembedded.org/ \\n \\
ftp://.*/.* http://sources.openembedded.org/ \\n \\
http://.*/.* http://sources.openembedded.org/ \\n \\
https://.*/.* http://sources.openembedded.org/ \\n"
""")
bitbake("world --runall fetch")
| mit | -4,816,193,112,612,254,000 | 39.2 | 84 | 0.617745 | false |
pepeportela/edx-platform | common/djangoapps/util/model_utils.py | 6 | 7263 | """
Utilities for django models.
"""
import re
import unicodedata
from django.conf import settings
from django.dispatch import Signal
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
from django_countries.fields import Country
from eventtracking import tracker
# The setting name used for events when "settings" (account settings, preferences, profile information) change.
USER_SETTINGS_CHANGED_EVENT_NAME = u'edx.user.settings.changed'
# Used to signal a field value change
USER_FIELD_CHANGED = Signal(providing_args=["user", "table", "setting", "old_value", "new_value"])
def get_changed_fields_dict(instance, model_class):
"""
Helper method for tracking field changes on a model.
Given a model instance and class, return a dict whose keys are that
instance's fields which differ from the last saved ones and whose values
are the old values of those fields. Related fields are not considered.
Args:
instance (Model instance): the model instance with changes that are
being tracked
model_class (Model class): the class of the model instance we are
tracking
Returns:
dict: a mapping of field names to current database values of those
fields, or an empty dict if the model is new
"""
try:
old_model = model_class.objects.get(pk=instance.pk)
except model_class.DoesNotExist:
# Object is new, so fields haven't technically changed. We'll return
# an empty dict as a default value.
return {}
else:
# We want to compare all of the scalar fields on the model, but none of
# the relations.
field_names = [f.name for f in model_class._meta.get_fields() if not f.is_relation] # pylint: disable=protected-access
changed_fields = {
field_name: getattr(old_model, field_name) for field_name in field_names
if getattr(old_model, field_name) != getattr(instance, field_name)
}
return changed_fields
def emit_field_changed_events(instance, user, db_table, excluded_fields=None, hidden_fields=None):
"""Emits a settings changed event for each field that has changed.
Note that this function expects that a `_changed_fields` dict has been set
as an attribute on `instance` (see `get_changed_fields_dict`.
Args:
instance (Model instance): the model instance that is being saved
user (User): the user that this instance is associated with
db_table (str): the name of the table that we're modifying
excluded_fields (list): a list of field names for which events should
not be emitted
hidden_fields (list): a list of field names specifying fields whose
values should not be included in the event (None will be used
instead)
Returns:
None
"""
def clean_field(field_name, value):
"""
Prepare a field to be emitted in a JSON serializable format. If
`field_name` is a hidden field, return None.
"""
if field_name in hidden_fields:
return None
# Country is not JSON serializable. Return the country code.
if isinstance(value, Country):
if value.code:
return value.code
else:
return None
return value
excluded_fields = excluded_fields or []
hidden_fields = hidden_fields or []
changed_fields = getattr(instance, '_changed_fields', {})
for field_name in changed_fields:
if field_name not in excluded_fields:
old_value = clean_field(field_name, changed_fields[field_name])
new_value = clean_field(field_name, getattr(instance, field_name))
emit_setting_changed_event(user, db_table, field_name, old_value, new_value)
# Remove the now inaccurate _changed_fields attribute.
if hasattr(instance, '_changed_fields'):
del instance._changed_fields
def truncate_fields(old_value, new_value):
"""
Truncates old_value and new_value for analytics event emission if necessary.
Args:
old_value(obj): the value before the change
new_value(obj): the new value being saved
Returns:
a dictionary with the following fields:
'old': the truncated old value
'new': the truncated new value
'truncated': the list of fields that have been truncated
"""
# Compute the maximum value length so that two copies can fit into the maximum event size
# in addition to all the other fields recorded.
max_value_length = settings.TRACK_MAX_EVENT / 4
serialized_old_value, old_was_truncated = _get_truncated_setting_value(old_value, max_length=max_value_length)
serialized_new_value, new_was_truncated = _get_truncated_setting_value(new_value, max_length=max_value_length)
truncated_values = []
if old_was_truncated:
truncated_values.append("old")
if new_was_truncated:
truncated_values.append("new")
return {'old': serialized_old_value, 'new': serialized_new_value, 'truncated': truncated_values}
def emit_setting_changed_event(user, db_table, setting_name, old_value, new_value):
"""Emits an event for a change in a setting.
Args:
user (User): the user that this setting is associated with.
db_table (str): the name of the table that we're modifying.
setting_name (str): the name of the setting being changed.
old_value (object): the value before the change.
new_value (object): the new value being saved.
Returns:
None
"""
truncated_fields = truncate_fields(old_value, new_value)
truncated_fields['setting'] = setting_name
truncated_fields['user_id'] = user.id
truncated_fields['table'] = db_table
tracker.emit(
USER_SETTINGS_CHANGED_EVENT_NAME,
truncated_fields
)
# Announce field change
USER_FIELD_CHANGED.send(sender=None, user=user, table=db_table, setting=setting_name,
old_value=old_value, new_value=new_value)
def _get_truncated_setting_value(value, max_length=None):
"""
Returns the truncated form of a setting value.
Returns:
truncated_value (object): the possibly truncated version of the value.
was_truncated (bool): returns true if the serialized value was truncated.
"""
if isinstance(value, basestring) and max_length is not None and len(value) > max_length:
return value[0:max_length], True
else:
return value, False
# Taken from Django 1.8 source code because it's not supported in 1.4
def slugify(value):
"""Converts value into a string suitable for readable URLs.
Converts to ASCII. Converts spaces to hyphens. Removes characters that
aren't alphanumerics, underscores, or hyphens. Converts to lowercase.
Also strips leading and trailing whitespace.
Args:
value (string): String to slugify.
"""
value = force_unicode(value)
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii')
value = re.sub(r'[^\w\s-]', '', value).strip().lower()
return mark_safe(re.sub(r'[-\s]+', '-', value))
| agpl-3.0 | -1,455,315,741,796,901,000 | 37.226316 | 130 | 0.666667 | false |
wweiradio/django | tests/flatpages_tests/test_forms.py | 165 | 4569 | from __future__ import unicode_literals
from django.conf import settings
from django.contrib.flatpages.forms import FlatpageForm
from django.contrib.flatpages.models import FlatPage
from django.contrib.sites.models import Site
from django.test import TestCase, modify_settings, override_settings
from django.utils import translation
@modify_settings(INSTALLED_APPS={'append': ['django.contrib.flatpages', ]})
@override_settings(SITE_ID=1)
class FlatpageAdminFormTests(TestCase):
@classmethod
def setUpTestData(cls):
# don't use the manager because we want to ensure the site exists
# with pk=1, regardless of whether or not it already exists.
cls.site1 = Site(pk=1, domain='example.com', name='example.com')
cls.site1.save()
def setUp(self):
# Site fields cache needs to be cleared after flatpages is added to
# INSTALLED_APPS
Site._meta._expire_cache()
self.form_data = {
'title': "A test page",
'content': "This is a test",
'sites': [settings.SITE_ID],
}
def test_flatpage_admin_form_url_validation(self):
"The flatpage admin form correctly validates urls"
self.assertTrue(FlatpageForm(data=dict(url='/new_flatpage/', **self.form_data)).is_valid())
self.assertTrue(FlatpageForm(data=dict(url='/some.special~chars/', **self.form_data)).is_valid())
self.assertTrue(FlatpageForm(data=dict(url='/some.very_special~chars-here/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a space/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a % char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a ! char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a & char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a ? char/', **self.form_data)).is_valid())
def test_flatpage_requires_leading_slash(self):
form = FlatpageForm(data=dict(url='no_leading_slash/', **self.form_data))
with translation.override('en'):
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['url'], ["URL is missing a leading slash."])
@override_settings(APPEND_SLASH=True,
MIDDLEWARE_CLASSES=['django.middleware.common.CommonMiddleware'])
def test_flatpage_requires_trailing_slash_with_append_slash(self):
form = FlatpageForm(data=dict(url='/no_trailing_slash', **self.form_data))
with translation.override('en'):
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['url'], ["URL is missing a trailing slash."])
@override_settings(APPEND_SLASH=False,
MIDDLEWARE_CLASSES=['django.middleware.common.CommonMiddleware'])
def test_flatpage_doesnt_requires_trailing_slash_without_append_slash(self):
form = FlatpageForm(data=dict(url='/no_trailing_slash', **self.form_data))
self.assertTrue(form.is_valid())
def test_flatpage_admin_form_url_uniqueness_validation(self):
"The flatpage admin form correctly enforces url uniqueness among flatpages of the same site"
data = dict(url='/myflatpage1/', **self.form_data)
FlatpageForm(data=data).save()
f = FlatpageForm(data=data)
with translation.override('en'):
self.assertFalse(f.is_valid())
self.assertEqual(
f.errors,
{'__all__': ['Flatpage with url /myflatpage1/ already exists for site example.com']})
def test_flatpage_admin_form_edit(self):
"""
Existing flatpages can be edited in the admin form without triggering
the url-uniqueness validation.
"""
existing = FlatPage.objects.create(
url="/myflatpage1/", title="Some page", content="The content")
existing.sites.add(settings.SITE_ID)
data = dict(url='/myflatpage1/', **self.form_data)
f = FlatpageForm(data=data, instance=existing)
self.assertTrue(f.is_valid(), f.errors)
updated = f.save()
self.assertEqual(updated.title, "A test page")
def test_flatpage_nosites(self):
data = dict(url='/myflatpage1/', **self.form_data)
data.update({'sites': ''})
f = FlatpageForm(data=data)
self.assertFalse(f.is_valid())
self.assertEqual(
f.errors,
{'sites': [translation.ugettext('This field is required.')]})
| bsd-3-clause | 323,132,750,961,510,200 | 40.917431 | 115 | 0.649376 | false |
gpetretto/monty | tests/test_os.py | 2 | 1196 | __author__ = 'Shyue Ping Ong'
__copyright__ = 'Copyright 2014, The Materials Virtual Lab'
__version__ = '0.1'
__maintainer__ = 'Shyue Ping Ong'
__email__ = '[email protected]'
__date__ = '1/24/14'
import unittest
import os
from monty.os.path import which, zpath
from monty.os import cd
test_dir = os.path.join(os.path.dirname(__file__), 'test_files')
class PathTest(unittest.TestCase):
def test_which(self):
py = which("python")
self.assertEqual(os.path.basename(py), "python")
def test_zpath(self):
fullzpath = zpath(os.path.join(test_dir, "myfile_gz"))
self.assertEqual(os.path.join(test_dir, "myfile_gz.gz"), fullzpath)
class CdTest(unittest.TestCase):
def test_cd(self):
with cd(test_dir):
self.assertTrue(os.path.exists("empty_file.txt"))
self.assertFalse(os.path.exists("empty_file.txt"))
def test_cd_exception(self):
try:
with cd(test_dir):
self.assertTrue(os.path.exists("empty_file.txt"))
raise RuntimeError()
except:
pass
self.assertFalse(os.path.exists("empty_file.txt"))
if __name__ == "__main__":
unittest.main()
| mit | 2,371,059,867,865,757,000 | 25 | 75 | 0.609532 | false |
double-y/django | tests/forms_tests/tests/test_validators.py | 261 | 1540 | from __future__ import unicode_literals
import re
from unittest import TestCase
from django import forms
from django.core import validators
from django.core.exceptions import ValidationError
class UserForm(forms.Form):
full_name = forms.CharField(
max_length=50,
validators=[
validators.validate_integer,
validators.validate_email,
]
)
string = forms.CharField(
max_length=50,
validators=[
validators.RegexValidator(
regex='^[a-zA-Z]*$',
message="Letters only.",
)
]
)
ignore_case_string = forms.CharField(
max_length=50,
validators=[
validators.RegexValidator(
regex='^[a-z]*$',
message="Letters only.",
flags=re.IGNORECASE,
)
]
)
class TestFieldWithValidators(TestCase):
def test_all_errors_get_reported(self):
form = UserForm({'full_name': 'not int nor mail', 'string': '2 is not correct', 'ignore_case_string': "IgnORE Case strIng"})
self.assertRaises(ValidationError, form.fields['full_name'].clean, 'not int nor mail')
try:
form.fields['full_name'].clean('not int nor mail')
except ValidationError as e:
self.assertEqual(2, len(e.messages))
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['string'], ["Letters only."])
self.assertEqual(form.errors['string'], ["Letters only."])
| bsd-3-clause | -57,411,264,725,189,270 | 28.056604 | 132 | 0.585714 | false |
eicher31/compassion-switzerland | partner_communication_switzerland/models/partner_communication.py | 3 | 28550 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <[email protected]>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import base64
import time
import logging
import re
from ..wizards.generate_communication_wizard import SMS_CHAR_LIMIT, SMS_COST
from math import ceil
from collections import OrderedDict
from datetime import date, datetime
from io import BytesIO
from dateutil.relativedelta import relativedelta
from odoo.addons.sponsorship_compassion.models.product import GIFT_REF
from odoo import api, models, _, fields
from odoo.exceptions import MissingError, UserError
_logger = logging.getLogger(__name__)
try:
from pyPdf import PdfFileWriter, PdfFileReader
from bs4 import BeautifulSoup
except ImportError:
_logger.warning("Please install pypdf and bs4 for using the module")
class PartnerCommunication(models.Model):
_inherit = 'partner.communication.job'
event_id = fields.Many2one('crm.event.compassion', 'Event')
ambassador_id = fields.Many2one('res.partner', 'Ambassador')
currency_id = fields.Many2one('res.currency', compute='_compute_currency')
utm_campaign_id = fields.Many2one('utm.campaign')
sms_cost = fields.Float()
sms_provider_id = fields.Many2one(
'sms.provider', 'SMS Provider',
default=lambda self: self.env.ref('sms_939.large_account_id', False),
readonly=False)
@api.model
def send_mode_select(self):
modes = super(PartnerCommunication, self).send_mode_select()
modes.append(('sms', _('SMS')))
return modes
@api.multi
def _compute_currency(self):
chf = self.env.ref('base.CHF')
for wizard in self:
wizard.currency_id = chf.id
def get_correspondence_attachments(self):
"""
Include PDF of letters if the send_mode is to print the letters.
:return: dict {attachment_name: [report_name, pdf_data]}
"""
self.ensure_one()
attachments = dict()
# Report is used for print configuration
report = 'report_compassion.b2s_letter'
letters = self.get_objects()
if self.send_mode == 'physical':
for letter in self.get_objects():
try:
attachments[letter.file_name] = [
report, self._convert_pdf(letter.letter_image)]
except MissingError:
_logger.warn("Missing letter image", exc_info=True)
self.send_mode = False
self.auto_send = False
self.message_post(
_('The letter image is missing!'), _("Missing letter"))
continue
else:
# Attach directly a zip in the letters
letters.attach_zip()
return attachments
def get_birthday_bvr(self):
"""
Attach birthday gift slip with background for sending by e-mail
:return: dict {attachment_name: [report_name, pdf_data]}
"""
self.ensure_one()
attachments = dict()
background = self.send_mode and 'physical' not in self.send_mode
sponsorships = self.get_objects().filtered(
lambda s: not s.birthday_paid)
gifts_to = sponsorships[:1].gift_partner_id
if sponsorships and gifts_to == self.partner_id:
birthday_gift = self.env['product.product'].search([
('default_code', '=', GIFT_REF[0])], limit=1)
attachments = sponsorships.get_bvr_gift_attachment(
birthday_gift, background)
return attachments
def get_graduation_bvr(self):
"""
Attach graduation gift slip with background for sending by e-mail
:return: dict {attachment_name: [report_name, pdf_data]}
"""
self.ensure_one()
attachments = dict()
background = self.send_mode and 'physical' not in self.send_mode
sponsorships = self.get_objects()
graduation = self.env['product.product'].search([
('default_code', '=', GIFT_REF[4])], limit=1)
gifts_to = sponsorships[0].gift_partner_id
if sponsorships and gifts_to == self.partner_id:
attachments = sponsorships.get_bvr_gift_attachment(
graduation, background)
return attachments
def get_family_slip_attachment(self):
"""
Attach family gift slip with background for sending by e-mail
:return: dict {attachment_name: [report_name, pdf_data]}
"""
self.ensure_one()
attachments = dict()
background = self.send_mode and 'physical' not in self.send_mode
sponsorships = self.get_objects()
family = self.env['product.product'].search([
('default_code', '=', GIFT_REF[2])], limit=1)
gifts_to = sponsorships[0].gift_partner_id
if sponsorships and gifts_to == self.partner_id:
attachments = sponsorships.get_bvr_gift_attachment(
family, background)
return attachments
def get_reminder_bvr(self):
"""
Attach sponsorship due payment slip with background for sending by
e-mail.
:return: dict {attachment_name: [report_name, pdf_data]}
"""
self.ensure_one()
sponsorships = self.get_objects()
# Verify big due periods
if len(sponsorships.mapped('months_due')) > 3:
self.need_call = 'before_sending'
payment_mode = sponsorships.with_context(lang='en_US').mapped(
'payment_mode_id.name')[0]
# LSV-DD Waiting reminders special case
if 'Waiting Reminder' in self.config_id.name and (
'LSV' in payment_mode or 'Postfinance' in payment_mode):
if self.partner_id.bank_ids:
# We received the bank info but withdrawal didn't work.
# Mark to call in order to verify the situation.
self.need_call = 'before_sending'
else:
# Don't put payment slip if we just wait the authorization form
return dict()
# Put product sponsorship to print the payment slip for physical print.
if self.send_mode and 'physical' in self.send_mode:
self.product_id = self.env['product.product'].search([
('default_code', '=', 'sponsorship')], limit=1)
return dict()
# In other cases, attach the payment slip.
report_name = 'report_compassion.bvr_due'
return {
_('sponsorship due.pdf'): [
report_name,
base64.b64encode(self.env['report'].get_pdf(
sponsorships.ids, report_name,
data={'background': True, 'doc_ids': sponsorships.ids}
))
]
}
def get_label_from_sponsorship(self):
"""
Attach sponsorship labels. Used from communication linked to children.
:return: dict {attachment_name: [report_name, pdf_data]}
"""
self.ensure_one()
sponsorships = self.get_objects()
return self.get_label_attachment(sponsorships)
def get_label_attachment(self, sponsorships=False):
"""
Attach sponsorship labels. Used from communication linked to children.
:return: dict {attachment_name: [report_name, pdf_data]}
"""
self.ensure_one()
if not sponsorships:
sponsorships = self.env['recurring.contract']
children = self.get_objects()
for child in children:
sponsorships += child.sponsorship_ids[0]
attachments = dict()
label_print = self.env['label.print'].search([
('name', '=', 'Sponsorship Label')], limit=1)
label_brand = self.env['label.brand'].search([
('brand_name', '=', 'Herma A4')], limit=1)
label_format = self.env['label.config'].search([
('name', '=', '4455 SuperPrint WeiB')], limit=1)
label_wizard = self.env['label.print.wizard'].with_context({
'active_ids': sponsorships.ids,
'active_model': 'recurring.contract',
'label_print': label_print.id,
'must_skip_send_to_printer': True
}).create({
'brand_id': label_brand.id,
'config_id': label_format.id,
'number_of_labels': 33
})
label_data = label_wizard.get_report_data()
report_name = 'label.report_label'
attachments[_('sponsorship labels.pdf')] = [
report_name,
base64.b64encode(
label_wizard.env['report'].get_pdf(
label_wizard.ids, report_name, data=label_data))
]
return attachments
def get_child_picture_attachment(self):
"""
Attach child pictures to communication. It directly attach them
to the communication if sent by e-mail and therefore does
return an empty dictionary.
:return: dict {}
"""
self.ensure_one()
res = dict()
if self.send_mode and 'physical' not in self.send_mode:
# Prepare attachments in case the communication is sent by e-mail
children = self.get_objects()
attachments = self.env['ir.attachment']
for child in children:
name = child.local_id + ' ' + child.last_photo_date + '.jpg'
attachments += attachments.create({
'name': name,
'datas_fname': name,
'res_model': self._name,
'res_id': self.id,
'datas': child.fullshot,
})
self.with_context(no_print=True).ir_attachment_ids = attachments
else:
self.ir_attachment_ids = False
return res
def get_yearly_payment_slips_2bvr(self):
return self.get_yearly_payment_slips(bv_number=2)
def get_yearly_payment_slips(self, bv_number=3):
"""
Attach payment slips
:param bv_number number of BV on a page (switch between 2BV/3BV page)
:return: dict {attachment_name: [report_name, pdf_data]}
"""
self.ensure_one()
assert bv_number in (2, 3)
sponsorships = self.get_objects()
payment_mode_bvr = self.env.ref(
'sponsorship_switzerland.payment_mode_bvr')
attachments = dict()
# IF payment mode is BVR and partner is paying
# attach sponsorship payment slips
pay_bvr = sponsorships.filtered(
lambda s: s.payment_mode_id == payment_mode_bvr and
s.partner_id == self.partner_id)
report_obj = self.env['report']
if pay_bvr and pay_bvr.must_pay_next_year():
today = date.today()
date_start = today.replace(today.year + 1, 1, 1)
date_stop = date_start.replace(month=12, day=31)
report_name = 'report_compassion.{}bvr_sponsorship'.format(
bv_number)
attachments.update({
_('sponsorship payment slips.pdf'): [
report_name,
base64.b64encode(report_obj.get_pdf(
pay_bvr.ids, report_name,
data={
'doc_ids': pay_bvr.ids,
'date_start': fields.Date.to_string(date_start),
'date_stop': fields.Date.to_string(date_stop),
'background': self.send_mode != 'physical'
}
))
]
})
# Attach gifts for correspondents
pays_gift = self.env['recurring.contract']
for sponsorship in sponsorships:
if sponsorship.mapped(sponsorship.send_gifts_to) == \
self.partner_id:
pays_gift += sponsorship
if pays_gift:
report_name = 'report_compassion.{}bvr_gift_sponsorship'.format(
bv_number)
product_ids = self.env['product.product'].search([
('default_code', 'in', GIFT_REF[:3])
]).ids
attachments.update({
_('sponsorship gifts.pdf'): [
report_name,
base64.b64encode(report_obj.get_pdf(
pays_gift.ids, report_name,
data={
'doc_ids': pays_gift.ids,
'product_ids': product_ids
}
))
]
})
return attachments
def get_childpack_attachment(self):
self.ensure_one()
lang = self.partner_id.lang
sponsorships = self.get_objects()
exit_conf = self.env.ref(
'partner_communication_switzerland.lifecycle_child_planned_exit')
if self.config_id == exit_conf and sponsorships.mapped(
'sub_sponsorship_id'):
sponsorships = sponsorships.mapped('sub_sponsorship_id')
children = sponsorships.mapped('child_id')
# Always retrieve latest information before printing dossier
children.get_infos()
report_name = 'report_compassion.childpack_small'
return {
_('child dossier.pdf'): [
report_name,
base64.b64encode(self.env['report'].get_pdf(
children.ids, report_name, data={
'lang': lang,
'is_pdf': self.send_mode != 'physical',
'type': report_name,
}))
]
}
def get_tax_receipt(self):
self.ensure_one()
res = {}
if self.send_mode == 'digital':
report_name = 'report_compassion.tax_receipt'
data = {
'doc_ids': self.partner_id.ids,
'year': self.env.context.get('year', date.today().year - 1),
'lang': self.partner_id.lang,
}
res = {
_('tax receipt.pdf'): [
report_name,
base64.b64encode(
self.env['report'].with_context(
must_skip_send_to_printer=True).get_pdf(
self.partner_id.ids, report_name, data=data))
]
}
return res
@api.multi
def send(self):
"""
- Prevent sending communication when invoices are being reconciled
- Mark B2S correspondence as read when printed.
- Postpone no money holds when reminders sent.
- Update donor tag
- Sends SMS for sms send_mode
:return: True
"""
sms_jobs = self.filtered(lambda j: j.send_mode == 'sms')
sms_jobs.send_by_sms()
other_jobs = self - sms_jobs
for job in other_jobs.filtered(lambda j: j.model in (
'recurring.contract', 'account.invoice')):
queue_job = self.env['queue.job'].search([
('channel', '=', 'root.group_reconcile'),
('state', '!=', 'done'),
], limit=1)
if queue_job:
invoices = self.env['account.invoice'].browse(
queue_job.record_ids)
if job.partner_id in invoices.mapped('partner_id'):
retry = 0
state = queue_job.state
while state != 'done' and retry < 5:
if queue_job.state == 'failed':
raise UserError(_(
"A reconcile job has failed. Please call "
"an admin for help."
))
_logger.info("Reconcile job is processing! Going in "
"sleep for five seconds...")
time.sleep(5)
state = queue_job.read(['state'])[0]['state']
retry += 1
if queue_job.state != 'done':
raise UserError(_(
"Some invoices of the partner are just being "
"reconciled now. Please wait the process to finish"
" before printing the communication."
))
super(PartnerCommunication, other_jobs).send()
b2s_printed = other_jobs.filtered(
lambda c: c.config_id.model == 'correspondence' and
c.send_mode == 'physical' and c.state == 'done')
if b2s_printed:
letters = b2s_printed.get_objects()
if letters:
letters.write({
'letter_delivered': True,
})
# No money extension
no_money_1 = self.env.ref('partner_communication_switzerland.'
'sponsorship_waiting_reminder_1')
no_money_2 = self.env.ref('partner_communication_switzerland.'
'sponsorship_waiting_reminder_2')
no_money_3 = self.env.ref('partner_communication_switzerland.'
'sponsorship_waiting_reminder_3')
settings = self.env['availability.management.settings']
first_extension = settings.get_param('no_money_hold_duration')
second_extension = settings.get_param('no_money_hold_extension')
for communication in other_jobs:
extension = False
if communication.config_id == no_money_1:
extension = first_extension + 7
elif communication.config_id == no_money_2:
extension = second_extension + 7
elif communication.config_id == no_money_3:
extension = 10
if extension:
holds = communication.get_objects().mapped('child_id.hold_id')
for hold in holds:
expiration = datetime.now() + relativedelta(days=extension)
hold.expiration_date = fields.Datetime.to_string(
expiration)
donor = self.env.ref('partner_compassion.res_partner_category_donor')
partners = other_jobs.filtered(
lambda j: j.config_id.model == 'account.invoice.line' and
donor not in j.partner_id.category_id).mapped('partner_id')
partners.write({'category_id': [(4, donor.id)]})
return True
@api.multi
def send_by_sms(self):
"""
Sends communication jobs with SMS 939 service.
:return: list of sms_texts
"""
link_pattern = re.compile(r'<a href="(.*)">(.*)</a>', re.DOTALL)
sms_medium_id = self.env.ref('sms_sponsorship.utm_medium_sms').id
sms_texts = []
for job in self.filtered('partner_mobile'):
sms_text = job.convert_html_for_sms(link_pattern, sms_medium_id)
sms_texts.append(sms_text)
sms_wizard = self.env['sms.sender.wizard'].with_context(
partner_id=job.partner_id.id).create({
'subject': job.subject,
'text': sms_text,
'sms_provider_id': job.sms_provider_id.id
})
sms_wizard.send_sms_partner()
job.write({
'state': 'done',
'sent_date': fields.Datetime.now(),
'sms_cost': ceil(
float(len(sms_text)) / SMS_CHAR_LIMIT) * SMS_COST
})
return sms_texts
def convert_html_for_sms(self, link_pattern, sms_medium_id):
"""
Converts HTML into simple text for SMS.
First replace links with short links using Link Tracker.
Then clean HTML using BeautifulSoup library.
:param link_pattern: the regex pattern for replacing links
:param sms_medium_id: the associated utm.medium id for generated links
:return: Clean text with short links for SMS use.
"""
self.ensure_one()
source_id = self.config_id.source_id.id
def _replace_link(match):
full_link = match.group(1).replace('&', '&')
short_link = self.env['link.tracker'].create({
'url': full_link,
'campaign_id': self.utm_campaign_id.id or self.env.ref(
'partner_communication_switzerland.'
'utm_campaign_communication').id,
'medium_id': sms_medium_id,
'source_id': source_id
})
return short_link.short_url
links_converted_text = link_pattern.sub(_replace_link, self.body_html)
soup = BeautifulSoup(links_converted_text, "lxml")
return soup.get_text().strip()
@api.multi
def open_related(self):
""" Select a better view for invoice lines. """
res = super(PartnerCommunication, self).open_related()
if self.config_id.model == 'account.invoice.line':
res['context'] = self.with_context(
tree_view_ref='sponsorship_compassion'
'.view_invoice_line_partner_tree',
group_by=False
).env.context
return res
def get_new_dossier_attachments(self):
"""
Returns pdfs for the New Dossier Communication, including:
- Sponsorship payment slips (if payment is True)
- Small Childpack
- Sponsorship labels (if correspondence is True)
- Child picture
:return: dict {attachment_name: [report_name, pdf_data]}
"""
self.ensure_one()
attachments = OrderedDict()
report_obj = self.env['report']
account_payment_mode_obj = self.env['account.payment.mode']\
.with_context(lang='en_US')
lsv_dd_modes = account_payment_mode_obj.search(
['|', ('name', 'like', 'Direct Debit'), ('name', 'like', 'LSV')])
permanent_order = self.env.ref(
'sponsorship_switzerland.payment_mode_permanent_order')
sponsorships = self.get_objects()
# Sponsorships included for payment slips
bv_sponsorships = sponsorships.filtered(
# 1. Needs to be payer
lambda s: s.partner_id == self.partner_id and
# 2. Permanent Order are always included
s.payment_mode_id == permanent_order or (
# 3. LSV/DD are never included
s.payment_mode_id not in lsv_dd_modes and
# 4. If already paid they are not included
not s.period_paid)
)
write_sponsorships = sponsorships.filtered(
lambda s: s.correspondent_id == self.partner_id)
# Include all active sponsorships for Permanent Order
bv_sponsorships |= bv_sponsorships\
.filtered(lambda s: s.payment_mode_id == permanent_order)\
.mapped('group_id.contract_ids').filtered(
lambda s: s.state in ('active', 'waiting'))
# Payment slips
if bv_sponsorships:
report_name = 'report_compassion.3bvr_sponsorship'
if bv_sponsorships.mapped('payment_mode_id') == permanent_order:
# One single slip is enough for permanent order.
report_name = 'report_compassion.bvr_sponsorship'
attachments.update({
_('sponsorship payment slips.pdf'): [
report_name,
base64.b64encode(report_obj.get_pdf(
bv_sponsorships.ids, report_name,
data={
'doc_ids': bv_sponsorships.ids,
'background': self.send_mode != 'physical'
}
))
]
})
# Childpack if not a SUB of planned exit.
lifecycle = sponsorships.mapped('parent_id.child_id.lifecycle_ids')
planned_exit = lifecycle and lifecycle[0].type == 'Planned Exit'
if not planned_exit:
attachments.update(self.get_childpack_attachment())
# Labels
if write_sponsorships:
attachments.update(self.get_label_attachment(write_sponsorships))
# Child picture
report_name = 'partner_communication_switzerland.child_picture'
child_ids = sponsorships.mapped('child_id').ids
attachments.update({
_('child picture.pdf'): [
report_name,
base64.b64encode(report_obj.get_pdf(
child_ids, report_name,
data={'doc_ids': child_ids}
))
]
})
# Country information
for field_office in self.get_objects().mapped(
'child_id.field_office_id'):
country_pdf = field_office.country_info_pdf
if country_pdf:
attachments.update({
field_office.name + ".pdf": [
'partner_communication_switzerland.field_office_info',
country_pdf
]
})
return attachments
def get_csp_attachment(self):
self.ensure_one()
attachments = OrderedDict()
report_obj = self.env['report']
account_payment_mode_obj = self.env['account.payment.mode']
csp = self.get_objects()
# Include all active csp for Permanent Order
if 'Permanent Order' in csp.with_context(
lang='en_US').mapped('payment_mode_id.name'):
csp += csp.mapped(
'group_id.contract_ids').filtered(
lambda s: s.state == 'active')
is_payer = self.partner_id in csp.mapped('partner_id')
make_payment_pdf = True
# LSV/DD don't need a payment slip
groups = csp.mapped('group_id')
lsv_dd_modes = account_payment_mode_obj.search(
['|', ('name', 'like', 'Direct Debit'), ('name', 'like', 'LSV')])
lsv_dd_groups = groups.filtered(
lambda r: r.payment_mode_id in lsv_dd_modes)
if len(lsv_dd_groups) == len(groups):
make_payment_pdf = False
# If partner already paid, avoid payment slip
if len(csp.filtered('period_paid')) == len(csp):
make_payment_pdf = False
# Payment slips
if is_payer and make_payment_pdf:
report_name = 'report_compassion.3bvr_sponsorship'
attachments.update({
_('csv payment slips.pdf'): [
report_name,
base64.b64encode(report_obj.get_pdf(
csp.ids, report_name,
data={
'doc_ids': csp.ids,
'background': self.send_mode != 'physical'
}
))
]
})
return attachments
def _convert_pdf(self, pdf_data):
"""
Converts all pages of PDF in A4 format if communication is
printed.
:param pdf_data: binary data of original pdf
:return: binary data of converted pdf
"""
if self.send_mode != 'physical':
return pdf_data
pdf = PdfFileReader(BytesIO(base64.b64decode(pdf_data)))
convert = PdfFileWriter()
a4_width = 594.48
a4_height = 844.32 # A4 units in PyPDF
for i in xrange(0, pdf.numPages):
# translation coordinates
tx = 0
ty = 0
page = pdf.getPage(i)
corner = [float(x) for x in page.mediaBox.getUpperRight()]
if corner[0] > a4_width or corner[1] > a4_height:
page.scaleBy(max(a4_width / corner[0], a4_height / corner[1]))
elif corner[0] < a4_width or corner[1] < a4_height:
tx = (a4_width - corner[0]) / 2
ty = (a4_height - corner[1]) / 2
convert.addBlankPage(a4_width, a4_height)
convert.getPage(i).mergeTranslatedPage(page, tx, ty)
output_stream = BytesIO()
convert.write(output_stream)
output_stream.seek(0)
return base64.b64encode(output_stream.read())
| agpl-3.0 | -7,057,945,540,565,169,000 | 39.902579 | 79 | 0.536567 | false |
Page-David/wget-fast | configer.py | 1 | 2463 | #!/usr/bin/env python3
import urllib.parse
import requests
import queue
import os
import interface
class Download_Configer(object):
# Init download settings...
def __init__(self, url, saveto):
self.url = url
parse_result = urllib.parse.urlparse(self.url)
self.filename = self.url.split('/')[-1]
self.protocol = parse_result.scheme
self.domain = parse_result.netloc
self.saveto = saveto
self.path = os.path.join(self.saveto, self.filename)
self.max_thread = 10
self.min_block = 1000
self.down_queue = queue.Queue(self.max_thread)
self._get_url_header()
self._block_content()
self._touch_file()
# Occur HEAD request and get more information
def _get_url_header(self):
interface.info_out('HTTP_REQUEST')
headers = {
'Range': 'bytes=0-1'
}
response = requests.get(self.url, stream = True, headers = headers)
if response.status_code == 206:
self.partital_content = True
interface.info_out('PARTITAL_SUPPORT')
self.content_length =int(response.headers['Content-Range']\
.split('/')[1])
elif response.status_code // 100 == 4:
interface.info_out('CONNECTION_ERROR', response.status_code)
elif response.status_code // 100 == 2:
self.partital_content = False
interface.info_out('PARTITAL_NOT_SUPPORT')
self.content_length = int(response.headers['Content-Length'])
interface.info_out('CONTENT_LENGTH', self.content_length)
# Break tasks into partital content
def _block_content(self):
if self.content_length // self.max_thread > self.min_block:
self.min_block = self.content_length // self.max_thread+1
self.x = 0
while self.x < self.content_length:
if self.x+self.min_block > self.content_length:
self.down_queue.put((self.x, self.content_length-1))
else:
self.down_queue.put((self.x, self.x+self.min_block-1))
self.x += self.min_block
def _touch_file(self):
open(self.path, 'w').close()
if __name__ == '__main__':
d = Download_Configer('https://raw.githubusercontent.com/getlantern/lantern-binaries/master/lantern-installer-beta.exe',
'/home/lancaster')
while not d.down_queue.empty():
print(d.down_queue.get())
| gpl-3.0 | 4,602,840,414,384,887,300 | 35.761194 | 124 | 0.600487 | false |
lalithsuresh/QEMU-Device-State-Visualisations | scripts/simpletrace.py | 12 | 2522 | #!/usr/bin/env python
#
# Pretty-printer for simple trace backend binary trace files
#
# Copyright IBM, Corp. 2010
#
# This work is licensed under the terms of the GNU GPL, version 2. See
# the COPYING file in the top-level directory.
#
# For help see docs/tracing.txt
import sys
import struct
import re
header_event_id = 0xffffffffffffffff
header_magic = 0xf2b177cb0aa429b4
header_version = 0
trace_fmt = '=QQQQQQQQ'
trace_len = struct.calcsize(trace_fmt)
event_re = re.compile(r'(disable\s+)?([a-zA-Z0-9_]+)\(([^)]*)\).*')
def err(msg):
sys.stderr.write(msg + '\n')
sys.exit(1)
def parse_events(fobj):
"""Parse a trace-events file."""
def get_argnames(args):
"""Extract argument names from a parameter list."""
return tuple(arg.split()[-1].lstrip('*') for arg in args.split(','))
events = {}
event_num = 0
for line in fobj:
m = event_re.match(line.strip())
if m is None:
continue
disable, name, args = m.groups()
events[event_num] = (name,) + get_argnames(args)
event_num += 1
return events
def read_record(fobj):
"""Deserialize a trace record from a file."""
s = fobj.read(trace_len)
if len(s) != trace_len:
return None
return struct.unpack(trace_fmt, s)
def read_trace_file(fobj):
"""Deserialize trace records from a file."""
header = read_record(fobj)
if header is None or \
header[0] != header_event_id or \
header[1] != header_magic or \
header[2] != header_version:
err('not a trace file or incompatible version')
while True:
rec = read_record(fobj)
if rec is None:
break
yield rec
class Formatter(object):
def __init__(self, events):
self.events = events
self.last_timestamp = None
def format_record(self, rec):
if self.last_timestamp is None:
self.last_timestamp = rec[1]
delta_ns = rec[1] - self.last_timestamp
self.last_timestamp = rec[1]
event = self.events[rec[0]]
fields = [event[0], '%0.3f' % (delta_ns / 1000.0)]
for i in xrange(1, len(event)):
fields.append('%s=0x%x' % (event[i], rec[i + 1]))
return ' '.join(fields)
if len(sys.argv) != 3:
err('usage: %s <trace-events> <trace-file>' % sys.argv[0])
events = parse_events(open(sys.argv[1], 'r'))
formatter = Formatter(events)
for rec in read_trace_file(open(sys.argv[2], 'rb')):
print formatter.format_record(rec)
| gpl-2.0 | -3,143,958,515,632,104,000 | 26.11828 | 76 | 0.601507 | false |
hobarrera/django | tests/responses/tests.py | 33 | 4881 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import io
from django.conf import settings
from django.core.cache import cache
from django.http import HttpResponse
from django.http.response import HttpResponseBase
from django.test import SimpleTestCase
UTF8 = 'utf-8'
ISO88591 = 'iso-8859-1'
class HttpResponseBaseTests(SimpleTestCase):
def test_closed(self):
r = HttpResponseBase()
self.assertIs(r.closed, False)
r.close()
self.assertIs(r.closed, True)
def test_write(self):
r = HttpResponseBase()
self.assertIs(r.writable(), False)
with self.assertRaisesMessage(IOError, 'This HttpResponseBase instance is not writable'):
r.write('asdf')
with self.assertRaisesMessage(IOError, 'This HttpResponseBase instance is not writable'):
r.writelines(['asdf\n', 'qwer\n'])
def test_tell(self):
r = HttpResponseBase()
with self.assertRaisesMessage(IOError, 'This HttpResponseBase instance cannot tell its position'):
r.tell()
def test_setdefault(self):
"""
HttpResponseBase.setdefault() should not change an existing header
and should be case insensitive.
"""
r = HttpResponseBase()
r['Header'] = 'Value'
r.setdefault('header', 'changed')
self.assertEqual(r['header'], 'Value')
r.setdefault('x-header', 'DefaultValue')
self.assertEqual(r['X-Header'], 'DefaultValue')
class HttpResponseTests(SimpleTestCase):
def test_status_code(self):
resp = HttpResponse(status=503)
self.assertEqual(resp.status_code, 503)
self.assertEqual(resp.reason_phrase, "Service Unavailable")
def test_change_status_code(self):
resp = HttpResponse()
resp.status_code = 503
self.assertEqual(resp.status_code, 503)
self.assertEqual(resp.reason_phrase, "Service Unavailable")
def test_reason_phrase(self):
reason = "I'm an anarchist coffee pot on crack."
resp = HttpResponse(status=814, reason=reason)
self.assertEqual(resp.status_code, 814)
self.assertEqual(resp.reason_phrase, reason)
def test_charset_detection(self):
""" HttpResponse should parse charset from content_type."""
response = HttpResponse('ok')
self.assertEqual(response.charset, settings.DEFAULT_CHARSET)
response = HttpResponse(charset=ISO88591)
self.assertEqual(response.charset, ISO88591)
self.assertEqual(response['Content-Type'], 'text/html; charset=%s' % ISO88591)
response = HttpResponse(content_type='text/plain; charset=%s' % UTF8, charset=ISO88591)
self.assertEqual(response.charset, ISO88591)
response = HttpResponse(content_type='text/plain; charset=%s' % ISO88591)
self.assertEqual(response.charset, ISO88591)
response = HttpResponse(content_type='text/plain; charset="%s"' % ISO88591)
self.assertEqual(response.charset, ISO88591)
response = HttpResponse(content_type='text/plain; charset=')
self.assertEqual(response.charset, settings.DEFAULT_CHARSET)
response = HttpResponse(content_type='text/plain')
self.assertEqual(response.charset, settings.DEFAULT_CHARSET)
def test_response_content_charset(self):
"""HttpResponse should encode based on charset."""
content = "Cafรฉ :)"
utf8_content = content.encode(UTF8)
iso_content = content.encode(ISO88591)
response = HttpResponse(utf8_content)
self.assertContains(response, utf8_content)
response = HttpResponse(iso_content, content_type='text/plain; charset=%s' % ISO88591)
self.assertContains(response, iso_content)
response = HttpResponse(iso_content)
self.assertContains(response, iso_content)
response = HttpResponse(iso_content, content_type='text/plain')
self.assertContains(response, iso_content)
def test_repr(self):
response = HttpResponse(content="Cafรฉ :)".encode(UTF8), status=201)
expected = '<HttpResponse status_code=201, "text/html; charset=utf-8">'
self.assertEqual(repr(response), expected)
def test_wrap_textiowrapper(self):
content = "Cafรฉ :)"
r = HttpResponse()
with io.TextIOWrapper(r, UTF8) as buf:
buf.write(content)
self.assertEqual(r.content, content.encode(UTF8))
def test_generator_cache(self):
generator = ("{}".format(i) for i in range(10))
response = HttpResponse(content=generator)
self.assertEqual(response.content, b'0123456789')
with self.assertRaises(StopIteration):
next(generator)
cache.set('my-response-key', response)
response = cache.get('my-response-key')
self.assertEqual(response.content, b'0123456789')
| bsd-3-clause | -7,511,904,347,758,875,000 | 35.133333 | 106 | 0.662772 | false |
abhiatgithub/shogun-toolbox | examples/undocumented/python_modular/mathematics_logdet.py | 29 | 2923 | #!/usr/bin/env python
from numpy import *
from scipy.io import mmread
# Loading an example sparse matrix of dimension 479x479, real, unsymmetric
mtx=mmread('../../../data/logdet/west0479.mtx')
parameter_list=[[mtx,100,60,1]]
def mathematics_logdet (matrix=mtx,max_iter_eig=1000,max_iter_lin=1000,num_samples=1):
from scipy.sparse import eye
# Create a Hermitian sparse matrix
rows=matrix.shape[0]
cols=matrix.shape[1]
A=matrix.transpose()*matrix+eye(rows, cols)
from scipy.sparse import csc_matrix
try:
from shogun.Mathematics import RealSparseMatrixOperator
from shogun.Mathematics import LanczosEigenSolver
from shogun.Mathematics import CGMShiftedFamilySolver
from shogun.Mathematics import LogRationalApproximationCGM
from shogun.Mathematics import ProbingSampler
from shogun.Mathematics import LogDetEstimator
from shogun.Mathematics import Statistics
from shogun.Library import SerialComputationEngine
# creating the linear operator, eigen-solver
op=RealSparseMatrixOperator(A.tocsc())
eig_solver=LanczosEigenSolver(op)
# we can set the iteration limit high for poorly conditioned matrices
eig_solver.set_max_iteration_limit(max_iter_eig)
# alternatively, if the matrix is small, we can compute eigenvalues externally
# and set min/max eigenvalues into the eigensolver
# from scipy.sparse.linalg import eigsh
# eigenvalues=eigsh(A, rows-1)
# eig_solver.set_min_eigenvalue(eigenvalues[0][0])
# eig_solver.set_max_eigenvalue(eigenvalues[0][-1])
# create the shifted-family linear solver which solves for all the shifts
# using as many matrix-vector products as one shift in CG iterations
lin_solver=CGMShiftedFamilySolver()
lin_solver.set_iteration_limit(max_iter_lin)
# computation engine
engine=SerialComputationEngine()
# set the desired accuracy tighter to obtain better results
# this determines the number of contour points in conformal mapping of
# the rational approximation of the Cauchy's integral of f(A)*s, f=log
desired_accuracy=1E-5
# creating the log-linear-operator function
op_func=LogRationalApproximationCGM(op, engine, eig_solver, lin_solver,\
desired_accuracy)
# set the trace sampler to be probing sampler, in which samples are obtained
# by greedy graph coloring of the power of sparse matrix (default is power=1,
# 2-distance coloring)
trace_sampler=ProbingSampler(op)
# estimating log-det
log_det_estimator=LogDetEstimator(trace_sampler, op_func, engine)
# set the number of samples as required
estimates=log_det_estimator.sample(num_samples)
estimated_logdet=sum(estimates)/len(estimates)
actual_logdet=Statistics.log_det(A)
print(actual_logdet, estimated_logdet)
return estimates
except ImportError:
print('One or many of the dependencies (Eigen3/LaPack/ColPack) not found!')
if __name__=='__main__':
print('LogDetEstimator')
mathematics_logdet (*parameter_list[0])
| gpl-3.0 | 7,516,256,802,962,936,000 | 32.215909 | 86 | 0.770099 | false |
fcolamar/AliPhysics | PWGJE/EMCALJetTasks/Tracks/analysis/base/TriggerEfficiency.py | 41 | 2551 | #**************************************************************************
#* Copyright(c) 1998-2014, ALICE Experiment at CERN, All rights reserved. *
#* *
#* Author: The ALICE Off-line Project. *
#* Contributors are mentioned in the code where appropriate. *
#* *
#* Permission to use, copy, modify and distribute this software and its *
#* documentation strictly for non-commercial purposes is hereby granted *
#* without fee, provided that the above copyright notice appears in all *
#* copies and that both the copyright notice and this permission notice *
#* appear in the supporting documentation. The authors make no claims *
#* about the suitability of this software for any purpose. It is *
#* provided "as is" without express or implied warranty. *
#**************************************************************************
class TriggerEfficiency:
"""
Class calculating the trigger efficiency from a given min. bias container and a given triggered container
"""
def __init__(self, triggername, minbiascontainer, triggeredcontainer):
"""
Constructor
"""
self.__triggername = triggername
self.__minbiascontainer = minbiascontainer
self.__triggeredcontainer = triggeredcontainer
self.__triggerefficiency = None
self.__CalculateTriggerEfficiency()
def __MakeNormalisedSpectrum(self, container, name):
container.SetVertexRange(-10., 10.)
container.SetPileupRejection(True)
if container.__class__ == "TrackContainer":
container.SelectTrackCuts(1)
container.RequestSeenInMinBias()
return container.MakeProjection(0, "ptSpectrum%s" %(name), "p_{#rm{t}} (GeV/c)", "1/N_{event} 1/(#Delta p_{#rm t}) dN/dp_{#rm{t}} ((GeV/c)^{-2}", doNorm = False)
def __CalculateTriggerEfficiency(self):
minbiasspectrum = self.__MakeNormalisedSpectrum(self.__minbiascontainer, "minbias")
self.__triggerefficiency = self.__MakeNormalisedSpectrum(self.__triggeredcontainer, self.__triggername)
self.__triggerefficiency.Divide(self.__triggerefficiency, minbiasspectrum, 1., 1., "b")
self.__triggerefficiency.SetName("triggerEff%s" %(self.__triggername))
def GetEfficiencyCurve(self):
return self.__triggerefficiency
| bsd-3-clause | -3,924,809,552,928,824,000 | 53.297872 | 169 | 0.580949 | false |
kalev/anaconda | pyanaconda/iw/partition_gui.py | 2 | 72665 | #
# partition_gui.py: allows the user to choose how to partition their disks
#
# Copyright (C) 2001, 2002 Red Hat, Inc. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author(s): Matt Wilson <[email protected]>
# Michael Fulbright <[email protected]>
#
import os
import gobject
import gtk
import gtk.glade
try:
import gnomecanvas
except ImportError:
import gnome.canvas as gnomecanvas
import pango
from pyanaconda import gui
import parted
import string
import types
import copy
from decimal import Decimal
from pyanaconda import storage
from iw_gui import *
from pyanaconda.flags import flags
import datacombo
import lvm_dialog_gui as l_d_g
import raid_dialog_gui as r_d_g
import partition_dialog_gui as p_d_g
from pyanaconda.partIntfHelpers import *
from pyanaconda.constants import *
from partition_ui_helpers_gui import *
from pyanaconda.storage.partitioning import doPartitioning
from pyanaconda.storage.devicelibs import lvm
from pyanaconda.storage.devices import devicePathToName
from pyanaconda.storage.devices import PartitionDevice
from pyanaconda.storage.devices import BTRFSVolumeDevice
from pyanaconda.storage.devices import deviceNameToDiskByPath
from pyanaconda.storage.errors import DeviceNotFoundError
import gettext
_ = lambda x: gettext.ldgettext("anaconda", x)
P_ = lambda x, y, z: gettext.ldngettext("anaconda", x, y, z)
import logging
log = logging.getLogger("anaconda")
STRIPE_HEIGHT = 35.0
LOGICAL_INSET = 3.0
TREE_SPACING = 2
# XXX hack but will work for now
if gtk.gdk.screen_width() > 640:
CANVAS_WIDTH = 490
else:
CANVAS_WIDTH = 390
CANVAS_HEIGHT = 200
MODE_ADD = 1
MODE_EDIT = 2
class Slice:
"""Class representing a slice of a stripe.
parent -- the stripe that the slice belongs too.
text -- what will appear in the slice
type -- either SLICE or SUBSLICE
xoffset -- start percentage
xlength -- a length percentage
dcCB -- function that is called on a double click.
cCB -- function that is called when one click (selected)
sel_col -- color when selected
unsel_col -- color when unselected
obj -- some python object that is related to this slice.
selected -- initial state of slice.
"""
SLICE = 0
SUBSLICE = 1
CONTAINERSLICE = 2
def __init__(self, parent, text, type, xoffset, xlength, dcCB=lambda: None,
cCB=lambda x: None, sel_col="cornsilk1", unsel_col="white",
obj = None, selected = False):
self.text = text
self.type = type
self.xoffset = xoffset
self.xlength = xlength
self.parent = parent
self.dcCB = dcCB
self.cCB = cCB
self.sel_col = sel_col
self.unsel_col = unsel_col
self.obj = obj
self.selected = selected
def eventHandler(self, widget, event):
if event.type == gtk.gdk.BUTTON_PRESS:
if event.button == 1:
self.select()
self.cCB(self.obj)
elif event.type == gtk.gdk._2BUTTON_PRESS:
#self.select()
self.dcCB()
return True
def putOnCanvas(self):
pgroup = self.parent.getGroup()
self.group = pgroup.add(gnomecanvas.CanvasGroup)
self.box = self.group.add(gnomecanvas.CanvasRect)
self.group.connect("event", self.eventHandler)
canvas_text = self.group.add(gnomecanvas.CanvasText,
font="sans", size_points=8)
xoffset = self.xoffset * CANVAS_WIDTH
xlength = self.xlength * CANVAS_WIDTH
if self.type == Slice.SUBSLICE:
yoffset = 0.0 + LOGICAL_INSET
yheight = STRIPE_HEIGHT - (LOGICAL_INSET * 2)
texty = 0.0
else:
yoffset = 0.0
yheight = STRIPE_HEIGHT
texty = LOGICAL_INSET
if self.selected:
fill_color = self.sel_col
else:
fill_color = self.unsel_col
self.group.set(x=xoffset, y=yoffset)
self.box.set(x1=0.0, y1=0.0, x2=xlength,
y2=yheight, fill_color=fill_color,
outline_color='black', width_units=1.0)
canvas_text.set(x=2.0, y=texty + 2.0, text=self.text,
fill_color='black',
anchor=gtk.ANCHOR_NW, clip=True,
clip_width=xlength-1, clip_height=yheight-1)
def shutDown(self):
self.parent = None
if self.group:
self.group.destroy()
self.group = None
def select(self):
for slice in self.parent.slices:
slice.deselect()
self.selected = True
if self.group and self.box:
if self.type != Slice.CONTAINERSLICE:
self.group.raise_to_top()
self.box.set(outline_color="red")
self.box.set(fill_color=self.sel_col)
def deselect(self):
self.selected = False
if self.box:
self.box.set(outline_color="black", fill_color=self.unsel_col)
class Stripe(object):
"""
canvas -- the canvas where everything goes
text -- the text that will appear on top of the stripe
yoff -- its the position in the y axis where this stripe should be drawn
dcCB -- function that should be called on a double click
obj -- some python object that is related to this stripe
"""
def __init__(self, canvas, text, dcCB, obj = None):
self.canvas_text = None
self.canvas = canvas
self.text = text
self.group = None
self._slices = []
self.dcCB = dcCB
self.selected = None
self.obj = obj
def putOnCanvas(self, yoff):
"""
returns the yposition after drawhing this stripe.
"""
# We set the text for the stripe.
self.canvas_text = self.canvas.root().add(gnomecanvas.CanvasText,
x=0.0, y=yoff, font="sans", size_points=9)
self.canvas_text.set(text=self.text, fill_color='black',
anchor=gtk.ANCHOR_NW, weight=pango.WEIGHT_BOLD)
(xxx1, yyy1, xxx2, yyy2) = self.canvas_text.get_bounds()
textheight = yyy2 - yyy1 + 2
self.group = self.canvas.root().add(gnomecanvas.CanvasGroup,
x=0, y=yoff+textheight)
self.group.add(gnomecanvas.CanvasRect, x1=0.0, y1=0.0, x2=CANVAS_WIDTH,
y2=STRIPE_HEIGHT, fill_color='green',
outline_color='grey71', width_units=1.0)
self.group.lower_to_bottom()
# We paint all the container slices first. So the contained slices
# actually show up.
for slice in [s for s in self.slices if s.type == Slice.CONTAINERSLICE]:
slice.putOnCanvas()
# After painting the containers we paint the rest.
for slice in [s for s in self.slices if s.type != Slice.CONTAINERSLICE]:
slice.putOnCanvas()
# 10 is a separator space.
return yoff + STRIPE_HEIGHT+textheight+10
def shutDown(self):
for slice in self.slices:
slice.shutDown()
self._slices = []
if self.canvas_text:
self.canvas_text.destroy()
if self.group:
self.group.destroy()
self.group = None
def getGroup(self):
return self.group
@property
def slices(self):
return self._slices
def addSlice(self, new_slice):
# check to see if they overlap.
for slice in self.slices:
# Container slices and subslices can overlap.
if new_slice.type+slice.type == Slice.CONTAINERSLICE+Slice.SUBSLICE:
continue
if new_slice.xoffset > slice.xoffset \
and new_slice.xoffset < slice.xoffset + slice.xlength:
# there is a colission, we cannot add.
return
self._slices.append(new_slice)
def getSelectedSlice(self):
for slice in self.slices:
if slice.selected:
return slice
return None
class StripeGraph:
""" This class will only handle one stripe."""
__canvas = None
def __init__(self):
self.stripe = None
self.next_ypos = 0.0
def __del__(self):
self.shutDown()
def shutDown(self):
if self.stripe:
self.stripe.shutDown()
self.stripe = None
self.next_ypos = 0.0
@classmethod
def getCanvas(cls):
if not StripeGraph.__canvas:
StripeGraph.__canvas = gnomecanvas.Canvas()
return StripeGraph.__canvas
def setDisplayed(self, obj):
# Check to see if we already have the correct obj displayed.
if self.getDisplayed() and self.getDisplayed().obj == obj:
return
if self.stripe:
self.stripe.shutDown()
self.stripe = self._createStripe(obj)
self.stripe.putOnCanvas(0)
# Trying to center the picture.
apply(self.getCanvas().set_scroll_region, self.getCanvas().root().get_bounds())
def getDisplayed(self):
return self.stripe
def selectSliceFromObj(self, obj):
"""Search for obj in the slices """
stripe = self.getDisplayed()
if not stripe:
return
for slice in stripe.slices:
# There is a part object in each slice.
if not slice.obj:
continue
if obj == slice.obj and not slice.selected:
slice.select()
break
def _createStripe(self, obj):
#This method needs to be overridden
pass
def getSelectedSlice(self):
return self.stripe.getSelectedSlice()
class DiskStripeGraph(StripeGraph):
"""Handles the creation of a bar view for the 'normal' devies.
storage -- the storage object
cCB -- call back function used when the user clicks on a slice. This function
is passed a device object when its executed.
dcCB -- call back function used when the user double clicks on a slice.
drive -- drive to display
"""
def __init__(self, storage, drive=None, cCB=lambda x:None, dcCB=lambda:None):
StripeGraph.__init__(self)
self.storage = storage
self.cCB = cCB
self.dcCB = dcCB
# Define the default colors per partition type.
self.part_type_colors = \
{"sel_logical": "cornsilk1", "unsel_logical": "white",
"sel_extended": "cornsilk1", "unsel_extended": "white",
"sel_normal": "cornsilk1", "unsel_normal": "white",
"sel_freespace": "grey88", "unsel_freespace": "grey88"}
if drive:
self.setDisplayed(drive)
def _createStripe(self, drive):
# Create the stripe
drivetext = _("Drive %(drive)s (%(size)-0.f MB) (Model: %(model)s)") \
% {'drive': drive.path,
'size': drive.size,
'model': drive.model}
stripe = Stripe(self.getCanvas(), drivetext, self.dcCB, obj = drive)
# Create the slices.
# These offsets are where the partition/slices end. 0<offset<1
for part in drive.format.partedDisk.getFreeSpacePartitions() \
+ [d for d in drive.format.partitions]:
if part.getSize(unit="MB") <= 1.0 or \
part.type & parted.PARTITION_METADATA:
continue
# Create the start and length for the slice.
xoffset = (Decimal(str(part.geometry.start))
/ Decimal(str(drive.partedDevice.length)))
xlength = (Decimal(str(part.geometry.length))
/ Decimal(str(drive.partedDevice.length)))
if part.type & parted.PARTITION_LOGICAL:
if part.type & parted.PARTITION_FREESPACE:
name = _("Free")
unsel_col = self.part_type_colors["unsel_freespace"]
sel_col = self.part_type_colors["sel_freespace"]
else:
name = part.path
unsel_col = self.part_type_colors["unsel_logical"]
sel_col = self.part_type_colors["sel_logical"]
partstr = "%s\n%.0f MB" % (name, float(part.getSize()))
stype = Slice.SUBSLICE
elif part.type & parted.PARTITION_FREESPACE:
partstr = "%s\n%.0f MB" % (_("Free"), float(part.getSize()))
stype = Slice.SLICE
unsel_col = self.part_type_colors["unsel_freespace"]
sel_col = self.part_type_colors["sel_freespace"]
elif part.type & parted.PARTITION_EXTENDED:
partstr = ""
stype = Slice.CONTAINERSLICE
unsel_col = self.part_type_colors["unsel_extended"]
sel_col = self.part_type_colors["sel_extended"]
else:
partstr = "%s\n%.0f MB" % (part.path, float(part.getSize()))
stype = Slice.SLICE
unsel_col = self.part_type_colors["unsel_normal"]
sel_col = self.part_type_colors["sel_normal"]
# We need to use the self.storage objects not the partedDisk ones.
# The free space has not storage object.
if part.type != parted.PARTITION_FREESPACE:
partName = devicePathToName(part.getDeviceNodeName())
o_part = self.storage.devicetree.getDeviceByName(partName)
else:
o_part = None
slice = Slice(stripe, partstr, stype, xoffset, xlength,
dcCB = self.dcCB, cCB = self.cCB, sel_col = sel_col,
unsel_col = unsel_col, obj = o_part)
stripe.addSlice(slice)
return stripe
class LVMStripeGraph(StripeGraph):
"""
storage -- the storage object
cCB -- call back function used when the user clicks on a slice. This function
is passed a device object when its executed.
dcCB -- call back function used when the user double clicks on a slice.
vg -- volume group to display
"""
def __init__(self, storage, vg=None, cCB=lambda x:None, dcCB=lambda:None):
StripeGraph.__init__(self)
self.storage = storage
self.cCB = cCB
self.dcCB = dcCB
# Define the default colors per partition type.
self.part_type_colors = \
{"sel_lv": "cornsilk1", "unsel_lv": "white",
"sel_freespace": "grey88", "unsel_freespace": "grey88"}
if vg:
self.setDisplayed(vg)
def _createStripe(self, vg):
# Create the stripe
vgtext = _("LVM Volume Group %(vgName)s (%(vgSize)-0.f MB)") % {"vgName": vg.name, "vgSize": vg.size}
stripe = Stripe(self.getCanvas(), vgtext, self.dcCB, obj = vg)
# Create the slices.
# Since se don't have a start and length like in the partitions, we
# put all the LVs next to each other and put the free space at the end.
curr_offset = Decimal(0)
for lv in vg.lvs:
lvstr = "%s\n%.0f MB" % (lv.name, float(lv.size))
stype = Slice.SLICE
sel_col = self.part_type_colors["sel_lv"]
unsel_col = self.part_type_colors["unsel_lv"]
#xoffset = float(curr_offset) / float(vg.size)
xoffset = curr_offset
xlength = Decimal(str(lv.size)) / Decimal(str(vg.size))
slice = Slice(stripe, lvstr, stype, xoffset, xlength,
dcCB = self.dcCB, cCB = self.cCB, sel_col = sel_col,
unsel_col = unsel_col, obj = lv)
stripe.addSlice(slice)
curr_offset += xlength
# We add the free space if there is any space left.
if curr_offset < 1:
#freestr = _("Free")
stype = Slice.SLICE
sel_col = self.part_type_colors["sel_freespace"]
unsel_col = self.part_type_colors["unsel_freespace"]
xoffset = curr_offset
xlength = Decimal(1 - curr_offset)
# with the xlength we give an approximate size
freestr = "%s\n%.0f MB" % (_("Free"), Decimal(str(vg.size)) * xlength)
# We append no object.
slice = Slice(stripe, freestr, stype, xoffset, xlength,
dcCB = self.dcCB, cCB = self.cCB, sel_col = sel_col,
unsel_col = unsel_col)
stripe.addSlice(slice)
return stripe
class MDStripeGraph(StripeGraph):
desc = "MD"
"""
storage -- the storage object
cCB -- call back function used when the user clicks on a slice. This function
is passed a device object when its executed.
dcCB -- call back function used when the user double clicks on a slice.
md -- md device to display.
"""
def __init__(self, storage, device=None, cCB=lambda x:None, dcCB=lambda:None):
StripeGraph.__init__(self)
self.storage = storage
self.cCB = cCB
self.dcCB = dcCB
self.part_type_colors = \
{"sel_md": "cornsilk1", "unsel_md": "white"}
if device:
self.setDisplayed(device)
def _get_text(self, md):
return (_("%(desc)s %(mdPath)s (%(mdSize)-0.f MB)")
% {"mdPath": md.path, "mdSize": md.size, "desc": self.desc})
def _createStripe(self, md):
mdtext = self._get_text(md)
stripe = Stripe(self.getCanvas(), mdtext, self.dcCB, obj = md)
# Since we can't really create subslices with md devices we will only
# show the md device size in the bar.
mdstr = "%s\n%.0f MB" % (md.path, float(md.size))
stype = Slice.SLICE
sel_col = self.part_type_colors["sel_md"]
unsel_col = self.part_type_colors["unsel_md"]
xoffset = 0
xlength = 1
slice = Slice(stripe, mdstr, stype, xoffset, xlength,
dcCB = self.dcCB, cCB = self.cCB, sel_col = sel_col,
unsel_col = unsel_col, obj = md)
stripe.addSlice(slice)
return stripe
class MDRaidArrayStripeGraph(MDStripeGraph):
desc = "MD RAID Array"
class BTRFSStripeGraph(MDStripeGraph):
desc = "BTRFS Pool"
def _get_text(self, md):
return (_("%(desc)s %(mdUUID)s (%(mdSize)-0.f MB)")
% {"mdUUID": md.uuid, "mdSize": md.size, "desc": self.desc})
class MessageGraph:
def __init__(self, canvas, message):
self.canvas = canvas
self.message = message
self.canvas_text = None
def display(self):
if self.canvas_text != None:
# This means that its already displayed.
return
self.canvas_text = self.canvas.root().add(gnomecanvas.CanvasText,
x=0.0, y=20, font="sans", size_points=16)
self.canvas_text.set(text=self.message, fill_color='black',
anchor=gtk.ANCHOR_CENTER, weight=pango.WEIGHT_BOLD)
# Trying to center the picture.
apply(self.canvas.set_scroll_region, self.canvas.root().get_bounds())
def destroy(self):
if self.canvas_text:
self.canvas_text.destroy()
self.canvas_text = None
class DiskTreeModelHelper:
def __init__(self, model, columns, iter):
self.model = model
self.iter = iter
self.columns = columns
def __getitem__(self, key):
if type(key) == types.StringType:
key = self.columns[key]
try:
return self.model.get_value(self.iter, key)
except Exception:
# FIXME: what exceptions might actually get raised here?
return None
def __setitem__(self, key, value):
if type(key) == types.StringType:
key = self.columns[key]
self.model.set_value(self.iter, key, value)
class DiskTreeModel(gtk.TreeStore):
isLeaf = -3
isFormattable = -2
# format: column header, type, x alignment, hide?, visibleKey
titles = ((N_("Device"), gobject.TYPE_STRING, 0.0, 0, 0),
(N_("Label"), gobject.TYPE_STRING, 0.0, 1, 0),
(N_("Size (MB)"), gobject.TYPE_STRING, 1.0, 0, 0),
(N_("Mount Point"), gobject.TYPE_STRING, 0.0, 0, isLeaf),
(N_("Type"), gobject.TYPE_STRING, 0.0, 0, 0),
(N_("Format"), gobject.TYPE_OBJECT, 0.5, 0, isFormattable),
("", gobject.TYPE_STRING, 0.0, 0, 0),
# the following must be the last two
("IsLeaf", gobject.TYPE_BOOLEAN, 0.0, 1, 0),
("IsFormattable", gobject.TYPE_BOOLEAN, 0.0, 1, 0),
("PyObject", gobject.TYPE_PYOBJECT, 0.0, 1, 0))
def __init__(self):
self.hiddenPartitions = []
self.titleSlot = {}
i = 0
types = [self]
self.columns = []
for title, kind, alignment, hide, key in self.titles:
self.titleSlot[title] = i
types.append(kind)
if hide:
i += 1
continue
elif kind == gobject.TYPE_OBJECT:
renderer = gtk.CellRendererPixbuf()
propertyMapping = {'pixbuf': i}
elif kind == gobject.TYPE_BOOLEAN:
renderer = gtk.CellRendererToggle()
propertyMapping = {'active': i}
elif (kind == gobject.TYPE_STRING or
kind == gobject.TYPE_INT):
renderer = gtk.CellRendererText()
propertyMapping = {'markup': i}
# wire in the cells that we want only visible on leaf nodes to
# the special leaf node column.
if key < 0:
propertyMapping['visible'] = len(self.titles) + key
renderer.set_property('xalign', alignment)
if title == "Mount Point":
title = _("Mount Point/\nRAID/Volume")
elif title == "Size (MB)":
title = _("Size\n(MB)")
elif title != "":
title = _(title)
col = apply(gtk.TreeViewColumn, (title, renderer),
propertyMapping)
col.set_alignment(0.5)
if kind == gobject.TYPE_STRING or kind == gobject.TYPE_INT:
col.set_property('sizing', gtk.TREE_VIEW_COLUMN_AUTOSIZE)
self.columns.append(col)
i += 1
apply(gtk.TreeStore.__init__, types)
self.view = gtk.TreeView(self)
# append all of the columns
map(self.view.append_column, self.columns)
def getTreeView(self):
return self.view
def selectRowFromObj(self, obj, iter=None):
"""Find the row in the tree containing obj and select it.
obj -- the object that we are searching
iter -- an iter from the tree. If None, get the first one.
Returns the iter where obj was found. None otherwise.
"""
retval = None
r_obj = None
#FIXME: watch out for hidden rows.
if not iter:
iter = self.get_iter_first()
while iter:
# r_obj -> (row object)
r_obj = self[iter]["PyObject"]
if obj and r_obj == obj:
# We have fond our object, select this row and break.
selection = self.view.get_selection()
if selection is not None:
selection.unselect_all()
selection.select_iter(iter)
# Make sure the tree view shows what we have selected.
path = self.get_path(iter)
col = self.view.get_column(0)
self.view.set_cursor(path, col, False)
self.view.scroll_to_cell(path, col, True, 0.5, 0.5)
retval = iter
break
if self.iter_has_child(iter):
# Call recursively if row has children.
rv = self.selectRowFromObj(obj, iter=self.iter_children(iter))
if rv != None:
retval = rv
break
iter = self.iter_next(iter)
return iter
def getCurrentDevice(self):
""" Return the device representing the current selection,
None otherwise.
"""
selection = self.view.get_selection()
model, iter = selection.get_selected()
if not iter:
return None
return model[iter]['PyObject']
def getCurrentDeviceParent(self):
""" Return the parent of the selected row. Returns an iter.
None if there is no parent.
"""
selection = self.view.get_selection()
model, iter = selection.get_selected()
if not iter:
return None
return model.iter_parent(iter)
def resetSelection(self):
pass
def clear(self):
selection = self.view.get_selection()
if selection is not None:
selection.unselect_all()
gtk.TreeStore.clear(self)
def __getitem__(self, iter):
if type(iter) == gtk.TreeIter:
return DiskTreeModelHelper(self, self.titleSlot, iter)
raise KeyError, iter
class PartitionWindow(InstallWindow):
def __init__(self, ics):
InstallWindow.__init__(self, ics)
ics.setTitle(_("Partitioning"))
ics.setNextEnabled(True)
self.parent = ics.getICW().window
def quit(self):
pass
def presentPartitioningComments(self,title, labelstr1, labelstr2, comments,
type="ok", custom_buttons=None):
if flags.autostep:
return 1
win = gtk.Dialog(title)
gui.addFrame(win)
if type == "ok":
win.add_button('gtk-ok', 1)
defaultchoice = 0
elif type == "yesno":
win.add_button('gtk-no', 2)
win.add_button('gtk-yes', 1)
defaultchoice = 1
elif type == "continue":
win.add_button('gtk-cancel', 0)
win.add_button(_("Continue"), 1)
defaultchoice = 1
elif type == "custom":
rid=0
for button in custom_buttons:
widget = win.add_button(button, rid)
rid = rid + 1
defaultchoice = rid - 1
image = gtk.Image()
image.set_from_stock('gtk-dialog-warning', gtk.ICON_SIZE_DIALOG)
hbox = gtk.HBox(False, 9)
al=gtk.Alignment(0.0, 0.0)
al.add(image)
hbox.pack_start(al, False)
buffer = gtk.TextBuffer(None)
buffer.set_text(comments)
text = gtk.TextView()
text.set_buffer(buffer)
text.set_property("editable", False)
text.set_property("cursor_visible", False)
text.set_wrap_mode(gtk.WRAP_WORD)
sw = gtk.ScrolledWindow()
sw.add(text)
sw.set_size_request(400, 200)
sw.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
sw.set_shadow_type(gtk.SHADOW_IN)
info1 = gtk.Label(labelstr1)
info1.set_line_wrap(True)
info1.set_size_request(400, -1)
info2 = gtk.Label(labelstr2)
info2.set_line_wrap(True)
info2.set_size_request(400, -1)
vbox = gtk.VBox(False, 9)
al=gtk.Alignment(0.0, 0.0)
al.add(info1)
vbox.pack_start(al, False)
vbox.pack_start(sw, True, True)
al=gtk.Alignment(0.0, 0.0)
al.add(info2)
vbox.pack_start(al, True)
hbox.pack_start(vbox, True, True)
win.vbox.pack_start(hbox)
win.set_position(gtk.WIN_POS_CENTER)
win.set_default_response(defaultchoice)
win.show_all()
rc = win.run()
win.destroy()
return rc
def getNext(self):
(errors, warnings) = self.storage.sanityCheck()
if errors:
labelstr1 = _("The partitioning scheme you requested "
"caused the following critical errors.")
labelstr2 = _("You must correct these errors before "
"you continue your installation of "
"%s.") % (productName,)
commentstr = string.join(errors, "\n\n")
self.presentPartitioningComments(_("Partitioning Errors"),
labelstr1, labelstr2,
commentstr, type="ok")
raise gui.StayOnScreen
if warnings:
# "storage configuration"
labelstr1 = _("The partitioning scheme you requested "
"generated the following warnings.")
labelstr2 = _("Would you like to continue with "
"your requested partitioning "
"scheme?")
commentstr = string.join(warnings, "\n\n")
rc = self.presentPartitioningComments(_("Partitioning Warnings"),
labelstr1, labelstr2,
commentstr,
type="yesno")
if rc != 1:
raise gui.StayOnScreen
formatWarnings = getPreExistFormatWarnings(self.storage)
if formatWarnings:
labelstr1 = _("The following pre-existing devices have been "
"selected to be formatted, destroying all data.")
# labelstr2 = _("Select 'Yes' to continue and format these "
# "partitions, or 'No' to go back and change these "
# "settings.")
labelstr2 = ""
commentstr = ""
for (dev, type, mntpt) in formatWarnings:
commentstr = commentstr + \
"%s %s %s\n" % (dev,type,mntpt)
rc = self.presentPartitioningComments(_("Format Warnings"),
labelstr1, labelstr2,
commentstr,
type="custom",
custom_buttons=["gtk-cancel",
_("_Format")])
if rc != 1:
raise gui.StayOnScreen
self.stripeGraph.shutDown()
self.tree.clear()
del self.parent
return None
def getPrev(self):
self.stripeGraph.shutDown()
self.tree.clear()
del self.parent
return None
def addDevice(self, device, treeiter):
if device.format.hidden:
return
if device.format.type == "luks":
# we'll want to grab format info from the mapped
# device, not the encrypted one
try:
dm_dev = self.storage.devicetree.getChildren(device)[0]
except IndexError:
format = device.format
else:
format = dm_dev.format
else:
format = device.format
# icon for the format column
if device.format.type == "luks" and not device.format.exists:
# we're creating the LUKS header
format_icon = self.lock_pixbuf
elif not format.exists:
# we're creating a format on the device
format_icon = self.checkmark_pixbuf
else:
format_icon = None
# mount point string
if format.type == "lvmpv":
vg = None
for _vg in self.storage.vgs:
if _vg.dependsOn(device):
vg = _vg
break
mnt_str = getattr(vg, "name", "")
elif format.type == "mdmember":
array = None
for _array in self.storage.mdarrays:
if _array.dependsOn(device):
array = _array
break
mnt_str = getattr(array, "name", "")
elif format.type == "btrfs" and not isinstance(device, BTRFSVolumeDevice):
btrfs_dev = self.storage.devicetree.getChildren(device)[0]
mnt_str = btrfs_dev.name
else:
mnt_str = getattr(format, "mountpoint", "")
if mnt_str is None:
mnt_str = ""
isleaf = True
# device name
name_str = getattr(device, "lvname", device.name)
# label
label_str = getattr(format, "label", "")
if label_str is None:
label_str = ""
self.tree[treeiter]['Device'] = name_str
self.tree[treeiter]['Size (MB)'] = "%Ld" % device.size
self.tree[treeiter]['PyObject'] = device
self.tree[treeiter]['IsFormattable'] = format.formattable
self.tree[treeiter]['Format'] = format_icon
self.tree[treeiter]['Mount Point'] = mnt_str
self.tree[treeiter]['IsLeaf'] = isleaf
self.tree[treeiter]['Type'] = format.name
self.tree[treeiter]['Label'] = label_str
# XXX can this move up one level?
if isinstance(device, BTRFSVolumeDevice):
# list subvolumes as children of the main volume
for s in device.subvolumes:
log.debug("%r" % s.format)
isleaf = False
if s.format.exists:
sub_format_icon = None
else:
sub_format_icon = self.checkmark_pixbuf
subvol_iter = self.tree.append(treeiter)
self.tree[subvol_iter]['Device'] = s.name
self.tree[subvol_iter]['PyObject'] = s
self.tree[subvol_iter]['IsFormattable'] = True
self.tree[subvol_iter]['Format'] = sub_format_icon
self.tree[subvol_iter]['Mount Point'] = s.format.mountpoint
self.tree[subvol_iter]['Type'] = s.type
self.tree[subvol_iter]['IsLeaf'] = True
def populate(self, initial = 0):
self.tree.resetSelection()
# first do LVM
vgs = self.storage.vgs
if vgs:
lvmparent = self.tree.append(None)
self.tree[lvmparent]['Device'] = _("LVM Volume Groups")
for vg in vgs:
vgparent = self.tree.append(lvmparent)
self.addDevice(vg, vgparent)
self.tree[vgparent]['Type'] = ""
for lv in vg.lvs:
iter = self.tree.append(vgparent)
self.addDevice(lv, iter)
# We add a row for the VG free space.
if vg.freeSpace > 0:
iter = self.tree.append(vgparent)
self.tree[iter]['Device'] = _("Free")
self.tree[iter]['Size (MB)'] = str(vg.freeSpace)
self.tree[iter]['PyObject'] = None
self.tree[iter]['Mount Point'] = ""
self.tree[iter]['IsLeaf'] = True
# handle RAID next
mdarrays = self.storage.mdarrays
if mdarrays:
raidparent = self.tree.append(None)
self.tree[raidparent]['Device'] = _("RAID Devices")
for array in mdarrays:
iter = self.tree.append(raidparent)
self.addDevice(array, iter)
name = "%s <span size=\"small\" color=\"gray\">(%s)</span>" % \
(array.name, array.path)
self.tree[iter]['Device'] = name
# BTRFS volumes
btrfs_devs = self.storage.btrfsVolumes
if btrfs_devs:
btrfsparent = self.tree.append(None)
self.tree[btrfsparent]['Device'] = _("BTRFS Volumes")
for dev in btrfs_devs:
iter = self.tree.append(btrfsparent)
self.addDevice(dev, iter)
# now normal partitions
disks = self.storage.partitioned
# also include unpartitioned disks that aren't mpath or biosraid
whole = filter(lambda d: not d.partitioned and not d.format.hidden,
self.storage.disks)
disks.extend(whole)
disks.sort(key=lambda d: d.name)
drvparent = self.tree.append(None)
self.tree[drvparent]['Device'] = _("Hard Drives")
for disk in disks:
# add a parent node to the tree
parent = self.tree.append(drvparent)
self.tree[parent]['PyObject'] = disk
if disk.partitioned:
part = disk.format.firstPartition
extendedParent = None
while part:
if part.type & parted.PARTITION_METADATA:
part = part.nextPartition()
continue
partName = devicePathToName(part.getDeviceNodeName())
device = self.storage.devicetree.getDeviceByName(partName)
if not device and not part.type & parted.PARTITION_FREESPACE:
log.debug("can't find partition %s in device"
" tree" % partName)
# ignore any free space region that is less than the
# grain size of the disklabel alignment we are using
if part.type & parted.PARTITION_FREESPACE:
min_length = disk.format.alignment.grainSize
if part.type & parted.PARTITION_LOGICAL:
# ignored free regions in the extended can be up
# to twice the alignment grain size, to account
# for logical partition metadata
min_length *= 2
if part.geometry.length < min_length:
part = part.nextPartition()
continue
if device and device.isExtended:
if extendedParent:
raise RuntimeError, ("can't handle more than "
"one extended partition per disk")
extendedParent = self.tree.append(parent)
iter = extendedParent
elif part.type & parted.PARTITION_LOGICAL:
if not extendedParent:
raise RuntimeError, ("crossed logical partition "
"before extended")
iter = self.tree.append(extendedParent)
else:
iter = self.tree.append(parent)
if device and not device.isExtended:
self.addDevice(device, iter)
else:
# either extended or freespace
if part.type & parted.PARTITION_FREESPACE:
devstring = _("Free")
ptype = ""
else:
devstring = partName
ptype = _("Extended")
self.tree[iter]['Device'] = devstring
self.tree[iter]['Type'] = ptype
size = part.getSize(unit="MB")
if size < 1.0:
sizestr = "< 1"
else:
sizestr = "%Ld" % (size)
self.tree[iter]['Size (MB)'] = sizestr
self.tree[iter]['PyObject'] = device
part = part.nextPartition()
else:
# whole-disk formatting
self.addDevice(disk, parent)
ident = None
try:
if disk.type == "dasd" or disk.type == "zfcp":
ident = deviceNameToDiskByPath(disk.name)
if ident.startswith("/dev/disk/by-path/"):
ident = os.path.basename(ident)
elif disk.type == "dm-multipath":
ident = disk.wwid
except DeviceNotFoundError:
ident = None
if not ident:
ident = disk.path
# Insert a '\n' when device string is too long. Usually when it
# contains '/dev/mapper'. First column should be around 20 chars.
if len(disk.name) + len(ident) > 20:
separator = "\n"
else:
separator= " "
self.tree[parent]['Device'] = \
"%s%s<span size=\"small\" color=\"gray\">(%s)</span>" \
% (disk.name, separator, ident)
self.treeView.expand_all()
self.messageGraph.display()
def barviewActivateCB(self):
""" Should be called when we double click on a slice"""
# This is a bit of a hack to make the double click on free space work.
# This function is useful when the selected slice is a free space,
# in any other case it calls self.treeActiveCB.
# We first see if the double click was from a free space or from another
# slice.
sel_slice = self.stripeGraph.getSelectedSlice()
if sel_slice == None:
# This really should not happen. Do nothing.
return
# The selected slice is a free slice if the object contained in it is
# None.
if sel_slice.obj != None:
# This is not a free slice, we should call treeActivateCB
return self.treeActivateCB()
else:
# Display a create window according to the stripe object.
# Get the device from the stripe.obj
disp_stripe = self.stripeGraph.getDisplayed()
if disp_stripe == None:
# this should not happen
return
# Display a create dialog.
stripe_dev = disp_stripe.obj
if stripe_dev.partitioned:
tempformat = self.storage.defaultFSType
device = self.storage.newPartition(fmt_type=tempformat)
self.editPartition(device, isNew = True)
elif isinstance(stripe_dev, storage.LVMVolumeGroupDevice):
self.editLVMLogicalVolume(vg = stripe_dev)
return
def treeActivateCB(self, *args):
curr_dev = self.tree.getCurrentDevice()
if isinstance(curr_dev, storage.PartitionDevice) and \
not curr_dev.isExtended:
self.editCB()
elif isinstance(curr_dev, storage.LVMLogicalVolumeDevice) \
or isinstance(curr_dev, storage.LVMVolumeGroupDevice) \
or isinstance(curr_dev, storage.MDRaidArrayDevice):
self.editCB()
elif curr_dev == None:
# Its probably a free space
iparent = self.tree.getCurrentDeviceParent()
if iparent == None:
# it was not free space, it is a root row.
return
# We execute a create function given the type of parent that was
# found.
# FIXME: This code might repeat itself. might be a good idea to
# put it in a function.
curr_parent = self.tree[iparent]["PyObject"]
if curr_parent.partitioned:
tempformat = self.storage.defaultFSType
device = self.storage.newPartition(fmt_type=tempformat)
self.editPartition(device, isNew = True)
elif isinstance(curr_parent, storage.LVMVolumeGroupDevice):
self.editLVMLogicalVolume(vg = curr_parent)
return
def treeSelectCB(self, selection, *args):
# The edit and create buttons will be enabled if the user has chosen
# something editable and/or deletable.
self.deleteButton.set_sensitive(False)
self.editButton.set_sensitive(False)
# I have no idea why this iter might be None. Its best to return
# without any action.
model, iter = selection.get_selected()
if not iter:
return
# If we return because there is no parent, make sure we show the user
# the infoGraph and no stripeGraph. The 'create' and 'delete' buttons
# will be deactivated.
iparent = model.iter_parent(iter)
if not iparent:
self.stripeGraph.shutDown()
self.messageGraph.display()
return # This is a root row.
# We destroy the message first. We will make sure to repaint it later
# if no stipe is displayed. Can't destroy it at the end of this func
# because it uncenters the created stripe, if any.
self.messageGraph.destroy()
device = model[iter]['PyObject']
# See if we need to change what is in the canvas. In all possibilities
# we must make sure we have the correct StripeGraph class.
if not device:
# This is free space.
parent = self.tree[iparent]["PyObject"]
if parent.partitioned:
if not isinstance(self.stripeGraph, DiskStripeGraph):
self.stripeGraph.shutDown()
self.stripeGraph = DiskStripeGraph(self.storage,
drive = parent, cCB = self.tree.selectRowFromObj,
dcCB = self.barviewActivateCB)
self.stripeGraph.setDisplayed(parent)
elif isinstance(parent, storage.LVMVolumeGroupDevice):
if not isinstance(self.stripeGraph, LVMStripeGraph):
self.stripeGraph.shutDown()
self.stripeGraph = LVMStripeGraph(self.storage,
vg = parent, cCB = self.tree.selectRowFromObj,
dcCB = self.barviewActivateCB)
self.stripeGraph.setDisplayed(parent)
elif device.partitioned:
if not isinstance(self.stripeGraph, DiskStripeGraph):
self.stripeGraph.shutDown()
self.stripeGraph = DiskStripeGraph(self.storage,
drive = device,
cCB = self.tree.selectRowFromObj,
dcCB = self.barviewActivateCB)
self.stripeGraph.setDisplayed(device)
# this is deletable but not editable.
self.deleteButton.set_sensitive(True)
elif isinstance(device, storage.PartitionDevice):
if not isinstance(self.stripeGraph, DiskStripeGraph):
self.stripeGraph.shutDown()
self.stripeGraph = DiskStripeGraph(self.storage,
drive = device.parents[0],
cCB = self.tree.selectRowFromObj,
dcCB = self.barviewActivateCB)
self.stripeGraph.setDisplayed(device.parents[0])
self.stripeGraph.selectSliceFromObj(device)
self.deleteButton.set_sensitive(True)
if not device.isExtended:
self.editButton.set_sensitive(True)
elif isinstance(device, storage.LVMVolumeGroupDevice):
if not isinstance(self.stripeGraph, LVMStripeGraph):
self.stripeGraph.shutDown()
self.stripeGraph = LVMStripeGraph(self.storage, vg = device,
cCB = self.tree.selectRowFromObj,
dcCB = self.barviewActivateCB)
self.stripeGraph.setDisplayed(device)
self.deleteButton.set_sensitive(True)
self.editButton.set_sensitive(True)
elif isinstance(device, storage.LVMLogicalVolumeDevice):
if not isinstance(self.stripeGraph, LVMStripeGraph):
self.stripeGraph.shutDown()
self.stripeGraph = LVMStripeGraph(self.storage, vg = device.vg,
cCB = self.tree.selectRowFromObj,
dcCB = self.barviewActivateCB)
self.stripeGraph.setDisplayed(device.vg)
self.stripeGraph.selectSliceFromObj(device)
self.deleteButton.set_sensitive(True)
self.editButton.set_sensitive(True)
elif isinstance(device, storage.MDRaidArrayDevice):
if not isinstance(self.stripeGraph, MDRaidArrayStripeGraph):
self.stripeGraph.shutDown()
self.stripeGraph = MDRaidArrayStripeGraph(self.storage,
device = device,
cCB = self.tree.selectRowFromObj,
dcCB = self.barviewActivateCB)
self.stripeGraph.setDisplayed(device)
self.deleteButton.set_sensitive(True)
self.editButton.set_sensitive(True)
elif isinstance(device, storage.BTRFSDevice):
# BTRFSDevice can be edited but not explicitly deleted. It is
# deleted when its last member device is removed.
if not isinstance(self.stripeGraph, BTRFSStripeGraph):
self.stripeGraph.shutDown()
self.stripeGraph = BTRFSStripeGraph(self.storage,
device = device,
cCB = self.tree.selectRowFromObj,
dcCB = self.barviewActivateCB)
self.stripeGraph.setDisplayed(device)
self.deleteButton.set_sensitive(False)
self.editButton.set_sensitive(True)
else:
# This means that the user selected something that is not showable
# in the bar view. Just show the information message.
self.stripeGraph.shutDown()
self.messageGraph.display()
self.deleteButton.set_sensitive(False)
self.editButton.set_sensitive(False)
def deleteCB(self, widget):
""" Right now we can say that if the device is partitioned we
want to delete all of the devices it contains. At some point
we will want to support creation and removal of partitionable
devices. This will need some work when that time comes.
"""
device = self.tree.getCurrentDevice()
if device.partitioned:
if doClearPartitionedDevice(self.intf,
self.storage,
device):
self.refresh()
elif doDeleteDevice(self.intf,
self.storage,
device):
if isinstance(device, storage.devices.PartitionDevice):
justRedraw = False
else:
justRedraw = True
if device.type == "lvmlv" and device in device.vg.lvs:
device.vg._removeLogVol(device)
self.refresh(justRedraw=justRedraw)
def createCB(self, *args):
# First we must decide what parts of the create_storage_dialog
# we will activate.
activate_create_partition = True
# We activate the create Volume Group radio button if there is a free
# partition with a Physical Volume format.
activate_create_vg = False
availpvs = len(self.storage.unusedPVs())
if (lvm.has_lvm()
and getFormat("lvmpv").supported
and availpvs > 0):
activate_create_vg = True
# We activate the create RAID dev if there are partitions that have
# raid format and are not related to any raid dev.
activate_create_raid_dev = False
availraidparts = len(self.storage.unusedMDMembers())
availminors = self.storage.unusedMDMinors
if (len(availminors) > 0
and getFormat("software RAID").supported
and availraidparts > 1):
activate_create_raid_dev = True
# Must check if all the possibilities are False. In this case tell the
# user that he can't create anything and the reasons.
if (not activate_create_partition
and not activate_create_vg
and not activate_create_raid_dev):
self.intf.messageWindow(_("Cannot perform any creation action"),
_("Note that the creation action requires one of the "
"following:\n\n"
"* Free space in one of the Hard Drives.\n"
"* At least two free Software RAID partitions.\n"
"* At least one free physical volume (LVM) partition.\n"
"* At least one Volume Group with free space."),
custom_icon="warning")
return
# We will activate the create lv button when we have a VG to put the
# LVs on.
activate_create_lv = False
vgs_with_free_space = []
for vg in self.storage.vgs:
if vg.freeSpace > 0:
vgs_with_free_space.append(vg)
if len(vgs_with_free_space) > 0:
activate_create_lv = True
# GTK crap starts here.
create_storage_xml = gtk.glade.XML(
gui.findGladeFile("create-storage.glade"), domain="anaconda")
self.dialog = create_storage_xml.get_widget("create_storage_dialog")
# Activate the partition radio buttons if needed.
# sp_rb -> standard partition
sp_rb = create_storage_xml.get_widget("create_storage_rb_standard_part")
# lp_rb -> lvm partition (physical volume)
lp_rb = create_storage_xml.get_widget("create_storage_rb_lvm_part")
# rp_rb -> RAID partition
rp_rb = create_storage_xml.get_widget("create_storage_rb_raid_part")
if activate_create_partition:
sp_rb.set_sensitive(True)
lp_rb.set_sensitive(True)
rp_rb.set_sensitive(True)
# Activate the Volume Group radio buttons if needed.
# vg_rb -> Volume Group
vg_rb = create_storage_xml.get_widget("create_storage_rb_lvm_vg")
if activate_create_vg:
vg_rb.set_sensitive(True)
# Activate the Logical Volume radio button if needed.
# We must also take care to control the combo box.
lv_rb = create_storage_xml.get_widget("create_storage_rb_lvm_lv")
if activate_create_lv:
# The combobox will be visible if the radio button is active.
# The combobox will be sensitive when the radio button is active.
def toggle_vg_cb_CB(button, vg_cb, selected_dev):
if button.get_active():
vg_cb.set_sensitive(True)
# We set the VG to whatever the user has chosen in the tree
# view. We will fall back on the first item on the list if
# there is no chosen VG.
if selected_dev and selected_dev.name \
and vg_cb.set_active_text(selected_dev.name):
# if set_active is True, we don't need to do anything else
pass
else:
vg_cb.set_active_text(vgs_with_free_space[0].name)
else:
vg_cb.set_sensitive(False)
vg_cb_st = gtk.TreeStore(gobject.TYPE_STRING, gobject.TYPE_PYOBJECT)
vg_cb = datacombo.DataComboBox(store = vg_cb_st)
vg_cb.set_sensitive(False)
for vg in vgs_with_free_space:
# FIXME: the name length might be a problem.
vg_cb.append(vg.name, vg)
lv_hb = create_storage_xml.get_widget("create_storage_hb_lvm_lv")
lv_hb.pack_start(vg_cb)
lv_rb.set_sensitive(True)
selected_dev = self.tree.getCurrentDevice()
lv_rb.connect("toggled", toggle_vg_cb_CB, vg_cb, selected_dev)
# Activate the RAID dev if needed.
# rd_rb -> RAID device
rd_rb = create_storage_xml.get_widget("create_storage_rb_raid_dev")
if activate_create_raid_dev:
rd_rb.set_sensitive(True)
# Before drawing lets select the first radio button that is sensitive:
# How can I get sensitivity from gtk.radiobutton?
if activate_create_partition:
sp_rb.set_active(True)
sp_rb.grab_focus()
elif activate_create_vg:
vg_rb.set_active(True)
vg_rb.grab_focus()
elif activate_create_raid_dev:
rd_rb.set_active(True)
rd_rb.grab_focus()
gui.addFrame(self.dialog)
self.dialog.show_all()
# Lets work the information messages with CB
# The RAID info message
rinfo_button = create_storage_xml.get_widget("create_storage_info_raid")
whatis_r = _("Software RAID allows you to combine several disks into "
"a larger RAID device. A RAID device can be configured "
"to provide additional speed and reliability compared "
"to using an individual drive. For more information on "
"using RAID devices please consult the %s "
"documentation.\n") % (productName,)
whatneed_r = _("To use RAID you must first create at least two "
"partitions of type 'software RAID'. Then you can create a "
"RAID device that can be formatted and mounted.\n\n")
whathave_r = P_(
"You currently have %d software RAID partition free to use.",
"You currently have %d software RAID partitions free to use.",
availraidparts) % (availraidparts,)
rinfo_message = "%s\n%s%s" % (whatis_r, whatneed_r, whathave_r)
rinfo_cb = lambda x : self.intf.messageWindow(_("About RAID"),
rinfo_message, custom_icon="information")
rinfo_button.connect("clicked", rinfo_cb)
# The LVM info message
lvminfo_button = create_storage_xml.get_widget("create_storage_info_lvm")
whatis_lvm = _("Logical Volume Manager (LVM) is a 3 level construct. "
"The first level is made up of disks or partitions formatted with "
"LVM metadata called Physical Volumes (PV). A Volume Group "
"(VG) sits on top of one or more PVs. The VG, in turn, is the "
"base to create one or more Logical Volumes (LV). Note that a "
"VG can be an aggregate of PVs from multiple physical disks. For "
"more information on using LVM please consult the %s "
"documentation\n") % (productName, )
whatneed_lvm = _("To create a PV you need a partition with "
"free space. To create a VG you need a PV that is not "
"part of any existing VG. To create an LV you need a VG with "
"free space.\n\n")
whathave_lvm = P_("You currently have %d available PV free to use.\n",
"You currently have %d available PVs free to use.\n",
availpvs) % (availpvs, )
lvminfo_message = "%s\n%s%s" % (whatis_lvm, whatneed_lvm, whathave_lvm)
lvminfo_cb = lambda x : self.intf.messageWindow(_("About LVM"),
lvminfo_message, custom_icon="information")
lvminfo_button.connect("clicked", lvminfo_cb)
dialog_rc = self.dialog.run()
# If Cancel was pressed
if dialog_rc == 0:
self.dialog.destroy()
return
# If Create was pressed Make sure we do a dialog.destroy before
# calling any other screen. We don't want the create dialog to show
# in the back when we pop up other screens.
if dialog_rc != 1:
log.error("I received a dialog_rc != 1 (%d) witch should not "
"happen" % dialog_rc)
self.dialog.destroy()
return
self.dialog.destroy()
if rp_rb.get_active():
member = self.storage.newPartition(fmt_type="mdmember")
self.editPartition(member, isNew = True, restrictfs=["mdmember"])
return
elif rd_rb.get_active():
array = self.storage.newMDArray(fmt_type=self.storage.defaultFSType)
self.editRaidArray(array, isNew = True)
return
elif lp_rb.get_active():
member = self.storage.newPartition(fmt_type="lvmpv")
self.editPartition(member, isNew = True, restrictfs=["lvmpv"])
return
elif vg_rb.get_active():
tempvg = self.storage.newVG()
self.editLVMVolumeGroup(tempvg, isNew = True)
return
elif lv_rb.get_active():
selected_vg = vg_cb.get_active_value()
self.editLVMLogicalVolume(vg = selected_vg)
return
elif sp_rb.get_active():
tempformat = self.storage.defaultFSType
device = self.storage.newPartition(fmt_type=tempformat)
self.editPartition(device, isNew = True)
return
def resetCB(self, *args):
if not confirmResetPartitionState(self.intf):
return
self.stripeGraph.shutDown()
# temporarily unset storage.config.clearPartType so that all devices
# will be found during storage reset
clearPartType = self.storage.config.clearPartType
self.storage.config.clearPartType = None
self.storage.reset()
self.storage.config.clearPartType = clearPartType
self.tree.clear()
self.populate()
def refresh(self, justRedraw=None):
log.debug("refresh: justRedraw=%s" % justRedraw)
self.stripeGraph.shutDown()
self.tree.clear()
if justRedraw:
rc = 0
else:
try:
doPartitioning(self.storage)
rc = 0
except PartitioningError as msg:
self.intf.messageWindow(_("Error Partitioning"),
_("Could not allocate requested partitions: %s.") % (msg),
custom_icon="error")
rc = -1
except PartitioningWarning as msg:
# XXX somebody other than me should make this look better
# XXX this doesn't handle the 'delete /boot partition spec' case
# (it says 'add anyway')
dialog = gtk.MessageDialog(self.parent, 0, gtk.MESSAGE_WARNING,
gtk.BUTTONS_NONE,
_("Warning: %s.") % (msg))
gui.addFrame(dialog)
button = gtk.Button(_("_Modify Partition"))
dialog.add_action_widget(button, 1)
button = gtk.Button(_("_Continue"))
dialog.add_action_widget(button, 2)
dialog.set_position(gtk.WIN_POS_CENTER)
dialog.show_all()
rc = dialog.run()
dialog.destroy()
if rc == 1:
rc = -1
else:
rc = 0
all_devices = self.storage.devicetree.devices
bootDevs = [d for d in all_devices if d.bootable]
#if reqs:
# for req in reqs:
# req.ignoreBootConstraints = 1
if not rc == -1:
self.populate()
return rc
def editCB(self, *args):
device = self.tree.getCurrentDevice()
reason = self.storage.deviceImmutable(device, ignoreProtected=True)
if reason:
self.intf.messageWindow(_("Unable To Edit"),
_("You cannot edit this device:\n\n%s")
% reason,
custom_icon="error")
return
if device.type == "mdarray":
self.editRaidArray(device)
elif device.type == "lvmvg":
self.editLVMVolumeGroup(device)
elif device.type == "lvmlv":
self.editLVMLogicalVolume(lv = device)
elif isinstance(device, storage.devices.PartitionDevice):
self.editPartition(device)
# isNew implies that this request has never been successfully used before
def editRaidArray(self, raiddev, isNew = False):
# r_d_g -> raid_dialog_gui
raideditor = r_d_g.RaidEditor(self.storage, self.intf, self.parent,
raiddev, isNew)
while True:
actions = raideditor.run()
for action in actions:
# FIXME: this needs to handle exceptions
self.storage.devicetree.registerAction(action)
if self.refresh(justRedraw=True):
actions.reverse()
for action in actions:
self.storage.devicetree.cancelAction(action)
if self.refresh():
raise RuntimeError, ("Returning partitions to state "
"prior to RAID edit failed")
continue
else:
break
raideditor.destroy()
def editPartition(self, device, isNew = False, restrictfs = None):
# p_d_g -> partition_dialog_gui
parteditor = p_d_g.PartitionEditor(self.anaconda, self.parent, device,
isNew = isNew, restrictfs = restrictfs)
while True:
orig_device = copy.copy(device)
actions = parteditor.run()
for action in actions:
# XXX we should handle exceptions here
self.anaconda.storage.devicetree.registerAction(action)
if self.refresh(justRedraw=not actions):
# autopart failed -- cancel the actions and try to get
# back to previous state
actions.reverse()
for action in actions:
self.anaconda.storage.devicetree.cancelAction(action)
# FIXME: proper action/device management would be better
if not isNew:
device.req_size = orig_device.req_size
device.req_base_size = orig_device.req_base_size
device.req_grow = orig_device.req_grow
device.req_max_size = orig_device.req_max_size
device.req_primary = orig_device.req_primary
device.req_disks = orig_device.req_disks
if self.refresh():
# this worked before and doesn't now...
raise RuntimeError, ("Returning partitions to state "
"prior to edit failed")
else:
break
parteditor.destroy()
return 1
def editLVMVolumeGroup(self, device, isNew = False):
# l_d_g -> lvm_dialog_gui
vgeditor = l_d_g.VolumeGroupEditor(self.anaconda, self.intf, self.parent,
device, isNew)
while True:
actions = vgeditor.run()
for action in actions:
# FIXME: handle exceptions
self.storage.devicetree.registerAction(action)
if self.refresh(justRedraw=True):
actions.reverse()
for action in actions:
self.storage.devicetree.cancelAction(action)
if self.refresh():
raise RuntimeError, ("Returning partitions to state "
"prior to edit failed")
continue
else:
break
vgeditor.destroy()
def editLVMLogicalVolume (self, lv = None, vg = None):
"""Will be consistent with the state of things and use this funciton
for creating and editing LVs.
lv -- the logical volume to edit. If this is set there is no need
for the other two arguments.
vg -- the volume group where the new lv is going to be created. This
will only be relevant when we are createing an LV.
"""
if lv != None:
# l_d_g -> lvm_dialog_gui
vgeditor = l_d_g.VolumeGroupEditor(self.anaconda, self.intf, self.parent,
lv.vg, isNew = False)
lv = vgeditor.lvs[lv.lvname]
isNew = False
elif vg != None:
# l_d_g -> lvm_dialog_gui
vgeditor = l_d_g.VolumeGroupEditor(self.anaconda, self.intf, self.parent,
vg, isNew = False)
tempvg = vgeditor.getTempVG()
name = self.storage.suggestDeviceName(parent=tempvg, prefix="lv")
format = getFormat(self.storage.defaultFSType)
vgeditor.lvs[name] = {'name': name,
'size': vg.freeSpace,
'format': format,
'originalFormat': format,
'stripes': 1,
'logSize': 0,
'snapshotSpace': 0,
'exists': False}
lv = vgeditor.lvs[name]
isNew = True
else:
# This is non-sense.
return
while True:
vgeditor.editLogicalVolume(lv, isNew = isNew)
actions = vgeditor.convertToActions()
for action in actions:
# FIXME: handle exceptions
self.storage.devicetree.registerAction(action)
if self.refresh(justRedraw=True):
actions.reverse()
for action in actions:
self.storage.devicetree.cancelAction(action)
if self.refresh():
raise RuntimeError, ("Returning partitions to state "
"prior to edit failed")
continue
else:
break
vgeditor.destroy()
def getScreen(self, anaconda):
self.anaconda = anaconda
self.storage = anaconda.storage
self.intf = anaconda.intf
self.checkmark_pixbuf = gui.getPixbuf("checkMark.png")
self.lock_pixbuf = gui.getPixbuf("gnome-lock.png")
checkForSwapNoMatch(anaconda)
# Beginning of the GTK stuff.
# create the operational buttons
buttonBox = gtk.HButtonBox()
buttonBox.set_spacing(6)
buttonBox.set_layout(gtk.BUTTONBOX_END)
ops = ((_("_Create"), self.createCB),
(_("_Edit"), self.editCB),
(_("_Delete"), self.deleteCB),
(_("Re_set"), self.resetCB))
for label, cb in ops:
button = gtk.Button(label)
buttonBox.add (button)
button.connect ("clicked", cb)
# We need these to control their sensitivity.
if label == _("_Edit"):
self.editButton = button
self.editButton.set_sensitive(False)
elif label == _("_Delete"):
self.deleteButton = button
self.deleteButton.set_sensitive(False)
# Create the disk tree (Fills the tree and the Bar View)
self.tree = DiskTreeModel()
self.treeView = self.tree.getTreeView()
self.treeView.connect('row-activated', self.treeActivateCB)
self.treeViewSelection = self.treeView.get_selection()
self.treeViewSelection.connect("changed", self.treeSelectCB)
self.stripeGraph = StripeGraph()
self.messageGraph = MessageGraph(self.stripeGraph.getCanvas(),
_("Please Select A Device"))
self.populate(initial = 1)
# Create the top scroll window
# We don't actually need a *scroll* window but nuthing else worked.
hadj = gtk.Adjustment(step_incr = 5.0)
vadj = gtk.Adjustment(step_incr = 5.0)
swt = gtk.ScrolledWindow(hadjustment = hadj, vadjustment = vadj)
swt.add(self.stripeGraph.getCanvas())
swt.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
swt.set_shadow_type(gtk.SHADOW_IN)
# Create the bottom scroll window
swb = gtk.ScrolledWindow()
swb.add(self.treeView)
swb.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
swb.set_shadow_type(gtk.SHADOW_IN)
# Create main vertical box and add everything.
MVbox = gtk.VBox(False, 5)
MVbox.pack_start(swt, False, False)
MVbox.pack_start(swb, True)
MVbox.pack_start(buttonBox, False, False)
MVbox.pack_start(gtk.HSeparator(), False)
return MVbox
| gpl-2.0 | -5,637,757,379,904,662,000 | 37.406448 | 109 | 0.551572 | false |
alikins/ansible | lib/ansible/modules/cloud/amazon/efs.py | 14 | 24211 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
---
module: efs
short_description: create and maintain EFS file systems
description:
- Module allows create, search and destroy Amazon EFS file systems
version_added: "2.2"
requirements: [ boto3 ]
author:
- "Ryan Sydnor (@ryansydnor)"
- "Artem Kazakov (@akazakov)"
options:
encrypt:
description:
- A boolean value that, if true, creates an encrypted file system. This can not be modfied after the file
system is created.
required: false
default: false
choices: ['yes', 'no']
version_added: 2.5
kms_key_id:
description:
- The id of the AWS KMS CMK that will be used to protect the encrypted file system. This parameter is only
required if you want to use a non-default CMK. If this parameter is not specified, the default CMK for
Amazon EFS is used. The key id can be Key ID, Key ID ARN, Key Alias or Key Alias ARN.
required: false
version_added: 2.5
purge_tags:
description:
- If yes, existing tags will be purged from the resource to match exactly what is defined by I(tags) parameter. If the I(tags) parameter
is not set then tags will not be modified.
required: false
default: yes
choices: [ 'yes', 'no' ]
version_added: 2.5
state:
description:
- Allows to create, search and destroy Amazon EFS file system
required: false
default: 'present'
choices: ['present', 'absent']
name:
description:
- Creation Token of Amazon EFS file system. Required for create. Either name or ID required for delete.
required: false
default: None
id:
description:
- ID of Amazon EFS. Either name or ID required for delete.
required: false
default: None
performance_mode:
description:
- File system's performance mode to use. Only takes effect during creation.
required: false
default: 'general_purpose'
choices: ['general_purpose', 'max_io']
tags:
description:
- "List of tags of Amazon EFS. Should be defined as dictionary
In case of 'present' state with list of tags and existing EFS (matched by 'name'), tags of EFS will be replaced with provided data."
required: false
default: None
targets:
description:
- "List of mounted targets. It should be a list of dictionaries, every dictionary should include next attributes:
- subnet_id - Mandatory. The ID of the subnet to add the mount target in.
- ip_address - Optional. A valid IPv4 address within the address range of the specified subnet.
- security_groups - Optional. List of security group IDs, of the form 'sg-xxxxxxxx'. These must be for the same VPC as subnet specified
This data may be modified for existing EFS using state 'present' and new list of mount targets."
required: false
default: None
wait:
description:
- "In case of 'present' state should wait for EFS 'available' life cycle state (of course, if current state not 'deleting' or 'deleted')
In case of 'absent' state should wait for EFS 'deleted' life cycle state"
required: false
default: "no"
choices: ["yes", "no"]
wait_timeout:
description:
- How long the module should wait (in seconds) for desired state before returning. Zero means wait as long as necessary.
required: false
default: 0
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# EFS provisioning
- efs:
state: present
name: myTestEFS
tags:
name: myTestNameTag
purpose: file-storage
targets:
- subnet_id: subnet-748c5d03
security_groups: [ "sg-1a2b3c4d" ]
# Modifying EFS data
- efs:
state: present
name: myTestEFS
tags:
name: myAnotherTestTag
targets:
- subnet_id: subnet-7654fdca
security_groups: [ "sg-4c5d6f7a" ]
# Deleting EFS
- efs:
state: absent
name: myTestEFS
'''
RETURN = '''
creation_time:
description: timestamp of creation date
returned: always
type: string
sample: "2015-11-16 07:30:57-05:00"
creation_token:
description: EFS creation token
returned: always
type: string
sample: "console-88609e04-9a0e-4a2e-912c-feaa99509961"
file_system_id:
description: ID of the file system
returned: always
type: string
sample: "fs-xxxxxxxx"
life_cycle_state:
description: state of the EFS file system
returned: always
type: string
sample: "creating, available, deleting, deleted"
mount_point:
description: url of file system
returned: always
type: string
sample: ".fs-xxxxxxxx.efs.us-west-2.amazonaws.com:/"
mount_targets:
description: list of mount targets
returned: always
type: list
sample:
[
{
"file_system_id": "fs-a7ad440e",
"ip_address": "172.31.17.173",
"life_cycle_state": "available",
"mount_target_id": "fsmt-d8907871",
"network_interface_id": "eni-6e387e26",
"owner_id": "740748460359",
"security_groups": [
"sg-a30b22c6"
],
"subnet_id": "subnet-e265c895"
},
...
]
name:
description: name of the file system
returned: always
type: string
sample: "my-efs"
number_of_mount_targets:
description: the number of targets mounted
returned: always
type: int
sample: 3
owner_id:
description: AWS account ID of EFS owner
returned: always
type: string
sample: "XXXXXXXXXXXX"
size_in_bytes:
description: size of the file system in bytes as of a timestamp
returned: always
type: dict
sample:
{
"timestamp": "2015-12-21 13:59:59-05:00",
"value": 12288
}
performance_mode:
description: performance mode of the file system
returned: always
type: string
sample: "generalPurpose"
tags:
description: tags on the efs instance
returned: always
type: dict
sample:
{
"name": "my-efs",
"key": "Value"
}
'''
from time import sleep
from time import time as timestamp
import traceback
try:
from botocore.exceptions import ClientError, BotoCoreError
except ImportError as e:
pass # Taken care of by ec2.HAS_BOTO3
from ansible.module_utils._text import to_native
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import (HAS_BOTO3, boto3_conn, camel_dict_to_snake_dict,
ec2_argument_spec, get_aws_connection_info, ansible_dict_to_boto3_tag_list,
compare_aws_tags, boto3_tag_list_to_ansible_dict)
def _index_by_key(key, items):
return dict((item[key], item) for item in items)
class EFSConnection(object):
DEFAULT_WAIT_TIMEOUT_SECONDS = 0
STATE_CREATING = 'creating'
STATE_AVAILABLE = 'available'
STATE_DELETING = 'deleting'
STATE_DELETED = 'deleted'
def __init__(self, module, region, **aws_connect_params):
self.connection = boto3_conn(module, conn_type='client',
resource='efs', region=region,
**aws_connect_params)
self.module = module
self.region = region
self.wait = module.params.get('wait')
self.wait_timeout = module.params.get('wait_timeout')
def get_file_systems(self, **kwargs):
"""
Returns generator of file systems including all attributes of FS
"""
items = iterate_all(
'FileSystems',
self.connection.describe_file_systems,
**kwargs
)
for item in items:
item['Name'] = item['CreationToken']
item['CreationTime'] = str(item['CreationTime'])
"""
Suffix of network path to be used as NFS device for mount. More detail here:
http://docs.aws.amazon.com/efs/latest/ug/gs-step-three-connect-to-ec2-instance.html
"""
item['MountPoint'] = '.%s.efs.%s.amazonaws.com:/' % (item['FileSystemId'], self.region)
if 'Timestamp' in item['SizeInBytes']:
item['SizeInBytes']['Timestamp'] = str(item['SizeInBytes']['Timestamp'])
if item['LifeCycleState'] == self.STATE_AVAILABLE:
item['Tags'] = self.get_tags(FileSystemId=item['FileSystemId'])
item['MountTargets'] = list(self.get_mount_targets(FileSystemId=item['FileSystemId']))
else:
item['Tags'] = {}
item['MountTargets'] = []
yield item
def get_tags(self, **kwargs):
"""
Returns tag list for selected instance of EFS
"""
tags = self.connection.describe_tags(**kwargs)['Tags']
return tags
def get_mount_targets(self, **kwargs):
"""
Returns mount targets for selected instance of EFS
"""
targets = iterate_all(
'MountTargets',
self.connection.describe_mount_targets,
**kwargs
)
for target in targets:
if target['LifeCycleState'] == self.STATE_AVAILABLE:
target['SecurityGroups'] = list(self.get_security_groups(
MountTargetId=target['MountTargetId']
))
else:
target['SecurityGroups'] = []
yield target
def get_security_groups(self, **kwargs):
"""
Returns security groups for selected instance of EFS
"""
return iterate_all(
'SecurityGroups',
self.connection.describe_mount_target_security_groups,
**kwargs
)
def get_file_system_id(self, name):
"""
Returns ID of instance by instance name
"""
info = first_or_default(iterate_all(
'FileSystems',
self.connection.describe_file_systems,
CreationToken=name
))
return info and info['FileSystemId'] or None
def get_file_system_state(self, name, file_system_id=None):
"""
Returns state of filesystem by EFS id/name
"""
info = first_or_default(iterate_all(
'FileSystems',
self.connection.describe_file_systems,
CreationToken=name,
FileSystemId=file_system_id
))
return info and info['LifeCycleState'] or self.STATE_DELETED
def get_mount_targets_in_state(self, file_system_id, states=None):
"""
Returns states of mount targets of selected EFS with selected state(s) (optional)
"""
targets = iterate_all(
'MountTargets',
self.connection.describe_mount_targets,
FileSystemId=file_system_id
)
if states:
if not isinstance(states, list):
states = [states]
targets = filter(lambda target: target['LifeCycleState'] in states, targets)
return list(targets)
def create_file_system(self, name, performance_mode, encrypt, kms_key_id):
"""
Creates new filesystem with selected name
"""
changed = False
state = self.get_file_system_state(name)
params = {}
params['CreationToken'] = name
params['PerformanceMode'] = performance_mode
if encrypt:
params['Encrypted'] = encrypt
if kms_key_id is not None:
params['KmsKeyId'] = kms_key_id
if state in [self.STATE_DELETING, self.STATE_DELETED]:
wait_for(
lambda: self.get_file_system_state(name),
self.STATE_DELETED
)
try:
self.connection.create_file_system(**params)
changed = True
except ClientError as e:
self.module.fail_json(msg="Unable to create file system: {0}".format(to_native(e)),
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except BotoCoreError as e:
self.module.fail_json(msg="Unable to create file system: {0}".format(to_native(e)),
exception=traceback.format_exc())
# we always wait for the state to be available when creating.
# if we try to take any actions on the file system before it's available
# we'll throw errors
wait_for(
lambda: self.get_file_system_state(name),
self.STATE_AVAILABLE,
self.wait_timeout
)
return changed
def converge_file_system(self, name, tags, purge_tags, targets):
"""
Change attributes (mount targets and tags) of filesystem by name
"""
result = False
fs_id = self.get_file_system_id(name)
if tags is not None:
tags_need_modify, tags_to_delete = compare_aws_tags(boto3_tag_list_to_ansible_dict(self.get_tags(FileSystemId=fs_id)), tags, purge_tags)
if tags_to_delete:
try:
self.connection.delete_tags(
FileSystemId=fs_id,
TagKeys=tags_to_delete
)
except ClientError as e:
self.module.fail_json(msg="Unable to delete tags: {0}".format(to_native(e)),
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except BotoCoreError as e:
self.module.fail_json(msg="Unable to delete tags: {0}".format(to_native(e)),
exception=traceback.format_exc())
result = True
if tags_need_modify:
try:
self.connection.create_tags(
FileSystemId=fs_id,
Tags=ansible_dict_to_boto3_tag_list(tags_need_modify)
)
except ClientError as e:
self.module.fail_json(msg="Unable to create tags: {0}".format(to_native(e)),
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except BotoCoreError as e:
self.module.fail_json(msg="Unable to create tags: {0}".format(to_native(e)),
exception=traceback.format_exc())
result = True
if targets is not None:
incomplete_states = [self.STATE_CREATING, self.STATE_DELETING]
wait_for(
lambda: len(self.get_mount_targets_in_state(fs_id, incomplete_states)),
0
)
current_targets = _index_by_key('SubnetId', self.get_mount_targets(FileSystemId=fs_id))
targets = _index_by_key('SubnetId', targets)
targets_to_create, intersection, targets_to_delete = dict_diff(current_targets,
targets, True)
# To modify mount target it should be deleted and created again
changed = [sid for sid in intersection if not targets_equal(['SubnetId', 'IpAddress', 'NetworkInterfaceId'],
current_targets[sid], targets[sid])]
targets_to_delete = list(targets_to_delete) + changed
targets_to_create = list(targets_to_create) + changed
if targets_to_delete:
for sid in targets_to_delete:
self.connection.delete_mount_target(
MountTargetId=current_targets[sid]['MountTargetId']
)
wait_for(
lambda: len(self.get_mount_targets_in_state(fs_id, incomplete_states)),
0
)
result = True
if targets_to_create:
for sid in targets_to_create:
self.connection.create_mount_target(
FileSystemId=fs_id,
**targets[sid]
)
wait_for(
lambda: len(self.get_mount_targets_in_state(fs_id, incomplete_states)),
0,
self.wait_timeout
)
result = True
# If no security groups were passed into the module, then do not change it.
security_groups_to_update = [sid for sid in intersection if
'SecurityGroups' in targets[sid] and
current_targets[sid]['SecurityGroups'] != targets[sid]['SecurityGroups']]
if security_groups_to_update:
for sid in security_groups_to_update:
self.connection.modify_mount_target_security_groups(
MountTargetId=current_targets[sid]['MountTargetId'],
SecurityGroups=targets[sid].get('SecurityGroups', None)
)
result = True
return result
def delete_file_system(self, name, file_system_id=None):
"""
Removes EFS instance by id/name
"""
result = False
state = self.get_file_system_state(name, file_system_id)
if state in [self.STATE_CREATING, self.STATE_AVAILABLE]:
wait_for(
lambda: self.get_file_system_state(name),
self.STATE_AVAILABLE
)
if not file_system_id:
file_system_id = self.get_file_system_id(name)
self.delete_mount_targets(file_system_id)
self.connection.delete_file_system(FileSystemId=file_system_id)
result = True
if self.wait:
wait_for(
lambda: self.get_file_system_state(name),
self.STATE_DELETED,
self.wait_timeout
)
return result
def delete_mount_targets(self, file_system_id):
"""
Removes mount targets by EFS id
"""
wait_for(
lambda: len(self.get_mount_targets_in_state(file_system_id, self.STATE_CREATING)),
0
)
targets = self.get_mount_targets_in_state(file_system_id, self.STATE_AVAILABLE)
for target in targets:
self.connection.delete_mount_target(MountTargetId=target['MountTargetId'])
wait_for(
lambda: len(self.get_mount_targets_in_state(file_system_id, self.STATE_DELETING)),
0
)
return len(targets) > 0
def iterate_all(attr, map_method, **kwargs):
"""
Method creates iterator from result set
"""
args = dict((key, value) for (key, value) in kwargs.items() if value is not None)
wait = 1
while True:
try:
data = map_method(**args)
for elm in data[attr]:
yield elm
if 'NextMarker' in data:
args['Marker'] = data['Nextmarker']
continue
break
except ClientError as e:
if e.response['Error']['Code'] == "ThrottlingException" and wait < 600:
sleep(wait)
wait = wait * 2
continue
else:
raise
def targets_equal(keys, a, b):
"""
Method compare two mount targets by specified attributes
"""
for key in keys:
if key in b and a[key] != b[key]:
return False
return True
def dict_diff(dict1, dict2, by_key=False):
"""
Helper method to calculate difference of two dictionaries
"""
keys1 = set(dict1.keys() if by_key else dict1.items())
keys2 = set(dict2.keys() if by_key else dict2.items())
intersection = keys1 & keys2
return keys2 ^ intersection, intersection, keys1 ^ intersection
def first_or_default(items, default=None):
"""
Helper method to fetch first element of list (if exists)
"""
for item in items:
return item
return default
def wait_for(callback, value, timeout=EFSConnection.DEFAULT_WAIT_TIMEOUT_SECONDS):
"""
Helper method to wait for desired value returned by callback method
"""
wait_start = timestamp()
while True:
if callback() != value:
if timeout != 0 and (timestamp() - wait_start > timeout):
raise RuntimeError('Wait timeout exceeded (' + str(timeout) + ' sec)')
else:
sleep(5)
continue
break
def main():
"""
Module action handler
"""
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
encrypt=dict(required=False, type="bool", default=False),
state=dict(required=False, type='str', choices=["present", "absent"], default="present"),
kms_key_id=dict(required=False, type='str', default=None),
purge_tags=dict(default=True, type='bool'),
id=dict(required=False, type='str', default=None),
name=dict(required=False, type='str', default=None),
tags=dict(required=False, type="dict", default={}),
targets=dict(required=False, type="list", default=[]),
performance_mode=dict(required=False, type='str', choices=["general_purpose", "max_io"], default="general_purpose"),
wait=dict(required=False, type="bool", default=False),
wait_timeout=dict(required=False, type="int", default=0)
))
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
region, _, aws_connect_params = get_aws_connection_info(module, boto3=True)
connection = EFSConnection(module, region, **aws_connect_params)
name = module.params.get('name')
fs_id = module.params.get('id')
tags = module.params.get('tags')
target_translations = {
'ip_address': 'IpAddress',
'security_groups': 'SecurityGroups',
'subnet_id': 'SubnetId'
}
targets = [dict((target_translations[key], value) for (key, value) in x.items()) for x in module.params.get('targets')]
performance_mode_translations = {
'general_purpose': 'generalPurpose',
'max_io': 'maxIO'
}
encrypt = module.params.get('encrypt')
kms_key_id = module.params.get('kms_key_id')
performance_mode = performance_mode_translations[module.params.get('performance_mode')]
purge_tags = module.params.get('purge_tags')
changed = False
state = str(module.params.get('state')).lower()
if state == 'present':
if not name:
module.fail_json(msg='Name parameter is required for create')
changed = connection.create_file_system(name, performance_mode, encrypt, kms_key_id)
changed = connection.converge_file_system(name=name, tags=tags, purge_tags=purge_tags, targets=targets) or changed
result = first_or_default(connection.get_file_systems(CreationToken=name))
elif state == 'absent':
if not name and not fs_id:
module.fail_json(msg='Either name or id parameter is required for delete')
changed = connection.delete_file_system(name, fs_id)
result = None
if result:
result = camel_dict_to_snake_dict(result)
module.exit_json(changed=changed, efs=result)
if __name__ == '__main__':
main()
| gpl-3.0 | 128,512,197,075,159,620 | 34.5 | 154 | 0.571517 | false |
Runscope/pysaml2 | src/saml2/mdie.py | 34 | 4757 | #!/usr/bin/env python
from saml2 import element_to_extension_element
from saml2 import extension_elements_to_elements
from saml2 import SamlBase
from saml2 import md
__author__ = 'rolandh'
"""
Functions used to import metadata from and export it to a pysaml2 format
"""
IMP_SKIP = ["_certs", "e_e_", "_extatt"]
EXP_SKIP = ["__class__"]
# From pysaml2 SAML2 metadata format to Python dictionary
def _eval(val, onts, mdb_safe):
"""
Convert a value to a basic dict format
:param val: The value
:param onts: Schemas to be used in the conversion
:return: The basic dictionary
"""
if isinstance(val, basestring):
val = val.strip()
if not val:
return None
else:
return val
elif isinstance(val, dict) or isinstance(val, SamlBase):
return to_dict(val, onts, mdb_safe)
elif isinstance(val, list):
lv = []
for v in val:
if isinstance(v, dict) or isinstance(v, SamlBase):
lv.append(to_dict(v, onts, mdb_safe))
else:
lv.append(v)
return lv
return val
def to_dict(_dict, onts, mdb_safe=False):
"""
Convert a pysaml2 SAML2 message class instance into a basic dictionary
format.
The export interface.
:param _dict: The pysaml2 metadata instance
:param onts: List of schemas to use for the conversion
:return: The converted information
"""
res = {}
if isinstance(_dict, SamlBase):
res["__class__"] = "%s&%s" % (_dict.c_namespace, _dict.c_tag)
for key in _dict.keyswv():
if key in IMP_SKIP:
continue
val = getattr(_dict, key)
if key == "extension_elements":
_eel = extension_elements_to_elements(val, onts)
_val = [_eval(_v, onts, mdb_safe) for _v in _eel]
elif key == "extension_attributes":
if mdb_safe:
_val = dict([(k.replace(".", "__"), v) for k, v in
val.items()])
#_val = {k.replace(".", "__"): v for k, v in val.items()}
else:
_val = val
else:
_val = _eval(val, onts, mdb_safe)
if _val:
if mdb_safe:
key = key.replace(".", "__")
res[key] = _val
else:
for key, val in _dict.items():
_val = _eval(val, onts, mdb_safe)
if _val:
if mdb_safe and "." in key:
key = key.replace(".", "__")
res[key] = _val
return res
# From Python dictionary to pysaml2 SAML2 metadata format
def _kwa(val, onts, mdb_safe=False):
"""
Key word argument conversion
:param val: A dictionary
:param onts: dictionary with schemas to use in the conversion
schema namespase is the key in the dictionary
:return: A converted dictionary
"""
if not mdb_safe:
return dict([(k, from_dict(v, onts)) for k, v in val.items()
if k not in EXP_SKIP])
else:
_skip = ["_id"]
_skip.extend(EXP_SKIP)
return dict([(k.replace("__", "."), from_dict(v, onts)) for k, v in
val.items() if k not in _skip])
def from_dict(val, onts, mdb_safe=False):
"""
Converts a dictionary into a pysaml2 object
:param val: A dictionary
:param onts: Dictionary of schemas to use in the conversion
:return: The pysaml2 object instance
"""
if isinstance(val, dict):
if "__class__" in val:
ns, typ = val["__class__"].split("&")
cls = getattr(onts[ns], typ)
if cls is md.Extensions:
lv = []
for key, ditems in val.items():
if key in EXP_SKIP:
continue
for item in ditems:
ns, typ = item["__class__"].split("&")
cls = getattr(onts[ns], typ)
kwargs = _kwa(item, onts, mdb_safe)
inst = cls(**kwargs)
lv.append(element_to_extension_element(inst))
return lv
else:
kwargs = _kwa(val, onts, mdb_safe)
inst = cls(**kwargs)
return inst
else:
res = {}
for key, v in val.items():
if mdb_safe:
key = key.replace("__", ".")
res[key] = from_dict(v, onts)
return res
elif isinstance(val, basestring):
return val
elif isinstance(val, list):
return [from_dict(v, onts) for v in val]
else:
return val
| bsd-2-clause | 8,209,176,597,055,246,000 | 31.141892 | 77 | 0.505571 | false |
BlindHunter/django | tests/model_formsets_regress/tests.py | 173 | 20725 | from __future__ import unicode_literals
from django import forms
from django.forms.formsets import DELETION_FIELD_NAME, BaseFormSet
from django.forms.models import (
BaseModelFormSet, inlineformset_factory, modelform_factory,
modelformset_factory,
)
from django.forms.utils import ErrorDict, ErrorList
from django.test import TestCase
from django.utils import six
from .models import (
Host, Manager, Network, ProfileNetwork, Restaurant, User, UserProfile,
UserSite,
)
class InlineFormsetTests(TestCase):
def test_formset_over_to_field(self):
"A formset over a ForeignKey with a to_field can be saved. Regression for #10243"
Form = modelform_factory(User, fields="__all__")
FormSet = inlineformset_factory(User, UserSite, fields="__all__")
# Instantiate the Form and FormSet to prove
# you can create a form with no data
form = Form()
form_set = FormSet(instance=User())
# Now create a new User and UserSite instance
data = {
'serial': '1',
'username': 'apollo13',
'usersite_set-TOTAL_FORMS': '1',
'usersite_set-INITIAL_FORMS': '0',
'usersite_set-MAX_NUM_FORMS': '0',
'usersite_set-0-data': '10',
'usersite_set-0-user': 'apollo13'
}
user = User()
form = Form(data)
if form.is_valid():
user = form.save()
else:
self.fail('Errors found on form:%s' % form_set)
form_set = FormSet(data, instance=user)
if form_set.is_valid():
form_set.save()
usersite = UserSite.objects.all().values()
self.assertEqual(usersite[0]['data'], 10)
self.assertEqual(usersite[0]['user_id'], 'apollo13')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
# Now update the UserSite instance
data = {
'usersite_set-TOTAL_FORMS': '1',
'usersite_set-INITIAL_FORMS': '1',
'usersite_set-MAX_NUM_FORMS': '0',
'usersite_set-0-id': six.text_type(usersite[0]['id']),
'usersite_set-0-data': '11',
'usersite_set-0-user': 'apollo13'
}
form_set = FormSet(data, instance=user)
if form_set.is_valid():
form_set.save()
usersite = UserSite.objects.all().values()
self.assertEqual(usersite[0]['data'], 11)
self.assertEqual(usersite[0]['user_id'], 'apollo13')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
# Now add a new UserSite instance
data = {
'usersite_set-TOTAL_FORMS': '2',
'usersite_set-INITIAL_FORMS': '1',
'usersite_set-MAX_NUM_FORMS': '0',
'usersite_set-0-id': six.text_type(usersite[0]['id']),
'usersite_set-0-data': '11',
'usersite_set-0-user': 'apollo13',
'usersite_set-1-data': '42',
'usersite_set-1-user': 'apollo13'
}
form_set = FormSet(data, instance=user)
if form_set.is_valid():
form_set.save()
usersite = UserSite.objects.all().values().order_by('data')
self.assertEqual(usersite[0]['data'], 11)
self.assertEqual(usersite[0]['user_id'], 'apollo13')
self.assertEqual(usersite[1]['data'], 42)
self.assertEqual(usersite[1]['user_id'], 'apollo13')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
def test_formset_over_inherited_model(self):
"A formset over a ForeignKey with a to_field can be saved. Regression for #11120"
Form = modelform_factory(Restaurant, fields="__all__")
FormSet = inlineformset_factory(Restaurant, Manager, fields="__all__")
# Instantiate the Form and FormSet to prove
# you can create a form with no data
form = Form()
form_set = FormSet(instance=Restaurant())
# Now create a new Restaurant and Manager instance
data = {
'name': "Guido's House of Pasta",
'manager_set-TOTAL_FORMS': '1',
'manager_set-INITIAL_FORMS': '0',
'manager_set-MAX_NUM_FORMS': '0',
'manager_set-0-name': 'Guido Van Rossum'
}
restaurant = User()
form = Form(data)
if form.is_valid():
restaurant = form.save()
else:
self.fail('Errors found on form:%s' % form_set)
form_set = FormSet(data, instance=restaurant)
if form_set.is_valid():
form_set.save()
manager = Manager.objects.all().values()
self.assertEqual(manager[0]['name'], 'Guido Van Rossum')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
# Now update the Manager instance
data = {
'manager_set-TOTAL_FORMS': '1',
'manager_set-INITIAL_FORMS': '1',
'manager_set-MAX_NUM_FORMS': '0',
'manager_set-0-id': six.text_type(manager[0]['id']),
'manager_set-0-name': 'Terry Gilliam'
}
form_set = FormSet(data, instance=restaurant)
if form_set.is_valid():
form_set.save()
manager = Manager.objects.all().values()
self.assertEqual(manager[0]['name'], 'Terry Gilliam')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
# Now add a new Manager instance
data = {
'manager_set-TOTAL_FORMS': '2',
'manager_set-INITIAL_FORMS': '1',
'manager_set-MAX_NUM_FORMS': '0',
'manager_set-0-id': six.text_type(manager[0]['id']),
'manager_set-0-name': 'Terry Gilliam',
'manager_set-1-name': 'John Cleese'
}
form_set = FormSet(data, instance=restaurant)
if form_set.is_valid():
form_set.save()
manager = Manager.objects.all().values().order_by('name')
self.assertEqual(manager[0]['name'], 'John Cleese')
self.assertEqual(manager[1]['name'], 'Terry Gilliam')
else:
self.fail('Errors found on formset:%s' % form_set.errors)
def test_inline_model_with_to_field(self):
"""
#13794 --- An inline model with a to_field of a formset with instance
has working relations.
"""
FormSet = inlineformset_factory(User, UserSite, exclude=('is_superuser',))
user = User.objects.create(username="guido", serial=1337)
UserSite.objects.create(user=user, data=10)
formset = FormSet(instance=user)
# Testing the inline model's relation
self.assertEqual(formset[0].instance.user_id, "guido")
def test_inline_model_with_to_field_to_rel(self):
"""
#13794 --- An inline model with a to_field to a related field of a
formset with instance has working relations.
"""
FormSet = inlineformset_factory(UserProfile, ProfileNetwork, exclude=[])
user = User.objects.create(username="guido", serial=1337, pk=1)
self.assertEqual(user.pk, 1)
profile = UserProfile.objects.create(user=user, about="about", pk=2)
self.assertEqual(profile.pk, 2)
ProfileNetwork.objects.create(profile=profile, network=10, identifier=10)
formset = FormSet(instance=profile)
# Testing the inline model's relation
self.assertEqual(formset[0].instance.profile_id, 1)
def test_formset_with_none_instance(self):
"A formset with instance=None can be created. Regression for #11872"
Form = modelform_factory(User, fields="__all__")
FormSet = inlineformset_factory(User, UserSite, fields="__all__")
# Instantiate the Form and FormSet to prove
# you can create a formset with an instance of None
Form(instance=None)
FormSet(instance=None)
def test_empty_fields_on_modelformset(self):
"""
No fields passed to modelformset_factory() should result in no fields
on returned forms except for the id (#14119).
"""
UserFormSet = modelformset_factory(User, fields=())
formset = UserFormSet()
for form in formset.forms:
self.assertIn('id', form.fields)
self.assertEqual(len(form.fields), 1)
def test_save_as_new_with_new_inlines(self):
"""
Existing and new inlines are saved with save_as_new.
Regression for #14938.
"""
efnet = Network.objects.create(name="EFNet")
host1 = Host.objects.create(hostname="irc.he.net", network=efnet)
HostFormSet = inlineformset_factory(Network, Host, fields="__all__")
# Add a new host, modify previous host, and save-as-new
data = {
'host_set-TOTAL_FORMS': '2',
'host_set-INITIAL_FORMS': '1',
'host_set-MAX_NUM_FORMS': '0',
'host_set-0-id': six.text_type(host1.id),
'host_set-0-hostname': 'tranquility.hub.dal.net',
'host_set-1-hostname': 'matrix.de.eu.dal.net'
}
# To save a formset as new, it needs a new hub instance
dalnet = Network.objects.create(name="DALnet")
formset = HostFormSet(data, instance=dalnet, save_as_new=True)
self.assertTrue(formset.is_valid())
formset.save()
self.assertQuerysetEqual(
dalnet.host_set.order_by("hostname"),
["<Host: matrix.de.eu.dal.net>", "<Host: tranquility.hub.dal.net>"]
)
def test_initial_data(self):
user = User.objects.create(username="bibi", serial=1)
UserSite.objects.create(user=user, data=7)
FormSet = inlineformset_factory(User, UserSite, extra=2, fields="__all__")
formset = FormSet(instance=user, initial=[{'data': 41}, {'data': 42}])
self.assertEqual(formset.forms[0].initial['data'], 7)
self.assertEqual(formset.extra_forms[0].initial['data'], 41)
self.assertIn('value="42"', formset.extra_forms[1].as_p())
class FormsetTests(TestCase):
def test_error_class(self):
'''
Test the type of Formset and Form error attributes
'''
Formset = modelformset_factory(User, fields="__all__")
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '0',
'form-0-id': '',
'form-0-username': 'apollo13',
'form-0-serial': '1',
'form-1-id': '',
'form-1-username': 'apollo13',
'form-1-serial': '2',
}
formset = Formset(data)
# check if the returned error classes are correct
# note: formset.errors returns a list as documented
self.assertIsInstance(formset.errors, list)
self.assertIsInstance(formset.non_form_errors(), ErrorList)
for form in formset.forms:
self.assertIsInstance(form.errors, ErrorDict)
self.assertIsInstance(form.non_field_errors(), ErrorList)
def test_initial_data(self):
User.objects.create(username="bibi", serial=1)
Formset = modelformset_factory(User, fields="__all__", extra=2)
formset = Formset(initial=[{'username': 'apollo11'}, {'username': 'apollo12'}])
self.assertEqual(formset.forms[0].initial['username'], "bibi")
self.assertEqual(formset.extra_forms[0].initial['username'], "apollo11")
self.assertIn('value="apollo12"', formset.extra_forms[1].as_p())
def test_extraneous_query_is_not_run(self):
Formset = modelformset_factory(Network, fields="__all__")
data = {'test-TOTAL_FORMS': '1',
'test-INITIAL_FORMS': '0',
'test-MAX_NUM_FORMS': '',
'test-0-name': 'Random Place', }
with self.assertNumQueries(1):
formset = Formset(data, prefix="test")
formset.save()
class CustomWidget(forms.widgets.TextInput):
pass
class UserSiteForm(forms.ModelForm):
class Meta:
model = UserSite
fields = "__all__"
widgets = {
'id': CustomWidget,
'data': CustomWidget,
}
localized_fields = ('data',)
class Callback(object):
def __init__(self):
self.log = []
def __call__(self, db_field, **kwargs):
self.log.append((db_field, kwargs))
return db_field.formfield(**kwargs)
class FormfieldCallbackTests(TestCase):
"""
Regression for #13095 and #17683: Using base forms with widgets
defined in Meta should not raise errors and BaseModelForm should respect
the specified pk widget.
"""
def test_inlineformset_factory_default(self):
Formset = inlineformset_factory(User, UserSite, form=UserSiteForm, fields="__all__")
form = Formset().forms[0]
self.assertIsInstance(form['id'].field.widget, CustomWidget)
self.assertIsInstance(form['data'].field.widget, CustomWidget)
self.assertFalse(form.fields['id'].localize)
self.assertTrue(form.fields['data'].localize)
def test_modelformset_factory_default(self):
Formset = modelformset_factory(UserSite, form=UserSiteForm)
form = Formset().forms[0]
self.assertIsInstance(form['id'].field.widget, CustomWidget)
self.assertIsInstance(form['data'].field.widget, CustomWidget)
self.assertFalse(form.fields['id'].localize)
self.assertTrue(form.fields['data'].localize)
def assertCallbackCalled(self, callback):
id_field, user_field, data_field = UserSite._meta.fields
expected_log = [
(id_field, {'widget': CustomWidget}),
(user_field, {}),
(data_field, {'widget': CustomWidget, 'localize': True}),
]
self.assertEqual(callback.log, expected_log)
def test_inlineformset_custom_callback(self):
callback = Callback()
inlineformset_factory(User, UserSite, form=UserSiteForm,
formfield_callback=callback, fields="__all__")
self.assertCallbackCalled(callback)
def test_modelformset_custom_callback(self):
callback = Callback()
modelformset_factory(UserSite, form=UserSiteForm,
formfield_callback=callback)
self.assertCallbackCalled(callback)
class BaseCustomDeleteFormSet(BaseFormSet):
"""
A formset mix-in that lets a form decide if it's to be deleted.
Works for BaseFormSets. Also works for ModelFormSets with #14099 fixed.
form.should_delete() is called. The formset delete field is also suppressed.
"""
def add_fields(self, form, index):
super(BaseCustomDeleteFormSet, self).add_fields(form, index)
self.can_delete = True
if DELETION_FIELD_NAME in form.fields:
del form.fields[DELETION_FIELD_NAME]
def _should_delete_form(self, form):
return hasattr(form, 'should_delete') and form.should_delete()
class FormfieldShouldDeleteFormTests(TestCase):
"""
Regression for #14099: BaseModelFormSet should use ModelFormSet method _should_delete_form
"""
class BaseCustomDeleteModelFormSet(BaseModelFormSet, BaseCustomDeleteFormSet):
""" Model FormSet with CustomDelete MixIn """
class CustomDeleteUserForm(forms.ModelForm):
""" A model form with a 'should_delete' method """
class Meta:
model = User
fields = "__all__"
def should_delete(self):
""" delete form if odd PK """
return self.instance.pk % 2 != 0
NormalFormset = modelformset_factory(User, form=CustomDeleteUserForm, can_delete=True)
DeleteFormset = modelformset_factory(User, form=CustomDeleteUserForm, formset=BaseCustomDeleteModelFormSet)
data = {
'form-TOTAL_FORMS': '4',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '4',
'form-0-username': 'John',
'form-0-serial': '1',
'form-1-username': 'Paul',
'form-1-serial': '2',
'form-2-username': 'George',
'form-2-serial': '3',
'form-3-username': 'Ringo',
'form-3-serial': '5',
}
delete_all_ids = {
'form-0-DELETE': '1',
'form-1-DELETE': '1',
'form-2-DELETE': '1',
'form-3-DELETE': '1',
}
def test_init_database(self):
""" Add test data to database via formset """
formset = self.NormalFormset(self.data)
self.assertTrue(formset.is_valid())
self.assertEqual(len(formset.save()), 4)
def test_no_delete(self):
""" Verify base formset doesn't modify database """
# reload database
self.test_init_database()
# pass standard data dict & see none updated
data = dict(self.data)
data['form-INITIAL_FORMS'] = 4
data.update({
'form-%d-id' % i: user.pk
for i, user in enumerate(User.objects.all())
})
formset = self.NormalFormset(data, queryset=User.objects.all())
self.assertTrue(formset.is_valid())
self.assertEqual(len(formset.save()), 0)
self.assertEqual(len(User.objects.all()), 4)
def test_all_delete(self):
""" Verify base formset honors DELETE field """
# reload database
self.test_init_database()
# create data dict with all fields marked for deletion
data = dict(self.data)
data['form-INITIAL_FORMS'] = 4
data.update({
'form-%d-id' % i: user.pk
for i, user in enumerate(User.objects.all())
})
data.update(self.delete_all_ids)
formset = self.NormalFormset(data, queryset=User.objects.all())
self.assertTrue(formset.is_valid())
self.assertEqual(len(formset.save()), 0)
self.assertEqual(len(User.objects.all()), 0)
def test_custom_delete(self):
""" Verify DeleteFormset ignores DELETE field and uses form method """
# reload database
self.test_init_database()
# Create formset with custom Delete function
# create data dict with all fields marked for deletion
data = dict(self.data)
data['form-INITIAL_FORMS'] = 4
data.update({
'form-%d-id' % i: user.pk
for i, user in enumerate(User.objects.all())
})
data.update(self.delete_all_ids)
formset = self.DeleteFormset(data, queryset=User.objects.all())
# verify two were deleted
self.assertTrue(formset.is_valid())
self.assertEqual(len(formset.save()), 0)
self.assertEqual(len(User.objects.all()), 2)
# verify no "odd" PKs left
odd_ids = [user.pk for user in User.objects.all() if user.pk % 2]
self.assertEqual(len(odd_ids), 0)
class RedeleteTests(TestCase):
def test_resubmit(self):
u = User.objects.create(username='foo', serial=1)
us = UserSite.objects.create(user=u, data=7)
formset_cls = inlineformset_factory(User, UserSite, fields="__all__")
data = {
'serial': '1',
'username': 'foo',
'usersite_set-TOTAL_FORMS': '1',
'usersite_set-INITIAL_FORMS': '1',
'usersite_set-MAX_NUM_FORMS': '1',
'usersite_set-0-id': six.text_type(us.pk),
'usersite_set-0-data': '7',
'usersite_set-0-user': 'foo',
'usersite_set-0-DELETE': '1'
}
formset = formset_cls(data, instance=u)
self.assertTrue(formset.is_valid())
formset.save()
self.assertEqual(UserSite.objects.count(), 0)
formset = formset_cls(data, instance=u)
# Even if the "us" object isn't in the DB any more, the form
# validates.
self.assertTrue(formset.is_valid())
formset.save()
self.assertEqual(UserSite.objects.count(), 0)
def test_delete_already_deleted(self):
u = User.objects.create(username='foo', serial=1)
us = UserSite.objects.create(user=u, data=7)
formset_cls = inlineformset_factory(User, UserSite, fields="__all__")
data = {
'serial': '1',
'username': 'foo',
'usersite_set-TOTAL_FORMS': '1',
'usersite_set-INITIAL_FORMS': '1',
'usersite_set-MAX_NUM_FORMS': '1',
'usersite_set-0-id': six.text_type(us.pk),
'usersite_set-0-data': '7',
'usersite_set-0-user': 'foo',
'usersite_set-0-DELETE': '1'
}
formset = formset_cls(data, instance=u)
us.delete()
self.assertTrue(formset.is_valid())
formset.save()
self.assertEqual(UserSite.objects.count(), 0)
| bsd-3-clause | 4,951,472,987,322,792,000 | 37.027523 | 111 | 0.587696 | false |
terbolous/CouchPotatoServer | libs/git/files.py | 122 | 1831 | # Copyright (c) 2009, Rotem Yaari <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of organization nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY Rotem Yaari ''AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Rotem Yaari BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class ModifiedFile(object):
def __init__(self, filename):
super(ModifiedFile, self).__init__()
self.filename = filename
def __repr__(self):
return self.filename
def __eq__(self, other):
return isinstance(other, ModifiedFile) and other.filename == self.filename
| gpl-3.0 | 211,344,630,695,542,900 | 56.21875 | 82 | 0.743856 | false |
moas/carbooking | booking/courses/models.py | 1 | 3802 | from __future__ import unicode_literals
import datetime
from django.core.exceptions import ValidationError
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext as _
from django.db.models import signals
from django.db import models
from django.conf import settings
from django.contrib.auth.models import User
from cities_light.models import Country, City
from model_utils import FieldTracker
from ..companies.models import Cars
from ..utils.common import CommonFields
# Create your models here.
@python_2_unicode_compatible
class Journey(CommonFields):
customer = models.ForeignKey(
User,
limit_choices_to={
'groups__name': settings.GROUP_CUSTOMER_LABEL,
'is_active': True,
},
on_delete=models.CASCADE,
verbose_name=_('Customer'),
)
country = models.ForeignKey(
Country,
on_delete=models.CASCADE,
verbose_name=_("Country")
)
departure_city = models.ForeignKey(
City,
verbose_name=_('Departure city'),
related_name='departure_point',
help_text=_('Departure must be related to country selected'),
)
departure_address = models.CharField(
_("Departure address"),
max_length=150
)
departure_dt = models.DateTimeField(
_('Start time'),
default=timezone.now() + timezone.timedelta(minutes=15),
)
arrival_city = models.ForeignKey(
City,
verbose_name=_('Arrival city'),
related_name='arrival_point',
help_text=_('Arrival must be related to country selected')
)
arrival_address = models.CharField(
_('Arrival address'),
max_length=150,
)
car = models.ForeignKey(
Cars,
limit_choices_to={'is_active': True, },
verbose_name=_('Car'),
)
is_active = models.BooleanField(
default=True
)
car_tracker = FieldTracker(['car'])
def __str__(self):
return "Journey {}: {}".format(
self.id,
self.customer.get_full_name(),
)
def customer_full_name(self):
return self.customer.get_full_name()
def to(self):
return '{} ({})'.format(
self.departure_city,
self.departure_address,
)
def destination(self):
return '{} ({})'.format(
self.arrival_city,
self.arrival_address,
)
destination.short_description = 'from'
class Meta:
verbose_name = _("journey")
verbose_name_plural = _("List of journey")
def get_absolute_url(self):
from django.core.urlresolvers import reverse
return reverse('courses:detail-course', self.id)
@classmethod
def reserved_flag(cls, sender, instance, created, **kwargs):
if created is True:
instance.car.is_reserved = True
else:
if instance.car_tracker.has_changed('car') is True:
previous_car = instance.car_tracker.previous('car')
previous_car.is_reserved = False
previous_car.save()
instance.car.is_reserved = instance.is_active
instance.car.save()
@classmethod
def post_delete_handler(cls, sender, instance, **kwargs):
car = instance.car
car.is_reserved = False
car.save()
def clean(self):
if self.car_tracker.has_changed('car') is True:
if self.car.is_reserved is True:
raise ValidationError(
{'car': _('Car selected is already reserved')}
)
signals.post_save.connect(Journey.reserved_flag, sender=Journey)
signals.post_delete.connect(Journey.post_delete_handler, sender=Journey)
| mit | 5,173,369,820,820,748,000 | 28.472868 | 72 | 0.61152 | false |
thomasvincent/utilities | NagiosPlugins/check_procs/pexpect/examples/chess2.py | 17 | 4026 | #!/usr/bin/env python
'''This demonstrates controlling a screen oriented application (curses).
It starts two instances of gnuchess and then pits them against each other.
'''
import pexpect
import string
import ANSI
import sys, os, time
class Chess:
def __init__(self, engine = "/usr/local/bin/gnuchess -a -h 1"):
self.child = pexpect.spawn (engine)
self.term = ANSI.ANSI ()
#self.child.expect ('Chess')
#if self.child.after != 'Chess':
# raise IOError, 'incompatible chess program'
#self.term.process_list (self.child.before)
#self.term.process_list (self.child.after)
self.last_computer_move = ''
def read_until_cursor (self, r,c, e=0):
'''Eventually something like this should move into the screen class or
a subclass. Maybe a combination of pexpect and screen...
'''
fout = open ('log','a')
while self.term.cur_r != r or self.term.cur_c != c:
try:
k = self.child.read(1, 10)
except Exception, e:
print 'EXCEPTION, (r,c):(%d,%d)\n' %(self.term.cur_r, self.term.cur_c)
sys.stdout.flush()
self.term.process (k)
fout.write ('(r,c):(%d,%d)\n' %(self.term.cur_r, self.term.cur_c))
fout.flush()
if e:
sys.stdout.write (k)
sys.stdout.flush()
if self.term.cur_r == r and self.term.cur_c == c:
fout.close()
return 1
print 'DIDNT EVEN HIT.'
fout.close()
return 1
def expect_region (self):
'''This is another method that would be moved into the
screen class.
'''
pass
def do_scan (self):
fout = open ('log','a')
while 1:
c = self.child.read(1,10)
self.term.process (c)
fout.write ('(r,c):(%d,%d)\n' %(self.term.cur_r, self.term.cur_c))
fout.flush()
sys.stdout.write (c)
sys.stdout.flush()
def do_move (self, move, e = 0):
time.sleep(1)
self.read_until_cursor (19,60, e)
self.child.sendline (move)
def wait (self, color):
while 1:
r = self.term.get_region (14,50,14,60)[0]
r = r.strip()
if r == color:
return
time.sleep (1)
def parse_computer_move (self, s):
i = s.find ('is: ')
cm = s[i+3:i+9]
return cm
def get_computer_move (self, e = 0):
time.sleep(1)
self.read_until_cursor (19,60, e)
time.sleep(1)
r = self.term.get_region (17,50,17,62)[0]
cm = self.parse_computer_move (r)
return cm
def switch (self):
print 'switching'
self.child.sendline ('switch')
def set_depth (self, depth):
self.child.sendline ('depth')
self.child.expect ('depth=')
self.child.sendline ('%d' % depth)
def quit(self):
self.child.sendline ('quit')
def LOG (s):
print s
sys.stdout.flush ()
fout = open ('moves.log', 'a')
fout.write (s + '\n')
fout.close()
print 'Starting...'
black = Chess()
white = Chess()
white.read_until_cursor (19,60,1)
white.switch()
done = 0
while not done:
white.wait ('Black')
move_white = white.get_computer_move(1)
LOG ( 'move white:'+ move_white )
black.do_move (move_white)
black.wait ('White')
move_black = black.get_computer_move()
LOG ( 'move black:'+ move_black )
white.do_move (move_black, 1)
g.quit()
| apache-2.0 | -4,886,961,951,130,979,000 | 29.732824 | 90 | 0.476403 | false |
hdinsight/hue | desktop/core/ext-py/Django-1.6.10/tests/reverse_single_related/tests.py | 150 | 1491 | from __future__ import absolute_import
from django.test import TestCase
from .models import Source, Item
class ReverseSingleRelatedTests(TestCase):
"""
Regression tests for an object that cannot access a single related
object due to a restrictive default manager.
"""
def test_reverse_single_related(self):
public_source = Source.objects.create(is_public=True)
public_item = Item.objects.create(source=public_source)
private_source = Source.objects.create(is_public=False)
private_item = Item.objects.create(source=private_source)
# Only one source is available via all() due to the custom default manager.
self.assertQuerysetEqual(
Source.objects.all(),
["<Source: Source object>"]
)
self.assertEqual(public_item.source, public_source)
# Make sure that an item can still access its related source even if the default
# manager doesn't normally allow it.
self.assertEqual(private_item.source, private_source)
# If the manager is marked "use_for_related_fields", it'll get used instead
# of the "bare" queryset. Usually you'd define this as a property on the class,
# but this approximates that in a way that's easier in tests.
Source.objects.use_for_related_fields = True
private_item = Item.objects.get(pk=private_item.pk)
self.assertRaises(Source.DoesNotExist, lambda: private_item.source)
| apache-2.0 | 8,916,072,218,406,376,000 | 37.230769 | 88 | 0.682093 | false |
DDelon/youtube-dl | youtube_dl/extractor/extremetube.py | 31 | 3146 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
sanitized_Request,
str_to_int,
)
class ExtremeTubeIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?extremetube\.com/(?:[^/]+/)?video/(?P<id>[^/#?&]+)'
_TESTS = [{
'url': 'http://www.extremetube.com/video/music-video-14-british-euro-brit-european-cumshots-swallow-652431',
'md5': '344d0c6d50e2f16b06e49ca011d8ac69',
'info_dict': {
'id': 'music-video-14-british-euro-brit-european-cumshots-swallow-652431',
'ext': 'mp4',
'title': 'Music Video 14 british euro brit european cumshots swallow',
'uploader': 'unknown',
'view_count': int,
'age_limit': 18,
}
}, {
'url': 'http://www.extremetube.com/gay/video/abcde-1234',
'only_matching': True,
}, {
'url': 'http://www.extremetube.com/video/latina-slut-fucked-by-fat-black-dick',
'only_matching': True,
}, {
'url': 'http://www.extremetube.com/video/652431',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
req = sanitized_Request(url)
req.add_header('Cookie', 'age_verified=1')
webpage = self._download_webpage(req, video_id)
video_title = self._html_search_regex(
r'<h1 [^>]*?title="([^"]+)"[^>]*>', webpage, 'title')
uploader = self._html_search_regex(
r'Uploaded by:\s*</strong>\s*(.+?)\s*</div>',
webpage, 'uploader', fatal=False)
view_count = str_to_int(self._html_search_regex(
r'Views:\s*</strong>\s*<span>([\d,\.]+)</span>',
webpage, 'view count', fatal=False))
flash_vars = self._parse_json(
self._search_regex(
r'var\s+flashvars\s*=\s*({.+?});', webpage, 'flash vars'),
video_id)
formats = []
for quality_key, video_url in flash_vars.items():
height = int_or_none(self._search_regex(
r'quality_(\d+)[pP]$', quality_key, 'height', default=None))
if not height:
continue
f = {
'url': video_url,
}
mobj = re.search(
r'/(?P<height>\d{3,4})[pP]_(?P<bitrate>\d+)[kK]_\d+', video_url)
if mobj:
height = int(mobj.group('height'))
bitrate = int(mobj.group('bitrate'))
f.update({
'format_id': '%dp-%dk' % (height, bitrate),
'height': height,
'tbr': bitrate,
})
else:
f.update({
'format_id': '%dp' % height,
'height': height,
})
formats.append(f)
self._sort_formats(formats)
return {
'id': video_id,
'title': video_title,
'formats': formats,
'uploader': uploader,
'view_count': view_count,
'age_limit': 18,
}
| unlicense | 482,804,688,309,103,100 | 33.195652 | 116 | 0.487603 | false |
vigov5/pvp-game | build_question.py | 1 | 1744 | #!venv/bin/python
# -*- coding: utf-8 -*-
import tornado
import tornado.websocket
import tornado.wsgi
import logging
import time
import json
import random
from app import app, db
from app.models import User, Game, Fact, Deck, ROLE_USER, ROLE_ADMIN, get_object_or_404
a = {'ใ':'a',
'ใ':'i',
'ใ':'u',
'ใ':'e',
'ใ':'o',
'ใ':'ka',
'ใ':'ki',
'ใ':'ku',
'ใ':'ke',
'ใ':'ko',
'ใ':'sa',
'ใ':'shi',
'ใ':'su',
'ใ':'se',
'ใ':'so',
'ใ':'ta',
'ใก':'chi',
'ใค':'tsu',
'ใฆ':'te',
'ใจ':'to',
'ใช':'na',
'ใซ':'ni',
'ใฌ':'nu',
'ใญ':'ne',
'ใฎ':'no',
'ใฏ':'ha',
'ใฒ':'hi',
'ใต':'fu',
'ใธ':'he',
'ใป':'ho',
'ใพ':'ma',
'ใฟ':'mi',
'ใ':'mu',
'ใ':'me',
'ใ':'mo',
'ใ':'ya',
'ใ':'yu',
'ใ':'yo',
'ใ':'ra',
'ใ':'ri',
'ใ':'ru',
'ใ':'re',
'ใ':'ro',
'ใ':'wa',
'ใ':'o',
'ใ':'n',
'ใ':'ga',
'ใ':'gi',
'ใ':'gu',
'ใ':'ge',
'ใ':'go',
'ใ':'za',
'ใ':'ji',
'ใ':'zu',
'ใ':'ze',
'ใ':'zo',
'ใ ':'da',
'ใง':'de',
'ใฉ':'do',
'ใฐ':'ba',
'ใณ':'bi',
'ใถ':'bu',
'ใน':'be',
'ใผ':'bo',
'ใฑ':'pa',
'ใด':'pi',
'ใท':'pu',
'ใบ':'pe',
'ใฝ':'po',
'ใใ':'kya',
'ใใ
':'kyu',
'ใใ':'kyo',
'ใใ':'sha',
'ใใ
':'shu',
'ใใ':'sho',
'ใกใ':'cha',
'ใกใ
':'chu',
'ใกใ':'cho',
'ใซใ':'nya',
'ใซใ
':'nyu',
'ใซใ':'nyo',
'ใฒใ':'hya',
'ใฒใ
':'hyu',
'ใฒใ':'hyo',
'ใฟใ':'mya',
'ใฟใ
':'myu',
'ใฟใ':'myo',
'ใใ':'rya',
'ใใ
':'ryu',
'ใใ':'ryo',
'ใใ':'gya',
'ใใ
':'gyu',
'ใใ':'gyo',
'ใใ':'ja',
'ใใ
':'ju',
'ใใ':'jo',
'ใณใ':'bya',
'ใณใ
':'byu',
'ใณใ':'byo',
'ใดใ':'pya',
'ใดใ
':'pyu',
'ใดใ':'pyo'}
d = Deck.query.get(1)
for k,v in a.items():
z = Fact(front=k, back=v, deck=d)
db.session.add(z)
db.session.commit()
| mit | -1,658,984,893,185,239,600 | 11.081967 | 87 | 0.438263 | false |
the-c0d3r/CapTipper | CTMagic.py | 11 | 6620 | #
# CapTipper is a malicious HTTP traffic explorer tool
# By Omri Herscovici <omriher AT gmail.com>
# http://omriher.com
# @omriher
#
#
# This file is part of CapTipper, and part of the Whatype library
# Whatype is an independent file type identification python library
# https://github.com/omriher/whatype
#
# CapTipper is a free software under the GPLv3 License
#
import os
class WhatypeErr(Exception):
def __init__(self, when, error):
self.when = when
self.error = error
def __str__(self):
return repr("Whatype Error on " + self.when + " : " + self.error)
class MagicNode(object):
def __init__(self, byte):
self.byte = byte
self.filetype = ""
self.ext = ""
self.strings = ""
self.children = []
def add_child(self, obj):
n = MagicNode(obj)
self.children.append(n)
return n
def has_child(self, data):
for child in self.children:
if child.byte.lower() == data.lower():
return child
return None
def get_childrens_by_byte(self, data):
childrens = []
for child in self.children:
if child.byte.lower() == data.lower():
#return child
childrens.append(child)
return childrens
class Whatype(object):
WTver = "0.1"
WTrev = "01"
MAGICLIST_NAME = "magics.csv"
def __init__(self,magic_file=""):
if magic_file:
if os.path.isfile(magic_file):
self.magic_list_file = magic_file
else:
raise WhatypeErr("magics list load", "Couldn't find " + magic_file)
else:
default_mgc = os.path.join(os.path.dirname(os.path.realpath(__file__)),Whatype.MAGICLIST_NAME)
if os.path.isfile(default_mgc):
self.magic_list_file = default_mgc
else:
raise WhatypeErr("loading default magics list","Couldn't find default magics list. " \
"Please provide a magics CSV file")
# Create main prefix tree graph (Trie)
self.Tree = MagicNode("all_magics")
with open(self.magic_list_file, "r") as ins:
for line in ins:
parts = line.split(",")
# parts[0] = File Type
# parts[1] = Magic bytes
# parts[2] = File Ext
# parts[3] = File Strings
self.create_branch(0, self.Tree, parts[0], parts[1], parts[2],parts[3])
def create_branch(self, node_level, father, filetype, magic, ext, strings):
magic_bytes = magic.split(" ")
byte = magic_bytes[node_level]
son = father.has_child(byte)
node_level += 1
if (node_level < len(magic_bytes)):
if son is None:
son = father.add_child(byte)
self.create_branch(node_level, son, filetype, magic, ext,strings)
else:
if (node_level == len(magic_bytes)):
son = father.add_child(byte)
son.filetype = filetype
son.ext = ext
son.strings = strings
def print_tree(self,Node, index):
for nd in Node.children:
print "--" * index + nd.byte
if (len(nd.children) > 0):
self.print_tree(nd, index + 1)
def strings_search(self,strings_list, content):
bGood = True
for str in strings_list.split(";"):
if content.lower().find(str.lower().rstrip()) == -1:
bGood = False
return bGood
def return_magic(self,cont,Name,Ext):
if not Name:
Name = "Inconclusive. "
if self.istext(cont):
Name += "Probably text"
Ext = "TEXT"
else:
Name += "Probably binary"
Ext = "BINARY"
return Name,Ext
def istext(self,cont):
# Based on http://code.activestate.com/recipes/173220/
import string
text_characters = "".join(map(chr, range(32, 127)) + list("\n\r\t\b"))
_null_trans = string.maketrans("", "")
if not cont:
# Empty files are considered text
return True
if "\0" in cont:
# Files with null bytes are likely binary
return False
# Get the non-text characters (maps a character to itself then
# use the 'remove' option to get rid of the text characters.)
t = cont.translate(_null_trans, text_characters)
# If more than 30% non-text characters, then
# this is considered a binary file
if float(len(t))/float(len(cont)) > 0.30:
return False
return True
def find(self, cont, Node, index=0, magic_history=[]):
if cont == "" or cont is None:
return "",""
curr_byte = cont[index].encode('hex')
NextNode = Node.get_childrens_by_byte(curr_byte)
if NextNode:
magic_history.extend(NextNode)
Name, Ext = self.find(cont, NextNode[0], index+1, magic_history)
if Ext == "Rollback":
for i in range(len(magic_history)):
Node = magic_history.pop()
if Node.filetype != "":
if self.strings_search(Node.strings, cont):
return Node.filetype, Node.ext
else:
return Name, Ext
return self.return_magic(cont,"","")
#return ""
else:
# last hex node found
if Node.filetype != "":
if self.strings_search(Node.strings, cont):
return Node.filetype, Node.ext
if len(magic_history) == 0:
#return "",""
return self.return_magic(cont,"","")
return "", "Rollback" # Magic search went too far, rollbacking
def identify_file(self,filepath):
try:
file_content = open(filepath).read()
return self.find(file_content, self.Tree)
except Exception, e:
raise WhatypeErr("file identification", str(e))
def identify_buffer(self,file_content):
try:
return self.find(file_content, self.Tree,0,[])
except Exception, e:
raise WhatypeErr("buffer identification", str(e)) | gpl-3.0 | -4,080,978,668,031,667,700 | 32.852632 | 106 | 0.514955 | false |
DavidNorman/tensorflow | tensorflow/python/kernel_tests/reduction_ops_test_big.py | 30 | 8764 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for reduction ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
class BaseReductionTest(test.TestCase):
def _tf_reduce(self, x, reduction_axes, keepdims):
raise NotImplementedError()
class BigReductionTest(BaseReductionTest):
"""Test reductions for sum and boolean all over a wide range of shapes."""
def _tf_reduce_max(self, x, reduction_axes, keepdims):
return math_ops.reduce_max(x, reduction_axes, keepdims)
def _tf_reduce_all(self, x, reduction_axes, keepdims):
return math_ops.reduce_all(x, reduction_axes, keepdims)
def _tf_reduce_mean(self, x, reduction_axes, keepdims):
return math_ops.reduce_mean(x, reduction_axes, keepdims)
def _tf_reduce_sum(self, x, reduction_axes, keepdims):
return math_ops.reduce_sum(x, reduction_axes, keepdims)
@test_util.run_deprecated_v1
def testFloat32Sum(self):
# make sure we test all possible kernel invocations
# logic is the same for all ops, test just float32 for brevity
arr_ = np.ones([4097, 4097], dtype=np.float32)
for size_x in [
1, 2, 3, 4, 16, 17, 32, 33, 64, 65, 128, 131, 256, 263, 1024, 1025,
4096, 4097
]:
for size_y in [
1, 2, 3, 4, 16, 17, 32, 33, 64, 65, 128, 131, 256, 263, 1024, 1025,
4096, 4097
]:
arr = arr_[0:size_x, 0:size_y]
col_sum = np.ones([size_y], dtype=np.float32) * size_x
row_sum = np.ones([size_x], dtype=np.float32) * size_y
full_sum = np.ones([], dtype=np.float32) * size_x * size_y
with self.session(graph=ops.Graph(), use_gpu=True) as sess:
arr_placeholder = array_ops.placeholder(dtype=np.float32,
shape=(size_x, size_y))
tf_row_sum = self._tf_reduce_sum(arr_placeholder, 1, False)
tf_col_sum = self._tf_reduce_sum(arr_placeholder, 0, False)
tf_full_sum = self._tf_reduce_sum(arr_placeholder, [0, 1], False)
tf_out_row, tf_out_col, tf_out_full = sess.run(
[tf_row_sum, tf_col_sum, tf_full_sum], {arr_placeholder: arr})
self.assertAllClose(col_sum, tf_out_col)
self.assertAllClose(row_sum, tf_out_row)
self.assertAllClose(full_sum, tf_out_full)
arr_ = np.ones([130, 130, 130], dtype=np.float32)
for size_x in range(1, 130, 13):
for size_y in range(1, 130, 13):
for size_z in range(1, 130, 13):
arr = arr_[0:size_x, 0:size_y, 0:size_z]
sum_y = np.ones([size_x, size_z], dtype=np.float32)
sum_xz = np.ones([size_y], dtype=np.float32)
with self.session(graph=ops.Graph(), use_gpu=True) as sess:
arr_placeholder = array_ops.placeholder(
dtype=np.float32, shape=(size_x, size_y, size_z))
tf_sum_xz = self._tf_reduce_mean(arr_placeholder, [0, 2], False)
tf_sum_y = self._tf_reduce_mean(arr_placeholder, 1, False)
tf_out_sum_xz, tf_out_sum_y = sess.run([tf_sum_xz, tf_sum_y],
{arr_placeholder: arr})
self.assertAllClose(sum_y, tf_out_sum_y)
self.assertAllClose(sum_xz, tf_out_sum_xz)
@test_util.run_deprecated_v1
def testFloat32Max(self):
# make sure we test all possible kernel invocations
# logic is the same for all ops, test just float32 for brevity
arr_ = np.random.uniform(
low=-3, high=-1, size=[4105, 4105]).astype(np.float32)
for size_x in [
1, 2, 3, 4, 16, 17, 32, 33, 64, 65, 128, 131, 256, 263, 1024, 1025,
4096, 4097
]:
for size_y in [
1, 2, 3, 4, 16, 17, 32, 33, 64, 65, 128, 131, 256, 263, 1024, 1025,
4096, 4097
]:
arr = arr_[0:size_x, 0:size_y]
col_max = np.max(arr, axis=0)
row_max = np.max(arr, axis=1)
full_max = np.max(col_max)
with self.session(graph=ops.Graph(), use_gpu=True) as sess:
arr_placeholder = array_ops.placeholder(dtype=np.float32,
shape=(size_x, size_y))
tf_row_max = self._tf_reduce_max(arr_placeholder, 1, False)
tf_col_max = self._tf_reduce_max(arr_placeholder, 0, False)
tf_full_max = self._tf_reduce_max(arr_placeholder, [0, 1], False)
tf_out_row, tf_out_col, tf_out_full = sess.run(
[tf_row_max, tf_col_max, tf_full_max], {arr_placeholder: arr})
self.assertAllClose(col_max, tf_out_col)
self.assertAllClose(row_max, tf_out_row)
self.assertAllClose(full_max, tf_out_full)
arr_ = np.random.uniform(
low=-3, high=-1, size=[130, 130, 130]).astype(np.float32)
for size_x in range(1, 130, 13):
for size_y in range(1, 130, 13):
for size_z in range(1, 130, 13):
arr = arr_[0:size_x, 0:size_y, 0:size_z]
sum_y = np.max(arr, axis=1)
sum_xz = np.max(arr, axis=(0, 2))
with self.session(graph=ops.Graph(), use_gpu=True) as sess:
arr_placeholder = array_ops.placeholder(
dtype=np.float32, shape=(size_x, size_y, size_z))
tf_sum_xz = self._tf_reduce_max(arr_placeholder, [0, 2], False)
tf_sum_y = self._tf_reduce_max(arr_placeholder, 1, False)
tf_out_sum_xz, tf_out_sum_y = sess.run(
[tf_sum_xz, tf_sum_y], {arr_placeholder: arr})
self.assertAllClose(sum_y, tf_out_sum_y)
self.assertAllClose(sum_xz, tf_out_sum_xz)
@test_util.run_deprecated_v1
def testBooleanAll(self):
# make sure we test all possible kernel invocations
# test operation where T(0) is not the identity
arr_ = np.ones([4097, 4097], dtype=np.bool)
for size_x in [
1, 2, 3, 4, 16, 17, 32, 33, 64, 65, 128, 131, 256, 263, 1024, 1025,
4096, 4097
]:
for size_y in [
1, 2, 3, 4, 16, 17, 32, 33, 64, 65, 128, 131, 256, 263, 1024, 1025,
4096, 4097
]:
arr = arr_[0:size_x, 0:size_y]
col_sum = np.ones([size_y], dtype=np.bool)
row_sum = np.ones([size_x], dtype=np.bool)
full_sum = np.ones([1], dtype=np.bool).reshape([])
with self.session(graph=ops.Graph(), use_gpu=True) as sess:
arr_placeholder = array_ops.placeholder(dtype=np.bool,
shape=(size_x, size_y))
tf_row_sum = self._tf_reduce_all(arr_placeholder, 1, False)
tf_col_sum = self._tf_reduce_all(arr_placeholder, 0, False)
tf_full_sum = self._tf_reduce_all(arr_placeholder, [0, 1], False)
tf_out_row, tf_out_col, tf_out_full = sess.run(
[tf_row_sum, tf_col_sum, tf_full_sum], {arr_placeholder: arr})
self.assertAllClose(col_sum, tf_out_col)
self.assertAllClose(row_sum, tf_out_row)
self.assertAllClose(full_sum, tf_out_full)
arr_ = np.ones([130, 130, 130], dtype=np.bool)
for size_x in range(1, 130, 13):
for size_y in range(1, 130, 13):
for size_z in range(1, 130, 13):
arr = arr_[0:size_x, 0:size_y, 0:size_z]
sum_y = np.ones([size_x, size_z], dtype=np.bool)
sum_xz = np.ones([size_y], dtype=np.bool)
with self.session(graph=ops.Graph(), use_gpu=True) as sess:
arr_placeholder = array_ops.placeholder(
dtype=np.bool, shape=(size_x, size_y, size_z))
tf_sum_xz = self._tf_reduce_all(arr_placeholder, [0, 2], False)
tf_sum_y = self._tf_reduce_all(arr_placeholder, 1, False)
tf_out_sum_xz, tf_out_sum_y = sess.run(
[tf_sum_xz, tf_sum_y], {arr_placeholder: arr})
self.assertAllClose(sum_y, tf_out_sum_y)
self.assertAllClose(sum_xz, tf_out_sum_xz)
if __name__ == "__main__":
test.main()
| apache-2.0 | 2,155,660,840,395,140,000 | 43.040201 | 80 | 0.589571 | false |
nikolas/lettuce | tests/integration/lib/Django-1.3/django/contrib/localflavor/pe/forms.py | 309 | 2272 | # -*- coding: utf-8 -*-
"""
PE-specific Form helpers.
"""
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import RegexField, CharField, Select
from django.utils.translation import ugettext_lazy as _
class PERegionSelect(Select):
"""
A Select widget that uses a list of Peruvian Regions as its choices.
"""
def __init__(self, attrs=None):
from pe_region import REGION_CHOICES
super(PERegionSelect, self).__init__(attrs, choices=REGION_CHOICES)
class PEDNIField(CharField):
"""
A field that validates `Documento Nacional de Identidadลฝ (DNI) numbers.
"""
default_error_messages = {
'invalid': _("This field requires only numbers."),
'max_digits': _("This field requires 8 digits."),
}
def __init__(self, *args, **kwargs):
super(PEDNIField, self).__init__(max_length=8, min_length=8, *args,
**kwargs)
def clean(self, value):
"""
Value must be a string in the XXXXXXXX formats.
"""
value = super(PEDNIField, self).clean(value)
if value in EMPTY_VALUES:
return u''
if not value.isdigit():
raise ValidationError(self.error_messages['invalid'])
if len(value) != 8:
raise ValidationError(self.error_messages['max_digits'])
return value
class PERUCField(RegexField):
"""
This field validates a RUC (Registro Unico de Contribuyentes). A RUC is of
the form XXXXXXXXXXX.
"""
default_error_messages = {
'invalid': _("This field requires only numbers."),
'max_digits': _("This field requires 11 digits."),
}
def __init__(self, *args, **kwargs):
super(PERUCField, self).__init__(max_length=11, min_length=11, *args,
**kwargs)
def clean(self, value):
"""
Value must be an 11-digit number.
"""
value = super(PERUCField, self).clean(value)
if value in EMPTY_VALUES:
return u''
if not value.isdigit():
raise ValidationError(self.error_messages['invalid'])
if len(value) != 11:
raise ValidationError(self.error_messages['max_digits'])
return value
| gpl-3.0 | -402,604,362,268,588,160 | 30.541667 | 78 | 0.607221 | false |
Subsets and Splits