repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ByteInternet/libcloud | libcloud/compute/drivers/openstack.py | 1 | 143549 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
OpenStack driver
"""
from libcloud.common.exceptions import BaseHTTPError
from libcloud.utils.iso8601 import parse_date
try:
import simplejson as json
except ImportError:
import json
import warnings
import base64
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import b
from libcloud.utils.py3 import next
from libcloud.utils.py3 import urlparse
from libcloud.utils.py3 import parse_qs
from libcloud.common.openstack import OpenStackBaseConnection
from libcloud.common.openstack import OpenStackDriverMixin
from libcloud.common.openstack import OpenStackException
from libcloud.common.openstack import OpenStackResponse
from libcloud.utils.networking import is_public_subnet
from libcloud.compute.base import NodeSize, NodeImage, NodeImageMember, \
UuidMixin
from libcloud.compute.base import (NodeDriver, Node, NodeLocation,
StorageVolume, VolumeSnapshot)
from libcloud.compute.base import KeyPair
from libcloud.compute.types import NodeState, StorageVolumeState, Provider, \
VolumeSnapshotState, Type
from libcloud.pricing import get_size_price
from libcloud.utils.xml import findall
from libcloud.utils.py3 import ET
__all__ = [
'OpenStack_1_0_Response',
'OpenStack_1_0_Connection',
'OpenStack_1_0_NodeDriver',
'OpenStack_1_0_SharedIpGroup',
'OpenStack_1_0_NodeIpAddresses',
'OpenStack_1_1_Response',
'OpenStack_1_1_Connection',
'OpenStack_1_1_NodeDriver',
'OpenStack_1_1_FloatingIpPool',
'OpenStack_2_FloatingIpPool',
'OpenStack_1_1_FloatingIpAddress',
'OpenStack_2_PortInterfaceState',
'OpenStack_2_PortInterface',
'OpenStackNodeDriver'
]
ATOM_NAMESPACE = "http://www.w3.org/2005/Atom"
DEFAULT_API_VERSION = '1.1'
PAGINATION_LIMIT = 1000
class OpenStackComputeConnection(OpenStackBaseConnection):
# default config for http://devstack.org/
service_type = 'compute'
service_name = 'nova'
service_region = 'RegionOne'
class OpenStackImageConnection(OpenStackBaseConnection):
service_type = 'image'
service_name = 'glance'
service_region = 'RegionOne'
class OpenStackNetworkConnection(OpenStackBaseConnection):
service_type = 'network'
service_name = 'neutron'
service_region = 'RegionOne'
class OpenStackVolumeV2Connection(OpenStackBaseConnection):
service_type = 'volumev2'
service_name = 'cinderv2'
service_region = 'RegionOne'
class OpenStackNodeDriver(NodeDriver, OpenStackDriverMixin):
"""
Base OpenStack node driver. Should not be used directly.
"""
api_name = 'openstack'
name = 'OpenStack'
website = 'http://openstack.org/'
NODE_STATE_MAP = {
'BUILD': NodeState.PENDING,
'REBUILD': NodeState.PENDING,
'ACTIVE': NodeState.RUNNING,
'SUSPENDED': NodeState.SUSPENDED,
'SHUTOFF': NodeState.STOPPED,
'DELETED': NodeState.TERMINATED,
'QUEUE_RESIZE': NodeState.PENDING,
'PREP_RESIZE': NodeState.PENDING,
'VERIFY_RESIZE': NodeState.RUNNING,
'PASSWORD': NodeState.PENDING,
'RESCUE': NodeState.PENDING,
'REBOOT': NodeState.REBOOTING,
'HARD_REBOOT': NodeState.REBOOTING,
'SHARE_IP': NodeState.PENDING,
'SHARE_IP_NO_CONFIG': NodeState.PENDING,
'DELETE_IP': NodeState.PENDING,
'ERROR': NodeState.ERROR,
'UNKNOWN': NodeState.UNKNOWN
}
# http://developer.openstack.org/api-ref-blockstorage-v2.html#volumes-v2
VOLUME_STATE_MAP = {
'creating': StorageVolumeState.CREATING,
'available': StorageVolumeState.AVAILABLE,
'attaching': StorageVolumeState.ATTACHING,
'in-use': StorageVolumeState.INUSE,
'deleting': StorageVolumeState.DELETING,
'error': StorageVolumeState.ERROR,
'error_deleting': StorageVolumeState.ERROR,
'backing-up': StorageVolumeState.BACKUP,
'restoring-backup': StorageVolumeState.BACKUP,
'error_restoring': StorageVolumeState.ERROR,
'error_extending': StorageVolumeState.ERROR,
}
# http://developer.openstack.org/api-ref-blockstorage-v2.html#ext-backups-v2
SNAPSHOT_STATE_MAP = {
'creating': VolumeSnapshotState.CREATING,
'available': VolumeSnapshotState.AVAILABLE,
'deleting': VolumeSnapshotState.DELETING,
'error': VolumeSnapshotState.ERROR,
'restoring': VolumeSnapshotState.RESTORING,
'error_restoring': VolumeSnapshotState.ERROR
}
def __new__(cls, key, secret=None, secure=True, host=None, port=None,
api_version=DEFAULT_API_VERSION, **kwargs):
if cls is OpenStackNodeDriver:
if api_version == '1.0':
cls = OpenStack_1_0_NodeDriver
elif api_version == '1.1':
cls = OpenStack_1_1_NodeDriver
elif api_version in ['2.0', '2.1', '2.2']:
cls = OpenStack_2_NodeDriver
else:
raise NotImplementedError(
"No OpenStackNodeDriver found for API version %s" %
(api_version))
return super(OpenStackNodeDriver, cls).__new__(cls)
def __init__(self, *args, **kwargs):
OpenStackDriverMixin.__init__(self, **kwargs)
super(OpenStackNodeDriver, self).__init__(*args, **kwargs)
@staticmethod
def _paginated_request(url, obj, connection, params=None):
"""
Perform multiple calls in order to have a full list of elements when
the API responses are paginated.
:param url: API endpoint
:type url: ``str``
:param obj: Result object key
:type obj: ``str``
:param connection: The API connection to use to perform the request
:type connection: ``obj``
:param params: Any request parameters
:type params: ``dict``
:return: ``list`` of API response objects
:rtype: ``list``
"""
params = params or {}
objects = list()
loop_count = 0
while True:
data = connection.request(url, params=params)
values = data.object.get(obj, list())
objects.extend(values)
links = data.object.get('%s_links' % obj, list())
next_links = [n for n in links if n['rel'] == 'next']
if next_links:
next_link = next_links[0]
query = urlparse.urlparse(next_link['href'])
# The query[4] references the query parameters from the url
params.update(parse_qs(query[4]))
else:
break
# Prevent the pagination from looping indefinitely in case
# the API returns a loop for some reason.
loop_count += 1
if loop_count > PAGINATION_LIMIT:
raise OpenStackException(
'Pagination limit reached for %s, the limit is %d. '
'This might indicate that your API is returning a '
'looping next target for pagination!' % (
url, PAGINATION_LIMIT
), None
)
return {obj: objects}
def destroy_node(self, node):
uri = '/servers/%s' % (node.id)
resp = self.connection.request(uri, method='DELETE')
# The OpenStack and Rackspace documentation both say this API will
# return a 204, but in-fact, everyone everywhere agrees it actually
# returns a 202, so we are going to accept either, and someday,
# someone will fix either the implementation or the documentation to
# agree.
return resp.status in (httplib.NO_CONTENT, httplib.ACCEPTED)
def reboot_node(self, node):
# pylint: disable=no-member
return self._reboot_node(node, reboot_type='HARD')
def list_nodes(self, ex_all_tenants=False):
"""
List the nodes in a tenant
:param ex_all_tenants: List nodes for all the tenants. Note: Your user
must have admin privileges for this
functionality to work.
:type ex_all_tenants: ``bool``
"""
params = {}
if ex_all_tenants:
params = {'all_tenants': 1}
# pylint: disable=no-member
return self._to_nodes(
self.connection.request('/servers/detail', params=params).object)
def create_volume(self, size, name, location=None, snapshot=None,
ex_volume_type=None):
"""
Create a new volume.
:param size: Size of volume in gigabytes (required)
:type size: ``int``
:param name: Name of the volume to be created
:type name: ``str``
:param location: Which data center to create a volume in. If
empty, undefined behavior will be selected.
(optional)
:type location: :class:`.NodeLocation`
:param snapshot: Snapshot from which to create the new
volume. (optional)
:type snapshot: :class:`.VolumeSnapshot`
:param ex_volume_type: What kind of volume to create.
(optional)
:type ex_volume_type: ``str``
:return: The newly created volume.
:rtype: :class:`StorageVolume`
"""
volume = {
'display_name': name,
'display_description': name,
'size': size,
'metadata': {
'contents': name,
},
}
if ex_volume_type:
volume['volume_type'] = ex_volume_type
if location:
volume['availability_zone'] = location
if snapshot:
volume['snapshot_id'] = snapshot.id
resp = self.connection.request('/os-volumes',
method='POST',
data={'volume': volume})
# pylint: disable=no-member
return self._to_volume(resp.object)
def destroy_volume(self, volume):
return self.connection.request('/os-volumes/%s' % volume.id,
method='DELETE').success()
def attach_volume(self, node, volume, device="auto"):
# when "auto" or None is provided for device, openstack will let
# the guest OS pick the next available device (fi. /dev/vdb)
return self.connection.request(
'/servers/%s/os-volume_attachments' % node.id,
method='POST',
data={
'volumeAttachment': {
'volumeId': volume.id,
'device': device,
}
}).success()
def detach_volume(self, volume, ex_node=None):
# when ex_node is not provided, volume is detached from all nodes
failed_nodes = []
for attachment in volume.extra['attachments']:
if not ex_node or ex_node.id in filter(None, (attachment.get(
'serverId'
), attachment.get('server_id'))):
response = self.connection.request(
'/servers/%s/os-volume_attachments/%s' %
(attachment.get('serverId') or attachment['server_id'],
attachment['id']),
method='DELETE')
if not response.success():
failed_nodes.append(
attachment.get('serverId') or attachment['server_id']
)
if failed_nodes:
raise OpenStackException(
'detach_volume failed for nodes with id: %s' %
', '.join(failed_nodes), 500, self
)
return True
def list_volumes(self):
# pylint: disable=no-member
return self._to_volumes(
self.connection.request('/os-volumes').object)
def ex_get_volume(self, volumeId):
# pylint: disable=no-member
return self._to_volume(
self.connection.request('/os-volumes/%s' % volumeId).object)
def list_images(self, location=None, ex_only_active=True):
"""
Lists all active images
@inherits: :class:`NodeDriver.list_images`
:param ex_only_active: True if list only active (optional)
:type ex_only_active: ``bool``
"""
# pylint: disable=no-member
return self._to_images(
self.connection.request('/images/detail').object, ex_only_active)
def get_image(self, image_id):
"""
Get an image based on an image_id
@inherits: :class:`NodeDriver.get_image`
:param image_id: Image identifier
:type image_id: ``str``
:return: A NodeImage object
:rtype: :class:`NodeImage`
"""
# pylint: disable=no-member
return self._to_image(self.connection.request(
'/images/%s' % (image_id,)).object['image'])
def list_sizes(self, location=None):
# pylint: disable=no-member
return self._to_sizes(
self.connection.request('/flavors/detail').object)
def list_locations(self):
return [NodeLocation(0, '', '', self)]
def _ex_connection_class_kwargs(self):
return self.openstack_connection_kwargs()
def ex_get_node_details(self, node_id):
"""
Lists details of the specified server.
:param node_id: ID of the node which should be used
:type node_id: ``str``
:rtype: :class:`Node`
"""
# @TODO: Remove this if in 0.6
if isinstance(node_id, Node):
node_id = node_id.id
uri = '/servers/%s' % (node_id)
try:
resp = self.connection.request(uri, method='GET')
except BaseHTTPError as e:
if e.code == httplib.NOT_FOUND:
return None
raise
# pylint: disable=no-member
return self._to_node_from_obj(resp.object)
def ex_soft_reboot_node(self, node):
"""
Soft reboots the specified server
:param node: node
:type node: :class:`Node`
:rtype: ``bool``
"""
# pylint: disable=no-member
return self._reboot_node(node, reboot_type='SOFT')
def ex_hard_reboot_node(self, node):
"""
Hard reboots the specified server
:param node: node
:type node: :class:`Node`
:rtype: ``bool``
"""
# pylint: disable=no-member
return self._reboot_node(node, reboot_type='HARD')
class OpenStackNodeSize(NodeSize):
"""
NodeSize class for the OpenStack.org driver.
Following the example of OpenNebula.org driver
and following guidelines:
https://issues.apache.org/jira/browse/LIBCLOUD-119
"""
def __init__(self, id, name, ram, disk, bandwidth, price, driver,
vcpus=None, ephemeral_disk=None, swap=None, extra=None):
super(OpenStackNodeSize, self).__init__(id=id, name=name, ram=ram,
disk=disk,
bandwidth=bandwidth,
price=price, driver=driver)
self.vcpus = vcpus
self.ephemeral_disk = ephemeral_disk
self.swap = swap
self.extra = extra
def __repr__(self):
return (('<OpenStackNodeSize: id=%s, name=%s, ram=%s, disk=%s, '
'bandwidth=%s, price=%s, driver=%s, vcpus=%s, ...>')
% (self.id, self.name, self.ram, self.disk, self.bandwidth,
self.price, self.driver.name, self.vcpus))
class OpenStack_1_0_Response(OpenStackResponse):
def __init__(self, *args, **kwargs):
# done because of a circular reference from
# NodeDriver -> Connection -> Response
self.node_driver = OpenStack_1_0_NodeDriver
super(OpenStack_1_0_Response, self).__init__(*args, **kwargs)
class OpenStack_1_0_Connection(OpenStackComputeConnection):
responseCls = OpenStack_1_0_Response
default_content_type = 'application/xml; charset=UTF-8'
accept_format = 'application/xml'
XML_NAMESPACE = 'http://docs.rackspacecloud.com/servers/api/v1.0'
class OpenStack_1_0_NodeDriver(OpenStackNodeDriver):
"""
OpenStack node driver.
Extra node attributes:
- password: root password, available after create.
- hostId: represents the host your cloud server runs on
- imageId: id of image
- flavorId: id of flavor
"""
connectionCls = OpenStack_1_0_Connection
type = Provider.OPENSTACK
features = {'create_node': ['generates_password']}
def __init__(self, *args, **kwargs):
self._ex_force_api_version = str(kwargs.pop('ex_force_api_version',
None))
self.XML_NAMESPACE = self.connectionCls.XML_NAMESPACE
super(OpenStack_1_0_NodeDriver, self).__init__(*args, **kwargs)
def _to_images(self, object, ex_only_active):
images = []
for image in findall(object, 'image', self.XML_NAMESPACE):
if ex_only_active and image.get('status') != 'ACTIVE':
continue
images.append(self._to_image(image))
return images
def _to_image(self, element):
return NodeImage(id=element.get('id'),
name=element.get('name'),
driver=self.connection.driver,
extra={'updated': element.get('updated'),
'created': element.get('created'),
'status': element.get('status'),
'serverId': element.get('serverId'),
'progress': element.get('progress'),
'minDisk': element.get('minDisk'),
'minRam': element.get('minRam')
}
)
def _change_password_or_name(self, node, name=None, password=None):
uri = '/servers/%s' % (node.id)
if not name:
name = node.name
body = {'xmlns': self.XML_NAMESPACE,
'name': name}
if password is not None:
body['adminPass'] = password
server_elm = ET.Element('server', body)
resp = self.connection.request(
uri, method='PUT', data=ET.tostring(server_elm))
if resp.status == httplib.NO_CONTENT and password is not None:
node.extra['password'] = password
return resp.status == httplib.NO_CONTENT
def create_node(self, **kwargs):
"""
Create a new node
@inherits: :class:`NodeDriver.create_node`
:keyword ex_metadata: Key/Value metadata to associate with a node
:type ex_metadata: ``dict``
:keyword ex_files: File Path => File contents to create on
the node
:type ex_files: ``dict``
:keyword ex_shared_ip_group_id: The server is launched into
that shared IP group
:type ex_shared_ip_group_id: ``str``
"""
name = kwargs['name']
image = kwargs['image']
size = kwargs['size']
attributes = {'xmlns': self.XML_NAMESPACE,
'name': name,
'imageId': str(image.id),
'flavorId': str(size.id)}
if 'ex_shared_ip_group' in kwargs:
# Deprecate this. Be explicit and call the variable
# ex_shared_ip_group_id since user needs to pass in the id, not the
# name.
warnings.warn('ex_shared_ip_group argument is deprecated.'
' Please use ex_shared_ip_group_id')
if 'ex_shared_ip_group_id' in kwargs:
shared_ip_group_id = kwargs['ex_shared_ip_group_id']
attributes['sharedIpGroupId'] = shared_ip_group_id
server_elm = ET.Element('server', attributes)
metadata_elm = self._metadata_to_xml(kwargs.get("ex_metadata", {}))
if metadata_elm:
server_elm.append(metadata_elm)
files_elm = self._files_to_xml(kwargs.get("ex_files", {}))
if files_elm:
server_elm.append(files_elm)
resp = self.connection.request("/servers",
method='POST',
data=ET.tostring(server_elm))
return self._to_node(resp.object)
def ex_set_password(self, node, password):
"""
Sets the Node's root password.
This will reboot the instance to complete the operation.
:class:`Node.extra['password']` will be set to the new value if the
operation was successful.
:param node: node to set password
:type node: :class:`Node`
:param password: new password.
:type password: ``str``
:rtype: ``bool``
"""
return self._change_password_or_name(node, password=password)
def ex_set_server_name(self, node, name):
"""
Sets the Node's name.
This will reboot the instance to complete the operation.
:param node: node to set name
:type node: :class:`Node`
:param name: new name
:type name: ``str``
:rtype: ``bool``
"""
return self._change_password_or_name(node, name=name)
def ex_resize_node(self, node, size):
"""
Change an existing server flavor / scale the server up or down.
:param node: node to resize.
:type node: :class:`Node`
:param size: new size.
:type size: :class:`NodeSize`
:rtype: ``bool``
"""
elm = ET.Element(
'resize',
{'xmlns': self.XML_NAMESPACE,
'flavorId': str(size.id)}
)
resp = self.connection.request("/servers/%s/action" % (node.id),
method='POST',
data=ET.tostring(elm))
return resp.status == httplib.ACCEPTED
def ex_resize(self, node, size):
"""
NOTE: This method is here for backward compatibility reasons.
You should use ``ex_resize_node`` instead.
"""
return self.ex_resize_node(node=node, size=size)
def ex_confirm_resize(self, node):
"""
Confirm a resize request which is currently in progress. If a resize
request is not explicitly confirmed or reverted it's automatically
confirmed after 24 hours.
For more info refer to the API documentation: http://goo.gl/zjFI1
:param node: node for which the resize request will be confirmed.
:type node: :class:`Node`
:rtype: ``bool``
"""
elm = ET.Element(
'confirmResize',
{'xmlns': self.XML_NAMESPACE},
)
resp = self.connection.request("/servers/%s/action" % (node.id),
method='POST',
data=ET.tostring(elm))
return resp.status == httplib.NO_CONTENT
def ex_revert_resize(self, node):
"""
Revert a resize request which is currently in progress.
All resizes are automatically confirmed after 24 hours if they have
not already been confirmed explicitly or reverted.
For more info refer to the API documentation: http://goo.gl/AizBu
:param node: node for which the resize request will be reverted.
:type node: :class:`Node`
:rtype: ``bool``
"""
elm = ET.Element(
'revertResize',
{'xmlns': self.XML_NAMESPACE}
)
resp = self.connection.request("/servers/%s/action" % (node.id),
method='POST',
data=ET.tostring(elm))
return resp.status == httplib.NO_CONTENT
def ex_rebuild(self, node_id, image_id):
"""
Rebuilds the specified server.
:param node_id: ID of the node which should be used
:type node_id: ``str``
:param image_id: ID of the image which should be used
:type image_id: ``str``
:rtype: ``bool``
"""
# @TODO: Remove those ifs in 0.6
if isinstance(node_id, Node):
node_id = node_id.id
if isinstance(image_id, NodeImage):
image_id = image_id.id
elm = ET.Element(
'rebuild',
{'xmlns': self.XML_NAMESPACE,
'imageId': image_id}
)
resp = self.connection.request("/servers/%s/action" % node_id,
method='POST',
data=ET.tostring(elm))
return resp.status == httplib.ACCEPTED
def ex_create_ip_group(self, group_name, node_id=None):
"""
Creates a shared IP group.
:param group_name: group name which should be used
:type group_name: ``str``
:param node_id: ID of the node which should be used
:type node_id: ``str``
:rtype: ``bool``
"""
# @TODO: Remove this if in 0.6
if isinstance(node_id, Node):
node_id = node_id.id
group_elm = ET.Element(
'sharedIpGroup',
{'xmlns': self.XML_NAMESPACE,
'name': group_name}
)
if node_id:
ET.SubElement(
group_elm,
'server',
{'id': node_id}
)
resp = self.connection.request('/shared_ip_groups',
method='POST',
data=ET.tostring(group_elm))
return self._to_shared_ip_group(resp.object)
def ex_list_ip_groups(self, details=False):
"""
Lists IDs and names for shared IP groups.
If details lists all details for shared IP groups.
:param details: True if details is required
:type details: ``bool``
:rtype: ``list`` of :class:`OpenStack_1_0_SharedIpGroup`
"""
uri = '/shared_ip_groups/detail' if details else '/shared_ip_groups'
resp = self.connection.request(uri,
method='GET')
groups = findall(resp.object, 'sharedIpGroup',
self.XML_NAMESPACE)
return [self._to_shared_ip_group(el) for el in groups]
def ex_delete_ip_group(self, group_id):
"""
Deletes the specified shared IP group.
:param group_id: group id which should be used
:type group_id: ``str``
:rtype: ``bool``
"""
uri = '/shared_ip_groups/%s' % group_id
resp = self.connection.request(uri, method='DELETE')
return resp.status == httplib.NO_CONTENT
def ex_share_ip(self, group_id, node_id, ip, configure_node=True):
"""
Shares an IP address to the specified server.
:param group_id: group id which should be used
:type group_id: ``str``
:param node_id: ID of the node which should be used
:type node_id: ``str``
:param ip: ip which should be used
:type ip: ``str``
:param configure_node: configure node
:type configure_node: ``bool``
:rtype: ``bool``
"""
# @TODO: Remove this if in 0.6
if isinstance(node_id, Node):
node_id = node_id.id
if configure_node:
str_configure = 'true'
else:
str_configure = 'false'
elm = ET.Element(
'shareIp',
{'xmlns': self.XML_NAMESPACE,
'sharedIpGroupId': group_id,
'configureServer': str_configure},
)
uri = '/servers/%s/ips/public/%s' % (node_id, ip)
resp = self.connection.request(uri,
method='PUT',
data=ET.tostring(elm))
return resp.status == httplib.ACCEPTED
def ex_unshare_ip(self, node_id, ip):
"""
Removes a shared IP address from the specified server.
:param node_id: ID of the node which should be used
:type node_id: ``str``
:param ip: ip which should be used
:type ip: ``str``
:rtype: ``bool``
"""
# @TODO: Remove this if in 0.6
if isinstance(node_id, Node):
node_id = node_id.id
uri = '/servers/%s/ips/public/%s' % (node_id, ip)
resp = self.connection.request(uri,
method='DELETE')
return resp.status == httplib.ACCEPTED
def ex_list_ip_addresses(self, node_id):
"""
List all server addresses.
:param node_id: ID of the node which should be used
:type node_id: ``str``
:rtype: :class:`OpenStack_1_0_NodeIpAddresses`
"""
# @TODO: Remove this if in 0.6
if isinstance(node_id, Node):
node_id = node_id.id
uri = '/servers/%s/ips' % node_id
resp = self.connection.request(uri,
method='GET')
return self._to_ip_addresses(resp.object)
def _metadata_to_xml(self, metadata):
if len(metadata) == 0:
return None
metadata_elm = ET.Element('metadata')
for k, v in list(metadata.items()):
meta_elm = ET.SubElement(metadata_elm, 'meta', {'key': str(k)})
meta_elm.text = str(v)
return metadata_elm
def _files_to_xml(self, files):
if len(files) == 0:
return None
personality_elm = ET.Element('personality')
for k, v in list(files.items()):
file_elm = ET.SubElement(personality_elm,
'file',
{'path': str(k)})
file_elm.text = base64.b64encode(b(v))
return personality_elm
def _reboot_node(self, node, reboot_type='SOFT'):
resp = self._node_action(node, ['reboot', ('type', reboot_type)])
return resp.status == httplib.ACCEPTED
def _node_action(self, node, body):
if isinstance(body, list):
attr = ' '.join(['%s="%s"' % (item[0], item[1])
for item in body[1:]])
body = '<%s xmlns="%s" %s/>' % (body[0], self.XML_NAMESPACE, attr)
uri = '/servers/%s/action' % (node.id)
resp = self.connection.request(uri, method='POST', data=body)
return resp
def _to_nodes(self, object):
node_elements = findall(object, 'server', self.XML_NAMESPACE)
return [self._to_node(el) for el in node_elements]
def _to_node_from_obj(self, obj):
return self._to_node(findall(obj, 'server', self.XML_NAMESPACE)[0])
def _to_node(self, el):
def get_ips(el):
return [ip.get('addr') for ip in el]
def get_meta_dict(el):
d = {}
for meta in el:
d[meta.get('key')] = meta.text
return d
public_ip = get_ips(findall(el, 'addresses/public/ip',
self.XML_NAMESPACE))
private_ip = get_ips(findall(el, 'addresses/private/ip',
self.XML_NAMESPACE))
metadata = get_meta_dict(findall(el, 'metadata/meta',
self.XML_NAMESPACE))
n = Node(id=el.get('id'),
name=el.get('name'),
state=self.NODE_STATE_MAP.get(
el.get('status'), NodeState.UNKNOWN),
public_ips=public_ip,
private_ips=private_ip,
driver=self.connection.driver,
# pylint: disable=no-member
extra={
'password': el.get('adminPass'),
'hostId': el.get('hostId'),
'imageId': el.get('imageId'),
'flavorId': el.get('flavorId'),
'uri': "https://%s%s/servers/%s" % (
self.connection.host,
self.connection.request_path, el.get('id')),
'service_name': self.connection.get_service_name(),
'metadata': metadata})
return n
def _to_sizes(self, object):
elements = findall(object, 'flavor', self.XML_NAMESPACE)
return [self._to_size(el) for el in elements]
def _to_size(self, el):
vcpus = int(el.get('vcpus')) if el.get('vcpus', None) else None
return OpenStackNodeSize(id=el.get('id'),
name=el.get('name'),
ram=int(el.get('ram')),
disk=int(el.get('disk')),
# XXX: needs hardcode
vcpus=vcpus,
bandwidth=None,
extra=el.get('extra_specs'),
# Hardcoded
price=self._get_size_price(el.get('id')),
driver=self.connection.driver)
def ex_limits(self):
"""
Extra call to get account's limits, such as
rates (for example amount of POST requests per day)
and absolute limits like total amount of available
RAM to be used by servers.
:return: dict with keys 'rate' and 'absolute'
:rtype: ``dict``
"""
def _to_rate(el):
rate = {}
for item in list(el.items()):
rate[item[0]] = item[1]
return rate
def _to_absolute(el):
return {el.get('name'): el.get('value')}
limits = self.connection.request("/limits").object
rate = [_to_rate(el) for el in findall(limits, 'rate/limit',
self.XML_NAMESPACE)]
absolute = {}
for item in findall(limits, 'absolute/limit',
self.XML_NAMESPACE):
absolute.update(_to_absolute(item))
return {"rate": rate, "absolute": absolute}
def create_image(self, node, name, description=None, reboot=True):
"""Create an image for node.
@inherits: :class:`NodeDriver.create_image`
:param node: node to use as a base for image
:type node: :class:`Node`
:param name: name for new image
:type name: ``str``
:rtype: :class:`NodeImage`
"""
image_elm = ET.Element(
'image',
{'xmlns': self.XML_NAMESPACE,
'name': name,
'serverId': node.id}
)
return self._to_image(
self.connection.request("/images", method="POST",
data=ET.tostring(image_elm)).object)
def delete_image(self, image):
"""Delete an image for node.
@inherits: :class:`NodeDriver.delete_image`
:param image: the image to be deleted
:type image: :class:`NodeImage`
:rtype: ``bool``
"""
uri = '/images/%s' % image.id
resp = self.connection.request(uri, method='DELETE')
return resp.status == httplib.NO_CONTENT
def _to_shared_ip_group(self, el):
servers_el = findall(el, 'servers', self.XML_NAMESPACE)
if servers_el:
servers = [s.get('id')
for s in findall(servers_el[0], 'server',
self.XML_NAMESPACE)]
else:
servers = None
return OpenStack_1_0_SharedIpGroup(id=el.get('id'),
name=el.get('name'),
servers=servers)
def _to_ip_addresses(self, el):
public_ips = [ip.get('addr') for ip in findall(
findall(el, 'public', self.XML_NAMESPACE)[0],
'ip', self.XML_NAMESPACE)]
private_ips = [ip.get('addr') for ip in findall(
findall(el, 'private', self.XML_NAMESPACE)[0],
'ip', self.XML_NAMESPACE)]
return OpenStack_1_0_NodeIpAddresses(public_ips, private_ips)
def _get_size_price(self, size_id):
try:
return get_size_price(driver_type='compute',
driver_name=self.api_name,
size_id=size_id)
except KeyError:
return 0.0
class OpenStack_1_0_SharedIpGroup(object):
"""
Shared IP group info.
"""
def __init__(self, id, name, servers=None):
self.id = str(id)
self.name = name
self.servers = servers
class OpenStack_1_0_NodeIpAddresses(object):
"""
List of public and private IP addresses of a Node.
"""
def __init__(self, public_addresses, private_addresses):
self.public_addresses = public_addresses
self.private_addresses = private_addresses
class OpenStack_1_1_Response(OpenStackResponse):
def __init__(self, *args, **kwargs):
# done because of a circular reference from
# NodeDriver -> Connection -> Response
self.node_driver = OpenStack_1_1_NodeDriver
super(OpenStack_1_1_Response, self).__init__(*args, **kwargs)
class OpenStackNetwork(object):
"""
A Virtual Network.
"""
def __init__(self, id, name, cidr, driver, extra=None):
self.id = str(id)
self.name = name
self.cidr = cidr
self.driver = driver
self.extra = extra or {}
def __repr__(self):
return '<OpenStackNetwork id="%s" name="%s" cidr="%s">' % (self.id,
self.name,
self.cidr,)
class OpenStackSecurityGroup(object):
"""
A Security Group.
"""
def __init__(self, id, tenant_id, name, description, driver, rules=None,
extra=None):
"""
Constructor.
:keyword id: Group id.
:type id: ``str``
:keyword tenant_id: Owner of the security group.
:type tenant_id: ``str``
:keyword name: Human-readable name for the security group. Might
not be unique.
:type name: ``str``
:keyword description: Human-readable description of a security
group.
:type description: ``str``
:keyword rules: Rules associated with this group.
:type rules: ``list`` of
:class:`OpenStackSecurityGroupRule`
:keyword extra: Extra attributes associated with this group.
:type extra: ``dict``
"""
self.id = id
self.tenant_id = tenant_id
self.name = name
self.description = description
self.driver = driver
self.rules = rules or []
self.extra = extra or {}
def __repr__(self):
return ('<OpenStackSecurityGroup id=%s tenant_id=%s name=%s \
description=%s>' % (self.id, self.tenant_id, self.name,
self.description))
class OpenStackSecurityGroupRule(object):
"""
A Rule of a Security Group.
"""
def __init__(self, id, parent_group_id, ip_protocol, from_port, to_port,
driver, ip_range=None, group=None, tenant_id=None,
direction=None, extra=None):
"""
Constructor.
:keyword id: Rule id.
:type id: ``str``
:keyword parent_group_id: ID of the parent security group.
:type parent_group_id: ``str``
:keyword ip_protocol: IP Protocol (icmp, tcp, udp, etc).
:type ip_protocol: ``str``
:keyword from_port: Port at start of range.
:type from_port: ``int``
:keyword to_port: Port at end of range.
:type to_port: ``int``
:keyword ip_range: CIDR for address range.
:type ip_range: ``str``
:keyword group: Name of a source security group to apply to rule.
:type group: ``str``
:keyword tenant_id: Owner of the security group.
:type tenant_id: ``str``
:keyword direction: Security group Direction (ingress or egress).
:type direction: ``str``
:keyword extra: Extra attributes associated with this rule.
:type extra: ``dict``
"""
self.id = id
self.parent_group_id = parent_group_id
self.ip_protocol = ip_protocol
self.from_port = from_port
self.to_port = to_port
self.driver = driver
self.ip_range = ''
self.group = {}
self.direction = 'ingress'
if group is None:
self.ip_range = ip_range
else:
self.group = {'name': group, 'tenant_id': tenant_id}
# by default in old versions only ingress was used
if direction is not None:
if direction in ['ingress', 'egress']:
self.direction = direction
else:
raise OpenStackException("Security group direction incorrect "
"value: ingress or egress.", 500,
driver)
self.tenant_id = tenant_id
self.extra = extra or {}
def __repr__(self):
return ('<OpenStackSecurityGroupRule id=%s parent_group_id=%s \
ip_protocol=%s from_port=%s to_port=%s>' % (self.id,
self.parent_group_id, self.ip_protocol, self.from_port,
self.to_port))
class OpenStackKeyPair(object):
"""
A KeyPair.
"""
def __init__(self, name, fingerprint, public_key, driver, private_key=None,
extra=None):
"""
Constructor.
:keyword name: Name of the KeyPair.
:type name: ``str``
:keyword fingerprint: Fingerprint of the KeyPair
:type fingerprint: ``str``
:keyword public_key: Public key in OpenSSH format.
:type public_key: ``str``
:keyword private_key: Private key in PEM format.
:type private_key: ``str``
:keyword extra: Extra attributes associated with this KeyPair.
:type extra: ``dict``
"""
self.name = name
self.fingerprint = fingerprint
self.public_key = public_key
self.private_key = private_key
self.driver = driver
self.extra = extra or {}
def __repr__(self):
return ('<OpenStackKeyPair name=%s fingerprint=%s public_key=%s ...>'
% (self.name, self.fingerprint, self.public_key))
class OpenStack_1_1_Connection(OpenStackComputeConnection):
responseCls = OpenStack_1_1_Response
accept_format = 'application/json'
default_content_type = 'application/json; charset=UTF-8'
def encode_data(self, data):
return json.dumps(data)
class OpenStack_1_1_NodeDriver(OpenStackNodeDriver):
"""
OpenStack node driver.
"""
connectionCls = OpenStack_1_1_Connection
type = Provider.OPENSTACK
features = {"create_node": ["generates_password"]}
_networks_url_prefix = '/os-networks'
def __init__(self, *args, **kwargs):
self._ex_force_api_version = str(kwargs.pop('ex_force_api_version',
None))
super(OpenStack_1_1_NodeDriver, self).__init__(*args, **kwargs)
def create_node(self, **kwargs):
"""Create a new node
@inherits: :class:`NodeDriver.create_node`
:keyword ex_keyname: The name of the key pair
:type ex_keyname: ``str``
:keyword ex_userdata: String containing user data
see
https://help.ubuntu.com/community/CloudInit
:type ex_userdata: ``str``
:keyword ex_config_drive: Enable config drive
see
http://docs.openstack.org/grizzly/openstack-compute/admin/content/config-drive.html
:type ex_config_drive: ``bool``
:keyword ex_security_groups: List of security groups to assign to
the node
:type ex_security_groups: ``list`` of
:class:`OpenStackSecurityGroup`
:keyword ex_metadata: Key/Value metadata to associate with a node
:type ex_metadata: ``dict``
:keyword ex_files: File Path => File contents to create on
the node
:type ex_files: ``dict``
:keyword networks: The server is launched into a set of Networks.
:type networks: ``list`` of :class:`OpenStackNetwork`
:keyword ex_disk_config: Name of the disk configuration.
Can be either ``AUTO`` or ``MANUAL``.
:type ex_disk_config: ``str``
:keyword ex_config_drive: If True enables metadata injection in a
server through a configuration drive.
:type ex_config_drive: ``bool``
:keyword ex_admin_pass: The root password for the node
:type ex_admin_pass: ``str``
:keyword ex_availability_zone: Nova availability zone for the node
:type ex_availability_zone: ``str``
"""
server_params = self._create_args_to_params(None, **kwargs)
resp = self.connection.request("/servers",
method='POST',
data={'server': server_params})
create_response = resp.object['server']
server_resp = self.connection.request(
'/servers/%s' % create_response['id'])
server_object = server_resp.object['server']
# adminPass is not always present
# http://docs.openstack.org/essex/openstack-compute/admin/
# content/configuring-compute-API.html#d6e1833
server_object['adminPass'] = create_response.get('adminPass', None)
return self._to_node(server_object)
def _to_images(self, obj, ex_only_active):
images = []
for image in obj['images']:
if ex_only_active and image.get('status') != 'ACTIVE':
continue
images.append(self._to_image(image))
return images
def _to_image(self, api_image):
server = api_image.get('server', {})
updated = api_image.get('updated_at') or api_image['updated']
created = api_image.get('created_at') or api_image['created']
min_ram = api_image.get('min_ram')
if min_ram is None:
min_ram = api_image.get('minRam')
min_disk = api_image.get('min_disk')
if min_disk is None:
min_disk = api_image.get('minDisk')
return NodeImage(
id=api_image['id'],
name=api_image['name'],
driver=self,
extra=dict(
visibility=api_image.get('visibility'),
updated=updated,
created=created,
status=api_image['status'],
progress=api_image.get('progress'),
metadata=api_image.get('metadata'),
os_type=api_image.get('os_type'),
serverId=server.get('id'),
minDisk=min_disk,
minRam=min_ram,
)
)
def _to_image_member(self, api_image_member):
created = api_image_member['created_at']
updated = api_image_member.get('updated_at')
return NodeImageMember(
id=api_image_member['member_id'],
image_id=api_image_member['image_id'],
state=api_image_member['status'],
created=created,
driver=self,
extra=dict(
schema=api_image_member.get('schema'),
updated=updated,
)
)
def _to_nodes(self, obj):
servers = obj['servers']
return [self._to_node(server) for server in servers]
def _to_volumes(self, obj):
volumes = obj['volumes']
return [self._to_volume(volume) for volume in volumes]
def _to_snapshots(self, obj):
snapshots = obj['snapshots']
return [self._to_snapshot(snapshot) for snapshot in snapshots]
def _to_sizes(self, obj):
flavors = obj['flavors']
return [self._to_size(flavor) for flavor in flavors]
def _create_args_to_params(self, node, **kwargs):
server_params = {
'name': kwargs.get('name'),
'metadata': kwargs.get('ex_metadata', {}),
'personality': self._files_to_personality(kwargs.get("ex_files",
{}))
}
if 'ex_availability_zone' in kwargs:
server_params['availability_zone'] = kwargs['ex_availability_zone']
if 'ex_keyname' in kwargs:
server_params['key_name'] = kwargs['ex_keyname']
if 'ex_userdata' in kwargs:
server_params['user_data'] = base64.b64encode(
b(kwargs['ex_userdata'])).decode('ascii')
if 'ex_config_drive' in kwargs:
server_params['config_drive'] = kwargs['ex_config_drive']
if 'ex_disk_config' in kwargs:
server_params['OS-DCF:diskConfig'] = kwargs['ex_disk_config']
if 'ex_config_drive' in kwargs:
server_params['config_drive'] = str(kwargs['ex_config_drive'])
if 'ex_admin_pass' in kwargs:
server_params['adminPass'] = kwargs['ex_admin_pass']
if 'networks' in kwargs:
networks = kwargs['networks']
networks = [{'uuid': network.id} for network in networks]
server_params['networks'] = networks
if 'ex_security_groups' in kwargs:
server_params['security_groups'] = []
for security_group in kwargs['ex_security_groups']:
name = security_group.name
server_params['security_groups'].append({'name': name})
if 'ex_blockdevicemappings' in kwargs:
server_params['block_device_mapping_v2'] = \
kwargs['ex_blockdevicemappings']
if 'name' in kwargs:
server_params['name'] = kwargs.get('name')
else:
server_params['name'] = node.name
if 'image' in kwargs:
server_params['imageRef'] = kwargs.get('image').id
else:
server_params['imageRef'] = node.extra.get(
'imageId', ''
) if node else ''
if 'size' in kwargs:
server_params['flavorRef'] = kwargs.get('size').id
else:
server_params['flavorRef'] = node.extra.get('flavorId')
return server_params
def _files_to_personality(self, files):
rv = []
for k, v in list(files.items()):
rv.append({'path': k, 'contents': base64.b64encode(b(v))})
return rv
def _reboot_node(self, node, reboot_type='SOFT'):
resp = self._node_action(node, 'reboot', type=reboot_type)
return resp.status == httplib.ACCEPTED
def ex_set_password(self, node, password):
"""
Changes the administrator password for a specified server.
:param node: Node to rebuild.
:type node: :class:`Node`
:param password: The administrator password.
:type password: ``str``
:rtype: ``bool``
"""
resp = self._node_action(node, 'changePassword', adminPass=password)
node.extra['password'] = password
return resp.status == httplib.ACCEPTED
def ex_rebuild(self, node, image, **kwargs):
"""
Rebuild a Node.
:param node: Node to rebuild.
:type node: :class:`Node`
:param image: New image to use.
:type image: :class:`NodeImage`
:keyword ex_metadata: Key/Value metadata to associate with a node
:type ex_metadata: ``dict``
:keyword ex_files: File Path => File contents to create on
the node
:type ex_files: ``dict``
:keyword ex_keyname: Name of existing public key to inject into
instance
:type ex_keyname: ``str``
:keyword ex_userdata: String containing user data
see
https://help.ubuntu.com/community/CloudInit
:type ex_userdata: ``str``
:keyword ex_security_groups: List of security groups to assign to
the node
:type ex_security_groups: ``list`` of
:class:`OpenStackSecurityGroup`
:keyword ex_disk_config: Name of the disk configuration.
Can be either ``AUTO`` or ``MANUAL``.
:type ex_disk_config: ``str``
:keyword ex_config_drive: If True enables metadata injection in a
server through a configuration drive.
:type ex_config_drive: ``bool``
:rtype: ``bool``
"""
server_params = self._create_args_to_params(node, image=image,
**kwargs)
resp = self._node_action(node, 'rebuild', **server_params)
return resp.status == httplib.ACCEPTED
def ex_resize(self, node, size):
"""
Change a node size.
:param node: Node to resize.
:type node: :class:`Node`
:type size: :class:`NodeSize`
:param size: New size to use.
:rtype: ``bool``
"""
server_params = {'flavorRef': size.id}
resp = self._node_action(node, 'resize', **server_params)
return resp.status == httplib.ACCEPTED
def ex_confirm_resize(self, node):
"""
Confirms a pending resize action.
:param node: Node to resize.
:type node: :class:`Node`
:rtype: ``bool``
"""
resp = self._node_action(node, 'confirmResize')
return resp.status == httplib.NO_CONTENT
def ex_revert_resize(self, node):
"""
Cancels and reverts a pending resize action.
:param node: Node to resize.
:type node: :class:`Node`
:rtype: ``bool``
"""
resp = self._node_action(node, 'revertResize')
return resp.status == httplib.ACCEPTED
def create_image(self, node, name, metadata=None):
"""
Creates a new image.
:param node: Node
:type node: :class:`Node`
:param name: The name for the new image.
:type name: ``str``
:param metadata: Key and value pairs for metadata.
:type metadata: ``dict``
:rtype: :class:`NodeImage`
"""
optional_params = {}
if metadata:
optional_params['metadata'] = metadata
resp = self._node_action(node, 'createImage', name=name,
**optional_params)
image_id = self._extract_image_id_from_url(resp.headers['location'])
return self.get_image(image_id=image_id)
def ex_set_server_name(self, node, name):
"""
Sets the Node's name.
:param node: Node
:type node: :class:`Node`
:param name: The name of the server.
:type name: ``str``
:rtype: :class:`Node`
"""
return self._update_node(node, name=name)
def ex_get_metadata(self, node):
"""
Get a Node's metadata.
:param node: Node
:type node: :class:`Node`
:return: Key/Value metadata associated with node.
:rtype: ``dict``
"""
return self.connection.request(
'/servers/%s/metadata' % (node.id,),
method='GET',).object['metadata']
def ex_set_metadata(self, node, metadata):
"""
Sets the Node's metadata.
:param node: Node
:type node: :class:`Node`
:param metadata: Key/Value metadata to associate with a node
:type metadata: ``dict``
:rtype: ``dict``
"""
return self.connection.request(
'/servers/%s/metadata' % (node.id,), method='PUT',
data={'metadata': metadata}
).object['metadata']
def ex_update_node(self, node, **node_updates):
"""
Update the Node's editable attributes. The OpenStack API currently
supports editing name and IPv4/IPv6 access addresses.
The driver currently only supports updating the node name.
:param node: Node
:type node: :class:`Node`
:keyword name: New name for the server
:type name: ``str``
:rtype: :class:`Node`
"""
potential_data = self._create_args_to_params(node, **node_updates)
updates = {'name': potential_data['name']}
return self._update_node(node, **updates)
def _to_networks(self, obj):
networks = obj['networks']
return [self._to_network(network) for network in networks]
def _to_network(self, obj):
return OpenStackNetwork(id=obj['id'],
name=obj['label'],
cidr=obj.get('cidr', None),
driver=self)
def ex_list_networks(self):
"""
Get a list of Networks that are available.
:rtype: ``list`` of :class:`OpenStackNetwork`
"""
response = self.connection.request(self._networks_url_prefix).object
return self._to_networks(response)
def ex_get_network(self, network_id):
"""
Retrieve the Network with the given ID
:param networkId: ID of the network
:type networkId: ``str``
:rtype :class:`OpenStackNetwork`
"""
request_url = "{networks_url_prefix}/{network_id}".format(
networks_url_prefix=self._networks_url_prefix,
network_id=network_id
)
response = self.connection.request(request_url).object
return self._to_network(response['network'])
def ex_create_network(self, name, cidr):
"""
Create a new Network
:param name: Name of network which should be used
:type name: ``str``
:param cidr: cidr of network which should be used
:type cidr: ``str``
:rtype: :class:`OpenStackNetwork`
"""
data = {'network': {'cidr': cidr, 'label': name}}
response = self.connection.request(self._networks_url_prefix,
method='POST', data=data).object
return self._to_network(response['network'])
def ex_delete_network(self, network):
"""
Delete a Network
:param network: Network which should be used
:type network: :class:`OpenStackNetwork`
:rtype: ``bool``
"""
resp = self.connection.request('%s/%s' % (self._networks_url_prefix,
network.id),
method='DELETE')
return resp.status in (httplib.NO_CONTENT, httplib.ACCEPTED)
def ex_get_console_output(self, node, length=None):
"""
Get console output
:param node: node
:type node: :class:`Node`
:param length: Optional number of lines to fetch from the
console log
:type length: ``int``
:return: Dictionary with the output
:rtype: ``dict``
"""
data = {
"os-getConsoleOutput": {
"length": length
}
}
resp = self.connection.request('/servers/%s/action' % node.id,
method='POST', data=data).object
return resp
def ex_list_snapshots(self):
return self._to_snapshots(
self.connection.request('/os-snapshots').object)
def ex_get_snapshot(self, snapshotId):
return self._to_snapshot(
self.connection.request('/os-snapshots/%s' % snapshotId).object)
def list_volume_snapshots(self, volume):
return [snapshot for snapshot in self.ex_list_snapshots()
if snapshot.extra['volume_id'] == volume.id]
def create_volume_snapshot(self, volume, name=None, ex_description=None,
ex_force=True):
"""
Create snapshot from volume
:param volume: Instance of `StorageVolume`
:type volume: `StorageVolume`
:param name: Name of snapshot (optional)
:type name: `str` | `NoneType`
:param ex_description: Description of the snapshot (optional)
:type ex_description: `str` | `NoneType`
:param ex_force: Specifies if we create a snapshot that is not in
state `available`. For example `in-use`. Defaults
to True. (optional)
:type ex_force: `bool`
:rtype: :class:`VolumeSnapshot`
"""
data = {'snapshot': {'volume_id': volume.id, 'force': ex_force}}
if name is not None:
data['snapshot']['display_name'] = name
if ex_description is not None:
data['snapshot']['display_description'] = ex_description
return self._to_snapshot(self.connection.request('/os-snapshots',
method='POST',
data=data).object)
def destroy_volume_snapshot(self, snapshot):
resp = self.connection.request('/os-snapshots/%s' % snapshot.id,
method='DELETE')
return resp.status == httplib.NO_CONTENT
def ex_create_snapshot(self, volume, name, description=None, force=False):
"""
Create a snapshot based off of a volume.
:param volume: volume
:type volume: :class:`StorageVolume`
:keyword name: New name for the volume snapshot
:type name: ``str``
:keyword description: Description of the snapshot (optional)
:type description: ``str``
:keyword force: Whether to force creation (optional)
:type force: ``bool``
:rtype: :class:`VolumeSnapshot`
"""
warnings.warn('This method has been deprecated in favor of the '
'create_volume_snapshot method')
return self.create_volume_snapshot(volume, name,
ex_description=description,
ex_force=force)
def ex_delete_snapshot(self, snapshot):
"""
Delete a VolumeSnapshot
:param snapshot: snapshot
:type snapshot: :class:`VolumeSnapshot`
:rtype: ``bool``
"""
warnings.warn('This method has been deprecated in favor of the '
'destroy_volume_snapshot method')
return self.destroy_volume_snapshot(snapshot)
def _to_security_group_rules(self, obj):
return [self._to_security_group_rule(security_group_rule) for
security_group_rule in obj]
def _to_security_group_rule(self, obj):
ip_range = group = tenant_id = None
if obj['group'] == {}:
ip_range = obj['ip_range'].get('cidr', None)
else:
group = obj['group'].get('name', None)
tenant_id = obj['group'].get('tenant_id', None)
return OpenStackSecurityGroupRule(
id=obj['id'], parent_group_id=obj['parent_group_id'],
ip_protocol=obj['ip_protocol'], from_port=obj['from_port'],
to_port=obj['to_port'], driver=self, ip_range=ip_range,
group=group, tenant_id=tenant_id)
def _to_security_groups(self, obj):
security_groups = obj['security_groups']
return [self._to_security_group(security_group) for security_group in
security_groups]
def _to_security_group(self, obj):
rules = self._to_security_group_rules(obj.get('security_group_rules',
obj.get('rules', [])))
return OpenStackSecurityGroup(id=obj['id'],
tenant_id=obj['tenant_id'],
name=obj['name'],
description=obj.get('description', ''),
rules=rules,
driver=self)
def ex_list_security_groups(self):
"""
Get a list of Security Groups that are available.
:rtype: ``list`` of :class:`OpenStackSecurityGroup`
"""
return self._to_security_groups(
self.connection.request('/os-security-groups').object)
def ex_get_node_security_groups(self, node):
"""
Get Security Groups of the specified server.
:rtype: ``list`` of :class:`OpenStackSecurityGroup`
"""
return self._to_security_groups(
self.connection.request('/servers/%s/os-security-groups' %
(node.id)).object)
def ex_create_security_group(self, name, description):
"""
Create a new Security Group
:param name: Name of the new Security Group
:type name: ``str``
:param description: Description of the new Security Group
:type description: ``str``
:rtype: :class:`OpenStackSecurityGroup`
"""
return self._to_security_group(self.connection.request(
'/os-security-groups', method='POST',
data={'security_group': {'name': name, 'description': description}}
).object['security_group'])
def ex_delete_security_group(self, security_group):
"""
Delete a Security Group.
:param security_group: Security Group should be deleted
:type security_group: :class:`OpenStackSecurityGroup`
:rtype: ``bool``
"""
resp = self.connection.request('/os-security-groups/%s' %
(security_group.id),
method='DELETE')
return resp.status in (httplib.NO_CONTENT, httplib.ACCEPTED)
def ex_create_security_group_rule(self, security_group, ip_protocol,
from_port, to_port, cidr=None,
source_security_group=None):
"""
Create a new Rule in a Security Group
:param security_group: Security Group in which to add the rule
:type security_group: :class:`OpenStackSecurityGroup`
:param ip_protocol: Protocol to which this rule applies
Examples: tcp, udp, ...
:type ip_protocol: ``str``
:param from_port: First port of the port range
:type from_port: ``int``
:param to_port: Last port of the port range
:type to_port: ``int``
:param cidr: CIDR notation of the source IP range for this rule
:type cidr: ``str``
:param source_security_group: Existing Security Group to use as the
source (instead of CIDR)
:type source_security_group: L{OpenStackSecurityGroup
:rtype: :class:`OpenStackSecurityGroupRule`
"""
source_security_group_id = None
if type(source_security_group) == OpenStackSecurityGroup:
source_security_group_id = source_security_group.id
return self._to_security_group_rule(self.connection.request(
'/os-security-group-rules', method='POST',
data={'security_group_rule': {
'ip_protocol': ip_protocol,
'from_port': from_port,
'to_port': to_port,
'cidr': cidr,
'group_id': source_security_group_id,
'parent_group_id': security_group.id}}
).object['security_group_rule'])
def ex_delete_security_group_rule(self, rule):
"""
Delete a Rule from a Security Group.
:param rule: Rule should be deleted
:type rule: :class:`OpenStackSecurityGroupRule`
:rtype: ``bool``
"""
resp = self.connection.request('/os-security-group-rules/%s' %
(rule.id), method='DELETE')
return resp.status == httplib.NO_CONTENT
def _to_key_pairs(self, obj):
key_pairs = obj['keypairs']
key_pairs = [self._to_key_pair(key_pair['keypair']) for key_pair in
key_pairs]
return key_pairs
def _to_key_pair(self, obj):
key_pair = KeyPair(name=obj['name'],
fingerprint=obj['fingerprint'],
public_key=obj['public_key'],
private_key=obj.get('private_key', None),
driver=self)
return key_pair
def list_key_pairs(self):
response = self.connection.request('/os-keypairs')
key_pairs = self._to_key_pairs(response.object)
return key_pairs
def get_key_pair(self, name):
self.connection.set_context({'key_pair_name': name})
response = self.connection.request('/os-keypairs/%s' % (name))
key_pair = self._to_key_pair(response.object['keypair'])
return key_pair
def create_key_pair(self, name):
data = {'keypair': {'name': name}}
response = self.connection.request('/os-keypairs', method='POST',
data=data)
key_pair = self._to_key_pair(response.object['keypair'])
return key_pair
def import_key_pair_from_string(self, name, key_material):
data = {'keypair': {'name': name, 'public_key': key_material}}
response = self.connection.request('/os-keypairs', method='POST',
data=data)
key_pair = self._to_key_pair(response.object['keypair'])
return key_pair
def delete_key_pair(self, key_pair):
"""
Delete a KeyPair.
:param keypair: KeyPair to delete
:type keypair: :class:`OpenStackKeyPair`
:rtype: ``bool``
"""
response = self.connection.request('/os-keypairs/%s' % (key_pair.name),
method='DELETE')
return response.status == httplib.ACCEPTED
def ex_list_keypairs(self):
"""
Get a list of KeyPairs that are available.
:rtype: ``list`` of :class:`OpenStackKeyPair`
"""
warnings.warn('This method has been deprecated in favor of '
'list_key_pairs method')
return self.list_key_pairs()
def ex_create_keypair(self, name):
"""
Create a new KeyPair
:param name: Name of the new KeyPair
:type name: ``str``
:rtype: :class:`OpenStackKeyPair`
"""
warnings.warn('This method has been deprecated in favor of '
'create_key_pair method')
return self.create_key_pair(name=name)
def ex_import_keypair(self, name, keyfile):
"""
Import a KeyPair from a file
:param name: Name of the new KeyPair
:type name: ``str``
:param keyfile: Path to the public key file (in OpenSSH format)
:type keyfile: ``str``
:rtype: :class:`OpenStackKeyPair`
"""
warnings.warn('This method has been deprecated in favor of '
'import_key_pair_from_file method')
return self.import_key_pair_from_file(name=name, key_file_path=keyfile)
def ex_import_keypair_from_string(self, name, key_material):
"""
Import a KeyPair from a string
:param name: Name of the new KeyPair
:type name: ``str``
:param key_material: Public key (in OpenSSH format)
:type key_material: ``str``
:rtype: :class:`OpenStackKeyPair`
"""
warnings.warn('This method has been deprecated in favor of '
'import_key_pair_from_string method')
return self.import_key_pair_from_string(name=name,
key_material=key_material)
def ex_delete_keypair(self, keypair):
"""
Delete a KeyPair.
:param keypair: KeyPair to delete
:type keypair: :class:`OpenStackKeyPair`
:rtype: ``bool``
"""
warnings.warn('This method has been deprecated in favor of '
'delete_key_pair method')
return self.delete_key_pair(key_pair=keypair)
def ex_get_size(self, size_id):
"""
Get a NodeSize
:param size_id: ID of the size which should be used
:type size_id: ``str``
:rtype: :class:`NodeSize`
"""
return self._to_size(self.connection.request(
'/flavors/%s' % (size_id,)) .object['flavor'])
def get_image(self, image_id):
"""
Get a NodeImage
@inherits: :class:`NodeDriver.get_image`
:param image_id: ID of the image which should be used
:type image_id: ``str``
:rtype: :class:`NodeImage`
"""
return self._to_image(self.connection.request(
'/images/%s' % (image_id,)).object['image'])
def delete_image(self, image):
"""
Delete a NodeImage
@inherits: :class:`NodeDriver.delete_image`
:param image: image witch should be used
:type image: :class:`NodeImage`
:rtype: ``bool``
"""
resp = self.connection.request('/images/%s' % (image.id,),
method='DELETE')
return resp.status == httplib.NO_CONTENT
def _node_action(self, node, action, **params):
params = params or None
return self.connection.request('/servers/%s/action' % (node.id,),
method='POST', data={action: params})
def _update_node(self, node, **node_updates):
"""
Updates the editable attributes of a server, which currently include
its name and IPv4/IPv6 access addresses.
"""
return self._to_node(
self.connection.request(
'/servers/%s' % (node.id,), method='PUT',
data={'server': node_updates}
).object['server']
)
def _to_node_from_obj(self, obj):
return self._to_node(obj['server'])
def _to_node(self, api_node):
public_networks_labels = ['public', 'internet']
public_ips, private_ips = [], []
for label, values in api_node['addresses'].items():
for value in values:
ip = value['addr']
is_public_ip = False
try:
is_public_ip = is_public_subnet(ip)
except Exception:
# IPv6
# Openstack Icehouse sets 'OS-EXT-IPS:type' to 'floating'
# for public and 'fixed' for private
explicit_ip_type = value.get('OS-EXT-IPS:type', None)
if label in public_networks_labels:
is_public_ip = True
elif explicit_ip_type == 'floating':
is_public_ip = True
elif explicit_ip_type == 'fixed':
is_public_ip = False
if is_public_ip:
public_ips.append(ip)
else:
private_ips.append(ip)
# Sometimes 'image' attribute is not present if the node is in an error
# state
image = api_node.get('image', None)
image_id = image.get('id', None) if image else None
config_drive = api_node.get("config_drive", False)
volumes_attached = api_node.get('os-extended-volumes:volumes_attached')
created = parse_date(api_node["created"])
return Node(
id=api_node['id'],
name=api_node['name'],
state=self.NODE_STATE_MAP.get(api_node['status'],
NodeState.UNKNOWN),
public_ips=public_ips,
private_ips=private_ips,
created_at=created,
driver=self,
extra=dict(
addresses=api_node['addresses'],
hostId=api_node['hostId'],
access_ip=api_node.get('accessIPv4'),
access_ipv6=api_node.get('accessIPv6', None),
# Docs says "tenantId", but actual is "tenant_id". *sigh*
# Best handle both.
tenantId=api_node.get('tenant_id') or api_node['tenantId'],
userId=api_node.get('user_id', None),
imageId=image_id,
flavorId=api_node['flavor']['id'],
uri=next(link['href'] for link in api_node['links'] if
link['rel'] == 'self'),
# pylint: disable=no-member
service_name=self.connection.get_service_name(),
metadata=api_node['metadata'],
password=api_node.get('adminPass', None),
created=api_node['created'],
updated=api_node['updated'],
key_name=api_node.get('key_name', None),
disk_config=api_node.get('OS-DCF:diskConfig', None),
config_drive=config_drive,
availability_zone=api_node.get('OS-EXT-AZ:availability_zone'),
volumes_attached=volumes_attached,
task_state=api_node.get("OS-EXT-STS:task_state", None),
vm_state=api_node.get("OS-EXT-STS:vm_state", None),
power_state=api_node.get("OS-EXT-STS:power_state", None),
progress=api_node.get("progress", None),
fault=api_node.get('fault')
),
)
def _to_volume(self, api_node):
if 'volume' in api_node:
api_node = api_node['volume']
state = self.VOLUME_STATE_MAP.get(api_node['status'],
StorageVolumeState.UNKNOWN)
return StorageVolume(
id=api_node['id'],
name=api_node.get('displayName', api_node.get('name')),
size=api_node['size'],
state=state,
driver=self,
extra={
'description': api_node.get('displayDescription',
api_node.get('description')),
'attachments': [att for att in api_node['attachments'] if att],
# TODO: remove in 1.18.0
'state': api_node.get('status', None),
'snapshot_id': api_node.get('snapshot_id',
api_node.get('snapshotId')),
'location': api_node.get('availability_zone',
api_node.get('availabilityZone')),
'volume_type': api_node.get('volume_type',
api_node.get('volumeType')),
'metadata': api_node.get('metadata', None),
'created_at': api_node.get('created_at',
api_node.get('createdAt'))
}
)
def _to_snapshot(self, data):
if 'snapshot' in data:
data = data['snapshot']
volume_id = data.get('volume_id', data.get('volumeId', None))
display_name = data.get('name',
data.get('display_name',
data.get('displayName', None)))
created_at = data.get('created_at', data.get('createdAt', None))
description = data.get('description',
data.get('display_description',
data.get('displayDescription', None)))
status = data.get('status', None)
extra = {'volume_id': volume_id,
'name': display_name,
'created': created_at,
'description': description,
'status': status}
state = self.SNAPSHOT_STATE_MAP.get(
status,
VolumeSnapshotState.UNKNOWN
)
try:
created_dt = parse_date(created_at)
except ValueError:
created_dt = None
snapshot = VolumeSnapshot(id=data['id'], driver=self,
size=data['size'], extra=extra,
created=created_dt, state=state,
name=display_name)
return snapshot
def _to_size(self, api_flavor, price=None, bandwidth=None):
# if provider-specific subclasses can get better values for
# price/bandwidth, then can pass them in when they super().
if not price:
price = self._get_size_price(str(api_flavor['id']))
extra = api_flavor.get('OS-FLV-WITH-EXT-SPECS:extra_specs', {})
return OpenStackNodeSize(
id=api_flavor['id'],
name=api_flavor['name'],
ram=api_flavor['ram'],
disk=api_flavor['disk'],
vcpus=api_flavor['vcpus'],
ephemeral_disk=api_flavor.get('OS-FLV-EXT-DATA:ephemeral', None),
swap=api_flavor['swap'],
extra=extra,
bandwidth=bandwidth,
price=price,
driver=self,
)
def _get_size_price(self, size_id):
try:
return get_size_price(
driver_type='compute',
driver_name=self.api_name,
size_id=size_id,
)
except KeyError:
return(0.0)
def _extract_image_id_from_url(self, location_header):
path = urlparse.urlparse(location_header).path
image_id = path.split('/')[-1]
return image_id
def ex_rescue(self, node, password=None):
# Requires Rescue Mode extension
"""
Rescue a node
:param node: node
:type node: :class:`Node`
:param password: password
:type password: ``str``
:rtype: :class:`Node`
"""
if password:
resp = self._node_action(node, 'rescue', adminPass=password)
else:
resp = self._node_action(node, 'rescue')
password = json.loads(resp.body)['adminPass']
node.extra['password'] = password
return node
def ex_unrescue(self, node):
"""
Unrescue a node
:param node: node
:type node: :class:`Node`
:rtype: ``bool``
"""
resp = self._node_action(node, 'unrescue')
return resp.status == httplib.ACCEPTED
def _to_floating_ip_pools(self, obj):
pool_elements = obj['floating_ip_pools']
return [self._to_floating_ip_pool(pool) for pool in pool_elements]
def _to_floating_ip_pool(self, obj):
return OpenStack_1_1_FloatingIpPool(obj['name'], self.connection)
def ex_list_floating_ip_pools(self):
"""
List available floating IP pools
:rtype: ``list`` of :class:`OpenStack_1_1_FloatingIpPool`
"""
return self._to_floating_ip_pools(
self.connection.request('/os-floating-ip-pools').object)
def _to_floating_ips(self, obj):
ip_elements = obj['floating_ips']
return [self._to_floating_ip(ip) for ip in ip_elements]
def _to_floating_ip(self, obj):
return OpenStack_1_1_FloatingIpAddress(id=obj['id'],
ip_address=obj['ip'],
pool=None,
node_id=obj['instance_id'],
driver=self)
def ex_list_floating_ips(self):
"""
List floating IPs
:rtype: ``list`` of :class:`OpenStack_1_1_FloatingIpAddress`
"""
return self._to_floating_ips(
self.connection.request('/os-floating-ips').object)
def ex_get_floating_ip(self, ip):
"""
Get specified floating IP
:param ip: floating IP to get
:type ip: ``str``
:rtype: :class:`OpenStack_1_1_FloatingIpAddress`
"""
floating_ips = self.ex_list_floating_ips()
ip_obj, = [x for x in floating_ips if x.ip_address == ip]
return ip_obj
def ex_create_floating_ip(self, ip_pool=None):
"""
Create new floating IP. The ip_pool attribute is optional only if your
infrastructure has only one IP pool available.
:param ip_pool: name of the floating IP pool
:type ip_pool: ``str``
:rtype: :class:`OpenStack_1_1_FloatingIpAddress`
"""
data = {'pool': ip_pool} if ip_pool is not None else {}
resp = self.connection.request('/os-floating-ips',
method='POST',
data=data)
data = resp.object['floating_ip']
id = data['id']
ip_address = data['ip']
return OpenStack_1_1_FloatingIpAddress(id=id,
ip_address=ip_address,
pool=None,
node_id=None,
driver=self)
def ex_delete_floating_ip(self, ip):
"""
Delete specified floating IP
:param ip: floating IP to remove
:type ip: :class:`OpenStack_1_1_FloatingIpAddress`
:rtype: ``bool``
"""
resp = self.connection.request('/os-floating-ips/%s' % ip.id,
method='DELETE')
return resp.status in (httplib.NO_CONTENT, httplib.ACCEPTED)
def ex_attach_floating_ip_to_node(self, node, ip):
"""
Attach the floating IP to the node
:param node: node
:type node: :class:`Node`
:param ip: floating IP to attach
:type ip: ``str`` or :class:`OpenStack_1_1_FloatingIpAddress`
:rtype: ``bool``
"""
address = ip.ip_address if hasattr(ip, 'ip_address') else ip
data = {
'addFloatingIp': {'address': address}
}
resp = self.connection.request('/servers/%s/action' % node.id,
method='POST', data=data)
return resp.status == httplib.ACCEPTED
def ex_detach_floating_ip_from_node(self, node, ip):
"""
Detach the floating IP from the node
:param node: node
:type node: :class:`Node`
:param ip: floating IP to remove
:type ip: ``str`` or :class:`OpenStack_1_1_FloatingIpAddress`
:rtype: ``bool``
"""
address = ip.ip_address if hasattr(ip, 'ip_address') else ip
data = {
'removeFloatingIp': {'address': address}
}
resp = self.connection.request('/servers/%s/action' % node.id,
method='POST', data=data)
return resp.status == httplib.ACCEPTED
def ex_get_metadata_for_node(self, node):
"""
Return the metadata associated with the node.
:param node: Node instance
:type node: :class:`Node`
:return: A dictionary or other mapping of strings to strings,
associating tag names with tag values.
:type tags: ``dict``
"""
return node.extra['metadata']
def ex_pause_node(self, node):
return self._post_simple_node_action(node, 'pause')
def ex_unpause_node(self, node):
return self._post_simple_node_action(node, 'unpause')
def ex_stop_node(self, node):
return self._post_simple_node_action(node, 'os-stop')
def ex_start_node(self, node):
return self._post_simple_node_action(node, 'os-start')
def ex_suspend_node(self, node):
return self._post_simple_node_action(node, 'suspend')
def ex_resume_node(self, node):
return self._post_simple_node_action(node, 'resume')
def _post_simple_node_action(self, node, action):
""" Post a simple, data-less action to the OS node action endpoint
:param `Node` node:
:param str action: the action to call
:return `bool`: a boolean that indicates success
"""
uri = '/servers/{node_id}/action'.format(node_id=node.id)
resp = self.connection.request(uri, method='POST', data={action: None})
return resp.status == httplib.ACCEPTED
class OpenStack_2_Connection(OpenStackComputeConnection):
responseCls = OpenStack_1_1_Response
accept_format = 'application/json'
default_content_type = 'application/json; charset=UTF-8'
def encode_data(self, data):
return json.dumps(data)
class OpenStack_2_ImageConnection(OpenStackImageConnection):
responseCls = OpenStack_1_1_Response
accept_format = 'application/json'
default_content_type = 'application/json; charset=UTF-8'
def encode_data(self, data):
return json.dumps(data)
class OpenStack_2_NetworkConnection(OpenStackNetworkConnection):
responseCls = OpenStack_1_1_Response
accept_format = 'application/json'
default_content_type = 'application/json; charset=UTF-8'
def encode_data(self, data):
return json.dumps(data)
class OpenStack_2_VolumeV2Connection(OpenStackVolumeV2Connection):
responseCls = OpenStack_1_1_Response
accept_format = 'application/json'
default_content_type = 'application/json; charset=UTF-8'
def encode_data(self, data):
return json.dumps(data)
class OpenStack_2_PortInterfaceState(Type):
"""
Standard states of OpenStack_2_PortInterfaceState
"""
BUILD = 'build'
ACTIVE = 'active'
DOWN = 'down'
UNKNOWN = 'unknown'
class OpenStack_2_NodeDriver(OpenStack_1_1_NodeDriver):
"""
OpenStack node driver.
"""
connectionCls = OpenStack_2_Connection
# Previously all image functionality was available through the
# compute API. This deprecated proxied API does not offer all
# functionality that the Glance Image service API offers.
# See https://developer.openstack.org/api-ref/compute/
#
# > These APIs are proxy calls to the Image service. Nova has deprecated
# > all the proxy APIs and users should use the native APIs instead. These
# > will fail with a 404 starting from microversion 2.36. See: Relevant
# > Image APIs.
#
# For example, managing image visibility and sharing machine
# images across tenants can not be done using the proxied image API in the
# compute endpoint, but it can be done with the Glance Image API.
# See https://developer.openstack.org/api-ref/
# image/v2/index.html#list-image-members
image_connectionCls = OpenStack_2_ImageConnection
image_connection = None
# Similarly not all node-related operations are exposed through the
# compute API
# See https://developer.openstack.org/api-ref/compute/
# For example, creating a new node in an OpenStack that is configured to
# create a new port for every new instance will make it so that if that
# port is detached it disappears. But if the port is manually created
# beforehand using the neutron network API and node is booted with that
# port pre-specified, then detaching that port later will result in that
# becoming a re-attachable resource much like a floating ip. So because
# even though this is the compute driver, we do connect to the networking
# API here because some operations relevant for compute can only be
# accessed from there.
network_connectionCls = OpenStack_2_NetworkConnection
network_connection = None
# Similarly all image operations are noe exposed through the block-storage
# API of the cinde service:
# https://developer.openstack.org/api-ref/block-storage/
volumev2_connectionCls = OpenStack_2_VolumeV2Connection
volumev2_connection = None
type = Provider.OPENSTACK
features = {"create_node": ["generates_password"]}
_networks_url_prefix = '/v2.0/networks'
_subnets_url_prefix = '/v2.0/subnets'
PORT_INTERFACE_MAP = {
'BUILD': OpenStack_2_PortInterfaceState.BUILD,
'ACTIVE': OpenStack_2_PortInterfaceState.ACTIVE,
'DOWN': OpenStack_2_PortInterfaceState.DOWN,
'UNKNOWN': OpenStack_2_PortInterfaceState.UNKNOWN
}
def __init__(self, *args, **kwargs):
original_connectionCls = self.connectionCls
self._ex_force_api_version = str(kwargs.pop('ex_force_api_version',
None))
if 'ex_force_auth_version' not in kwargs:
kwargs['ex_force_auth_version'] = '3.x_password'
original_ex_force_base_url = kwargs.get('ex_force_base_url')
# We run the init once to get the Glance V2 API connection
# and put that on the object under self.image_connection.
if original_ex_force_base_url or kwargs.get('ex_force_image_url'):
kwargs['ex_force_base_url'] = \
str(kwargs.pop('ex_force_image_url',
original_ex_force_base_url))
self.connectionCls = self.image_connectionCls
super(OpenStack_2_NodeDriver, self).__init__(*args, **kwargs)
self.image_connection = self.connection
# We run the init once to get the Cinder V2 API connection
# and put that on the object under self.volumev2_connection.
if original_ex_force_base_url or kwargs.get('ex_force_volume_url'):
kwargs['ex_force_base_url'] = \
str(kwargs.pop('ex_force_volume_url',
original_ex_force_base_url))
self.connectionCls = self.volumev2_connectionCls
super(OpenStack_2_NodeDriver, self).__init__(*args, **kwargs)
self.volumev2_connection = self.connection
# We run the init once to get the Neutron V2 API connection
# and put that on the object under self.network_connection.
if original_ex_force_base_url or kwargs.get('ex_force_network_url'):
kwargs['ex_force_base_url'] = \
str(kwargs.pop('ex_force_network_url',
original_ex_force_base_url))
self.connectionCls = self.network_connectionCls
super(OpenStack_2_NodeDriver, self).__init__(*args, **kwargs)
self.network_connection = self.connection
# We run the init once again to get the compute API connection
# and that's put under self.connection as normal.
self._ex_force_base_url = original_ex_force_base_url
if original_ex_force_base_url:
kwargs['ex_force_base_url'] = self._ex_force_base_url
self.connectionCls = original_connectionCls
super(OpenStack_2_NodeDriver, self).__init__(*args, **kwargs)
def _to_port(self, element):
created = element.get('created_at')
updated = element.get('updated_at')
return OpenStack_2_PortInterface(
id=element['id'],
state=self.PORT_INTERFACE_MAP.get(
element.get('status'), OpenStack_2_PortInterfaceState.UNKNOWN
),
created=created,
driver=self,
extra=dict(
admin_state_up=element['admin_state_up'],
allowed_address_pairs=element['allowed_address_pairs'],
binding_vnic_type=element['binding:vnic_type'],
device_id=element['device_id'],
description=element['description'],
device_owner=element['device_owner'],
fixed_ips=element['fixed_ips'],
mac_address=element['mac_address'],
name=element['name'],
network_id=element['network_id'],
project_id=element.get('project_id', None),
port_security_enabled=element.get('port_security_enabled',
None),
revision_number=element.get('revision_number', None),
security_groups=element['security_groups'],
tags=element.get('tags', None),
tenant_id=element['tenant_id'],
updated=updated,
)
)
def list_nodes(self, ex_all_tenants=False):
"""
List the nodes in a tenant
:param ex_all_tenants: List nodes for all the tenants. Note: Your user
must have admin privileges for this
functionality to work.
:type ex_all_tenants: ``bool``
"""
params = {}
if ex_all_tenants:
params = {'all_tenants': 1}
return self._to_nodes(self._paginated_request(
'/servers/detail', 'servers', self.connection, params=params))
def get_image(self, image_id):
"""
Get a NodeImage using the V2 Glance API
@inherits: :class:`OpenStack_1_1_NodeDriver.get_image`
:param image_id: ID of the image which should be used
:type image_id: ``str``
:rtype: :class:`NodeImage`
"""
return self._to_image(self.image_connection.request(
'/v2/images/%s' % (image_id,)).object)
def list_images(self, location=None, ex_only_active=True):
"""
Lists all active images using the V2 Glance API
@inherits: :class:`NodeDriver.list_images`
:param location: Which data center to list the images in. If
empty, undefined behavior will be selected.
(optional)
:type location: :class:`.NodeLocation`
:param ex_only_active: True if list only active (optional)
:type ex_only_active: ``bool``
"""
if location is not None:
raise NotImplementedError(
"location in list_images is not implemented "
"in the OpenStack_2_NodeDriver")
if not ex_only_active:
raise NotImplementedError(
"ex_only_active in list_images is not implemented "
"in the OpenStack_2_NodeDriver")
response = self.image_connection.request('/v2/images')
images = []
for image in response.object['images']:
images.append(self._to_image(image))
return images
def ex_update_image(self, image_id, data):
"""
Patch a NodeImage. Can be used to set visibility
:param image_id: ID of the image which should be used
:type image_id: ``str``
:param data: The data to PATCH, either a dict or a list
for example: [
{'op': 'replace', 'path': '/visibility', 'value': 'shared'}
]
:type data: ``dict|list``
:rtype: :class:`NodeImage`
"""
response = self.image_connection.request(
'/v2/images/%s' % (image_id,),
headers={'Content-type': 'application/'
'openstack-images-'
'v2.1-json-patch'},
method='PATCH', data=data
)
return self._to_image(response.object)
def ex_list_image_members(self, image_id):
"""
List all members of an image. See
https://developer.openstack.org/api-ref/image/v2/index.html#sharing
:param image_id: ID of the image of which the members should
be listed
:type image_id: ``str``
:rtype: ``list`` of :class:`NodeImageMember`
"""
response = self.image_connection.request(
'/v2/images/%s/members' % (image_id,)
)
image_members = []
for image_member in response.object['members']:
image_members.append(self._to_image_member(image_member))
return image_members
def ex_create_image_member(self, image_id, member_id):
"""
Give a project access to an image.
The image should have visibility status 'shared'.
Note that this is not an idempotent operation. If this action is
attempted using a tenant that is already in the image members
group the API will throw a Conflict (409).
See the 'create-image-member' section on
https://developer.openstack.org/api-ref/image/v2/index.html
:param str image_id: The ID of the image to share with the specified
tenant
:param str member_id: The ID of the project / tenant (the image member)
Note that this is the Keystone project ID and not the project name,
so something like e2151b1fe02d4a8a2d1f5fc331522c0a
:return None:
:param image_id: ID of the image to share
:type image_id: ``str``
:param project: ID of the project to give access to the image
:type image_id: ``str``
:rtype: ``list`` of :class:`NodeImageMember`
"""
data = {'member': member_id}
response = self.image_connection.request(
'/v2/images/%s/members' % image_id,
method='POST', data=data
)
return self._to_image_member(response.object)
def ex_get_image_member(self, image_id, member_id):
"""
Get a member of an image by id
:param image_id: ID of the image of which the member should
be listed
:type image_id: ``str``
:param member_id: ID of the member to list
:type image_id: ``str``
:rtype: ``list`` of :class:`NodeImageMember`
"""
response = self.image_connection.request(
'/v2/images/%s/members/%s' % (image_id, member_id)
)
return self._to_image_member(response.object)
def ex_accept_image_member(self, image_id, member_id):
"""
Accept a pending image as a member.
This call is idempotent unlike ex_create_image_member,
you can accept the same image many times.
:param image_id: ID of the image to accept
:type image_id: ``str``
:param project: ID of the project to accept the image as
:type image_id: ``str``
:rtype: ``bool``
"""
data = {'status': 'accepted'}
response = self.image_connection.request(
'/v2/images/%s/members/%s' % (image_id, member_id),
method='PUT', data=data
)
return self._to_image_member(response.object)
def _to_networks(self, obj):
networks = obj['networks']
return [self._to_network(network) for network in networks]
def _to_network(self, obj):
extra = {}
if obj.get('router:external', None):
extra['router:external'] = obj.get('router:external')
if obj.get('subnets', None):
extra['subnets'] = obj.get('subnets')
return OpenStackNetwork(id=obj['id'],
name=obj['name'],
cidr=None,
driver=self,
extra=extra)
def ex_list_networks(self):
"""
Get a list of Networks that are available.
:rtype: ``list`` of :class:`OpenStackNetwork`
"""
response = self.network_connection.request(
self._networks_url_prefix).object
return self._to_networks(response)
def ex_get_network(self, network_id):
"""
Retrieve the Network with the given ID
:param networkId: ID of the network
:type networkId: ``str``
:rtype :class:`OpenStackNetwork`
"""
request_url = "{networks_url_prefix}/{network_id}".format(
networks_url_prefix=self._networks_url_prefix,
network_id=network_id
)
response = self.network_connection.request(request_url).object
return self._to_network(response['network'])
def ex_create_network(self, name, **kwargs):
"""
Create a new Network
:param name: Name of network which should be used
:type name: ``str``
:rtype: :class:`OpenStackNetwork`
"""
data = {'network': {'name': name}}
# Add optional values
for key, value in kwargs.items():
data['network'][key] = value
response = self.network_connection.request(self._networks_url_prefix,
method='POST',
data=data).object
return self._to_network(response['network'])
def ex_delete_network(self, network):
"""
Delete a Network
:param network: Network which should be used
:type network: :class:`OpenStackNetwork`
:rtype: ``bool``
"""
resp = self.network_connection.request(
'%s/%s' % (self._networks_url_prefix,
network.id), method='DELETE')
return resp.status in (httplib.NO_CONTENT, httplib.ACCEPTED)
def _to_subnets(self, obj):
subnets = obj['subnets']
return [self._to_subnet(subnet) for subnet in subnets]
def _to_subnet(self, obj):
extra = {}
if obj.get('router:external', None):
extra['router:external'] = obj.get('router:external')
if obj.get('subnets', None):
extra['subnets'] = obj.get('subnets')
return OpenStack_2_SubNet(id=obj['id'],
name=obj['name'],
cidr=obj['cidr'],
network_id=obj['network_id'],
driver=self,
extra=extra)
def ex_list_subnets(self):
"""
Get a list of Subnet that are available.
:rtype: ``list`` of :class:`OpenStack_2_SubNet`
"""
response = self.network_connection.request(
self._subnets_url_prefix).object
return self._to_subnets(response)
def ex_create_subnet(self, name, network, cidr, ip_version=4,
description='', dns_nameservers=None,
host_routes=None):
"""
Create a new Subnet
:param name: Name of subnet which should be used
:type name: ``str``
:param network: Parent network of the subnet
:type network: ``OpenStackNetwork``
:param cidr: cidr of network which should be used
:type cidr: ``str``
:param ip_version: ip_version of subnet which should be used
:type ip_version: ``int``
:param description: Description for the resource.
:type description: ``str``
:param dns_nameservers: List of dns name servers.
:type dns_nameservers: ``list`` of ``str``
:param host_routes: Additional routes for the subnet.
:type host_routes: ``list`` of ``str``
:rtype: :class:`OpenStack_2_SubNet`
"""
data = {
'subnet':
{
'cidr': cidr,
'network_id': network.id,
'ip_version': ip_version,
'name': name or '',
'description': description or '',
'dns_nameservers': dns_nameservers or [],
'host_routes': host_routes or []
}
}
response = self.network_connection.request(
self._subnets_url_prefix, method='POST', data=data).object
return self._to_subnet(response['subnet'])
def ex_delete_subnet(self, subnet):
"""
Delete a Subnet
:param subnet: Subnet which should be deleted
:type subnet: :class:`OpenStack_2_SubNet`
:rtype: ``bool``
"""
resp = self.network_connection.request('%s/%s' % (
self._subnets_url_prefix, subnet.id), method='DELETE')
return resp.status in (httplib.NO_CONTENT, httplib.ACCEPTED)
def ex_update_subnet(self, subnet, name=None, description=None,
dns_nameservers=None, host_routes=None):
"""
Update data of an existing SubNet
:param subnet: Subnet which should be updated
:type subnet: :class:`OpenStack_2_SubNet`
:param name: Name of subnet which should be used
:type name: ``str``
:param description: Description for the resource.
:type description: ``str``
:param dns_nameservers: List of dns name servers.
:type dns_nameservers: ``list`` of ``str``
:param host_routes: Additional routes for the subnet.
:type host_routes: ``list`` of ``str``
:rtype: :class:`OpenStack_2_SubNet`
"""
data = {'subnet': {}}
if name is not None:
data['subnet']['name'] = name
if description is not None:
data['subnet']['description'] = description
if dns_nameservers is not None:
data['subnet']['dns_nameservers'] = dns_nameservers
if host_routes is not None:
data['subnet']['host_routes'] = host_routes
response = self.network_connection.request(
"%s/%s" % (self._subnets_url_prefix, subnet.id),
method='PUT', data=data).object
return self._to_subnet(response['subnet'])
def ex_list_ports(self):
"""
List all OpenStack_2_PortInterfaces
https://developer.openstack.org/api-ref/network/v2/#list-ports
:rtype: ``list`` of :class:`OpenStack_2_PortInterface`
"""
response = self._paginated_request(
'/v2.0/ports', 'ports', self.network_connection)
return [self._to_port(port) for port in response['ports']]
def ex_delete_port(self, port):
"""
Delete an OpenStack_2_PortInterface
https://developer.openstack.org/api-ref/network/v2/#delete-port
:param port: port interface to remove
:type port: :class:`OpenStack_2_PortInterface`
:rtype: ``bool``
"""
response = self.network_connection.request(
'/v2.0/ports/%s' % port.id, method='DELETE'
)
return response.success()
def ex_detach_port_interface(self, node, port):
"""
Detaches an OpenStack_2_PortInterface interface from a Node.
:param node: node
:type node: :class:`Node`
:param port: port interface to detach
:type port: :class:`OpenStack_2_PortInterface`
:rtype: ``bool``
"""
return self.connection.request(
'/servers/%s/os-interface/%s' % (node.id, port.id),
method='DELETE'
).success()
def ex_attach_port_interface(self, node, port):
"""
Attaches an OpenStack_2_PortInterface to a Node.
:param node: node
:type node: :class:`Node`
:param port: port interface to attach
:type port: :class:`OpenStack_2_PortInterface`
:rtype: ``bool``
"""
data = {
'interfaceAttachment': {
'port_id': port.id
}
}
return self.connection.request(
'/servers/{}/os-interface'.format(node.id),
method='POST', data=data
).success()
def ex_create_port(self, network, description=None,
admin_state_up=True, name=None):
"""
Creates a new OpenStack_2_PortInterface
:param network: ID of the network where the newly created
port should be attached to
:type network: :class:`OpenStackNetwork`
:param description: Description of the port
:type description: str
:param admin_state_up: The administrative state of the
resource, which is up or down
:type admin_state_up: bool
:param name: Human-readable name of the resource
:type name: str
:rtype: :class:`OpenStack_2_PortInterface`
"""
data = {
'port':
{
'description': description or '',
'admin_state_up': admin_state_up,
'name': name or '',
'network_id': network.id,
}
}
response = self.network_connection.request(
'/v2.0/ports', method='POST', data=data
)
return self._to_port(response.object['port'])
def ex_get_port(self, port_interface_id):
"""
Retrieve the OpenStack_2_PortInterface with the given ID
:param port_interface_id: ID of the requested port
:type port_interface_id: str
:return: :class:`OpenStack_2_PortInterface`
"""
response = self.network_connection.request(
'/v2.0/ports/{}'.format(port_interface_id), method='GET'
)
return self._to_port(response.object['port'])
def ex_update_port(self, port, description=None,
admin_state_up=None, name=None,
port_security_enabled=None,
qos_policy_id=None, security_groups=None):
"""
Update a OpenStack_2_PortInterface
:param port: port interface to update
:type port: :class:`OpenStack_2_PortInterface`
:param description: Description of the port
:type description: ``str``
:param admin_state_up: The administrative state of the
resource, which is up or down
:type admin_state_up: ``bool``
:param name: Human-readable name of the resource
:type name: ``str``
:param port_security_enabled: The port security status
:type port_security_enabled: ``bool``
:param qos_policy_id: QoS policy associated with the port
:type qos_policy_id: ``str``
:param security_groups: The IDs of security groups applied
:type security_groups: ``list`` of ``str``
:rtype: :class:`OpenStack_2_PortInterface`
"""
data = {'port': {}}
if description is not None:
data['port']['description'] = description
if admin_state_up is not None:
data['port']['admin_state_up'] = admin_state_up
if name is not None:
data['port']['name'] = name
if port_security_enabled is not None:
data['port']['port_security_enabled'] = port_security_enabled
if qos_policy_id is not None:
data['port']['qos_policy_id'] = qos_policy_id
if security_groups is not None:
data['port']['security_groups'] = security_groups
response = self.network_connection.request(
'/v2.0/ports/{}'.format(port.id), method='PUT', data=data
)
return self._to_port(response.object['port'])
def list_volumes(self):
"""
Get a list of Volumes that are available.
:rtype: ``list`` of :class:`StorageVolume`
"""
return self._to_volumes(self._paginated_request(
'/volumes/detail', 'volumes', self.volumev2_connection))
def ex_get_volume(self, volumeId):
"""
Retrieve the StorageVolume with the given ID
:param volumeId: ID of the volume
:type volumeId: ``string``
:return: :class:`StorageVolume`
"""
return self._to_volume(
self.volumev2_connection.request('/volumes/%s' % volumeId).object)
def create_volume(self, size, name, location=None, snapshot=None,
ex_volume_type=None, ex_image_ref=None):
"""
Create a new volume.
:param size: Size of volume in gigabytes (required)
:type size: ``int``
:param name: Name of the volume to be created
:type name: ``str``
:param location: Which data center to create a volume in. If
empty, undefined behavior will be selected.
(optional)
:type location: :class:`.NodeLocation`
:param snapshot: Snapshot from which to create the new
volume. (optional)
:type snapshot: :class:`.VolumeSnapshot`
:param ex_volume_type: What kind of volume to create.
(optional)
:type ex_volume_type: ``str``
:param ex_image_ref: The image to create the volume from
when creating a bootable volume (optional)
:type ex_image_ref: ``str``
:return: The newly created volume.
:rtype: :class:`StorageVolume`
"""
volume = {
'name': name,
'description': name,
'size': size,
'metadata': {
'contents': name,
},
}
if ex_volume_type:
volume['volume_type'] = ex_volume_type
if ex_image_ref:
volume['imageRef'] = ex_image_ref
if location:
volume['availability_zone'] = location
if snapshot:
volume['snapshot_id'] = snapshot.id
resp = self.volumev2_connection.request('/volumes',
method='POST',
data={'volume': volume})
return self._to_volume(resp.object)
def destroy_volume(self, volume):
"""
Delete a Volume.
:param volume: Volume to be deleted
:type volume: :class:`StorageVolume`
:rtype: ``bool``
"""
return self.volumev2_connection.request('/volumes/%s' % volume.id,
method='DELETE').success()
def ex_list_snapshots(self):
"""
Get a list of Snapshot that are available.
:rtype: ``list`` of :class:`VolumeSnapshot`
"""
return self._to_snapshots(self._paginated_request(
'/snapshots/detail', 'snapshots', self.volumev2_connection))
def create_volume_snapshot(self, volume, name=None, ex_description=None,
ex_force=True):
"""
Create snapshot from volume
:param volume: Instance of `StorageVolume`
:type volume: `StorageVolume`
:param name: Name of snapshot (optional)
:type name: `str` | `NoneType`
:param ex_description: Description of the snapshot (optional)
:type ex_description: `str` | `NoneType`
:param ex_force: Specifies if we create a snapshot that is not in
state `available`. For example `in-use`. Defaults
to True. (optional)
:type ex_force: `bool`
:rtype: :class:`VolumeSnapshot`
"""
data = {'snapshot': {'volume_id': volume.id, 'force': ex_force}}
if name is not None:
data['snapshot']['name'] = name
if ex_description is not None:
data['snapshot']['description'] = ex_description
return self._to_snapshot(
self.volumev2_connection.request('/snapshots', method='POST',
data=data).object)
def destroy_volume_snapshot(self, snapshot):
"""
Delete a Volume Snapshot.
:param snapshot: Snapshot to be deleted
:type snapshot: :class:`VolumeSnapshot`
:rtype: ``bool``
"""
resp = self.volumev2_connection.request('/snapshots/%s' % snapshot.id,
method='DELETE')
return resp.status in (httplib.NO_CONTENT, httplib.ACCEPTED)
def ex_list_security_groups(self):
"""
Get a list of Security Groups that are available.
:rtype: ``list`` of :class:`OpenStackSecurityGroup`
"""
return self._to_security_groups(
self.network_connection.request('/v2.0/security-groups').object)
def ex_create_security_group(self, name, description):
"""
Create a new Security Group
:param name: Name of the new Security Group
:type name: ``str``
:param description: Description of the new Security Group
:type description: ``str``
:rtype: :class:`OpenStackSecurityGroup`
"""
return self._to_security_group(self.network_connection .request(
'/v2.0/security-groups', method='POST',
data={'security_group': {'name': name, 'description': description}}
).object['security_group'])
def ex_delete_security_group(self, security_group):
"""
Delete a Security Group.
:param security_group: Security Group should be deleted
:type security_group: :class:`OpenStackSecurityGroup`
:rtype: ``bool``
"""
resp = self.network_connection.request('/v2.0/security-groups/%s' %
(security_group.id),
method='DELETE')
return resp.status == httplib.NO_CONTENT
def _to_security_group_rule(self, obj):
ip_range = group = tenant_id = parent_id = None
protocol = from_port = to_port = direction = None
if 'parent_group_id' in obj:
if obj['group'] == {}:
ip_range = obj['ip_range'].get('cidr', None)
else:
group = obj['group'].get('name', None)
tenant_id = obj['group'].get('tenant_id', None)
parent_id = obj['parent_group_id']
from_port = obj['from_port']
to_port = obj['to_port']
protocol = obj['ip_protocol']
else:
ip_range = obj.get('remote_ip_prefix', None)
group = obj.get('remote_group_id', None)
tenant_id = obj.get('tenant_id', None)
parent_id = obj['security_group_id']
from_port = obj['port_range_min']
to_port = obj['port_range_max']
protocol = obj['protocol']
return OpenStackSecurityGroupRule(
id=obj['id'], parent_group_id=parent_id,
ip_protocol=protocol, from_port=from_port,
to_port=to_port, driver=self, ip_range=ip_range,
group=group, tenant_id=tenant_id, direction=direction)
def ex_create_security_group_rule(self, security_group, ip_protocol,
from_port, to_port, cidr=None,
source_security_group=None):
"""
Create a new Rule in a Security Group
:param security_group: Security Group in which to add the rule
:type security_group: :class:`OpenStackSecurityGroup`
:param ip_protocol: Protocol to which this rule applies
Examples: tcp, udp, ...
:type ip_protocol: ``str``
:param from_port: First port of the port range
:type from_port: ``int``
:param to_port: Last port of the port range
:type to_port: ``int``
:param cidr: CIDR notation of the source IP range for this rule
:type cidr: ``str``
:param source_security_group: Existing Security Group to use as the
source (instead of CIDR)
:type source_security_group: L{OpenStackSecurityGroup
:rtype: :class:`OpenStackSecurityGroupRule`
"""
source_security_group_id = None
if type(source_security_group) == OpenStackSecurityGroup:
source_security_group_id = source_security_group.id
return self._to_security_group_rule(self.network_connection.request(
'/v2.0/security-group-rules', method='POST',
data={'security_group_rule': {
'direction': 'ingress',
'protocol': ip_protocol,
'port_range_min': from_port,
'port_range_max': to_port,
'remote_ip_prefix': cidr,
'remote_group_id': source_security_group_id,
'security_group_id': security_group.id}}
).object['security_group_rule'])
def ex_delete_security_group_rule(self, rule):
"""
Delete a Rule from a Security Group.
:param rule: Rule should be deleted
:type rule: :class:`OpenStackSecurityGroupRule`
:rtype: ``bool``
"""
resp = self.network_connection.request(
'/v2.0/security-group-rules/%s' % (rule.id), method='DELETE')
return resp.status == httplib.NO_CONTENT
def ex_remove_security_group_from_node(self, security_group, node):
"""
Remove a Security Group from a node.
:param security_group: Security Group to remove from node.
:type security_group: :class:`OpenStackSecurityGroup`
:param node: Node to remove the Security Group.
:type node: :class:`Node`
:rtype: ``bool``
"""
server_params = {'name': security_group.name}
resp = self._node_action(node, 'removeSecurityGroup', **server_params)
return resp.status == httplib.ACCEPTED
def _to_floating_ip_pool(self, obj):
return OpenStack_2_FloatingIpPool(obj['id'], obj['name'],
self.network_connection)
def _to_floating_ip_pools(self, obj):
pool_elements = obj['networks']
return [self._to_floating_ip_pool(pool) for pool in pool_elements]
def ex_list_floating_ip_pools(self):
"""
List available floating IP pools
:rtype: ``list`` of :class:`OpenStack_2_FloatingIpPool`
"""
return self._to_floating_ip_pools(
self.network_connection.request('/v2.0/networks?router:external'
'=True&fields=id&fields='
'name').object)
def _to_routers(self, obj):
routers = obj['routers']
return [self._to_router(router) for router in routers]
def _to_router(self, obj):
extra = {}
extra['external_gateway_info'] = obj['external_gateway_info']
extra['routes'] = obj['routes']
return OpenStack_2_Router(id=obj['id'],
name=obj['name'],
status=obj['status'],
driver=self,
extra=extra)
def ex_list_routers(self):
"""
Get a list of Routers that are available.
:rtype: ``list`` of :class:`OpenStack_2_Router`
"""
response = self.network_connection.request(
'/v2.0/routers').object
return self._to_routers(response)
def ex_create_router(self, name, description='', admin_state_up=True,
external_gateway_info=None):
"""
Create a new Router
:param name: Name of router which should be used
:type name: ``str``
:param description: Description of the port
:type description: ``str``
:param admin_state_up: The administrative state of the
resource, which is up or down
:type admin_state_up: ``bool``
:param external_gateway_info: The external gateway information
:type external_gateway_info: ``dict``
:rtype: :class:`OpenStack_2_Router`
"""
data = {
'router':
{
'name': name or '',
'description': description or '',
'admin_state_up': admin_state_up,
}
}
if external_gateway_info:
data['router']['external_gateway_info'] = external_gateway_info
response = self.network_connection.request(
'/v2.0/routers', method='POST', data=data).object
return self._to_router(response['router'])
def ex_delete_router(self, router):
"""
Delete a Router
:param router: Router which should be deleted
:type router: :class:`OpenStack_2_Router`
:rtype: ``bool``
"""
resp = self.network_connection.request('%s/%s' % (
'/v2.0/routers', router.id), method='DELETE')
return resp.status in (httplib.NO_CONTENT, httplib.ACCEPTED)
def _manage_router_interface(self, router, op, subnet=None, port=None):
"""
Add/Remove interface to router
:param router: Router to add/remove the interface
:type router: :class:`OpenStack_2_Router`
:param op: Operation to perform: 'add' or 'remove'
:type op: ``str``
:param subnet: Subnet object to be added to the router
:type subnet: :class:`OpenStack_2_SubNet`
:param port: Port object to be added to the router
:type port: :class:`OpenStack_2_PortInterface`
:rtype: ``bool``
"""
data = {}
if subnet:
data['subnet_id'] = subnet.id
elif port:
data['port_id'] = port.id
else:
raise OpenStackException("Error in router interface: "
"port or subnet are None.", 500,
self)
resp = self.network_connection.request('%s/%s/%s_router_interface' % (
'/v2.0/routers', router.id, op), method='PUT', data=data)
return resp.status == httplib.OK
def ex_add_router_port(self, router, port):
"""
Add port to a router
:param router: Router to add the port
:type router: :class:`OpenStack_2_Router`
:param port: Port object to be added to the router
:type port: :class:`OpenStack_2_PortInterface`
:rtype: ``bool``
"""
return self._manage_router_interface(router, 'add', port=port)
def ex_del_router_port(self, router, port):
"""
Remove port from a router
:param router: Router to remove the port
:type router: :class:`OpenStack_2_Router`
:param port: Port object to be added to the router
:type port: :class:`OpenStack_2_PortInterface`
:rtype: ``bool``
"""
return self._manage_router_interface(router, 'remove', port=port)
def ex_add_router_subnet(self, router, subnet):
"""
Add subnet to a router
:param router: Router to add the subnet
:type router: :class:`OpenStack_2_Router`
:param subnet: Subnet object to be added to the router
:type subnet: :class:`OpenStack_2_SubNet`
:rtype: ``bool``
"""
return self._manage_router_interface(router, 'add', subnet=subnet)
def ex_del_router_subnet(self, router, subnet):
"""
Remove subnet to a router
:param router: Router to remove the subnet
:type router: :class:`OpenStack_2_Router`
:param subnet: Subnet object to be added to the router
:type subnet: :class:`OpenStack_2_SubNet`
:rtype: ``bool``
"""
return self._manage_router_interface(router, 'remove', subnet=subnet)
class OpenStack_1_1_FloatingIpPool(object):
"""
Floating IP Pool info.
"""
def __init__(self, name, connection):
self.name = name
self.connection = connection
def list_floating_ips(self):
"""
List floating IPs in the pool
:rtype: ``list`` of :class:`OpenStack_1_1_FloatingIpAddress`
"""
return self._to_floating_ips(
self.connection.request('/os-floating-ips').object)
def _to_floating_ips(self, obj):
ip_elements = obj['floating_ips']
return [self._to_floating_ip(ip) for ip in ip_elements]
def _to_floating_ip(self, obj):
return OpenStack_1_1_FloatingIpAddress(id=obj['id'],
ip_address=obj['ip'],
pool=self,
node_id=obj['instance_id'],
driver=self.connection.driver)
def get_floating_ip(self, ip):
"""
Get specified floating IP from the pool
:param ip: floating IP to get
:type ip: ``str``
:rtype: :class:`OpenStack_1_1_FloatingIpAddress`
"""
ip_obj, = [x for x in self.list_floating_ips() if x.ip_address == ip]
return ip_obj
def create_floating_ip(self):
"""
Create new floating IP in the pool
:rtype: :class:`OpenStack_1_1_FloatingIpAddress`
"""
resp = self.connection.request('/os-floating-ips',
method='POST',
data={'pool': self.name})
data = resp.object['floating_ip']
id = data['id']
ip_address = data['ip']
return OpenStack_1_1_FloatingIpAddress(id=id,
ip_address=ip_address,
pool=self,
node_id=None,
driver=self.connection.driver)
def delete_floating_ip(self, ip):
"""
Delete specified floating IP from the pool
:param ip: floating IP to remove
:type ip: :class:`OpenStack_1_1_FloatingIpAddress`
:rtype: ``bool``
"""
resp = self.connection.request('/os-floating-ips/%s' % ip.id,
method='DELETE')
return resp.status in (httplib.NO_CONTENT, httplib.ACCEPTED)
def __repr__(self):
return ('<OpenStack_1_1_FloatingIpPool: name=%s>' % self.name)
class OpenStack_1_1_FloatingIpAddress(object):
"""
Floating IP info.
"""
def __init__(self, id, ip_address, pool, node_id=None, driver=None):
self.id = str(id)
self.ip_address = ip_address
self.pool = pool
self.node_id = node_id
self.driver = driver
def delete(self):
"""
Delete this floating IP
:rtype: ``bool``
"""
if self.pool is not None:
return self.pool.delete_floating_ip(self)
elif self.driver is not None:
return self.driver.ex_delete_floating_ip(self)
def __repr__(self):
return ('<OpenStack_1_1_FloatingIpAddress: id=%s, ip_addr=%s,'
' pool=%s, driver=%s>'
% (self.id, self.ip_address, self.pool, self.driver))
class OpenStack_2_FloatingIpPool(object):
"""
Floating IP Pool info.
"""
def __init__(self, id, name, connection):
self.id = id
self.name = name
self.connection = connection
def _to_floating_ips(self, obj):
ip_elements = obj['floatingips']
return [self._to_floating_ip(ip) for ip in ip_elements]
def _to_floating_ip(self, obj):
instance_id = None
# In neutron version prior to 13.0.0 port_details does not exists
if 'port_details' not in obj and 'port_id' in obj and obj['port_id']:
port = self.connection.driver.ex_get_port(obj['port_id'])
if port:
obj['port_details'] = {"device_id": port.extra["device_id"],
"device_owner":
port.extra["device_owner"],
"mac_address":
port.extra["mac_address"]}
if 'port_details' in obj and obj['port_details']:
if obj['port_details']['device_owner'] in ['compute:nova',
'compute:None']:
instance_id = obj['port_details']['device_id']
ip_address = obj['floating_ip_address']
return OpenStack_1_1_FloatingIpAddress(id=obj['id'],
ip_address=ip_address,
pool=self,
node_id=instance_id,
driver=self.connection.driver)
def list_floating_ips(self):
"""
List floating IPs in the pool
:rtype: ``list`` of :class:`OpenStack_1_1_FloatingIpAddress`
"""
return self._to_floating_ips(
self.connection.request('/v2.0/floatingips').object)
def get_floating_ip(self, ip):
"""
Get specified floating IP from the pool
:param ip: floating IP to get
:type ip: ``str``
:rtype: :class:`OpenStack_1_1_FloatingIpAddress`
"""
floating_ips = self._to_floating_ips(
self.connection.request('/v2.0/floatingips?floating_ip_address'
'=%s' % ip).object)
return floating_ips[0]
def create_floating_ip(self):
"""
Create new floating IP in the pool
:rtype: :class:`OpenStack_1_1_FloatingIpAddress`
"""
resp = self.connection.request('/v2.0/floatingips',
method='POST',
data={'floatingip':
{'floating_network_id': self.id}}
)
data = resp.object['floatingip']
id = data['id']
ip_address = data['floating_ip_address']
return OpenStack_1_1_FloatingIpAddress(id=id,
ip_address=ip_address,
pool=self,
node_id=None,
driver=self.connection.driver)
def delete_floating_ip(self, ip):
"""
Delete specified floating IP from the pool
:param ip: floating IP to remove
:type ip: :class:`OpenStack_1_1_FloatingIpAddress`
:rtype: ``bool``
"""
resp = self.connection.request('/v2.0/floatingips/%s' % ip.id,
method='DELETE')
return resp.status in (httplib.NO_CONTENT, httplib.ACCEPTED)
def __repr__(self):
return ('<OpenStack_2_FloatingIpPool: name=%s>' % self.name)
class OpenStack_2_SubNet(object):
"""
A Virtual SubNet.
"""
def __init__(self, id, name, cidr, network_id, driver, extra=None):
self.id = str(id)
self.name = name
self.cidr = cidr
self.network_id = network_id
self.driver = driver
self.extra = extra or {}
def __repr__(self):
return '<OpenStack_2_SubNet id="%s" name="%s" cidr="%s">' % (self.id,
self.name,
self.cidr)
class OpenStack_2_Router(object):
"""
A Virtual Router.
"""
def __init__(self, id, name, status, driver, extra=None):
self.id = str(id)
self.name = name
self.status = status
self.driver = driver
self.extra = extra or {}
def __repr__(self):
return '<OpenStack_2_Router id="%s" name="%s">' % (self.id,
self.name)
class OpenStack_2_PortInterface(UuidMixin):
"""
Port Interface info. Similar in functionality to a floating IP (can be
attached / detached from a compute instance) but implementation-wise a
bit different.
> A port is a connection point for attaching a single device, such as the
> NIC of a server, to a network. The port also describes the associated
> network configuration, such as the MAC and IP addresses to be used on
> that port.
https://docs.openstack.org/python-openstackclient/pike/cli/command-objects/port.html
Also see:
https://developer.openstack.org/api-ref/compute/#port-interfaces-servers-os-interface
"""
def __init__(self, id, state, driver, created=None, extra=None):
"""
:param id: Port Interface ID.
:type id: ``str``
:param state: State of the OpenStack_2_PortInterface.
:type state: :class:`.OpenStack_2_PortInterfaceState`
:param created: A datetime object that represents when the
port interface was created
:type created: ``datetime.datetime``
:param extra: Optional provided specific attributes associated with
this image.
:type extra: ``dict``
"""
self.id = str(id)
self.state = state
self.driver = driver
self.created = created
self.extra = extra or {}
UuidMixin.__init__(self)
def delete(self):
"""
Delete this Port Interface
:rtype: ``bool``
"""
return self.driver.ex_delete_port(self)
def __repr__(self):
return (('<OpenStack_2_PortInterface: id=%s, state=%s, '
'driver=%s ...>')
% (self.id, self.state, self.driver.name))
| apache-2.0 | -4,344,738,230,865,552,400 | 34.097555 | 120 | 0.538004 | false | 4.261132 | false | false | false |
drtuxwang/system-config | bin/tocapital.py | 1 | 2244 | #!/usr/bin/env python3
"""
Print arguments with first letter in upper case (camel case).
"""
import argparse
import glob
import os
import signal
import sys
from typing import List
class Options:
"""
Options class
"""
def __init__(self) -> None:
self._args: argparse.Namespace = None
self.parse(sys.argv)
def get_words(self) -> List[str]:
"""
Return list of words.
"""
return self._args.words
def _parse_args(self, args: List[str]) -> None:
parser = argparse.ArgumentParser(
description='Print arguments wth first letter in upper case.',
)
parser.add_argument('words', nargs='+', metavar='word', help='A word.')
self._args = parser.parse_args(args)
def parse(self, args: List[str]) -> None:
"""
Parse arguments
"""
self._parse_args(args[1:])
class Main:
"""
Main class
"""
def __init__(self) -> None:
try:
self.config()
sys.exit(self.run())
except (EOFError, KeyboardInterrupt):
sys.exit(114)
except SystemExit as exception:
sys.exit(exception)
sys.exit(0)
@staticmethod
def config() -> None:
"""
Configure program
"""
if hasattr(signal, 'SIGPIPE'):
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
if os.name == 'nt':
argv = []
for arg in sys.argv:
files = glob.glob(arg) # Fixes Windows globbing bug
if files:
argv.extend(files)
else:
argv.append(arg)
sys.argv = argv
@staticmethod
def run() -> int:
"""
Start program
"""
options = Options()
words = options.get_words()
cwords = []
for word in words:
cparts = []
for part in word.split('-'):
cparts.append(part[:1].upper() + part[1:].lower())
cwords.append('-'.join(cparts))
print(" ".join(cwords))
return 0
if __name__ == '__main__':
if '--pydoc' in sys.argv:
help(__name__)
else:
Main()
| gpl-2.0 | -7,888,770,518,914,146,000 | 21.44 | 79 | 0.499554 | false | 4.171004 | false | false | false |
staugur/SwarmOps | src/config.py | 1 | 3033 | # -*- coding: utf-8 -*-
"""
SwarmOps.config
~~~~~~~~~~~~~~
The program configuration file, the preferred configuration item, reads the system environment variable first.
:copyright: (c) 2018 by staugur.
:license: MIT, see LICENSE for more details.
"""
from os import getenv
GLOBAL = {
"ProcessName": "SwarmOps",
#自定义进程名.
"Host": getenv("swarmops_host", "0.0.0.0"),
#监听地址
"Port": getenv("swarmops_port", 10130),
#监听端口
"LogLevel": getenv("swarmops_loglevel", "DEBUG"),
#应用日志记录级别, 依次为 DEBUG, INFO, WARNING, ERROR, CRITICAL.
}
SSO = {
"app_name": getenv("swarmops_sso_app_name", GLOBAL["ProcessName"]),
# Passport应用管理中注册的应用名
"app_id": getenv("swarmops_sso_app_id", "app_id"),
# Passport应用管理中注册返回的`app_id`
"app_secret": getenv("swarmops_sso_app_secret", "app_secret"),
# Passport应用管理中注册返回的`app_secret`
"sso_server": getenv("swarmops_sso_server", "YourPassportFQDN"),
# Passport部署允许的完全合格域名根地址,例如作者的`https://passport.saintic.com`
"sso_allow": getenv("swarmops_sso_allow"),
# 允许登录的uid列表,格式是: uid1,uid2,...,uidn
"sso_deny": getenv("swarmops_sso_deny")
# 拒绝登录的uid列表, 格式同上
}
# 系统配置
SYSTEM = {
"HMAC_SHA256_KEY": getenv("swarmops_hmac_sha256_key", "273d32c8d797fa715190c7408ad73811"),
# hmac sha256 key
"AES_CBC_KEY": getenv("swarmops_aes_cbc_key", "YRRGBRYQqrV1gv5A"),
# utils.aes_cbc.CBC类中所用加密key
"JWT_SECRET_KEY": getenv("swarmops_jwt_secret_key", "WBlE7_#qDf2vRb@vM!Zw#lqrg@rdd3A6"),
# utils.jwt.JWTUtil类中所用加密key
}
#存储配置段
STORAGE={
"SwarmStorageMode": getenv("swarmops_swarmstoragemode", "local"),
#存储Swarm集群信息的方式, 可选`local(本地文件存储)`, `redis`
#使用local存储,数据将会序列化存储到logs/SwarmKey、ActiveKey文件中;
#使用redis存储,便可以多点部署,数据将会序列化存储到redis中。
"Connection": getenv("swarmops_StorageConnection", "redis://ip:port:password"),
#当SwarmStorageMode不为local时,此配置项有意义。
#此配置项设置存储后端的连接信息, 如redis, redis没有密码则留空:password部分
"SwarmKey": getenv("swarmops_StorageSwarmKey", "SwarmOps_All"),
#存储后端存储所有Swarm数据的Key索引
"ActiveKey": getenv("swarmops_StorageActiveKey", "SwarmOps_Active"),
#存储后端存储活跃集群数据的Key索引
}
#私有仓配置段
REGISTRY={
"RegistryAddr": getenv("swarmops_RegistryAddr", "https://registry.saintic.com"),
#私有仓地址, 例如https://docker.io, http://ip:port
"RegistryVersion": getenv("swarmops_RegistryVersion", 1),
#私有仓版本, 1、2
"RegistryAuthentication": getenv("swarmops_RegistryAuthentication", None)
#认证, 目前不可用
}
| bsd-3-clause | -8,325,207,699,234,633,000 | 25.178947 | 114 | 0.673904 | false | 2.170157 | false | false | false |
richardingham/octopus | octopus/notifier/sms.py | 1 | 1629 | # Twisted Imports
from twisted.internet import reactor, protocol, defer
from twisted.web.client import Agent
from twisted.web.http_headers import Headers
from twisted.internet.ssl import ClientContextFactory
from twisted.python import log
# System Imports
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
# Sibling Imports
import util as notifier_util
class WebClientContextFactory(ClientContextFactory):
def getContext(self, hostname, port):
return ClientContextFactory.getContext(self)
class _Receiver (protocol.Protocol):
def __init__ (self, d):
self.buf = ''
self.d = d
def dataReceived (self, data):
self.buf += data
def connectionLost (self, reason):
# TODO: test if reason is twisted.web.client.ResponseDone, if not, do an errback
self.d.callback(self.buf)
class ClockworkSMS (object):
def __init__ (self, api_key):
contextFactory = WebClientContextFactory()
self.agent = Agent(reactor, contextFactory)
self._api_key = api_key
def notify (self, destination, message):
destinations = destination.split(",")
if len(destinations) > 50:
log.msg("Max 50 SMS recipients allowed")
params = {
"key": self._api_key,
"to": destination,
"content": message.encode("utf_8", "replace")
}
uri = "https://api.clockworksms.com/http/send.aspx?{:s}"
d = self.agent.request(
"GET",
uri.format(urlencode(params)),
Headers({
'User-Agent': ['octopus'],
}),
None
)
def handle_response (response):
d = defer.Deferred()
response.deliverBody(_Receiver(d))
return d
d.addCallback(handle_response)
return d
| mit | -5,075,994,737,181,868,000 | 21.943662 | 82 | 0.711479 | false | 3.331288 | false | false | false |
OpenClovis/SAFplus-Availability-Scalability-Platform | doc/doclayout/jsclassindexpage.py | 1 | 1381 | """<module>
This module generates an html page that lists all classes
"""
import pdb
from PyHtmlGen.gen import *
from PyHtmlGen.document import *
from PyHtmlGen.htmldoc import *
from PyHtmlGen.bar import *
# from layoutstyle import *
from PyHtmlGen.layouttable import *
from PyHtmlGen.table import *
from PyHtmlGen.imagehtml import *
from PyHtmlGen.menu import *
from PyHtmlGen.layouthtml import *
from PyHtmlGen.form import *
from PyHtmlGen.attribute import *
from PyHtmlGen.json import *
from PyHtmlGen.cssclass import *
from common import *
from htmlcommon import *
from jscommon import *
from constants import *
def genClasses(cllist):
header = ["Class","Section","File"]
body = []
for obj in cllist:
body.append([obj2tlink(obj,PageLocCenter),parenttLink(obj,TagSection,PageLocCenter),parenttLink(obj,TagFile,PageLocCenter)])
grid = GridFromList(header, body )
#grid.RowBackground(Color(250,250,100),[Color(200,200,200),Color(240,240,240)])
grid.RowAttrs({"class":"classIndexHeaderRow"},[{"class":"classIndexRowA"},{"class":"classIndexRowB"}])
return grid
def generate(objs,cfg,args,tagDict):
objs.sort(key=lambda x: x.name)
mv = genClasses(objs)
hdr = VSplit([resize(2,"Class Directory")])
ctr = HSplit([BR,mv])
fname = "Class.html"
page = [hdr,ctr]
WriteFile(FilePrefix + fname,page,HtmlFragment())
return (fname,page)
#</module>
| gpl-2.0 | 4,678,383,709,413,481,000 | 26.078431 | 128 | 0.737871 | false | 3.219114 | false | false | false |
jj4jj/sdv | ws_server.py | 1 | 2339 | #-*-coding:utf8-*-
from threading import Thread
import tornado.ioloop
import tornado.web
import tornado.websocket
import json
import log
import config
try:
# py2
from urllib.parse import urlparse
except ImportError:
# py3
from urlparse import urlparse
class WebSocketMsgHandler():
def __init__(self):
self.client = None
pass
def on_client_open(self, client):
self.client = client
log.debug('open')
def on_client_message(self, msg):
log.debug('msg:'+msg)
self.reply(msg)
def on_client_close(self):
log.debug('close')
self.client = None
def reply(self, mtype, data):
jmsg = json.dumps({'type':mtype, 'data':data})
log.debug('reply msg:'+jmsg)
self.client.write_message(jmsg)
class WebSocketServer(Thread):
def __init__(self, uri, dispatcher, host=None, port=8888):
Thread.__init__(self)
#############################################
self.uri = uri
self.dispatcher = dispatcher
self.port = port
self.host = host
class _WebSocketServerHandlerProxy(tornado.websocket.WebSocketHandler):
hb_msg = json.dumps({u'type': u'pong', u'data': u'-*-heart-beat-*-'})
def open(self):
dispatcher.on_client_open(self)
def on_message(self, message):
objmsg = json.loads(message)
if objmsg['type'] == 'ping':
self.write_message(self.hb_msg)
else:
dispatcher.on_client_message(objmsg)
def on_close(self):
dispatcher.on_client_close()
def check_origin(self, origin):
return True
self.app = tornado.web.Application([(config.WSS_PREFIX_RGX, _WebSocketServerHandlerProxy)])
self.app.listen(address=host, port=port)
self.io = tornado.ioloop.IOLoop.current()
def stop(self):
self.io.stop()
pass
def run(self):
self.io.start()
if __name__ == "__main__":
ws = WebSocketServer('', WebSocketMsgHandler())
ws.setDaemon(True)
import signal
def stop_ws():
ws.stop()
signal.signal(signal.SIGINT, stop_ws)
import sys
signal.signal(signal.SIGTERM, sys.exit)
ws.start()
ws.join()
| mit | 4,949,130,193,569,616,000 | 24.703297 | 99 | 0.568619 | false | 3.924497 | false | false | false |
dzolnierz/mysql-utilities | mysql/utilities/__init__.py | 1 | 2324 | #
# Copyright (c) 2010, 2015, Oracle and/or its affiliates. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
"""mysql.utilities"""
# Major, Minor, Patch, Status
VERSION = (1, 5, 6, 'GA', 0)
# Future versions will have to include only the X, Y (no Z).
VERSION_STRING = "%s.%s.%s" % VERSION[0:3]
COPYRIGHT = "2010, 2015 Oracle and/or its affiliates. All rights reserved."
COPYRIGHT_FULL = "Copyright (c) " + COPYRIGHT + """
This is a release of dual licensed MySQL Utilities. For the avoidance of
doubt, this particular copy of the software is released
under the version 2 of the GNU General Public License.
MySQL Utilities is brought to you by Oracle.
"""
LICENSE = "GPLv2"
VERSION_FRM = ("MySQL Utilities {program} version %s \n"
"License type: %s" % (VERSION_STRING, LICENSE))
LICENSE_FRM = (VERSION_FRM + "\n" + COPYRIGHT_FULL)
PYTHON_MIN_VERSION = (2, 6, 0)
PYTHON_MAX_VERSION = (3, 0, 0)
CONNECTOR_MIN_VERSION = (1, 2, 1)
# This dictionary has to be updated whenever a utility is added.
# the format to use is:
# '<utility_name>': (<PYTHON_MIN_VERSION>, <PYTHON_MAX_VERSION>)
AVAILABLE_UTILITIES = {
'mysqlauditadmin': (),
'mysqlauditgrep': (),
'mysqldbcompare': (),
'mysqldbcopy': (),
'mysqldbexport': (),
'mysqldbimport': (),
'mysqldiff': (),
'mysqldiskusage': (),
'mysqlfailover': (),
'mysqlfrm': (),
'mysqlindexcheck': (),
'mysqlmetagrep': (),
'mysqlprocgrep': (),
'mysqlreplicate': (),
'mysqlrpladmin': (),
'mysqlrplcheck': (),
'mysqlrplms': (),
'mysqlrplshow': (),
'mysqlrplsync': (),
'mysqlserverclone': (),
'mysqlserverinfo': (),
'mysqluc': (),
'mysqluserclone': (),
}
| gpl-2.0 | 2,112,526,006,098,551,300 | 31.277778 | 78 | 0.667384 | false | 3.42268 | false | false | false |
syagev/kaggle_dsb | luna16/src/deep/unet/unet.py | 1 | 7592 | import theano
import theano.tensor as T
import lasagne
from lasagne.layers import InputLayer, Conv2DLayer, MaxPool2DLayer, batch_norm, DropoutLayer, GaussianNoiseLayer
from lasagne.init import HeNormal
from lasagne import nonlinearities
from lasagne.layers import ConcatLayer, Upscale2DLayer
from lasagne.regularization import l2, regularize_network_params
import logging
from params import params as P
import numpy as np
def output_size_for_input(in_size, depth):
in_size -= 4
for _ in range(depth-1):
in_size = in_size//2
in_size -= 4
for _ in range(depth-1):
in_size = in_size*2
in_size -= 4
return in_size
NET_DEPTH = P.DEPTH #Default 5
INPUT_SIZE = P.INPUT_SIZE #Default 512
OUTPUT_SIZE = output_size_for_input(INPUT_SIZE, NET_DEPTH)
def filter_for_depth(depth):
return 2**(P.BRANCHING_FACTOR+depth)
def define_network(input_var):
batch_size = None
net = {}
net['input'] = InputLayer(shape=(batch_size,P.CHANNELS,P.INPUT_SIZE,P.INPUT_SIZE), input_var=input_var)
nonlinearity = nonlinearities.leaky_rectify
if P.GAUSSIAN_NOISE > 0:
net['input'] = GaussianNoiseLayer(net['input'], sigma=P.GAUSSIAN_NOISE)
def contraction(depth, deepest):
n_filters = filter_for_depth(depth)
incoming = net['input'] if depth == 0 else net['pool{}'.format(depth-1)]
net['conv{}_1'.format(depth)] = Conv2DLayer(incoming,
num_filters=n_filters, filter_size=3, pad='valid',
W=HeNormal(gain='relu'),
nonlinearity=nonlinearity)
net['conv{}_2'.format(depth)] = Conv2DLayer(net['conv{}_1'.format(depth)],
num_filters=n_filters, filter_size=3, pad='valid',
W=HeNormal(gain='relu'),
nonlinearity=nonlinearity)
if P.BATCH_NORMALIZATION:
net['conv{}_2'.format(depth)] = batch_norm(net['conv{}_2'.format(depth)], alpha=P.BATCH_NORMALIZATION_ALPHA)
if not deepest:
net['pool{}'.format(depth)] = MaxPool2DLayer(net['conv{}_2'.format(depth)], pool_size=2, stride=2)
def expansion(depth, deepest):
n_filters = filter_for_depth(depth)
incoming = net['conv{}_2'.format(depth+1)] if deepest else net['_conv{}_2'.format(depth+1)]
upscaling = Upscale2DLayer(incoming,4)
net['upconv{}'.format(depth)] = Conv2DLayer(upscaling,
num_filters=n_filters, filter_size=2, stride=2,
W=HeNormal(gain='relu'),
nonlinearity=nonlinearity)
if P.SPATIAL_DROPOUT > 0:
bridge_from = DropoutLayer(net['conv{}_2'.format(depth)], P.SPATIAL_DROPOUT)
else:
bridge_from = net['conv{}_2'.format(depth)]
net['bridge{}'.format(depth)] = ConcatLayer([
net['upconv{}'.format(depth)],
bridge_from],
axis=1, cropping=[None, None, 'center', 'center'])
net['_conv{}_1'.format(depth)] = Conv2DLayer(net['bridge{}'.format(depth)],
num_filters=n_filters, filter_size=3, pad='valid',
W=HeNormal(gain='relu'),
nonlinearity=nonlinearity)
#if P.BATCH_NORMALIZATION:
# net['_conv{}_1'.format(depth)] = batch_norm(net['_conv{}_1'.format(depth)])
if P.DROPOUT > 0:
net['_conv{}_1'.format(depth)] = DropoutLayer(net['_conv{}_1'.format(depth)], P.DROPOUT)
net['_conv{}_2'.format(depth)] = Conv2DLayer(net['_conv{}_1'.format(depth)],
num_filters=n_filters, filter_size=3, pad='valid',
W=HeNormal(gain='relu'),
nonlinearity=nonlinearity)
for d in range(NET_DEPTH):
#There is no pooling at the last layer
deepest = d == NET_DEPTH-1
contraction(d, deepest)
for d in reversed(range(NET_DEPTH-1)):
deepest = d == NET_DEPTH-2
expansion(d, deepest)
# Output layer
net['out'] = Conv2DLayer(net['_conv0_2'], num_filters=P.N_CLASSES, filter_size=(1,1), pad='valid',
nonlinearity=None)
#import network_repr
#print network_repr.get_network_str(net['out'])
logging.info('Network output shape '+ str(lasagne.layers.get_output_shape(net['out'])))
return net
def score_metrics(out, target_var, weight_map, l2_loss=0):
_EPSILON=1e-8
out_flat = out.dimshuffle(1,0,2,3).flatten(ndim=2).dimshuffle(1,0)
target_flat = target_var.dimshuffle(1,0,2,3).flatten(ndim=1)
weight_flat = weight_map.dimshuffle(1,0,2,3).flatten(ndim=1)
prediction = lasagne.nonlinearities.softmax(out_flat)
prediction_binary = T.argmax(prediction, axis=1)
dice_score = (T.sum(T.eq(2, prediction_binary+target_flat))*2.0 /
(T.sum(prediction_binary) + T.sum(target_flat)))
loss = lasagne.objectives.categorical_crossentropy(T.clip(prediction,_EPSILON,1-_EPSILON), target_flat)
loss = loss * weight_flat
loss = loss.mean()
loss += l2_loss
accuracy = T.mean(T.eq(prediction_binary, target_flat),
dtype=theano.config.floatX)
return loss, accuracy, dice_score, target_flat, prediction, prediction_binary
def define_updates(network, input_var, target_var, weight_var):
params = lasagne.layers.get_all_params(network, trainable=True)
out = lasagne.layers.get_output(network)
test_out = lasagne.layers.get_output(network, deterministic=True)
l2_loss = P.L2_LAMBDA * regularize_network_params(network, l2)
train_metrics = score_metrics(out, target_var, weight_var, l2_loss)
loss, acc, dice_score, target_prediction, prediction, prediction_binary = train_metrics
val_metrics = score_metrics(test_out, target_var, weight_var, l2_loss)
t_loss, t_acc, t_dice_score, t_target_prediction, t_prediction, t_prediction_binary = train_metrics
l_r = theano.shared(np.array(P.LEARNING_RATE, dtype=theano.config.floatX))
if P.OPTIMIZATION == 'nesterov':
updates = lasagne.updates.nesterov_momentum(
loss, params, learning_rate=l_r, momentum=P.MOMENTUM)
if P.OPTIMIZATION == 'adam':
updates = lasagne.updates.adam(
loss, params, learning_rate=l_r)
logging.info("Defining train function")
train_fn = theano.function([input_var, target_var, weight_var],[
loss, l2_loss, acc, dice_score, target_prediction, prediction, prediction_binary],
updates=updates)
logging.info("Defining validation function")
val_fn = theano.function([input_var, target_var, weight_var], [
t_loss, l2_loss, t_acc, t_dice_score, t_target_prediction, t_prediction, t_prediction_binary])
return train_fn, val_fn, l_r
def define_predict(network, input_var):
params = lasagne.layers.get_all_params(network, trainable=True)
out = lasagne.layers.get_output(network, deterministic=True)
out_flat = out.dimshuffle(1,0,2,3).flatten(ndim=2).dimshuffle(1,0)
prediction = lasagne.nonlinearities.softmax(out_flat)
print "Defining predict"
predict_fn = theano.function([input_var],[prediction])
return predict_fn
| apache-2.0 | 8,048,763,956,391,622,000 | 40.037838 | 126 | 0.597208 | false | 3.521336 | false | false | false |
kbsezginel/tee_mof | thermof/initialize/job.py | 1 | 2422 | # Date: September 2017
# Author: Kutay B. Sezginel
"""
Initializing job submission files for computing cluster
"""
import os
from thermof.sample import slurm_file, slurm_scratch_file, pbs_file
from . import read_lines, write_lines
def job_submission_file(simdir, parameters, verbose=True):
""" Generate job submission file from given parameters """
jobpar = parameters.job
file_name = os.path.join(simdir, '%s.%s' % (jobpar['prefix'], jobpar['name']))
print('III. Writing %s job submission file -> %s' % (jobpar['scheduler'], file_name)) if verbose else None
if jobpar['scheduler'] == 'slurm':
write_slurm_file(file_name, jobpar, sample=slurm_file)
elif jobpar['scheduler'] == 'slurm-scratch':
write_slurm_file(file_name, jobpar, sample=slurm_scratch_file)
elif jobpar['scheduler'] == 'pbs':
write_pbs_file(file_name, jobpar, sample=pbs_file)
else:
print('Select job scheduler: slurm / slurm-scratch / pbs')
def write_slurm_file(file_name, jobpar, sample):
""" Write slurm job submission file """
job_lines = read_lines(sample)
job_lines[2] = '#SBATCH --job-name=%s\n' % jobpar['name']
job_lines[3] = '#SBATCH --output=%s.out\n' % jobpar['name']
job_lines[4] = '#SBATCH --nodes=%i\n' % jobpar['nodes']
job_lines[5] = '#SBATCH --ntasks-per-node=%i\n' % jobpar['ppn']
job_lines[6] = '#SBATCH --time=%s\n' % jobpar['walltime']
job_lines[7] = '#SBATCH --cluster=%s\n' % jobpar['cluster']
if jobpar['scheduler'] == 'slurm':
job_lines[18] = 'srun --mpi=pmi2 lmp_mpi -in %s > %s\n' % (jobpar['input'], jobpar['output'])
elif jobpar['scheduler'] == 'slurm-scratch':
job_lines[24] = 'zfs=%s\n' % jobpar['zfsdir']
job_lines[38] = 'lmpdir=%s\n' % (jobpar['lmpdir'])
job_lines[39] = 'srun --mpi=pmi2 $lmpdir -in %s > %s\n' % (jobpar['input'], jobpar['output'])
write_lines(file_name, job_lines)
def write_pbs_file(file_name, jobpar, sample):
""" Write PBS job submission file """
job_lines = read_lines(sample)
job_lines[3] = '#PBS -N %s\n' % jobpar['name']
job_lines[4] = '#PBS -q %s\n' % jobpar['queue']
job_lines[5] = '#PBS -l nodes=%i:ppn=%i\n' % (jobpar['nodes'], jobpar['ppn'])
job_lines[6] = '#PBS -l walltime=%s\n' % jobpar['walltime']
job_lines[15] = 'prun lammps < %s > %s' % (jobpar['input'], jobpar['output'])
write_lines(file_name, job_lines)
| mit | 3,736,780,771,206,809,600 | 45.576923 | 110 | 0.618084 | false | 2.813008 | false | false | false |
mcroydon/django-tumbleweed | tumbleweed/views.py | 1 | 11972 | from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.core.paginator import Paginator
from django.http import Http404, HttpResponseServerError
from haystack.query import SearchQuerySet
from django.conf import settings
import datetime, time
RESULTS_PER_PAGE = getattr(settings, 'TUMBLEWEED_RESULTS_PER_PAGE', 20)
def tumble(request, date_field='pub_date', template_name='tumbleweed/tumble.html', searchqueryset=None,
paginate_by=RESULTS_PER_PAGE, context_class=RequestContext, extra_context={}):
"""
A tumblelog view that harnesses the denormalized data in a haystack index.
Optional parameters:
date_field
The name of the field in your `haystack`_ index that you would like to order
your tumbles by. Default: ``pub_date``.
template_name
The name of the template to render. Default: :template:`tumbleweed/tumble.html`.
searchqueryset
You may pass in your own SearchQuerySet_ if you would like to further restrict
what items show up in the tumble view. This is useful for filtering only live
objects or only objects whose publication date has passed. Default: ``None``.
paginate_by
The number of objects to include in each page of the tumble. Default:
``TUMBLEWEED_RESULTS_PER_PAGE`` in your settings file, or 20.
context_class
Pass in your own `context class`_. Default: Django's ``RequestContext``.
extra_context
A dictionary containing extra variables to be included in the context, similar
to ``extra_context`` included in Django's generic views.
Template context:
page
The current page of haystack results.
paginator
The Paginator_ for access to information about the paginated list
for creating next/previous links, showing the total number of
tumbled items, etc.
.. _haystack: http://haystacksearch.org/
.. _SearchQuerySet: http://haystacksearch.org/docs/searchqueryset_api.html
.. _context class: http://docs.djangoproject.com/en/dev/ref/templates/api/#id1
.. _Paginator: http://docs.djangoproject.com/en/dev/topics/pagination/
"""
if not searchqueryset:
searchqueryset = SearchQuerySet().all()
things = searchqueryset.order_by('-%s' % date_field)
paginator = Paginator(things, paginate_by)
try:
page = paginator.page(int(request.GET.get('page', 1)))
except ValueError:
raise Http404
context_dict = {
'page': page,
'paginator': paginator,
}
context_dict.update(extra_context)
return render_to_response(template_name, context_dict, context_instance=context_class(request))
def archive_year(request, year, searchqueryset=None, date_field='pub_date', template_name='tumbleweed/tumble_archive_year.html', **kwargs):
"""
A paginated list of tumbled item for a given year.
Required parameters:
year
The year to tumble, usually passed in as part of the URL.
Optional parameters:
date_field
The name of the field in your `haystack`_ index that you would like to order
your tumbles by. Default: ``pub_date``.
template_name
The name of the template to render. Default: :template:`tumbleweed/tumble.html`.
searchqueryset
You may pass in your own SearchQuerySet_ if you would like to further restrict
what items show up in the tumble view. This is useful for filtering only live
objects or only objects whose publication date has passed. Default: ``None``.
paginate_by
The number of objects to include in each page of the tumble. Default:
``TUMBLEWEED_RESULTS_PER_PAGE`` in your settings file, or 20.
context_class
Pass in your own `context class`_. Default: Django's ``RequestContext``.
extra_context
A dictionary containing extra variables to be included in the context, similar
to ``extra_context`` included in Django's generic views.
Template context:
page
The current page of haystack results.
paginator
The Paginator_ for access to information about the paginated list
for creating next/previous links, showing the total number of
tumbled items, etc.
.. _haystack: http://haystacksearch.org/
.. _SearchQuerySet: http://haystacksearch.org/docs/searchqueryset_api.html
.. _context class: http://docs.djangoproject.com/en/dev/ref/templates/api/#id1
.. _Paginator: http://docs.djangoproject.com/en/dev/topics/pagination/
"""
if not searchqueryset:
searchqueryset = SearchQuerySet().all()
try:
year = int(year)
except ValueError:
return HttpResponseServerError(u'An integer is required for year.')
# TODO: Less ugly, please.
lookup_kwargs = {
'%s__gte' % date_field: datetime.datetime(year, 1, 1),
'%s__lte' % date_field: datetime.datetime(year, 12, 31, 23, 59, 59)
}
return tumble(request, searchqueryset=searchqueryset.filter(**lookup_kwargs), template_name=template_name, **kwargs)
def archive_month(request, year, month, searchqueryset=None, date_field='pub_date', month_format='%b',
template_name='tumbleweed/tumble_archive_month.html', **kwargs):
"""
A paginated list of tumbled item for a given month.
Required parameters:
year
The year to tumble, usually passed in as part of the URL.
month
The month to tumble, usually passed in as part of the URL.
Optional parameters:
month_format
The `date formatting`_ code used to interpret the month passed in as a string.
Default: ``%b``.
date_field
The name of the field in your `haystack`_ index that you would like to order
your tumbles by. Default: ``pub_date``.
template_name
The name of the template to render. Default: :template:`tumbleweed/tumble.html`.
searchqueryset
You may pass in your own SearchQuerySet_ if you would like to further restrict
what items show up in the tumble view. This is useful for filtering only live
objects or only objects whose publication date has passed. Default: ``None``.
paginate_by
The number of objects to include in each page of the tumble. Default:
``TUMBLEWEED_RESULTS_PER_PAGE`` in your settings file, or 20.
context_class
Pass in your own `context class`_. Default: Django's ``RequestContext``.
extra_context
A dictionary containing extra variables to be included in the context, similar
to ``extra_context`` included in Django's generic views.
Template context:
page
The current page of haystack results.
paginator
The Paginator_ for access to information about the paginated list
for creating next/previous links, showing the total number of
tumbled items, etc.
.. _date formatting: http://docs.djangoproject.com/en/dev/ref/templates/builtins/#now
.. _haystack: http://haystacksearch.org/
.. _SearchQuerySet: http://haystacksearch.org/docs/searchqueryset_api.html
.. _context class: http://docs.djangoproject.com/en/dev/ref/templates/api/#id1
.. _Paginator: http://docs.djangoproject.com/en/dev/topics/pagination/
"""
if not searchqueryset:
searchqueryset = SearchQuerySet().all()
# TODO: day list?
# This logic courtesy of Django's date-based generic views
try:
tt = time.strptime("%s-%s" % (year, month), '%s-%s' % ('%Y', month_format))
date = datetime.date(*tt[:3])
except ValueError:
raise Http404
now = datetime.datetime.now()
# Calculate first and last day of month, for use in a date-range lookup.
first_day = date.replace(day=1)
if first_day.month == 12:
last_day = first_day.replace(year=first_day.year + 1, month=1)
else:
last_day = first_day.replace(month=first_day.month + 1)
lookup_kwargs = {
'%s__gte' % date_field: first_day,
'%s__lt' % date_field: last_day,
}
return tumble(request, searchqueryset=searchqueryset.filter(**lookup_kwargs), template_name=template_name, **kwargs)
def archive_day(request, year, month, day, searchqueryset=None, date_field='pub_date', month_format='%b', day_format='%d',
template_name='tumbleweed/tumble_archive_day.html', **kwargs):
"""
A paginated list of tumbled item for a given month.
Required parameters:
year
The year to tumble, usually passed in as part of the URL.
month
The month to tumble, usually passed in as part of the URL.
day
The day to tumble, usualy passed in as part of the URL.
Optional parameters:
month_format
The `date formatting`_ code used to interpret the month passed in as a string.
Default: ``%b``.
day_format
The `date formatting`_ code used to interpret the day pass in as a string.
Default: ``%d``.
date_field
The name of the field in your `haystack`_ index that you would like to order
your tumbles by. Default: ``pub_date``.
template_name
The name of the template to render. Default: :template:`tumbleweed/tumble.html`.
searchqueryset
You may pass in your own SearchQuerySet_ if you would like to further restrict
what items show up in the tumble view. This is useful for filtering only live
objects or only objects whose publication date has passed. Default: ``None``.
paginate_by
The number of objects to include in each page of the tumble. Default:
``TUMBLEWEED_RESULTS_PER_PAGE`` in your settings file, or 20.
context_class
Pass in your own `context class`_. Default: Django's ``RequestContext``.
extra_context
A dictionary containing extra variables to be included in the context, similar
to ``extra_context`` included in Django's generic views.
Template context:
page
The current page of haystack results.
paginator
The Paginator_ for access to information about the paginated list
for creating next/previous links, showing the total number of
tumbled items, etc.
.. _date formatting: http://docs.djangoproject.com/en/dev/ref/templates/builtins/#now
.. _haystack: http://haystacksearch.org/
.. _SearchQuerySet: http://haystacksearch.org/docs/searchqueryset_api.html
.. _context class: http://docs.djangoproject.com/en/dev/ref/templates/api/#id1
.. _Paginator: http://docs.djangoproject.com/en/dev/topics/pagination/
"""
if not searchqueryset:
searchqueryset = SearchQuerySet().all()
# More logic courtesy of Django
try:
tt = time.strptime('%s-%s-%s' % (year, month, day),
'%s-%s-%s' % ('%Y', month_format, day_format))
date = datetime.date(*tt[:3])
except ValueError:
raise Http404
lookup_kwargs = {
'%s__gte' % date_field: datetime.datetime.combine(date, datetime.time.min),
'%s__lte' % date_field: datetime.datetime.combine(date, datetime.time.max)
}
return tumble(request, searchqueryset=searchqueryset.filter(**lookup_kwargs), template_name=template_name, **kwargs)
| bsd-3-clause | 96,717,738,742,192,700 | 39.040134 | 139 | 0.635483 | false | 4.171429 | false | false | false |
Southpaw-TACTIC/TACTIC | src/pyasm/prod/web/prod_wdg.py | 1 | 7380 | ###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ['IntrospectWdg','IntrospectSelectWdg', 'ProdIconButtonWdg',
'ProdIconSubmitWdg', 'SnapshotInfoWdg', 'SnapshotLabelWdg',
'AssetLibrarySelectionWdg', 'SObjectSelectionWdg']
from pyasm.web import *
from pyasm.widget import *
from pyasm.search import Search, SObject
from pyasm.prod.biz import *
from pyasm.common import Container
from pyasm.prod.load import ProdLoaderContext
class ProdIconButtonWdg(IconButtonWdg):
def __init__(self, name=None, icon=None, long=True, icon_pos="left"):
super(ProdIconButtonWdg,self).__init__(name, icon, long, icon_pos)
self.add_style("line-height: 14px")
self.add_style("font-size: 0.8em")
self.add_style("padding: 3px 10px 3px 10px")
class ProdIconSubmitWdg(IconSubmitWdg):
def __init__(self, name=None, icon=None, long=True, icon_pos="left"):
super(ProdIconSubmitWdg,self).__init__(name, icon, long, icon_pos)
self.add_style("line-height: 14px")
self.add_style("font-size: 0.8em")
self.add_style("padding: 3px 10px 3px 10px")
class IntrospectWdg(ProdIconSubmitWdg):
'''a widget that does introspection to analyze/update what
assets(versions) are loaded in the session of the app'''
def __init__(self):
super(IntrospectWdg, self).__init__("Introspect", long=True)
self.add_style("height: 14px")
self.add_style("font-size: 0.8em")
#self.add_style("padding: 3px 10px 2px 10px")
self.add_behavior({'type': "click", 'cbjs_action': "introspect(bvr)"})
class IntrospectSelectWdg(ProdIconSubmitWdg):
'''a widget that does selected introspection to analyze/update
what assets(versions) are loaded in the session of the app'''
def __init__(self):
super(IntrospectSelectWdg, self).__init__("Introspect Select", long=True)
self.add_style("height: 14px")
self.add_style("font-size: 0.8em")
self.add_event("onclick", "introspect_select()")
class SnapshotInfoWdg(BaseTableElementWdg):
'''a widget that extracts the info of the xml snippet of a snapshot'''
def preprocess(self):
search_type_list = SObject.get_values(self.sobjects, 'search_type', unique=True)
search_id_dict = {}
self.ref_sobject_cache = {}
# initialize the search_id_dict
for type in search_type_list:
search_id_dict[type] = []
# cache it first
for sobject in self.sobjects:
search_type = sobject.get_value('search_type')
search_id_list = search_id_dict.get(search_type)
search_id_list.append(sobject.get_value('search_id'))
from pyasm.search import SearchException
for key, value in search_id_dict.items():
try:
ref_sobjects = Search.get_by_id(key, value)
sobj_dict = SObject.get_dict(ref_sobjects)
except SearchException as e:
print("WARNING: search_type [%s] with id [%s] does not exist" % (key, value))
print(str(e))
sobj_dict = {}
# store a dict of dict with the search_type as key
self.ref_sobject_cache[key] = sobj_dict
def get_display(self):
search_type = self.get_current_sobject().get_value('search_type')
search_id = self.get_current_sobject().get_value('search_id')
sobject = None
if self.ref_sobject_cache:
sobj_dict = self.ref_sobject_cache.get(search_type)
if sobj_dict:
sobject = sobj_dict.get(str(search_id))
else:
sobject = Search.get_by_id(search_type, search_id)
if sobject:
if isinstance(sobject, ShotInstance):
code = "%s-%s" %(sobject.get_value('shot_code'), sobject.get_code())
elif sobject.has_value('name'):
code = "%s-%s" %(sobject.get_value('name'), sobject.get_code())
else:
code = sobject.get_code()
else:
code = "n/a"
return code
class SnapshotLabelWdg(BaseTableElementWdg):
def get_snapshot(self, mode):
''' get the snapshot depending on the mode i.e. input, output'''
dict = self.get_current_aux_data()
output_snapshots = input_snapshots = None
if dict and '%s_snapshots'%mode in dict:
if mode == 'output':
output_snapshots = dict.get('%s_snapshots' %mode)
else:
input_snapshots = dict.get('%s_snapshots' %mode)
else:
sobject = self.get_current_sobject()
context = self.get_context()
loader = ProdLoaderContext()
output_snapshots = loader.get_output_snapshots(sobject, context)
input_snapshots = loader.get_input_snapshots(sobject, context)
# this is for sharing with AssetLoaderWdg
# should only be called once per sobject
self.append_aux_data({'output_snapshots': output_snapshots, \
'input_snapshots': input_snapshots})
if mode == 'output':
return output_snapshots
else:
return input_snapshots
def get_context(self):
context_select = Container.get('context_filter')
context = 'publish'
if context_select:
context = context_select.get_value()
if context == "":
values = context_select.get_option('values')
context = values[len(values)-1]
return context
def get_display(self):
snapshot = self.get_snapshot('output')
label = None
if snapshot:
label = snapshot.get_label()
widget = Widget()
if label:
widget.add(IconWdg(label, eval('IconWdg.%s' %label.upper())))
else:
widget.add('')
return widget
class AssetLibrarySelectionWdg(SelectWdg):
def get_display(self):
search = Search('prod/asset_library')
self.set_search_for_options(search, 'code', 'title')
self.set_option('web_state', 'true')
self.add_empty_option()
select = super(AssetLibrarySelectionWdg, self).get_display()
span = SpanWdg(select)
insert_wdg = IframeInsertLinkWdg(search.get_search_type())
insert_wdg.set_refresh_mode("page")
span.add(insert_wdg)
return span
class SObjectSelectionWdg(SelectWdg):
def get_display(self):
search_type = self.get_option('search_type')
if not search_type:
return
search = Search(search_type)
self.set_search_for_options(search, 'code', 'code')
self.set_option('web_state', 'true')
self.add_empty_option()
select = super(SObjectSelectionWdg, self).get_display()
span = SpanWdg(select)
insert_wdg = IframeInsertLinkWdg(search.get_search_type())
insert_wdg.set_refresh_mode("page")
span.add(insert_wdg)
return span
| epl-1.0 | -2,703,030,322,963,593,000 | 34.652174 | 93 | 0.595799 | false | 3.806086 | false | false | false |
k0001/mediasancion | extras/importers/mscrap_import.py | 1 | 18027 | # coding: utf8
# MediaSanción, aplicación web para acceder a los datos públicos de la
# actividad legislativa en Argentina.
# Copyright (C) 2010,2011,2012 Renzo Carbonara <renzo @carbonara .com .ar>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# It's recommended that you import items in the following order:
# 1. LegisladorItem
# 2. ProyectoItem
# 3. FirmaProyecto|TramiteProyectoItem|DictamenProyectoItem
# This program is ugly as shit.
import json
import logging
import optparse
import os
import random
import re
import signal
import sys
import time
import isodate
from datetime import datetime
from pprint import pprint
from django.db.models import Q
from django.db import transaction
from mediasancion.core.models import Partido, Distrito, Bloque, Persona
from mediasancion.congreso.models import (Proyecto, FirmaProyecto, Legislador,
Comision, DictamenProyecto, TramiteProyecto)
logging.basicConfig(level=logging.WARNING)
log = logging.getLogger(os.path.basename(__file__))
AUDIT_ORIGIN = u'mscrap_import:%s' % datetime.utcnow().isoformat()
def store_legislador_item(x):
try:
distrito = Distrito.objects.get(nombre=x['distrito_nombre'])
except Distrito.DoesNotExist:
distrito = Distrito(nombre=x['distrito_nombre'], origin=AUDIT_ORIGIN)
distrito.resource_source = x['resource_source']
distrito.resource_url = x['resource_url']
distrito.save()
if x.get('bloque_nombre'):
try:
bloque = Bloque.objects.get(nombre=x['bloque_nombre'])
except Bloque.DoesNotExist:
bloque = Bloque(nombre=x['bloque_nombre'], origin=AUDIT_ORIGIN)
bloque.resource_source = x['resource_source']
bloque.resource_url = x['resource_url']
bloque.save()
else:
bloque = None
if x.get('partido_nombre'):
try:
partido = Partido.objects.get(nombre=x['partido_nombre'])
except Partido.DoesNotExist:
partido = Partido(nombre=x['partido_nombre'], origin=AUDIT_ORIGIN)
partido.resource_source = x['resource_source']
partido.resource_url = x['resource_url']
partido.save()
else:
partido = None
persona_created = True
try:
persona = Persona.objects.get(nombre=x['nombre'],
apellido=x['apellido'],
legislador__camara=x['camara'],
legislador__bloque=bloque,
legislador__distrito=distrito,
legislador__inicio=isodate.parse_date(x['mandato_inicio']),
legislador__fin=isodate.parse_date(x['mandato_fin']))
persona_created = False
except Persona.DoesNotExist:
try:
persona = Persona.objects.get(nombre=x['nombre'], apellido=x['apellido'])
persona_created = False
except Persona.MultipleObjectsReturned:
log.error((u"This is an expected error! Aparently you have more than one Persona named: "
u"%(apellido)s, %(nombre)s. You'll have to fix this by hand. Set var 'persona' "
u"to the desired Persona instance and continue (c)") % x)
import ipdb; ipdb.set_trace()
except Persona.DoesNotExist:
persona = Persona(nombre=x['nombre'], apellido=x['apellido'], origin=AUDIT_ORIGIN)
try:
assert isinstance(persona, Persona)
except (NameError, AssertionError):
raise RuntimeError(u"Missing Persona, sorry, need to abort.")
persona.email = x.get('email') or None # the 'or None' thing is cuz we don't want empty strings.
persona.telefono = x.get('telefono') or None
persona.foto = x.get('foto_url') or None # <--- makes no sense, but we don't care right now.
persona.save()
if persona_created:
persona.resource_source = x['resource_source']
persona.resource_url = x['resource_url']
persona.resource_id = x['resource_id']
persona.save()
log.debug(u'Created %s Persona' % persona.uuid)
else:
log.debug(u'Updated %s Persona' % persona.uuid)
try:
legislador = Legislador.objects.get(persona=persona,
camara=x['camara'],
bloque=bloque,
distrito=distrito,
inicio=isodate.parse_date(x['mandato_inicio']),
fin=isodate.parse_date(x['mandato_fin']))
log.debug(u'Updated %s Legislador' % legislador.uuid)
except Legislador.DoesNotExist:
legislador = Legislador(persona=persona,
camara=x['camara'],
bloque=bloque,
partido=partido,
distrito=distrito,
inicio=isodate.parse_date(x['mandato_inicio']),
fin=isodate.parse_date(x['mandato_fin']))
legislador.resource_source = x['resource_source']
legislador.resource_url = x['resource_url']
legislador.resource_id = x['resource_id']
legislador.origin = AUDIT_ORIGIN
legislador.save()
log.debug(u'Created %s Legislador' % legislador.uuid)
return True
def store_proyecto_item(x):
try:
p = Proyecto.objects.get(camara_origen_expediente=x['camara_origen_expediente'],
camara_origen=x['camara_origen'])
proyecto_created = False
except Proyecto.DoesNotExist:
p = Proyecto(camara_origen_expediente=x['camara_origen_expediente'],
camara_origen=x['camara_origen'],
origin=AUDIT_ORIGIN)
proyecto_created = True
p.resource_source = x['resource_source']
p.resource_url = x['resource_url']
p.resource_id = x['resource_id']
p.origen = x['origen']
p.camara_revisora = x['camara_revisora'] if 'camara_revisora' in x else None
p.camara_revisora_expediente = x.get('camara_revisora_expediente') or ''
p.reproduccion_expediente = x.get('reproduccion_expediente') or ''
p.ley_numero = x.get('ley_numero')
p.tipo = x['tipo']
p.mensaje = x.get('mensaje_codigo') or ''
p.publicacion_en = x.get('publicacion_en') or ''
p.publicacion_fecha = isodate.parse_date(x['publicacion_fecha'])
p.texto_completo_url = x.get('texto_completo_url', '')
p.texto_mediasancion_senadores_url = x.get('texto_mediasancion_senadores_url', '')
p.texto_mediasancion_diputados_url = x.get('texto_mediasancion_diputados_url', '')
p.sumario = x['sumario']
p.save()
cd = x.get('comisiones_diputados', ())
for s in cd:
s = s.capitalize()
try:
c = Comision.objects.get(camara='D', nombre__iexact=s)
except Comision.DoesNotExist:
c = Comision(camara='D', nombre=s, origin=AUDIT_ORIGIN)
c.resource_source = x['resource_source']
c.resource_url = x['resource_url']
c.save()
if not c in p.comisiones.all():
p.comisiones.add(c)
for s in x.get('comisiones_senadores', ()):
s = s.capitalize()
try:
c = Comision.objects.get(camara='S', nombre__iexact=s)
except Comision.DoesNotExist:
c = Comision(camara='S', nombre=s, origin=AUDIT_ORIGIN)
c.resource_source = x['resource_source']
c.resource_url = x['resource_url']
c.save()
if not c in p.comisiones.all():
p.comisiones.add(c)
if proyecto_created:
log.debug(u'Created %s Proyecto' % p.uuid)
return True
else:
log.debug(u'Updated %s Proyecto' % p.uuid)
return True
def store_firmaproyecto_item(x):
try:
proyecto = Proyecto.objects.get(camara_origen_expediente=x['proyecto_camara_origen_expediente'],
camara_origen=x['proyecto_camara_origen'])
except Proyecto.DoesNotExist:
return False
if x.get('firmante_bloque'):
try:
bloque = Bloque.objects.get(nombre=x['firmante_bloque'])
except Bloque.DoesNotExist:
bloque = Bloque(nombre=x['firmante_bloque'], origin=AUDIT_ORIGIN)
bloque.resource_source = x['resource_source']
bloque.resource_url = x['resource_url']
bloque.save()
else:
bloque = None
if x.get('firmante_distrito'):
try:
distrito = Distrito.objects.get(nombre=x['firmante_distrito'])
except Distrito.DoesNotExist:
distrito = Distrito(nombre=x['firmante_distrito'], origin=AUDIT_ORIGIN)
distrito.resource_source = x['resource_source']
distrito.resource_url = x['resource_url']
distrito.save()
else:
distrito = None
poder =x['firmante_poder']
firmante_special = x.get('firmante_special') or u''
if not firmante_special:
firmante_apellido = x.get('firmante_apellido') or u''
firmante_nombre = x.get('firmante_nombre') or u''
try:
persona = Persona.objects.get(apellido=firmante_apellido,
nombre=firmante_nombre)
except Persona.DoesNotExist:
persona = Persona(apellido=firmante_apellido,
nombre=firmante_nombre,
origin=AUDIT_ORIGIN)
persona.resource_source = x['resource_source']
persona.resource_url = x['resource_url']
persona.save()
try:
legislador = Legislador.objects.get(persona=persona,
bloque=bloque,
distrito=distrito)
except Legislador.DoesNotExist:
# if legislador created, inicio and fin will be missing. Whatever.
legislador = Legislador(persona=persona,
bloque=bloque,
distrito=distrito,
camara='?',
origin=AUDIT_ORIGIN)
legislador.resource_source = x['resource_source']
legislador.resource_url = x['resource_url']
legislador.save()
else:
persona = legislador = None
try:
fp = FirmaProyecto.objects.get(proyecto=proyecto,
legislador=legislador,
poder=poder,
poder_who=firmante_special,
tipo_firma=x['tipo_firma'])
log.debug(u'Updated %s FirmaProyecto' % fp.uuid)
except FirmaProyecto.DoesNotExist:
fp = FirmaProyecto(proyecto=proyecto,
legislador=legislador,
poder=poder,
poder_who=firmante_special,
tipo_firma=x['tipo_firma'],
origin=AUDIT_ORIGIN)
fp.resource_source = x['resource_source']
fp.resource_url = x['resource_url']
fp.resource_id = x.get('resource_id')
fp.save()
log.debug(u'Created %s FirmaProyecto' % fp.uuid)
return True
def store_dictamenproyecto_item(x):
try:
proyecto = Proyecto.objects.get(camara_origen_expediente=x['proyecto_camara_origen_expediente'],
camara_origen=x['proyecto_camara_origen'])
except Proyecto.DoesNotExist:
return False
x_fecha = isodate.parse_date(x['fecha']) if 'fecha' in x else None
try:
dp = DictamenProyecto.objects.get(proyecto=proyecto,
camara=x['camara'],
index=int(x['index']))
except DictamenProyecto.DoesNotExist:
dp = DictamenProyecto(proyecto=proyecto,
camara=x['camara'],
index=int(x['index']),
fecha=x_fecha,
orden_del_dia=(x.get('orden_del_dia') or u''),
descripcion=(x.get('descripcion') or u''),
resultado=(x.get('resultado') or u''),
origin=AUDIT_ORIGIN)
dp.resource_source = x['resource_source']
dp.resource_url = x['resource_url']
dp.resource_id = x.get('resource_id')
dp.save()
log.debug(u'Created %s DictamenProyecto' % dp.uuid)
else:
dp_changed = False
if dp.resultado and x.get('resultado') and dp.resultado != x.get('resultado'):
dp.resultado = x.get('resultado', u'')
dp_changed = True
if dp.descripcion and x.get('descripcion') and dp.descripcion != x.get('descripcion'):
dp.descripcion = x.get('descripcion', u'')
dp_changed = True
if dp_changed:
dp.save()
log.debug(u'Updated %s DictamenProyecto' % dp.uuid)
return True
def store_tramiteproyecto_item(x):
try:
proyecto = Proyecto.objects.get(camara_origen_expediente=x['proyecto_camara_origen_expediente'],
camara_origen=x['proyecto_camara_origen'])
except Proyecto.DoesNotExist:
return False
x_fecha = isodate.parse_date(x['fecha']) if 'fecha' in x else None
try:
tp = TramiteProyecto.objects.get(proyecto=proyecto,
camara=x['camara'],
index=int(x['index']))
except TramiteProyecto.DoesNotExist:
tp = TramiteProyecto(proyecto=proyecto,
camara=x['camara'],
index=int(x['index']),
fecha=x_fecha,
descripcion=(x.get('descripcion') or u''),
resultado=(x.get('resultado') or u''),
origin=AUDIT_ORIGIN)
tp.resource_source = x['resource_source']
tp.resource_url = x['resource_url']
tp.resource_id = x.get('resource_id')
tp.save()
log.debug(u'Created %s TramiteProyecto' % tp.uuid)
else:
tp_changed = False
if tp.resultado and x.get('resultado') and tp.resultado != x.get('resultado'):
tp.resultado = x.get('resultado', u'')
tp_changed = True
if tp.descripcion and x.get('descripcion') and tp.descripcion != x.get('descripcion'):
tp.descripcion = x.get('descripcion', u'')
tp_changed = True
if tp_changed:
tp.save()
log.debug(u'Updated %s TramiteProyecto' % tp.uuid)
return True
@transaction.commit_manually
def store_item(t, x):
ts = { 'LegisladorItem': store_legislador_item,
'ProyectoItem': store_proyecto_item,
'FirmaProyectoItem': store_firmaproyecto_item,
'DictamenProyectoItem': store_dictamenproyecto_item,
'TramiteProyectoItem': store_tramiteproyecto_item }
try:
_store = ts[t]
except KeyError:
log.warning(u"Skiping %s" % t)
return
try:
s = _store(x)
except:
transaction.rollback()
raise
if s:
transaction.commit()
return True
else:
log.debug(u"Couldn't store %s" % t)
transaction.rollback()
return False
def store_raw(line):
t, x = json.loads(line)
return store_item(t, x)
def main_store(lines):
log.info('Storing...')
for line in lines:
if not store_raw(line):
return
def _sighandle_pdb(sig, frame):
import pdb
pdb.Pdb().set_trace(frame)
signal.signal(signal.SIGUSR1, _sighandle_pdb)
def parse_args():
parser = optparse.OptionParser(usage=u"usage: %prog [options] FILE [FILE..]")
parser.add_option('-v', '--verbose',
action='store_true', dest='verbose',
help=u"verbose output")
parser.add_option('--debug',
action='store_true', dest='debug',
help=u"debug output")
parser.add_option('--wtf',
action='store_true', dest='wtf',
help=u"enable WTF post-mortem debugger")
opts, args = parser.parse_args()
if not args:
parser.print_help()
sys.exit(1)
return opts, args
if __name__ == '__main__':
opts, args = parse_args()
if opts.debug:
log.setLevel(logging.DEBUG)
elif opts.verbose:
log.setLevel(logging.INFO)
log.info('PID: %d' % os.getpid())
log.info('SIGUSR1: Start debugger.')
log.info('SIGUSR2: Print status.')
if opts.wtf:
log.info(u"WTF Post-mortem debugger enabled")
try:
for fname in args:
with open(fname, 'rb') as f: # we use ascii-only input (JSON)
log.info(u"Opening %s..." % fname)
main_store(f)
except Exception:
log.error(u"Something bad happened!!! Nothing will saved.")
if opts.wtf:
from wtf import WTF
WTF()
else:
raise
| agpl-3.0 | 3,245,439,087,975,844,000 | 35.265594 | 104 | 0.563249 | false | 3.455521 | false | false | false |
flisboac/uksat | wafbuild.py | 1 | 3974 | #!/usr/bin/env python
#from waflib.Configure import conf
#from waflib.TaskGen import feature
def options(ctx):
ctx.add_option('-B', '--build', action='store', default="release",
help='Specifies which build to run.')
ctx.add_option('--list-builds', action='store_true',
help='Lists all available builds and their targets (NOT IMPLEMENTED YET).')
target = _get_all_all_target(ctx)
tools = _get_tools(ctx, {'all': target})
for tool in tools:
ctx.load(tool['tool'], **tool)
def configure(ctx):
targets = _get_build_targets(ctx, include_all = False)
tools = _get_tools(ctx, targets)
programs = _get_programs(ctx, targets)
for tool in tools:
ctx.load(tool['tool'])
for program in programs:
ctx.find_program(**program)
ctx.env.build = ctx.options.build
def build(ctx):
targets = _get_build_targets(ctx)
for targetname in targets:
ctx(**targets[targetname])
def _get_list(ctx, targets, key, defaultkey):
values = {}
for targetname in targets:
target = targets[targetname]
valuelist = target.get(key, [])
if type(valuelist) is list or type(valuelist) is tuple:
for value in valuelist:
if type(value) is dict:
values[value[defaultkey]] = value
#values.append(value)
else:
values[value] = {defaultkey: value}
#values.append({defaultkey: value})
else:
values[valuelist] = {defaultkey: valuelist}
#values.append({defaultkey: valuelist})
return list(values.values())
def _get_tools(ctx, targets):
return _get_list(ctx, targets, 'load', defaultkey = 'tool')
def _get_programs(ctx, targets):
return _get_list(ctx, targets, 'find_program', defaultkey = 'filename')
def _get_all_all_target(ctx):
targets = _get_build_targets(ctx, 'all', include_all = True)
all_target = targets['all'] or {}
return all_target
def _get_build_targets(ctx, buildname = None, include_all = False):
from waflib import Context
if not buildname:
try:
buildname = ctx.env.build
if not buildname: buildname = ctx.options.build
except:
buildname = ctx.options.build
try:
builds = Context.g_module.BUILDS
except:
builds = {}
try:
allbuilddata = builds['all']
except:
allbuilddata = {}
# It's mandatory to have the build declared.
try:
targetbuilddata = builds[buildname]
except:
raise Exception("Build '" + buildname + "' is not declared.")
targetnames = set()
targets = {}
for targetname in allbuilddata: targetnames.add(targetname)
for targetname in targetbuilddata: targetnames.add(targetname)
for targetname in targetnames:
if include_all or targetname != 'all':
targets[targetname] = _get_build_target(ctx, targetname, buildname)
return targets
def _get_build_target(ctx, targetname, buildname = None):
from copy import copy
from waflib import Context
if not buildname:
try:
buildname = ctx.env.build
if not buildname: buildname = ctx.options.build
except:
buildname = ctx.options.build
try:
builds = Context.g_module.BUILDS
except:
raise Exception("BUILDS dictionary is not declared.")
try:
allbuilddata = builds['all']
except:
allbuilddata = {}
try:
allalldata = allbuilddata['all']
except:
allalldata = {}
try:
alldata = allbuilddata[targetname]
except:
alldata = {}
# It's mandatory to have the build declared.
try:
targetbuilddata = builds[buildname]
except:
targetbuilddata = {}
try:
targetalldata = targetbuilddata['all']
except:
targetalldata = {}
try:
targetdata = targetbuilddata[targetname]
except:
targetdata = {}
#if not allbuilddata and not targetbuilddata:
# raise Exception("Build '" + buildname + "' is not declared.")
data = copy(allalldata)
for key in alldata: data[key] = alldata[key]
for key in targetalldata: data[key] = targetalldata[key]
for key in targetdata: data[key] = targetdata[key]
if not data:
raise Exception("No target '" + targetname + "' for build '" + buildname + "'.")
else:
if 'target' not in data:
data['target'] = targetname
return data
| mit | 3,694,323,395,176,270,000 | 23.231707 | 82 | 0.694766 | false | 3.059276 | false | false | false |
joshuamorton/calc_three_proj | plot.py | 1 | 1969 | from matplotlib import pyplot as plt
import numpy as np
import iterative
import pascal
import power
plt.style.use('ggplot')
qr = []
lu = []
for i in range(2, 13):
q = pascal.solve_qr_b(pascal.pascal_matrix(i), pascal.harmonic_vector(i))
l = pascal.solve_lu_b(pascal.pascal_matrix(i), pascal.harmonic_vector(i))
qr.append(q)
lu.append(l)
plt.subplot(1, 1, 1)
x = range(2, 13)
y = [i[1] for i in qr]
z = [i[2] for i in qr]
plt.plot(x, y, color='blue') # error from householder
plt.plot(x, z, color='green') # solution error of qr
plt.yscale('log')
plt.savefig('./qr_err.png')
y = [i[1] for i in lu]
z = [i[2] for i in lu]
plt.clf()
plt.plot(x, y, color='blue')
plt.plot(x, z, color='green')
plt.yscale('log')
plt.savefig('./lu_err.png')
plt.clf()
jacobi, gs = iterative.generate_data()
j_vals = [i[1] for i in jacobi]
g_vals = [i[1] for i in gs]
jacobi_approx = sum(j_vals) / len(j_vals) # 2c
gs_approx = sum(g_vals) / len(g_vals)
print("Averages, jacobi then gauss-seidel, then iterations")
print(jacobi_approx)
print(gs_approx)
print(float(sum(j[2] for j in jacobi))/sum(g[2] for g in gs))
exact = np.array([9.0/190, 28.0/475, 33.0/475]).reshape(3,1)
errs_jacobi = [pascal.norm_inf(j-exact) for j in j_vals]
errs_gs = [pascal.norm_inf(g-exact) for g in g_vals]
plt.plot([j[2] for j in jacobi], errs_jacobi, 'ko', [g[2] for g in gs], errs_jacobi, 'bo')
plt.savefig('./iterative_err.png')
plt.clf()
powers = power.generate_data()
ds = [p[0] for p in powers if p[0] is not None]
ts = [p[1] for p in powers if p[1] is not None]
tis = [p[2] for p in powers if p[2] is not None]
maxs = [p[3] for p in powers if p[3] is not None]
mins = [p[4] for p in powers if p[4] is not None]
big = max(maxs)
small = max(mins)
maxs = [float(m)/big for m in maxs]
mins = [float(m)/small for m in mins]
plt.scatter(ds, ts, c=maxs)
plt.savefig('./power_mat.png')
plt.clf()
plt.scatter([1.0/d for d in ds], tis, c=mins)
plt.savefig('./power_inv.png')
plt.clf() | mit | -2,766,495,094,339,962,000 | 25.621622 | 90 | 0.648045 | false | 2.311033 | false | false | false |
fonttools/skia-pathops | ci/download_libskia.py | 1 | 2713 | import argparse
import glob
import logging
import platform
import os
import shutil
import struct
import tempfile
__requires__ = ["github_release"]
import github_release
GITHUB_REPO = "fonttools/skia-builder"
ASSET_TEMPLATE = "libskia-{plat}-{arch}.zip"
DOWNLOAD_DIR = os.path.join("build", "download")
CPU_ARCH = "x64" if struct.calcsize("P") * 8 == 64 else "x86"
PLATFORM_TAGS = {"Linux": "linux", "Darwin": "mac", "Windows": "win"}
logger = logging.getLogger()
def get_latest_release(repo):
releases = github_release.get_releases(repo)
if not releases:
raise ValueError("no releases found for {!r}".format(repo))
return releases[0]
def download_unpack_assets(repo, tag, asset_name, dest_dir):
dest_dir = os.path.abspath(dest_dir)
os.makedirs(dest_dir, exist_ok=True)
with tempfile.TemporaryDirectory() as tmpdir:
curdir = os.getcwd()
os.chdir(tmpdir)
try:
downloaded = github_release.gh_asset_download(repo, tag, asset_name)
except:
raise
else:
if not downloaded:
raise ValueError(
"no assets found for {0!r} with name {1!r}".format(tag, asset_name)
)
for archive in glob.glob(asset_name):
shutil.unpack_archive(archive, dest_dir)
finally:
os.chdir(curdir)
if __name__ == "__main__":
logging.basicConfig(level="INFO")
parser = argparse.ArgumentParser()
parser.add_argument(
"-p",
"--platform",
default=PLATFORM_TAGS.get(platform.system(), "win"),
choices=["win", "mac", "linux"],
help="The desired platform (default: %(default)s)",
)
parser.add_argument(
"-a",
"--cpu-arch",
default=CPU_ARCH,
help="The desired CPU architecture (default: %(default)s)",
choices=["x86", "x64"],
)
parser.add_argument(
"-d",
"--download-dir",
default=DOWNLOAD_DIR,
help="directory where to download libskia (default: %(default)s)",
)
parser.add_argument(
"-t", "--tag-name", default=None, help="release tag name (default: latest)"
)
args = parser.parse_args()
tag_name = args.tag_name
if tag_name is None:
latest_release = get_latest_release(GITHUB_REPO)
tag_name = latest_release["tag_name"]
asset_name = ASSET_TEMPLATE.format(plat=args.platform, arch=args.cpu_arch)
logger.info(
"Downloading '%s' from '%s' at tag '%s' to %s",
asset_name,
GITHUB_REPO,
tag_name,
args.download_dir,
)
download_unpack_assets(GITHUB_REPO, tag_name, asset_name, args.download_dir)
| bsd-3-clause | 2,709,180,981,875,565,000 | 27.260417 | 87 | 0.596756 | false | 3.646505 | false | false | false |
biothings/biothings_explorer | biothings_explorer/smartapi_kg/__init__.py | 1 | 1114 | import sys
from .dataload import load_specs
from .smartapi_parser import SmartAPIParser
from .filter import filterOps
import traceback
class MetaKG:
def __init__(self):
self.ops = []
self.parser = SmartAPIParser()
def populateOpsFromSpecs(self, specs, verbose=False):
"""Populate meta-kg operations based on SmartAPI specifications"""
self.ops = []
for spec in specs:
try:
self.parser.load_spec(spec)
self.ops += self.parser.fetch_endpoint_info()
except:
if verbose:
print("Unexpected error:", sys.exc_info()[0])
print(
"failed to load the following spec {}".format(spec.get("info"))
)
def constructMetaKG(self, source="remote", tag="translator"):
"""Construct API Meta Knowledge Graph based on SmartAPI Specifications."""
specs = load_specs(source=source, tag=tag)
self.populateOpsFromSpecs(specs)
def filter(self, criteria):
return filterOps(self.ops, criteria)
| apache-2.0 | 8,065,000,017,719,369,000 | 31.764706 | 87 | 0.590664 | false | 4.420635 | false | false | false |
dbcli/vcli | vcli/packages/sqlcompletion.py | 1 | 13865 | from __future__ import print_function
import sys
import sqlparse
from sqlparse.sql import Comparison, Identifier, Where
from .parseutils import last_word, extract_tables, find_prev_keyword
from .vspecial import parse_special_command
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str
else:
string_types = basestring
def suggest_type(full_text, text_before_cursor):
"""Takes the full_text that is typed so far and also the text before the
cursor to suggest completion type and scope.
Returns a tuple with a type of entity ('table', 'column' etc) and a scope.
A scope for a column category will be a list of tables.
"""
word_before_cursor = last_word(text_before_cursor,
include='many_punctuations')
identifier = None
# If we've partially typed a word then word_before_cursor won't be an empty
# string. In that case we want to remove the partially typed string before
# sending it to the sqlparser. Otherwise the last token will always be the
# partially typed string which renders the smart completion useless because
# it will always return the list of keywords as completion.
if word_before_cursor:
if word_before_cursor[-1] == '(' or word_before_cursor[0] == '\\':
parsed = sqlparse.parse(text_before_cursor)
else:
parsed = sqlparse.parse(
text_before_cursor[:-len(word_before_cursor)])
# word_before_cursor may include a schema qualification, like
# "schema_name.partial_name" or "schema_name.", so parse it
# separately
p = sqlparse.parse(word_before_cursor)[0]
if p.tokens and isinstance(p.tokens[0], Identifier):
identifier = p.tokens[0]
else:
parsed = sqlparse.parse(text_before_cursor)
if len(parsed) > 1:
# Multiple statements being edited -- isolate the current one by
# cumulatively summing statement lengths to find the one that bounds the
# current position
current_pos = len(text_before_cursor)
stmt_start, stmt_end = 0, 0
for statement in parsed:
stmt_len = len(statement.to_unicode())
stmt_start, stmt_end = stmt_end, stmt_end + stmt_len
if stmt_end >= current_pos:
text_before_cursor = full_text[stmt_start:current_pos]
full_text = full_text[stmt_start:]
break
elif parsed:
# A single statement
statement = parsed[0]
else:
# The empty string
statement = None
# Check for special commands and handle those separately
if statement:
# Be careful here because trivial whitespace is parsed as a statement,
# but the statement won't have a first token
tok1 = statement.token_first()
if tok1 and tok1.value == '\\':
return suggest_special(text_before_cursor)
last_token = statement and statement.token_prev(len(statement.tokens)) or ''
return suggest_based_on_last_token(last_token, text_before_cursor,
full_text, identifier)
def suggest_special(text):
text = text.lstrip()
cmd, _, arg = parse_special_command(text)
if cmd == text:
# Trying to complete the special command itself
return [{'type': 'special'}]
if cmd in ('\\c', '\\connect'):
return [{'type': 'database'}]
if cmd == '\\dn':
return [{'type': 'schema'}]
if arg:
# Try to distinguish "\d name" from "\d schema.name"
# Note that this will fail to obtain a schema name if wildcards are
# used, e.g. "\d schema???.name"
parsed = sqlparse.parse(arg)[0].tokens[0]
try:
schema = parsed.get_parent_name()
except AttributeError:
schema = None
else:
schema = None
if cmd[1:] == 'd':
# \d can descibe tables or views
if schema:
return [{'type': 'table', 'schema': schema},
{'type': 'view', 'schema': schema}]
else:
return [{'type': 'schema'},
{'type': 'table', 'schema': []},
{'type': 'view', 'schema': []}]
elif cmd[1:] in ('dt', 'dv', 'df', 'dT'):
rel_type = {'dt': 'table',
'dv': 'view',
'df': 'function',
'dT': 'datatype',
}[cmd[1:]]
if schema:
return [{'type': rel_type, 'schema': schema}]
else:
return [{'type': 'schema'},
{'type': rel_type, 'schema': []}]
if cmd in ['\\n', '\\ns', '\\nd']:
return [{'type': 'namedquery'}]
return [{'type': 'keyword'}, {'type': 'special'}]
def suggest_based_on_last_token(token, text_before_cursor, full_text, identifier):
if isinstance(token, string_types):
token_v = token.lower()
elif isinstance(token, Comparison):
# If 'token' is a Comparison type such as
# 'select * FROM abc a JOIN def d ON a.id = d.'. Then calling
# token.value on the comparison type will only return the lhs of the
# comparison. In this case a.id. So we need to do token.tokens to get
# both sides of the comparison and pick the last token out of that
# list.
token_v = token.tokens[-1].value.lower()
elif isinstance(token, Where):
# sqlparse groups all tokens from the where clause into a single token
# list. This means that token.value may be something like
# 'where foo > 5 and '. We need to look "inside" token.tokens to handle
# suggestions in complicated where clauses correctly
prev_keyword, text_before_cursor = find_prev_keyword(text_before_cursor)
return suggest_based_on_last_token(prev_keyword, text_before_cursor,
full_text, identifier)
elif isinstance(token, Identifier):
# If the previous token is an identifier, we can suggest datatypes if
# we're in a parenthesized column/field list, e.g.:
# CREATE TABLE foo (Identifier <CURSOR>
# CREATE FUNCTION foo (Identifier <CURSOR>
# If we're not in a parenthesized list, the most likely scenario is the
# user is about to specify an alias, e.g.:
# SELECT Identifier <CURSOR>
# SELECT foo FROM Identifier <CURSOR>
prev_keyword, _ = find_prev_keyword(text_before_cursor)
if prev_keyword and prev_keyword.value == '(':
# Suggest datatypes
return suggest_based_on_last_token('type', text_before_cursor,
full_text, identifier)
else:
return [{'type': 'keyword'}]
else:
token_v = token.value.lower()
if not token:
return [{'type': 'keyword'}, {'type': 'special'}]
elif token_v.endswith('('):
p = sqlparse.parse(text_before_cursor)[0]
if p.tokens and isinstance(p.tokens[-1], Where):
# Four possibilities:
# 1 - Parenthesized clause like "WHERE foo AND ("
# Suggest columns/functions
# 2 - Function call like "WHERE foo("
# Suggest columns/functions
# 3 - Subquery expression like "WHERE EXISTS ("
# Suggest keywords, in order to do a subquery
# 4 - Subquery OR array comparison like "WHERE foo = ANY("
# Suggest columns/functions AND keywords. (If we wanted to be
# really fancy, we could suggest only array-typed columns)
column_suggestions = suggest_based_on_last_token('where',
text_before_cursor, full_text, identifier)
# Check for a subquery expression (cases 3 & 4)
where = p.tokens[-1]
prev_tok = where.token_prev(len(where.tokens) - 1)
if isinstance(prev_tok, Comparison):
# e.g. "SELECT foo FROM bar WHERE foo = ANY("
prev_tok = prev_tok.tokens[-1]
prev_tok = prev_tok.value.lower()
if prev_tok == 'exists':
return [{'type': 'keyword'}]
elif prev_tok in ('any', 'some', 'all'):
return column_suggestions + [{'type': 'keyword'}]
elif prev_tok == 'in':
# Technically, we should suggest columns AND keywords, as
# per case 4. However, IN is different from ANY, SOME, ALL
# in that it can accept a *list* of columns, or a subquery.
# But suggesting keywords for , "SELECT * FROM foo WHERE bar IN
# (baz, qux, " would be overwhelming. So we special case 'IN'
# to not suggest keywords.
return column_suggestions
else:
return column_suggestions
# Get the token before the parens
prev_tok = p.token_prev(len(p.tokens) - 1)
if prev_tok and prev_tok.value and prev_tok.value.lower() == 'using':
# tbl1 INNER JOIN tbl2 USING (col1, col2)
tables = extract_tables(full_text)
# suggest columns that are present in more than one table
return [{'type': 'column', 'tables': tables, 'drop_unique': True}]
elif p.token_first().value.lower() == 'select':
# If the lparen is preceeded by a space chances are we're about to
# do a sub-select.
if last_word(text_before_cursor,
'all_punctuations').startswith('('):
return [{'type': 'keyword'}]
# We're probably in a function argument list
return [{'type': 'column', 'tables': extract_tables(full_text)}]
elif token_v in ('set', 'by', 'distinct'):
return [{'type': 'column', 'tables': extract_tables(full_text)}]
elif token_v in ('select', 'where', 'having'):
# Check for a table alias or schema qualification
parent = (identifier and identifier.get_parent_name()) or []
if parent:
tables = extract_tables(full_text)
tables = [t for t in tables if identifies(parent, *t)]
return [{'type': 'column', 'tables': tables},
{'type': 'table', 'schema': parent},
{'type': 'view', 'schema': parent},
{'type': 'function', 'schema': parent}]
else:
return [{'type': 'column', 'tables': extract_tables(full_text)},
{'type': 'function', 'schema': []}]
elif (token_v.endswith('join') and token.is_keyword) or (token_v in
('copy', 'from', 'update', 'into', 'describe', 'truncate')):
schema = (identifier and identifier.get_parent_name()) or []
# Suggest tables from either the currently-selected schema or the
# public schema if no schema has been specified
suggest = [{'type': 'table', 'schema': schema}]
if not schema:
# Suggest schemas
suggest.insert(0, {'type': 'schema'})
# Only tables can be TRUNCATED, otherwise suggest views
if token_v != 'truncate':
suggest.append({'type': 'view', 'schema': schema})
return suggest
elif token_v in ('table', 'view', 'function'):
# E.g. 'DROP FUNCTION <funcname>', 'ALTER TABLE <tablname>'
rel_type = token_v
schema = (identifier and identifier.get_parent_name()) or []
if schema:
return [{'type': rel_type, 'schema': schema}]
else:
return [{'type': 'schema'}, {'type': rel_type, 'schema': []}]
elif token_v == 'on':
tables = extract_tables(full_text) # [(schema, table, alias), ...]
parent = (identifier and identifier.get_parent_name()) or []
if parent:
# "ON parent.<suggestion>"
# parent can be either a schema name or table alias
tables = [t for t in tables if identifies(parent, *t)]
return [{'type': 'column', 'tables': tables},
{'type': 'table', 'schema': parent},
{'type': 'view', 'schema': parent},
{'type': 'function', 'schema': parent}]
else:
# ON <suggestion>
# Use table alias if there is one, otherwise the table name
aliases = [t[2] or t[1] for t in tables]
return [{'type': 'alias', 'aliases': aliases}]
elif token_v in ('c', 'use', 'database', 'template'):
# "\c <db", "use <db>", "DROP DATABASE <db>",
# "CREATE DATABASE <newdb> WITH TEMPLATE <db>"
return [{'type': 'database'}]
elif token_v == 'schema':
# DROP SCHEMA schema_name
return [{'type': 'schema'}]
elif token_v.endswith(',') or token_v == '=':
prev_keyword, text_before_cursor = find_prev_keyword(text_before_cursor)
if prev_keyword:
return suggest_based_on_last_token(
prev_keyword, text_before_cursor, full_text, identifier)
else:
return []
elif token_v in ('type', '::'):
# ALTER TABLE foo SET DATA TYPE bar
# SELECT foo::bar
# Note that tables are a form of composite type in postgresql, so
# they're suggested here as well
schema = (identifier and identifier.get_parent_name()) or []
suggestions = [{'type': 'datatype', 'schema': schema},
{'type': 'table', 'schema': schema}]
if not schema:
suggestions.append({'type': 'schema'})
return suggestions
else:
return [{'type': 'keyword'}]
def identifies(id, schema, table, alias):
return id == alias or id == table or (
schema and (id == schema + '.' + table))
| bsd-3-clause | -1,566,311,390,984,765,200 | 41.015152 | 82 | 0.560765 | false | 4.185029 | false | false | false |
mgh14/470ai | bzagents/Agent.py | 1 | 7556 | import telnetlib
import sys
import time
import random
import math
import GnuplotUtil
from tankUtil import *
class Agent(object):
# constants
SERVER_DELIMITER = "\n"
LIST_START = "start" + SERVER_DELIMITER
LIST_END = "end" + SERVER_DELIMITER
SERVER_CONNECT_ACKNOWLEDGED = "bzrobots 1" + SERVER_DELIMITER
NOT_SET = "not_set"
# member variables
ipAddr = NOT_SET
port = NOT_SET
socket = NOT_SET
constants = dict()
iHaveEnemyFlag = False
worldHalfSize = NOT_SET
myBaseCoords = NOT_SET
myFlagStand = NOT_SET
def __init__(self, ip, port):
self.ipAddr = ip
self.port = port
# connect to telnet bzrflag server
self.socket = telnetlib.Telnet(ip, port)
response = self.socket.read_until(self.SERVER_DELIMITER)
if (response == self.SERVER_CONNECT_ACKNOWLEDGED):
print "connect to server: successful"
else:
print "failed connection!"
sys.exit(-1)
# register and prepare agent
self.registerAgent()
self.loadConstants()
self.setMyBase()
self.setMyFlagStand()
def registerAgent(self):
self.socket.write("agent 1" + self.SERVER_DELIMITER)
print "Registration Successful on port " + str(self.port)
def loadConstants(self):
constList = self._query("constants")
for item in constList:
self.constants[item[0]] = item[1]
self.worldHalfSize = int(self.constants["worldsize"]) / 2
print self.constants
def setMyBase(self):
bases = self._query("bases")
for base in bases:
if(base[0] == self.constants["team"]):
point1 = self.getAdjustedPoint((int(float(base[1])),int(float(base[2]))))
point2 = self.getAdjustedPoint((int(float(base[3])),int(float(base[4]))))
point3 = self.getAdjustedPoint((int(float(base[5])),int(float(base[6]))))
point4 = self.getAdjustedPoint((int(float(base[7])),int(float(base[8]))))
self.myBaseCoords = [point1,point2,point3,point4]
return
print "Error: no base assigned!"
def setMyFlagStand(self):
flags = self._query("flags")
for flag in flags:
if(flag[0] == self.constants["team"]):
flagPoint = self.getAdjustedPoint((int(float(flag[2])),int(float(flag[3]))))
self.myFlagStand = [flagPoint[0],flagPoint[1]]
def commandAgent(self, command):
#print "Cmd: " + command
self.socket.write(command + self.SERVER_DELIMITER)
responseLine1 = self.socket.read_until(self.SERVER_DELIMITER).rstrip()
responseLine2 = self.socket.read_until(self.SERVER_DELIMITER)
#print "ResponseL1: " + responseLine1
#print "ResponseL2: " + responseLine2
def stop(self, tankNum):
self.commandAgent("angvel " + str(tankNum) + " 0")
self.commandAgent("speed " + str(tankNum) + " 0")
def closeSocket(self):
self.socket.close()
# for game queries
def _query(self, queryCommand):
self.socket.write(queryCommand + self.SERVER_DELIMITER)
response = self.socket.read_until(self.SERVER_DELIMITER).rstrip();
stringList = self.socket.read_until(self.LIST_END)
stringList = stringList[len(self.LIST_START):-1*(len(self.LIST_END) + 1)] # parse off 'begin\n' and 'end\n'
listOfLines = stringList.split(self.SERVER_DELIMITER) # split strings by newline
# split each line by whitespace
lineArrays = []
for line in listOfLines:
array = line.split()
array.pop(0)
lineArrays.append(array)
return lineArrays
def _getRawResponse(self, queryCommand):
#print "query: " + query
self.socket.write(queryCommand + self.SERVER_DELIMITER)
response = self.socket.read_until(self.SERVER_DELIMITER).rstrip();
#print "ResponseL1: " + response
stringList = self.socket.read_until(self.LIST_END)
return stringList
def printList(self,listToPrint):
print "List:"
for current in listToPrint:
print str(current)
print "(end list)"
def _isCoordinateInBase(self, coords):
# top-right corner check
trCorner = (coords[0] < self.myBaseCoords[0][0] and coords[1] < self.myBaseCoords[0][1])
# bottom-right corner check
brCorner = (coords[0] < self.myBaseCoords[1][0] and coords[1] > self.myBaseCoords[1][1])
# bottom-left corner check
blCorner = (coords[0] > self.myBaseCoords[2][0] and coords[1] > self.myBaseCoords[2][1])
# top-left corner check
tlCorner = (coords[0] > self.myBaseCoords[3][0] and coords[1] < self.myBaseCoords[3][1])
return (trCorner and brCorner and blCorner and tlCorner)
def _isMyFlagInMyBase(self):
flags = self._query("flags")
for flag in flags:
if(flag[0] == self.constants["team"]):
return self._isCoordinateInBase(self._getMyFlagPosition())
return -1
def _isMyFlagCaptured(self):
flags = self._query("flags")
for flag in flags:
if(flag[0] == self.constants["team"]):
return (not (flag[1] == self.constants["team"]))
return -1
def _getMyFlagPosition(self):
flags = self._query("flags")
for flag in flags:
if(flag[0] == self.constants["team"]):
flagPoint = self.getAdjustedPoint((int(float(flag[2])),int(float(flag[3]))))
return [flagPoint[0],flagPoint[1]]
return [-10000,-10000] # represents an error (should be found above)
def _getEnemyFlagPositions(self):
flags = self._query("flags")
positions = []
for flag in flags:
if(flag[0] == self.constants["team"]):
continue
flagPos = self.getAdjustedPoint((int(float(flag[2])),int(float(flag[3]))))
positions.append(flagPos)
return positions
def _iHaveEnemyFlag(self):
flags = self._query("flags")
for flag in flags:
if(flag[0] == self.constants["team"]): # don't count my own flag
continue
if(flag[1] == self.constants["team"]):
return True
return False
def _getCurrentPositionOfTank(self,tankNum):
tankInfo = self._query("mytanks")[tankNum]
return self.getAdjustedPoint([float(tankInfo[6]),float(tankInfo[7])])
def distance(self, a , b):
return math.sqrt((b[1]-a[1])**2+(b[0]-a[0])**2)
def getDesiredAngle(self, tankNum, pointToVisit):
currentPosition = self._getCurrentPositionOfTank(tankNum)
return self.getAdjustedAngle(math.atan2(pointToVisit[1]-currentPosition[1],
pointToVisit[0]-currentPosition[0]))
def setAngularVelocity(self, tankNum, angVel, desiredAngle):
tankInfo = self._query("mytanks")[tankNum]
currAngle = self.getAdjustedAngle(tankInfo[8])
absAngVel = abs(angVel)
# figure out which way the tank should turn
if(desiredAngle - currAngle > 0):
if(desiredAngle - math.pi > currAngle):
angVel = -1 * absAngVel
else:
angVel = absAngVel
else:
if(desiredAngle + math.pi > currAngle):
angVel = -1 * absAngVel
else:
angVel = absAngVel
self.commandAgent("angvel " + str(tankNum) + " " + str(angVel))
def setAngularVelocityByPoint(self, tankNum, angVel, pointToVisit):
self.setAngularVelocity(tankNum, angVel, self.getDesiredAngle(tankNum,pointToVisit))
def getAdjustedAngle(self,rawAngle):
rawAngle = float(rawAngle)
twoPi = 2*math.pi
if(rawAngle > twoPi):
return math.fmod(rawAngle,twoPi)
if(rawAngle >= 0) and (rawAngle < math.pi):
return rawAngle
if(rawAngle < 0):
return twoPi + rawAngle
return rawAngle
def getAdjustedPoint(self,point):
return [self.worldHalfSize + point[0],self.worldHalfSize + point[1]]
def getMyPosition(self, tankNum):
mytanks = self._query("mytanks")
tankInfo = mytanks[tankNum]
return self.getAdjustedPoint([float(tankInfo[6]),float(tankInfo[7])])
def getMyAngle(self, tankNum):
mytanks = self._query("mytanks")
tankInfo = mytanks[tankNum]
return self.getAdjustedAngle(float(tankInfo[8]))
def play(self): # driver function for beginning AI simulation
print "no implemented play method: tanks will just sit."
| gpl-3.0 | -7,796,457,630,180,964,000 | 27.730038 | 110 | 0.69468 | false | 2.861037 | false | false | false |
ssindow/ZMeter | MeterGUI.py | 1 | 29977 | # -*- coding: utf-8 -*-
"""
GUI Code
"""
import LinkGPIB
import pyqtgraph as pg
from pyqtgraph.Qt import QtGui, QtCore
from pyqtgraph.widgets.RemoteGraphicsView import RemoteGraphicsView
import numpy as np
from time import sleep, strftime
## Constants
versionTxt = 'CellShot V2.6.3'
observerModel = 2602 #2461
sourceModel = 2401
timerPeriod = .4
bufferDepth = 1
plotWindow = 2048*4
plotSubsampling = 22
remotePlot = False
setAntialias = False
class MeterGUI(QtGui.QMainWindow):
# User-Defined constants
triggered = False
defaultTopThreshold = '3.6E+6'
defaultBotThreshold = '3.4E+6'
defaultPercentage = '8'
obSettingUpdated = False # True
ijSettingUpdated = False # True
revertPolarityClicked = False
defaultYMax = '2.5E+6'
defaultYMin = '0.5E+0'
def __init__(self, readport='COM8', baudrate=230400, bufferSize=bufferDepth,
winSize=(800,400)): ## COM port and baudrate are for serial comm.
super(MeterGUI, self).__init__()
self.lastUpdate = 0.0
self.startTime = 0.1
self.data = np.empty((16 * plotWindow, 2))
self.ptr = 0
self.avgFPS = 0.0
# self.kb = KeyBoard()
self.baseLineValue = float(self.defaultBotThreshold)
self.topLineValue = float(self.defaultTopThreshold)
self.botLineValue = float(self.defaultBotThreshold)
self.percentThreshold = float(self.defaultPercentage)
self.placeholder = 'Messages\nto be shown\nhere.'
self.injectionCount = 0
# QTimer
self.timer = QtCore.QTimer()
# GUI Layout
pg.setConfigOptions(antialias=setAntialias)
self.layout = pg.LayoutWidget()
# self.kb = QtGui.QGraphicsView(self)
self.layout.installEventFilter(self)
# self.setCentralWidget(self.layout)
self.createLayout(winSize)
def initLink(self, externalInjector = True):
# Initialize Link
if self.dcheck.isChecked():
self.lastUpdate = pg.ptime.time()
self.startTime = pg.ptime.time()
else:
try:
handler.connGPIB(bufferDepth)
handler.initMeter()
sleep(3.5)
# handler.setMeterBuffer(bufferDepth)
if observerModel == 2401:
handler.resetTimer()
self.startTime = float(handler.dev[0].query('SYST:TIME?'))
except:
print('Failed.')
# Fill in GUI Boxes
self.vsenseBox.setText(str(handler.sourceVolt))
self.ilimitBox.setText(str(handler.sourceILim))
self.ishotBox.setText(str(handler.injectCurr))
self.vclampBox.setText(str(handler.injectVLim))
self.tPulseBox.setText(str(handler.injectWidth))
def runTimer(self):
self.timer.timeout.connect(self.update)
self.timer.start(timerPeriod)
## All GPIB assumed.
def createLayout(self, winSize):
# GUI Items Declaration
self.revertPolarityBtn = QtGui.QPushButton('Revert\nPolarity')
self.revertPolarityBtn.clicked.connect(self.revertPolarity)
self.expbtn = QtGui.QPushButton('Export')
self.expbtn.clicked.connect(self.csvExport)
self.rstbtn = QtGui.QPushButton('Reset')
self.rstbtn.clicked.connect(self.resetData)
self.filterEnChk = QtGui.QCheckBox('Analog Filter')
self.filterEnChk.clicked.connect(self.filterSetting)
self.filterEnChk.setChecked(False)
self.ymaxBox = QtGui.QLineEdit(self.defaultYMax)
self.ymaxBox.editingFinished.connect(self.yrangeSetting)
self.ymaxBox.setFixedWidth(60)
self.ymaxBox.setAlignment(QtCore.Qt.AlignRight)
self.yminBox = QtGui.QLineEdit(self.defaultYMin)
self.yminBox.editingFinished.connect(self.yrangeSetting)
self.yminBox.setFixedWidth(60)
self.yminBox.setAlignment(QtCore.Qt.AlignRight)
self.autoYChk = QtGui.QCheckBox('Auto Y-Range')
self.autoYChk.clicked.connect(self.yrangeSetting)
self.autoYChk.setChecked(True)
self.dcheck = QtGui.QCheckBox('Debug')
self.dcheck.setChecked(False)
self.measureEnChk = QtGui.QCheckBox('Measure Enable')
self.measureEnChk.setChecked(False)
self.startBtn = QtGui.QPushButton('Start')
self.startBtn.clicked.connect(self.startObserve)
self.plotEnChk = QtGui.QCheckBox('Plot Enable')
self.plotEnChk.setChecked(True)
self.msgLab = QtGui.QLabel(self.placeholder)
self.autoInjectChk = QtGui.QCheckBox('Auto Injection')
self.autoInjectChk.setChecked(True)
self.autoInjectChk.clicked.connect(self.aijUpdate)
self.topThresholdBox = QtGui.QLineEdit(self.defaultTopThreshold)
self.topThresholdBox.editingFinished.connect(self.thresholdSetting)
self.topThresholdBox.setFixedWidth(60)
self.topThresholdBox.setAlignment(QtCore.Qt.AlignRight)
self.botThresholdBox = QtGui.QLineEdit(self.defaultBotThreshold)
self.botThresholdBox.editingFinished.connect(self.thresholdSetting)
self.botThresholdBox.setFixedWidth(60)
self.botThresholdBox.setAlignment(QtCore.Qt.AlignRight)
self.adaptThresholdChk = QtGui.QCheckBox('Adaptive Th')
self.adaptThresholdChk.setChecked(True)
self.adaptThresholdChk.clicked.connect(self.adaptUpdate)
self.percentThresholdBox = QtGui.QLineEdit(self.defaultPercentage)
self.percentThresholdBox.editingFinished.connect(self.tpSetting)
self.percentThresholdBox.setFixedWidth(60)
self.percentThresholdBox.setAlignment(QtCore.Qt.AlignRight)
self.baseLineLab = QtGui.QLabel()
self.bsline = pg.InfiniteLine(self.baseLineValue, 0)
self.ttline = pg.InfiniteLine(self.topLineValue, 0, pen='c')
self.tbline = pg.InfiniteLine(self.botLineValue, 0, pen='m')
self.manualTrigBtn = QtGui.QPushButton('Manual\nTrigger') # trigger
self.manualTrigBtn.clicked.connect(self.setManualTrigger) #handler.dev[1].write(':INIT'))#
self.injectionCountTitleLab = QtGui.QLabel('Injection#:')
self.injectionCountLab = QtGui.QLabel(str(self.injectionCount))
self.rebaseBtn = QtGui.QPushButton('RB')
self.rebaseBtn.clicked.connect(self.resetBaseLine)
self.rezeroCountBtn = QtGui.QPushButton('RZ')
self.rezeroCountBtn.clicked.connect(self.resetInjectionCount)
self.vsenseBox = QtGui.QLineEdit() # vsense
self.ilimitBox = QtGui.QLineEdit() # ilimit
self.ishotBox = QtGui.QLineEdit() # ishot
self.vclampBox = QtGui.QLineEdit() # vclamp
self.tPulseBox = QtGui.QLineEdit() # tpulse
self.filterCntBox = QtGui.QLineEdit('22') # filterCnt
self.revertThresholdBox = QtGui.QLineEdit('2E+6') # Revert at this value
self.autoRevertChk = QtGui.QCheckBox('Auto Revert')
self.autoRevertChk.setChecked(True)
self.autoRevertChk.clicked.connect(self.autoRevert)
self.manualRevertChk = QtGui.QCheckBox('Manual Revert')
self.manualRevertChk.setChecked(True)
self.manualRevertChk.clicked.connect(self.manualRevert)
self.vsenseBox.textEdited.connect(self.setObSettingsUpdated)
self.ilimitBox.textEdited.connect(self.setObSettingsUpdated)
self.ishotBox.textEdited.connect(self.setIjSettingsUpdated)
self.vclampBox.textEdited.connect(self.setIjSettingsUpdated)
self.tPulseBox.textEdited.connect(self.setIjSettingsUpdated)
self.updateSettingsBtn = QtGui.QPushButton('Update\nSettings') # updateSettings
self.updateSettingsBtn.clicked.connect(self.updateInstSettings)
self.logBox = QtGui.QTextEdit()
# Plot Area
if remotePlot:
self.rview = RemoteGraphicsView()
self.rview.pg.setConfigOptions(antialias=setAntialias)
self.plt = self.rview.pg.PlotItem(title='Real-time Impedance Plot')
self.plt._setProxyOptions(deferGetattr=True)
self.rview.setCentralItem(self.plt)
self.tline = self.plt.plot()
else:
self.plt = pg.PlotWidget(title='Real-time Impedance Plot')
self.plt.addItem(self.bsline)
self.plt.addItem(self.ttline)
self.plt.addItem(self.tbline)
self.plt.showGrid(x=True, y=True)
self.plt.setClipToView(True)
self.plt.setLabel('bottom', 'Time', 's')
self.plt.setLabel('left', 'Impedance', 'Ω')
####################
## Set GUI Layout ##
#self.layout.addWidget(self.dcheck, row=0, col=0)
self.layout.addWidget(self.expbtn, row=0, col=0)
self.layout.addWidget(self.startBtn, row=1, col=0)
self.layout.addWidget(self.rstbtn, row=2, col=0)
self.logo = QtGui.QLabel()
self.logo.setPixmap(QtGui.QPixmap('resources\\img\\FBM_logo02.png'))
#self.logo.setFixedHeight(40)
#self.logo.setFixedWidth(300)
self.layout.addWidget(self.logo, row=0, col=1, colspan=3)
self.layout.addWidget(self.measureEnChk, row=1, col=1)
self.filterCntLab = QtGui.QLabel('Moving Avg.(2~100):')
self.layout.addWidget(self.filterCntLab, row=2, col=1)
self.filterCntLab.setFixedWidth(160)
self.filterCntLab.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
self.layout.addWidget(self.filterEnChk, row=1, col=2)
self.filterCntBox.setFixedWidth(40)
self.layout.addWidget(self.filterCntBox, row=2, col=2)
self.vsenseLab = QtGui.QLabel('V_sense:')
self.vsenseLab.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
self.layout.addWidget(self.vsenseLab, row=1, col=3)
self.ilimitLab = QtGui.QLabel('I_limit:')
self.ilimitLab.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
self.layout.addWidget(self.ilimitLab, row=2, col=3)
# Polarity Reversion
self.revertPolarityBtn.setFixedWidth(60)
self.layout.addWidget(self.revertPolarityBtn, row=0, col=4)
# V-sense
self.vsenseBox.setFixedWidth(60)
self.layout.addWidget(self.vsenseBox, row=1, col=4)
# I-limit
self.ilimitBox.setFixedWidth(60)
self.layout.addWidget(self.ilimitBox, row=2, col=4)
# Update Settings
self.updateSettingsBtn.setFixedWidth(60)
self.layout.addWidget(self.updateSettingsBtn, row=0, col=5)
# Ishot Box
self.ishotLab = QtGui.QLabel('I_shot:')
self.ishotLab.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
self.layout.addWidget(self.ishotLab, row=1, col=5)
self.ishotBox.setFixedWidth(60)
self.layout.addWidget(self.ishotBox, row=1, col=6)
# Vclamp box
self.vclampLab = QtGui.QLabel('V_clamp:')
self.vclampLab.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
self.layout.addWidget(self.vclampLab, row=2, col=5)
self.vclampBox.setFixedWidth(60)
self.layout.addWidget(self.vclampBox, row=2, col=6)
# tPulse Box
self.tPulseLab = QtGui.QLabel('t_Pulse:')
self.tPulseLab.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
self.layout.addWidget(self.tPulseLab, row=3, col=5)
self.tPulseBox.setFixedWidth(60)
self.layout.addWidget(self.tPulseBox, row=3, col=6)
# Manual Trig
self.manualTrigBtn.setFixedWidth(60)
self.layout.addWidget(self.manualTrigBtn, row=0, col=6)
self.layout.addWidget(self.autoRevertChk, row=4, col=5, colspan=2)
self.revertThresholdLab = QtGui.QLabel('Flip @')
self.revertThresholdLab.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
self.layout.addWidget(self.revertThresholdLab, row=5, col=5)
self.revertThresholdBox.setFixedWidth(60)
self.layout.addWidget(self.revertThresholdBox, row=5, col=6)
self.layout.addWidget(self.autoInjectChk, row=6, col=5, colspan=2)
self.layout.addWidget(self.botThresholdBox, row=7, col=5)
self.layout.addWidget(self.topThresholdBox, row=7, col=6)
self.layout.addWidget(self.baseLineLab, row=8, col=5)
self.rebaseBtn.setFixedWidth(30)
self.layout.addWidget(self.rebaseBtn, row=8, col=6)
# Peak counter
self.injectionCountTitleLab.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignBottom)
self.layout.addWidget(self.injectionCountTitleLab, row=9, col=5, colspan=1)
self.injectionCountLab.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
self.layout.addWidget(self.injectionCountLab, row=10, col=5)
self.rezeroCountBtn.setFixedWidth(30)
self.layout.addWidget(self.rezeroCountBtn, row=10, col=6)
# Version
self.versionLab = QtGui.QLabel(versionTxt)
self.versionLab.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTop)
self.layout.addWidget(self.versionLab, row=0, col=7, colspan=2)
# Plot Speed
self.layout.addWidget(self.msgLab, row=1, col=7, rowspan=2, colspan=2)
self.msgLab.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTop)
self.layout.addWidget(self.manualRevertChk, row=4, col=7, colspan=2)
self.layout.addWidget(self.plotEnChk, row=5, col=7, colspan=2)
self.layout.addWidget(self.autoYChk, row=6, col=7, colspan=2)
self.layout.addWidget(self.yminBox, row=7, col=7)
self.layout.addWidget(self.ymaxBox, row=7, col=8)
self.layout.addWidget(self.adaptThresholdChk, row=9, col=7, colspan=2)
self.layout.addWidget(self.percentThresholdBox, row=10, col=7)
self.layout.addWidget(QtGui.QLabel('%'), row=10, col=8)
self.layout.addWidget(self.logBox, row=11, col=5, colspan=4)
if remotePlot:
self.layout.addWidget(self.rview, row=3, col=0, rowspan=10, colspan=5)
else:
self.layout.addWidget(self.plt, row=3, col=0, rowspan=10, colspan=5)
self.layout.resize(*winSize)
self.layout.show()
self.curve = self.plt.plot()
# if remotePlot:
def startObserve(self):
if (observerModel == 2602) :
handler.dev[0].write('startObserve()')
print('Starting...')
self.measureEnChk.setChecked(True)
def aijUpdate(self):
if observerModel == 2602:
if self.autoInjectChk.isChecked():
handler.dev[0].write('Aij(true)')
else:
handler.dev[0].write('Aij(false)')
def resetBaseLine(self):
self.baseLineValue = float(self.defaultBotThreshold)
self.topLineValue = float(self.defaultTopThreshold)
self.botLineValue = float(self.defaultBotThreshold)
handler.dev[0].write('reb()')
def resetInjectionCount(self):
self.injectionCount = 0
handler.dev[0].write('rez()')
def setManualTrigger(self):
if observerModel == 2602:
handler.dev[0].write('genPulse(ijWidth)')
else:
handler.dev[1].write(':INIT')
def setObSettingsUpdated(self):
self.obSettingUpdated = True
def setIjSettingsUpdated(self):
self.ijSettingUpdated = True
print('Injector setting has been changed.')
#print(self.ijSettingUpdated)
#print((self.ijSettingUpdated == True) )
#print((self.obSettingUpdated != True) & (self.ijSettingUpdated != True))
def updateInstSettings(self):
if ((self.obSettingUpdated != True) & (self.ijSettingUpdated != True)):
print('No setting has been changed.')
else:
if self.obSettingUpdated:
self.obSettingUpdated = False
self.ijSettingUpdated = False
handler.sourceVolt = float(self.vsenseBox.text())
handler.sourceILim = float(self.ilimitBox.text())
if observerModel == 2602:
handler.dev[0].write('updateSMUA('+ str(handler.sourceVolt)
+ ',' + str(handler.sourceILim) + ')')
print('updateSMUA('+ str(handler.sourceVolt)
+ ',' + str(handler.sourceILim) + ')')
# updating function
if self.ijSettingUpdated:
self.ijSettingUpdated = False
handler.injectCurr = float(self.ishotBox.text())
handler.injectVLim = float(self.vclampBox.text())
handler.injectWidth = float(self.tPulseBox.text())
if observerModel == 2602:
handler.dev[0].write('updateSMUB(' + str(handler.injectCurr)
+ ',' + str(handler.injectVLim) + ')')
handler.dev[0].write('ijWidth = '+str(handler.injectWidth))
else:
handler.dev[1].write(':SOUR:CURR:LEV ' + str(handler.injectCurr))
handler.dev[1].write(':SENS:VOLT:PROT ' + str(handler.injectVLim))
def adaptUpdate(self):
if observerModel == 2602:
if self.adaptThresholdChk.isChecked():
percent = self.percentThresholdBox.text()
else:
percent = '0'
print('adaptiveT(' + percent + ')')
handler.dev[0].write('adaptiveT(' + percent + ')')
def revertPolarity(self):
if observerModel == 2602:
#handler.dev[0].write('rvt()')
print('Obsolete function since V2.5.6')
else:
self.obSettingUpdated = ~self.obSettingUpdated # Why?
self.revertPolarityClicked = True
handler.sourceVolt = -handler.sourceVolt
self.vsenseBox.setText(str(handler.sourceVolt))
print('Source is changed to %02.1f' % handler.sourceVolt)
def csvExport(self):
filename = strftime('%Y%m%d_%H%M')+'.csv'
np.savetxt(filename, self.data, fmt='%.06f',delimiter=',',header='|Z|(ohms),time(s)')
choice = QtGui.QMessageBox.information(self, 'Message',
"Impedance data is saved in "+filename+'!',
QtGui.QMessageBox.Ok)
if choice == QtGui.QMessageBox.Ok:
print("Exporting confirmed.")
def resetData(self):
self.ptr = 0
self.data = np.empty((16 * plotWindow, 2))
self.avgFPS = 0.0
self.baseLineValue = float(self.defaultBotThreshold)
self.topLineValue = float(self.defaultTopThreshold)
self.botLineValue = float(self.defaultBotThreshold)
if self.dcheck.isChecked():
self.startTime = pg.ptime.time()
elif observerModel == 2401:
self.startTime = 0
handler.dev[0].write(':SYST:TIME:RES')
return
def autoRevert(self):
global handler
if self.autoRevertChk.isChecked():
if observerModel == 2602:
handler.dev[0].write('revertThreshold = ' + self.revertThresholdBox.text())
handler.dev[0].write('autoRevert = true')
print('Auto polarity reversion is applied.')
else:
if observerModel == 2602:
handler.dev[0].write('autoRevert = false')
print('Auto polarity reversion is cancelled.')
return
def manualRevert(self):
global handler
if self.manualRevertChk.isChecked():
if observerModel == 2602:
handler.dev[0].write('manualRevert = true')
print('Manual polarity reversion is enabled.')
else:
if observerModel == 2602:
handler.dev[0].write('manualRevert = false')
print('Manual polarity reversion is disabled.')
return
def filterSetting(self):
global handler
if self.filterEnChk.isChecked():
#self.ser.write(b'RES:FILT ON\n')
if observerModel == 2602:
handler.dev[0].write('smua.measure.filter.count = '+ self.filterCntBox.text())
handler.dev[0].write('smua.measure.filter.enable = smua.FILTER_ON')
elif observerModel == 2401:
handler.dev[0].write(':SENS:AVER ON')
elif observerModel == 2461:
#if observerModel == 2461:
handler.dev[0].write(':SENS:CURR:AVER ON')
print('Analog LPF is applied.')
else:
#self.ser.write(b'RES:FILT OFF\n')
if observerModel == 2602:
handler.dev[0].write('smua.measure.filter.enable = smua.FILTER_OFF')
elif observerModel == 2401:
handler.dev[0].write(':SENS:AVER OFF')
elif observerModel == 2461:
handler.dev[0].write(':SENS:CURR:AVER OFF')
print('Analog LPF is disabled.')
return
def yrangeUpdate(self):
self.defaultYMax = self.ymaxBox.text()
self.defaultYMin = self.yminBox.text()
print(self.defaultYMax, self.defaultYMin)
return
def yrangeSetting(self):
self.yrangeUpdate()
if self.autoYChk.isChecked():
self.plt.enableAutoRange(axis='y')
print('Auto Y-range is set.')
else:
# self.plt.setRange(yRange=[float(self.ymin),float(self.ymax)],update=True,disableAutoRange=True)
self.plt.disableAutoRange(axis='y')
self.plt.setYRange(float(self.defaultYMin), float(self.defaultYMax))
print('Manual Y-range is set.')
return
def thresholdSetting(self):
self.defaultTopThreshold = self.topThresholdBox.text()
self.defaultBotThreshold = self.botThresholdBox.text()
self.topLineValue = float(self.defaultTopThreshold)
self.botLineValue = float(self.defaultBotThreshold)
self.ttline.setValue(self.topLineValue)
self.tbline.setValue(self.botLineValue)
def tpSetting(self):
self.percentThreshold = float(self.percentThresholdBox.text())
def update(self):
if self.measureEnChk.isChecked():
## Expanding data buffer
self.ptr += bufferDepth
if self.ptr >= self.data.shape[0]:
tmp = self.data
self.data = np.empty((self.data.shape[0]*2,2))
self.data[:tmp.shape[0],:] = tmp
print('Expanding data buffer...')
# Updating Z-data
if self.dcheck.isChecked():
now = pg.ptime.time()
self.data[self.ptr - bufferDepth:self.ptr, 0] = np.random.normal(size=bufferDepth)
self.data[self.ptr, 1] = now - self.startTime
for i in range(1, bufferDepth):
self.data[self.ptr - i, 1] = self.data[self.ptr, 1] - (now - self.lastUpdate) * i / float(bufferDepth)
else:
# Pre-processing may be necessary
self.data[self.ptr-bufferDepth : self.ptr] = self.getData()
if self.revertPolarityClicked:
self.revertPolarityClicked = False
if observerModel == 2461:
handler.dev[0].write(':SOUR:VOLT '+str(handler.sourceVolt)+'\n')
if self.plotEnChk.isChecked() & ((self.ptr / bufferDepth) % plotSubsampling == 0):
now = self.data[self.ptr-1, 1]
try:
fps = 1 / (now - self.lastUpdate)
except:
fps = 1
if self.ptr < plotWindow: ## Plot is not moving at this point
self.curve.setData(x=self.data[:self.ptr, 1], y=self.data[:self.ptr, 0], _callSync='off')
self.sigma = np.std(self.data[:self.ptr, 0])
else: # Moving plot
self.curve.setData(x=self.data[self.ptr - plotWindow:self.ptr, 1],
y=self.data[self.ptr - plotWindow:self.ptr, 0], _callSync='off')
self.sigma = np.std(self.data[self.ptr - plotWindow:self.ptr, 0])
self.bsline.setValue(self.baseLineValue)
self.ttline.setValue(self.topLineValue)
self.tbline.setValue(self.botLineValue)
self.avgFPS = self.avgFPS * 0.95 + fps * 0.05
self.msgLab.setText('Plotting\n@%02.2ffps\n(%0.1fpoints/s)\nσ=%.2e' % \
(self.avgFPS, self.avgFPS * bufferDepth * plotSubsampling, self.sigma))
self.baseLineLab.setText('%02.2e' % self.baseLineValue)
self.injectionCountLab.setText(str(self.injectionCount))
self.lastUpdate = now
return
def getData(self):
global handler
## GPIB
try:
#received = handler.dev[0].read().split(',')
if observerModel == 2602:
try:
received = handler.dev[0].read().rstrip('\n').split(',')
except:
print(received)
#handler.dev[0].read()
#if (handler.dev[0].read()!="TSP>\n"): print('Non-prompt output detected.')
elif observerModel == 2461:
handler.dev[0].write(':READ:DIG? "defbuffer1",READ,REL')
#handler.dev[0].write(':READ? "defbuffer1",READ,REL')
received = LinkGPIB.formatSRE(handler.dev[0].read_raw())
elif observerModel == 2401:
received = LinkGPIB.formatSRE(handler.dev[0].read_raw())
#received = handler.dev[0].query(':READ?').split(',')
#received = handler.readMeterBuffer().split(',')
#print(received)
readValues = np.empty((bufferDepth,2)) # Z, t
if observerModel == 2602: # re-coded for V2.5
# Cell detection: Source Voltage and Read Current
readValues[:,0] = received[0::6] # Z, t, base, bot, top, ijCnt
readValues[:,1] = received[1::6]
self.baseLineValue = float(received[2])
self.botLineValue = received[3]
self.topLineValue = received[4]
self.injectionCount = int(received[5])
#
# if self.autoInjectChk.isChecked():
# if ~self.triggered & (np.max(readValues[:, 0]) > self.topLineValue):
# handler.dev[1].write(':INIT')
# self.triggered = True
# self.injectionCount += 1 # Counts number of injections
# elif (np.min(readValues[:,0]) < self.botLineValue):
# if (np.min(readValues[:,0]) < self.baseLineValue):
# self.baseLineValue = self.baseLineValue * 0.996 + np.average(readValues[:, 0]) * 0.004
# else:
# self.baseLineValue = self.baseLineValue * 0.98 + np.average(readValues[:, 0]) * 0.02
# if self.adaptThresholdChk.isChecked():
# self.botLineValue = self.baseLineValue * (1 + 0.006 * self.percentThreshold)
# self.topLineValue = self.baseLineValue * (1 + 0.01 * self.percentThreshold)
# #print(self.baseLineValue)
# if self.triggered:
# self.triggered = False
elif observerModel == 2461:
# Cell detection: Source Voltage and Read Current
readValues[:,0] = [ handler.sourceVolt / x for x in received[0::2]]
## debugging - source current and read voltage
#readValues[:,0] = [x / handler.sourceCurr for x in received[0::2]]
#print(readValues[:,0])
if self.autoInjectChk.isChecked():
if ~self.triggered & (np.max(readValues[:, 0]) > self.topLineValue):
handler.dev[1].write(':INIT')
self.triggered = True
elif (np.min(readValues[:,0]) < self.botLineValue):
self.baseLineValue = self.baseLineValue * 0.95 + np.average(readValues[:, 0]) * 0.05
if self.adaptThresholdChk.isChecked():
self.botLineValue = self.baseLineValue * (1 + 0.006 * self.percentThreshold)
self.topLineValue = self.baseLineValue * (1 + 0.01 * self.percentThreshold)
#print(self.baseLineValue)
if self.triggered:
self.triggered = False
readValues[:,1] = received[1::2]
else:
readValues[:,0] = received[0::2]
readValues[:,1] = received[1::2]
#print(readValues[:,1])
#print(readValues)
except:
readValues = np.random.normal(size=(bufferDepth,2))
return readValues
def eventFilter(self, source, event):
if (source is self.layout and
event.type() == QtCore.QEvent.KeyPress):
key = event.key()
if key == QtCore.Qt.Key_Escape:
print('Esc button is pressed.')
# sys.exit(1)
elif key == QtCore.Qt.Key_Space:
self.setManualTrigger()
return QtGui.QMainWindow.eventFilter(self, source, event)
# def closeEvent(self, event):
# print('Calling')
# print('event: {0}'.format(event))
# event.accept()
if __name__ == '__main__':
import sys
if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'):
app = QtGui.QApplication(sys.argv)
handler = LinkGPIB.LinkHandler()
m = MeterGUI()
m.initLink()
#handler.dev[0].write('startObserve()')
m.runTimer()
sys.exit(app.exec_())
| mit | -8,308,357,177,591,892,000 | 42.695335 | 122 | 0.598766 | false | 3.717599 | false | false | false |
edx-solutions/api-integration | edx_solutions_api_integration/management/commands/convert_ooyala_to_bcove.py | 1 | 3014 | import datetime
import logging
from optparse import make_option
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Q
from edx_solutions_api_integration.tasks import convert_ooyala_to_bcove
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from pytz import UTC
logger = logging.getLogger(__name__) # pylint: disable=locally-disabled, invalid-name
class Command(BaseCommand):
"""
Command to update Ooyala Xblock Content IDs to corresponding Brightcove IDs
"""
help = 'Convert Ooyala IDs to corresponding Brightcove IDs in Xblock and embeds'
batch_size = 100
def add_arguments(self, parser):
parser.add_argument(
"--user-id",
dest="user_id",
help="Staff User ID",
),
parser.add_argument(
"--course-ids",
dest="course_ids",
help="Course IDs to process Ooyala instances in",
),
parser.add_argument(
"--revert",
dest="revert",
action="store_true",
default=False,
help="Revert all the converted Ids back to previous state"
),
def handle(self, *args, **options):
course_ids = options.get('course_ids')
user_id = options.get('user_id')
revert = options.get('revert')
if not user_id:
raise CommandError("--user-id parameter is missing. Please provide a staff user id")
else:
try:
User.objects.get(id=user_id)
except User.DoesNotExist:
raise CommandError("Invalid user id: {}. Please provide a valid staff user id".format(user_id))
if course_ids:
course_ids = course_ids.split(',')
logger.info('Ooyala IDs update task queued for Courses: {}'.format(course_ids))
convert_ooyala_to_bcove.delay(
staff_user_id=user_id,
course_ids=course_ids,
revert=revert,
callback="conversion_script_success_callback",
)
else:
# run on all open courses
open_courses = CourseOverview.objects.filter(
Q(end__gte=datetime.datetime.today().replace(tzinfo=UTC)) |
Q(end__isnull=True)
).values_list('id', flat=True)
logger.info('Ooyala IDs update command: queuing task for {} Open Courses'.format(len(open_courses)))
for course_ids in self.chunks(open_courses, self.batch_size):
convert_ooyala_to_bcove.delay(
staff_user_id=user_id,
course_ids=course_ids,
revert=revert,
callback="conversion_script_success_callback",
)
def chunks(self, l, n):
"""Yield successive n-sized chunks from l."""
for i in range(0, len(l), n):
yield l[i:i + n]
| agpl-3.0 | -6,302,106,137,230,728,000 | 35.756098 | 112 | 0.584605 | false | 4.197772 | false | false | false |
kamyu104/GoogleCodeJam-2014 | World Finals/aram.py | 1 | 1450 | # Copyright (c) 2015 kamyu. All rights reserved.
#
# Google Code Jam 2014 World Finals - Problem F. ARAM
# https://code.google.com/codejam/contest/7214486/dashboard#s=p5
#
# Time: O(60 * N * R * G)
# Space: O(1)
#
# Can you win at least X fraction of the time?
def CanWin(X):
A = []
last_G_values = 0
# C < G, not enough coins for a reroll.
for C in xrange(0, G):
A.append(avg_win_prob_top[N] - X)
last_G_values += A[C]
# C >= G, enough coins for a reroll.
for C in xrange(G, R * G + 1):
A.append(-1e100)
for K in xrange(1, N + 1):
p = 1.0 * (N - K) / N # Probability of rerolling.
p_reroll = p / (1 - p) * last_G_values
p_not_reroll = avg_win_prob_top[K] - X
A[C] = max(A[C], p_reroll + p_not_reroll)
if A[C] >= 0:
return True
last_G_values += A[C] - A[C - G]
return False
for case in xrange(input()):
N, R, G = map(int, raw_input().strip().split())
win_prob = map(float, raw_input().strip().split())
win_prob = sorted(win_prob, reverse=True)
avg_win_prob_top = [0]
for topK in xrange(1, N + 1):
avg_win_prob_top.append(sum(win_prob[0:topK]) / topK)
left = 0.0
right = 1.0
for i in xrange(60):
mid = (left + right) / 2
if not CanWin(mid):
right = mid
else:
left = mid
print "Case #%d: %.15f" % (case+1, left) | mit | 4,796,693,524,528,156,000 | 26.377358 | 64 | 0.522759 | false | 2.799228 | false | false | false |
Intel-Corporation/tensorflow | tensorflow/python/framework/func_graph.py | 1 | 42379 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""FuncGraph and related functionality."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections as py_collections
import itertools
import weakref
from tensorflow.core.framework import attr_value_pb2
from tensorflow.python.eager import context
from tensorflow.python.eager import execute
from tensorflow.python.eager import tape
from tensorflow.python.eager.graph_only_ops import graph_placeholder
from tensorflow.python.framework import composite_tensor
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_spec
from tensorflow.python.framework.auto_control_deps import AutomaticControlDependencies
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import custom_gradient
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.util import compat
from tensorflow.python.util import memory
from tensorflow.python.util import nest
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util import tf_decorator
from tensorflow.python.util.lazy_loader import LazyLoader
# This is to avoid a circular dependency:
# function -> func_graph
function = LazyLoader("function", globals(),
"tensorflow.python.eager.function")
def_function = LazyLoader(
"def_function", globals(),
"tensorflow.python.eager.def_function")
WHITELIST_COLLECTIONS = [
ops.GraphKeys.GLOBAL_VARIABLES,
ops.GraphKeys.LOCAL_VARIABLES,
ops.GraphKeys.TRAINABLE_VARIABLES,
variable_scope._VARSTORE_KEY, # pylint: disable=protected-access
variable_scope._VARSCOPESTORE_KEY # pylint: disable=protected-access
]
class UnknownArgument(object):
"""Signifies an argument which is not currently handled."""
pass
def convert_structure_to_signature(structure, arg_names=None):
"""Convert a potentially nested structure to a signature.
Args:
structure: Structure to convert, where top level collection is a list or a
tuple.
arg_names: Optional list of arguments that has equal number of elements as
`structure` and is used for naming corresponding TensorSpecs.
Returns:
Identical structure that has TensorSpec objects instead of Tensors and
UknownArgument instead of any unsupported types.
"""
structure = composite_tensor.replace_composites_with_components(structure)
def encode_arg(arg, path):
"""A representation for this argument, for converting into signatures."""
if isinstance(arg, ops.Tensor):
user_specified_name = None
try:
user_specified_name = compat.as_str(
arg.op.get_attr("_user_specified_name"))
except ValueError:
pass
if path and user_specified_name and user_specified_name != path[0]:
# The user has explicitly named the argument differently than the name
# of the function argument.
name = user_specified_name
else:
name = "/".join([str(p) for p in path])
return tensor_spec.TensorSpec(arg.shape, arg.dtype, name)
if isinstance(arg, (
int,
float,
bool,
type(None),
dtypes.DType,
tensor_spec.TensorSpec,
)):
return arg
return UnknownArgument()
# We are using the flattened paths to name the TensorSpecs. We need an
# explicit name for them downstream.
flattened = nest.flatten_with_tuple_paths(structure, expand_composites=True)
if arg_names:
if len(arg_names) != len(structure):
raise ValueError(
"Passed in arg_names don't match actual signature (%s)." % arg_names)
# Replace all top-level names with their actual arg_names. If a path before
# was "(2,'a',1)", it will become "(arg_names[2],'a',1)".
flattened = [
((arg_names[path[0]],) + path[1:], arg) for path, arg in flattened
]
mapped = [encode_arg(arg, path) for path, arg in flattened]
return nest.pack_sequence_as(structure, mapped, expand_composites=True)
class FuncGraph(ops.Graph):
"""Graph representing a function body.
Attributes:
name: The name of the function.
inputs: Placeholder tensors representing the inputs to this function. The
tensors are in this FuncGraph. This represents "regular" inputs as well as
captured inputs (i.e. the values of self.captures), with the regular
inputs coming first.
outputs: Tensors that will be returned by this function. The tensors are in
this FuncGraph.
control_outputs: Operations that must be executed before the function
represented by this graph can be said to have been executed.
structured_input_signature: A tuple of (args, kwargs), which are both
possibly-nested python objects that were received by this function. Note
that these structures might contain Python `None`s.
structured_outputs: A possibly-nested python object which will be returned
by this function. The Tensors in this structure are the same as those of
self.outputs. Note that this structure might contain Python `None`s.
variables: Variables that should be watched during function execution.
outer_graph: The graph this function is defined in. May be another FuncGraph
or the global default Graph.
captures: Maps external tensor -> internal tensor (i.e. input placeholder).
The entries are in the order they were captured.
control_captures: Set of external ops on which this graph has a control
dependency.
seed: The graph-level random seed.
capture_by_value: If True, the func graph will capture Variables by value
instead of reference.
"""
def __init__(self, name, collections=None, capture_by_value=None):
"""Construct a new FuncGraph.
The graph will inherit its graph key, collections, seed, and distribution
strategy stack from the current context or graph.
Args:
name: the name of the function.
collections: a dictionary of collections this FuncGraph should start
with. If not specified (None), the FuncGraph will read (but not write
to) the outer graph's collections that are not whitelisted, and both
read and write to the outer graph's collections that are whitelisted.
The current whitelisted collections are the global variables, the
local variables, and the trainable variables.
Defaults to None.
capture_by_value: An optional boolean. If True, the func graph will
capture Variables by value instead of reference. By default inherit
from outer graphs, and failing that will default to False.
"""
super(FuncGraph, self).__init__()
self.name = name
self.inputs = []
self.outputs = []
self.control_outputs = []
self.control_captures = set()
self.structured_input_signature = None
self.structured_outputs = None
self._weak_variables = []
self._watched_variables = weakref.WeakSet()
self.outer_graph = ops.get_default_graph()
self.captures = py_collections.OrderedDict()
# Inherit capture-by-value from outer graph.
if capture_by_value is not None:
self.capture_by_value = capture_by_value
elif self.outer_graph is not None and isinstance(
self.outer_graph, FuncGraph):
self.capture_by_value = self.outer_graph.capture_by_value
else:
self.capture_by_value = False
self._building_function = True
# Map from resource tensor name to last op (in program order) which uses
# this tensor. Used to enforce that execution order matches program order
# for resource tensors.
self._last_op_using_resource_tensor = {}
graph = self.outer_graph
if context.executing_eagerly():
self.seed = context.global_seed()
# [for tf-data user migration from TF1.0 to 2.0] seed_used keep track of
# any None op_seed for random_op in the function, in which case we end up
# using function seed, which could be unintended behavior for the op.
self._seed_used = False
else:
self.seed = graph.seed
self._seed_used = False
# TODO(allenl): Figure out if we can remove colocation stack
# specialization (currently used in cond_v2), here and in the cache key.
self._colocation_stack = graph._colocation_stack.copy() # pylint: disable=protected-access
if collections is None:
for collection_name in graph.get_all_collection_keys():
if collection_name not in WHITELIST_COLLECTIONS:
self._collections[collection_name] = graph.get_collection(
collection_name)
for collection_name in WHITELIST_COLLECTIONS:
self._collections[collection_name] = graph.get_collection_ref(
collection_name)
else:
self._collections = collections
def __str__(self):
return "FuncGraph(name=%s, id=%s)" % (self.name, id(self))
def watch_variable(self, v):
"""Marks the variable v as accessed while building this graph."""
while self is not None and isinstance(self, FuncGraph):
self._watched_variables.add(v)
self = self.outer_graph
def control_dependencies(self, control_inputs):
"""Handles control dependencies.
FuncGraph wraps Graph's control_dependencies logic by first filtering out
any external tensors / operations and storing them in the graph's
control_captures member. Any consumers of this function graph must then
decide how to handle the control captures.
Args:
control_inputs: A list of `Operation` or `Tensor` objects which
must be executed or computed before running the operations
defined in the context. Can also be `None` to clear the control
dependencies.
Returns:
A context manager that specifies control dependencies for all
operations constructed within the context.
Raises:
TypeError: If `control_inputs` is not a list of `Operation` or
`Tensor` objects.
"""
if control_inputs is None:
return super(FuncGraph, self).control_dependencies(control_inputs)
filtered_control_inputs = []
for c in control_inputs:
# Check for _UnreadVariable
if (isinstance(c, ops.IndexedSlices) or
(hasattr(c, "_handle") and hasattr(c, "op"))):
c = c.op
graph_element = ops._as_graph_element(c) # pylint: disable=protected-access
if graph_element is None:
graph_element = c
if graph_element is not None and getattr(
graph_element, "graph", None) is not self:
self.control_captures.add(graph_element)
else:
filtered_control_inputs.append(graph_element)
return super(FuncGraph, self).control_dependencies(filtered_control_inputs)
def as_default(self):
outer_cm = super(FuncGraph, self).as_default()
@tf_contextlib.contextmanager
def inner_cm():
"""Context manager for copying distribute.Strategy scope information."""
graph = ops.get_default_graph()
# pylint: disable=protected-access
# TODO(b/112906995, nareshmodi): distribution strategy depends on
# inheriting this stack from the default graph even in eager mode. Maybe
# it should be part of the eager context? This would also allow us to
# remove a get_default_graph() call from the function cache lookup.
old_strategy_stack = self._distribution_strategy_stack
self._distribution_strategy_stack = list(
graph._distribution_strategy_stack)
# We ignore device placements from any outer scopes while tracing the
# function when possible, to avoid hard-coding them in the function
# graph. "Default" placements come from the PartitionedCallOp's placement,
# so that the same trace of the Python function may be placed on several
# different devices and saved functions may be placed on new devices when
# restored.
old_device_stack = self._device_function_stack
if context.executing_eagerly():
if self._distribution_strategy_stack:
self._add_device_to_stack(context.context().device_name)
else:
if (self._distribution_strategy_stack
or device_stack_has_callable(graph._device_function_stack)):
# Hard-code devices from device functions in the function body
self._device_function_stack = graph._device_function_stack.copy()
old_creator_stack = self._variable_creator_stack
self._variable_creator_stack = graph._variable_creator_stack
# Inherit the graph key, since this is used for matching variables in
# optimizers.
old_graph_key = self._graph_key
self._graph_key = graph._graph_key
# pylint: enable=protected-access
with outer_cm as g:
try:
yield g
finally:
self._distribution_strategy_stack = old_strategy_stack
self._device_function_stack = old_device_stack
self._variable_creator_stack = old_creator_stack
self._graph_key = old_graph_key
return inner_cm()
@property
def output_types(self):
return [t.dtype for t in self.outputs]
@property
def output_shapes(self):
return [t.shape for t in self.outputs]
@property
def variables(self):
"""A list of variables accessed by this FuncGraph.
Note that functions keep only weak references to variables. Calling the
function after a variable it accesses has been deleted is an error.
Yields:
Strong references to variables accessed by this FuncGraph.
"""
for weak_v in self._weak_variables:
v = weak_v()
if v is None:
raise AssertionError(
"Called a function referencing variables which have been deleted. "
"This likely means that function-local variables were created and "
"not referenced elsewhere in the program. This is generally a "
"mistake; consider storing variables in an object attribute on "
"first call.")
yield v
@variables.setter
def variables(self, var_list):
self._weak_variables = [weakref.ref(v) for v in var_list]
def _capture_by_value(
self,
op_type,
inputs,
dtypes, # pylint: disable=redefined-outer-name
input_types=None,
name=None,
attrs=None,
op_def=None,
compute_shapes=True,
compute_device=True):
# When capturing by value, do the read outside
reverse_captures = dict((v, k) for k, v in self.captures.items())
uncaptured_inputs = [reverse_captures.get(t, t) for t in inputs]
with ops.init_scope():
if context.executing_eagerly():
attr_list = ("dtype", int(attrs["dtype"].type))
value, = execute.execute(
compat.as_bytes(op_type), 1, uncaptured_inputs, attr_list,
context.context())
else:
op = ops.get_default_graph().create_op(
op_type, uncaptured_inputs, dtypes, input_types, name, attrs,
op_def, compute_shapes, compute_device)
value = op.outputs[0]
captured_value = self.capture(value)
return captured_value.op
def create_op(
self,
op_type,
inputs,
dtypes=None, # pylint: disable=redefined-outer-name
input_types=None,
name=None,
attrs=None,
op_def=None,
compute_shapes=True,
compute_device=True):
"""Like Graph.create_op, except handles external input tensors.
This overload adds functionality to create_op to "capture" any external
input tensors, i.e. tensors from the eager context or outer function graphs
if this is a nested function. See `capture` for more information.
Args:
op_type: The `Operation` type to create. This corresponds to the
`OpDef.name` field for the proto that defines the operation.
inputs: A list of `Tensor` objects that will be inputs to the `Operation`.
dtypes: (Optional) A list of `DType` objects that will be the types of the
tensors that the operation produces.
input_types: (Optional.) A list of `DType`s that will be the types of
the tensors that the operation consumes. By default, uses the base
`DType` of each input in `inputs`. Operations that expect
reference-typed inputs must specify `input_types` explicitly.
name: (Optional.) A string name for the operation. If not specified, a
name is generated based on `op_type`.
attrs: (Optional.) A dictionary where the key is the attribute name (a
string) and the value is the respective `attr` attribute of the
`NodeDef` proto that will represent the operation (an `AttrValue`
proto).
op_def: (Optional.) The `OpDef` proto that describes the `op_type` that
the operation will have.
compute_shapes: (Optional.) Deprecated. Has no effect (shapes are always
computed).
compute_device: (Optional.) If True, device functions will be executed
to compute the device property of the Operation.
Returns:
An `Operation` object.
"""
if self.capture_by_value and op_type in ["ReadVariableOp",
"ResourceGather"]:
return self._capture_by_value(
op_type, inputs, dtypes, input_types, name, attrs, op_def,
compute_shapes, compute_device)
# This capturing logic interacts poorly with control flow contexts which
# want to replace inputs of ops far too late in the process. This can lead
# the context to get confused and try to create an Enter for an Enter. We
# can detect this here and skip the additional Enter which can confuse loop
# validation logic.
if op_type == "Enter" and inputs[0].op.type == "Enter":
if inputs[0].op.get_attr("frame_name") == attrs["frame_name"].s:
return inputs[0].op
# Calling AddValue on the control flow contexts to force creation of the
# backward accumulators in the original graph before we create placeholders
# to capture the inputs.
ctxt = ops.get_default_graph()._control_flow_context # pylint: disable=protected-access
for i, inp in enumerate(inputs):
# TPU Estimator defines a control flow context with no AddValue method.
if ctxt is not None and hasattr(ctxt, "AddValue"):
inp = ctxt.AddValue(inp)
inp = self.capture(inp)
inputs[i] = inp
return super(FuncGraph, self).create_op(
op_type, inputs, dtypes, input_types, name, attrs, op_def,
compute_device=compute_device)
def capture(self, tensor, name=None):
"""Captures `tensor` if it's external to this graph.
If `tensor` is from a different graph, returns a placeholder for it.
`tensor` and the placeholder will appear in self.captures, and the
placeholder will appear in self.inputs. Multiple calls to this method with
the same `tensor` argument will return the same placeholder. If `tensor` is
from this graph, returns `tensor`.
Args:
tensor: Tensor. May be from this FuncGraph or a different graph.
name: Optional name if a placeholder is created.
Returns:
Tensor from this FuncGraph.
"""
# Note: _forward_func_graph is currently only set when building the gradient
# graph graph of a defun call. If the backwards graph tries to capture
# tensors those will be captured first in the forward graph. This
# makes sure that any tensor needed by a custom_gradient is correctly
# captured.
if (getattr(tensor, "graph", None) is not self and
hasattr(self, "_forward_func_graph") and
isinstance(self._forward_func_graph, FuncGraph)):
tensor = self._forward_func_graph.capture(tensor)
if isinstance(tensor, ops.EagerTensor):
if name is None:
name = str(ops.uid())
return self._capture_helper(tensor, name)
if tensor.graph is not self:
if name is None:
name = tensor.op.name
inner_graph = tensor.graph
while inner_graph is not None and isinstance(inner_graph, FuncGraph):
if inner_graph is self:
raise ValueError(
"Trying to capture a tensor from an inner function. This can be "
"caused by accessing a tensor defined inside a loop or "
"conditional body, or a subfunction, from a calling function, "
"without going through the proper return value mechanism. "
"Consider using TensorFlow mechanisms such as TensorArrays "
"to return tensors from inner functions or loop / conditional "
"bodies. Tensor: %s; tensor graph: %s; this graph: %s"
% (tensor, tensor.graph, self))
inner_graph = inner_graph.outer_graph
return self._capture_helper(tensor, name)
return tensor
def _capture_helper(self, tensor, name):
captured_tensor = self.captures.get(tensor, None)
if captured_tensor is None:
captured_tensor = _create_substitute_placeholder(tensor, name=name,
dtype=tensor.dtype)
self.captures[tensor] = captured_tensor
self.inputs.append(captured_tensor)
tape.record_operation("captured_value", [captured_tensor], [tensor],
lambda x: [x])
return captured_tensor
@property
def external_captures(self):
"""External tensors captured by this function."""
return list(self.captures.keys())
@property
def internal_captures(self):
"""Placeholders in this function corresponding captured tensors."""
return list(self.captures.values())
def func_graph_from_py_func(name,
python_func,
args,
kwargs,
signature=None,
func_graph=None,
autograph=False,
autograph_options=None,
add_control_dependencies=True,
arg_names=None,
op_return_value=None,
collections=None,
capture_by_value=None,
override_flat_arg_shapes=None):
"""Returns a `FuncGraph` generated from `python_func`.
Args:
name: an identifier for the function.
python_func: the Python function to trace.
args: the positional args with which the Python function should be called;
ignored if a signature is provided.
kwargs: the keyword args with which the Python function should be called;
ignored if a signature is provided.
signature: a possibly nested sequence of `TensorSpecs` specifying the shapes
and dtypes of the arguments. When a signature is provided, `args` and
`kwargs` are ignored, and `python_func` is traced with Tensors conforming
to `signature`. If `None`, the shapes and dtypes are inferred from the
inputs.
func_graph: Optional. An instance of FuncGraph. If provided, we will use
this graph else a new one is built and returned.
autograph: whether to use autograph to compile `python_func`.
See https://www.tensorflow.org/guide/autograph for more information.
autograph_options: additional knobs to control when `autograph=True`.
See https://www.tensorflow.org/guide/autograph for more information.
add_control_dependencies: If True, automatically adds control dependencies
to ensure program order matches execution order and stateful ops always
execute.
arg_names: Optional list of argument names, used to give input placeholders
recognizable names.
op_return_value: Optional. A Tensor. If set and `python_func` returns
Operations, those return values will be replaced with this value. If not
set, returning an Operation triggers an error.
collections: a dictionary of collections this FuncGraph should start
with. If not specified (None), the FuncGraph will read (but not write to)
the outer graph's collections that are not whitelisted, and both
read and write to the outer graph's collections that are whitelisted.
The current whitelisted collections are the global variables, the
local variables, and the trainable variables.
Defaults to None.
capture_by_value: An optional boolean. If True, the func graph will capture
Variables by value instead of reference. By default inherit from outer
graphs, and failing that will default to False.
override_flat_arg_shapes: An optional list of instances that are either
`None` or `TensorShape`. The length must match that of
`nest.flatten((args, kwargs), expand_composites=True)`. The entries
containing value `None` must match entries in flattened arguments
containing non-tensors, while entries containing a `TensorShape` must
match entries in the flattened arguments containing tensors.
Returns:
A FuncGraph.
Raises:
TypeError: If any of `python_func`'s return values is neither `None` nor a
`Tensor`.
ValueError: If both `signature` and `override_flat_arg_shapes` are
passed in.
"""
if op_return_value is not None:
assert isinstance(op_return_value, ops.Tensor), op_return_value
if func_graph is None:
func_graph = FuncGraph(name, collections=collections,
capture_by_value=capture_by_value)
assert isinstance(func_graph, FuncGraph)
if add_control_dependencies:
control_manager = AutomaticControlDependencies()
else:
control_manager = ops.NullContextmanager()
with func_graph.as_default(), control_manager as a:
current_scope = variable_scope.get_variable_scope()
default_use_recource = current_scope.use_resource
current_scope.set_use_resource(True)
if signature is not None and override_flat_arg_shapes is not None:
raise ValueError(
"Passed both signature and override_flat_arg_shapes: %s and %s."
% (signature, override_flat_arg_shapes))
if signature is not None:
args = signature
kwargs = {}
# Creates and names placeholders for all arguments.
if override_flat_arg_shapes is not None:
flat_args = nest.flatten(args, expand_composites=True)
arg_shapes = override_flat_arg_shapes[:len(flat_args)]
kwarg_shapes = override_flat_arg_shapes[len(flat_args):]
else:
arg_shapes = None
kwarg_shapes = None
func_args = _get_defun_inputs_from_args(
args, arg_names, flat_shapes=arg_shapes)
func_kwargs = _get_defun_inputs_from_kwargs(
kwargs, flat_shapes=kwarg_shapes)
# Convert all Tensors into TensorSpecs before saving the structured inputs.
# If storing pure concrete functions that are not called through polymorphic
# functions, we don't have access to FunctionSpec, so we need to call the
# TensorSpecs by their `arg_names` for later binding.
func_graph.structured_input_signature = (
convert_structure_to_signature(func_args, arg_names),
convert_structure_to_signature(func_kwargs))
flat_func_args = nest.flatten(func_args, expand_composites=True)
flat_func_kwargs = nest.flatten(func_kwargs, expand_composites=True)
# Temporarily set inputs to allow graph building code to inspect
# them. Reassigned below.
func_graph.inputs = [arg for arg in flat_func_args + flat_func_kwargs
if isinstance(arg, ops.Tensor)]
# Note: `nest.flatten` sorts by keys, as does `_deterministic_dict_values`.
# Variables to help check whether mutation happens in calling the function
# Copy the recursive list, tuple and map structure, but not base objects
func_args_before = nest.pack_sequence_as(func_args, flat_func_args,
expand_composites=True)
func_kwargs_before = nest.pack_sequence_as(
func_kwargs, flat_func_kwargs, expand_composites=True)
def convert(x):
"""Converts a function output to a Tensor."""
if x is None:
return None
if op_return_value is not None and isinstance(x, ops.Operation):
# TODO(b/79881896): we currently can't capture external control deps, so
# this won't work if x needs to be captured (i.e. if python_func returns
# captured Operations).
with ops.control_dependencies([x]):
x = array_ops.identity(op_return_value)
elif not isinstance(x, tensor_array_ops.TensorArray):
try:
x = ops.convert_to_tensor_or_composite(x)
except (ValueError, TypeError):
raise TypeError(
"To be compatible with tf.contrib.eager.defun, Python functions "
"must return zero or more Tensors; in compilation of %s, found "
"return value of type %s, which is not a Tensor." %
(str(python_func), type(x)))
if add_control_dependencies:
x = a.mark_as_return(x)
return x
try:
if autograph:
from tensorflow.python import autograph # pylint: disable=g-import-not-at-top
_, original_func = tf_decorator.unwrap(python_func)
def wrapper(*args, **kwargs):
# Note: functions annotated with @tf.function should always be
# converted even though they would meet autograph's whitelisting
# criteria.
# If this assumption is ever broken, converted_call will need to
# handle the possibility of original_func still being a shim, e.g.
# bound to WeakrefSelf.
return autograph.converted_call(
original_func, None,
autograph.ConversionOptions(
recursive=True,
optional_features=autograph_options,
force_conversion=True,
), args, kwargs)
# Wrapping around a decorator allows checks like tf_inspect.getargspec
# to be accurate.
converted_func = tf_decorator.make_decorator(original_func, wrapper)
python_func = tf_decorator.rewrap(python_func, original_func,
converted_func)
func_outputs = python_func(*func_args, **func_kwargs)
# invariant: `func_outputs` contains only Tensors, CompositeTensors,
# TensorArrays and `None`s.
func_outputs = nest.map_structure(convert, func_outputs,
expand_composites=True)
check_mutation(func_args_before, func_args)
check_mutation(func_kwargs_before, func_kwargs)
finally:
current_scope.set_use_resource(default_use_recource)
# Variables in `func_args`, `func_kwargs` should be explicit inputs
# to the function, not captured inputs.
graph_variables = list(func_graph._watched_variables) # pylint: disable=protected-access
arg_variables = set()
inputs = []
for arg in (nest.flatten(func_args, expand_composites=True) +
nest.flatten(func_kwargs, expand_composites=True)):
if isinstance(arg, resource_variable_ops.ResourceVariable):
# Even if an argument variable was not used in the function, we've
# already manually captured the resource Tensor when creating argument
# placeholders.
resource_placeholder = func_graph.captures.pop(arg.handle, None)
if resource_placeholder is None:
continue
arg_variables.add(arg)
inputs.append(resource_placeholder)
elif isinstance(arg, ops.Tensor):
inputs.append(arg)
variables = [v for v in graph_variables if v not in arg_variables]
func_graph.inputs = inputs + list(func_graph.captures.values())
func_graph.structured_outputs = func_outputs
# Returning a closed-over tensor does not trigger convert_to_tensor.
func_graph.outputs.extend(
func_graph.capture(x)
for x in flatten(func_graph.structured_outputs)
if x is not None)
func_graph.variables = variables
if add_control_dependencies:
func_graph.control_outputs.extend(control_manager.ops_which_must_run)
# Register any other functions defined in the graph.
with ops.init_scope():
if context.executing_eagerly():
for f in func_graph._functions.values(): # pylint: disable=protected-access
# TODO(ashankar): What about the gradient registry?
context.add_function(f._c_func.func) # pylint: disable=protected-access
return func_graph
def maybe_captured(tensor):
"""If t is a captured value placeholder, returns the original captured value.
Args:
tensor: Tensor.
Returns:
A tensor, potentially from a different Graph/FuncGraph.
"""
if (not isinstance(tensor, ops.EagerTensor) and
tensor.op.graph.building_function and tensor.op.type == "Placeholder"):
for input_t, placeholder_t in tensor.op.graph.captures.items():
if tensor == placeholder_t:
return maybe_captured(input_t)
# pylint: enable=protected-access
return tensor
def device_stack_has_callable(device_stack):
"""Checks whether a device stack contains a callable."""
return any(callable(spec._device_name_or_function) # pylint: disable=protected-access
for spec in device_stack.peek_objs())
def check_mutation(n1, n2):
"""Check if two list of arguments are exactly the same."""
errmsg = ("Function to be traced should not modify structure of input "
"arguments. Check if your function has list and dictionary "
"operations that alter input arguments, "
"such as `list.pop`, `list.append`")
try:
nest.assert_same_structure(n1, n2, expand_composites=True)
except ValueError:
raise ValueError(errmsg)
for arg1, arg2 in zip(nest.flatten(n1, expand_composites=True),
nest.flatten(n2, expand_composites=True)):
if arg1 is not arg2:
raise ValueError(errmsg)
# TODO(edloper): If TensorArray becomes a CompositeTensor, then delete this.
def flatten(sequence):
"""Like nest.flatten w/ expand_composites, but returns flow for TensorArrays.
Args:
sequence: A nested structure of Tensors, CompositeTensors, and
TensorArrays.
Returns:
A list of tensors.
"""
flat_sequence = nest.flatten(sequence, expand_composites=True)
return [
item.flow if isinstance(item, tensor_array_ops.TensorArray) else item
for item in flat_sequence]
# TODO(edloper): If TensorArray becomes a CompositeTensor, then delete this.
def pack_sequence_as(structure, flat_sequence):
"""Like `nest.pack_sequence_as` but also builds TensorArrays from flows.
Args:
structure: The structure to pack into. May contain Tensors,
CompositeTensors, or TensorArrays.
flat_sequence: An iterable containing tensors.
Returns:
A nested structure.
Raises:
AssertionError if `structure` and `flat_sequence` are not compatible.
"""
flat_sequence = list(flat_sequence)
flattened_structure = nest.flatten(structure, expand_composites=True)
if len(flattened_structure) != len(flat_sequence):
raise ValueError("Mismatch in element count")
for i in range(len(flat_sequence)):
if isinstance(flattened_structure[i], tensor_array_ops.TensorArray):
flat_sequence[i] = tensor_array_ops.build_ta_with_new_flow(
old_ta=flattened_structure[i], flow=flat_sequence[i])
return nest.pack_sequence_as(structure, flat_sequence, expand_composites=True)
def _create_substitute_placeholder(value, name=None, dtype=None):
"""Creates a placeholder for `value` and propagates shape info to it."""
# Note: setting ops.control_dependencies(None) ensures we always put
# capturing placeholders outside of any control flow context.
with ops.control_dependencies(None):
placeholder = graph_placeholder(
dtype=dtype or value.dtype, shape=value.shape, name=name)
custom_gradient.copy_handle_data(value, placeholder)
return placeholder
def _get_defun_inputs_from_args(args, names, flat_shapes=None):
"""Maps Python function positional args to graph-construction inputs."""
return _get_defun_inputs(
args, names, structure=args, flat_shapes=flat_shapes)
def _get_defun_inputs(args, names, structure, flat_shapes=None):
"""Maps python function args to graph-construction inputs.
Args:
args: A flat list of user-specified arguments.
names: A list of strings with user-specified argument names, same length as
`args`. May be `None`, in which case a generic name is used.
structure: The original argument list or dictionary.
flat_shapes: A flat list of values that are either `None` or
instances of `TensorShape`. If provided, then length must match
that of `nest.flatten(args, expand_composites=True)`; and locations where
`args` are instances of `Tensor` must have a corresponding `TensorShape`
in `flat_shapes`. May be `None`, in which case exact shapes are read
directly from the args.
Returns:
Placeholders with the same structure as `structure`.
Raises:
RuntimeError: if `flat_shapes` is provided, but
`len(flat_shapes) != len(nest.flatten(args, expand_composites=True))`.
RuntimeError: if a shape from `flat_shapes` is not None
for an argument that is not a `Tensor`, `TensorSpec`,
or `ResourceVariable`.
"""
func_graph = ops.get_default_graph()
function_inputs = []
if names is None:
names = [None] * len(args)
if flat_shapes is None:
shapes_iter = itertools.repeat(None)
else:
len_flat_args = len(nest.flatten(args, expand_composites=True))
if len_flat_args != len(flat_shapes):
raise RuntimeError(
"Length of fully flat shapes (%d) must match that of "
"flatten(args) (%d). args: %s, flat_shapes: %s"
% (len(flat_shapes),
len_flat_args,
args,
flat_shapes))
shapes_iter = iter(flat_shapes)
for arg_value, name in zip(args, names):
flattened = nest.flatten(arg_value, expand_composites=True)
tensor_specs = [
arg for arg in flattened if isinstance(arg, tensor_spec.TensorSpec)
]
specified_names = [arg.name for arg in tensor_specs if arg.name]
if specified_names and len(specified_names) < len(tensor_specs):
raise ValueError("If specifying TensorSpec names for nested structures, "
"either zero or all names have to be specified.")
for arg in flattened:
# We have a shape entry for each arg, regadless of whether it's a real
# Tensor or not. For non-tensor entries it should be None.
shape = next(shapes_iter)
if isinstance(arg, (ops.Tensor, tensor_spec.TensorSpec)):
if isinstance(arg, tensor_spec.TensorSpec) and arg.name:
requested_name = arg.name
else:
requested_name = name
placeholder_shape = shape if shape is not None else arg.shape
try:
placeholder = graph_placeholder(
arg.dtype, placeholder_shape,
name=requested_name)
except ValueError:
# Sometimes parameter names are not valid op names, so fall back to
# unnamed placeholders.
placeholder = graph_placeholder(arg.dtype, placeholder_shape)
if name is not None:
# Record the requested/user-specified name in case it's different than
# the uniquified name, for validation when exporting signatures.
placeholder.op._set_attr( # pylint: disable=protected-access
"_user_specified_name",
attr_value_pb2.AttrValue(s=compat.as_bytes(requested_name)))
function_inputs.append(placeholder)
elif isinstance(arg, resource_variable_ops.ResourceVariable):
# Capture arg variables to create placeholders for them. These will be
# removed as captures after the function is traced (since otherwise we'd
# just add it back with a new placeholder when the variable was
# referenced).
placeholder = func_graph.capture(arg.handle, name=name)
placeholder.op._set_attr( # pylint: disable=protected-access
"_user_specified_name",
attr_value_pb2.AttrValue(s=compat.as_bytes(name)))
function_inputs.append(arg)
else:
if shape is not None:
raise RuntimeError(
"Expected provided shape override to be None for arg that isn't "
"a Tensor, but saw arg: '%s', shape: '%s'. args: %s"
% (arg, shape, args))
function_inputs.append(arg)
return nest.pack_sequence_as(structure, function_inputs,
expand_composites=True)
def _get_defun_inputs_from_kwargs(kwargs, flat_shapes):
"""Maps Python function keyword args to graph-construction inputs."""
if kwargs:
names, args = zip(*sorted(kwargs.items()))
else:
names = []
args = []
return _get_defun_inputs(
args, names, structure=kwargs, flat_shapes=flat_shapes)
def dismantle_func_graph(func_graph):
"""Removes reference cycles in `func_graph` FuncGraph.
Helpful for making sure the garbage collector doesn't need to run when
the FuncGraph goes out of scope, e.g. in tests using defun with
@test_util.run_in_graph_and_eager_modes(assert_no_eager_garbage=True).
Args:
func_graph: A `FuncGraph` object to destroy. `func_graph` is unusable
after this function.
"""
# TODO(b/115366440): Delete this method when a custom OrderedDict is added.
# Clearing captures using clear() leaves some cycles around.
while func_graph.captures:
func_graph.captures.popitem()
memory.dismantle_ordered_dict(func_graph.captures)
ops.dismantle_graph(func_graph)
| apache-2.0 | -8,036,142,726,043,407,000 | 41.763875 | 97 | 0.676845 | false | 4.197603 | false | false | false |
mrcrgl/django_distributed_task | distributed_task/broker/backends/db.py | 1 | 1101 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from ..interface import BrokerInterface
from distributed_task import settings
from distributed_task.core.serializer import serialize, deserialize
from distributed_task.models import Message
import time
class DatabaseMessageBroker(BrokerInterface):
queue = 'distributed_task_queue'
def prepare(self):
self.load_config()
def load_config(self):
OPTIONS = getattr(settings, 'BROKER_OPTIONS')
self.queue = OPTIONS.get('QUEUE', 'distributed_task_queue')
def produce_message(self, data):
m = Message(message=serialize(data), queue=self.queue)
m.save()
def consume_message(self, handler):
while True:
next = Message.objects.filter(queue=self.queue).order_by('created').first()
if not next:
return True
body = next.message
next.delete()
handler(deserialize(body))
def keep_consuming(self, handler):
while True:
self.consume_message(handler)
time.sleep(10) | mit | 4,212,467,058,290,029,600 | 26.55 | 87 | 0.64396 | false | 4.317647 | false | false | false |
gazpachoking/deluge-old | deluge/ui/gtkui/createtorrentdialog.py | 1 | 18151 | #
# createtorrentdialog.py
#
# Copyright (C) 2008 Andrew Resch <[email protected]>
#
# Deluge is free software.
#
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License, as published by the Free Software
# Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# deluge is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with deluge. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here.
#
#
import gtk
import sys
import os.path
import gobject
import base64
import logging
from twisted.internet.threads import deferToThread
from deluge.ui.client import client
import listview
import deluge.component as component
import deluge.common
from deluge.configmanager import ConfigManager
log = logging.getLogger(__name__)
class CreateTorrentDialog:
def show(self):
self.builder = gtk.Builder()
# The main dialog
self.builder.add_from_file(deluge.common.resource_filename(
"deluge.ui.gtkui", os.path.join("glade", "create_torrent_dialog.ui")
))
# The remote path dialog
self.builder.add_from_file(deluge.common.resource_filename(
"deluge.ui.gtkui", os.path.join("glade", "create_torrent_dialog.remote_path.ui")
))
# The remote save dialog
self.builder.add_from_file(deluge.common.resource_filename(
"deluge.ui.gtkui", os.path.join("glade", "create_torrent_dialog.remote_save.ui")
))
# The progress dialog
self.builder.add_from_file(deluge.common.resource_filename(
"deluge.ui.gtkui", os.path.join("glade", "create_torrent_dialog.progress.ui")
))
self.config = ConfigManager("gtkui.conf")
self.dialog = self.builder.get_object("create_torrent_dialog")
self.dialog.set_transient_for(component.get("MainWindow").window)
self.builder.connect_signals({
"on_button_file_clicked": self._on_button_file_clicked,
"on_button_folder_clicked": self._on_button_folder_clicked,
"on_button_remote_path_clicked": self._on_button_remote_path_clicked,
"on_button_cancel_clicked": self._on_button_cancel_clicked,
"on_button_save_clicked": self._on_button_save_clicked,
"on_button_up_clicked": self._on_button_up_clicked,
"on_button_add_clicked": self._on_button_add_clicked,
"on_button_remove_clicked": self._on_button_remove_clicked,
"on_button_down_clicked": self._on_button_down_clicked
})
# path, icon, size
self.files_treestore = gtk.TreeStore(str, str, gobject.TYPE_UINT64)
column = gtk.TreeViewColumn(_("Filename"))
render = gtk.CellRendererPixbuf()
column.pack_start(render, False)
column.add_attribute(render, "stock-id", 1)
render = gtk.CellRendererText()
column.pack_start(render, True)
column.add_attribute(render, "text", 0)
column.set_expand(True)
self.builder.get_object("treeview_files").append_column(column)
column = gtk.TreeViewColumn(_("Size"))
render = gtk.CellRendererText()
column.pack_start(render)
column.set_cell_data_func(render, listview.cell_data_size, 2)
self.builder.get_object("treeview_files").append_column(column)
self.builder.get_object("treeview_files").set_model(self.files_treestore)
self.builder.get_object("treeview_files").set_show_expanders(False)
# tier, url
self.trackers_liststore = gtk.ListStore(int, str)
self.builder.get_object("tracker_treeview").append_column(
gtk.TreeViewColumn(_("Tier"), gtk.CellRendererText(), text=0))
self.builder.get_object("tracker_treeview").append_column(
gtk.TreeViewColumn(_("Tracker"), gtk.CellRendererText(), text=1))
self.builder.get_object("tracker_treeview").set_model(self.trackers_liststore)
self.trackers_liststore.set_sort_column_id(0, gtk.SORT_ASCENDING)
if not client.is_localhost() and client.connected():
self.builder.get_object("button_remote_path").show()
else:
self.builder.get_object("button_remote_path").hide()
self.dialog.show()
def parse_piece_size_text(self, value):
psize, metric = value.split()
psize = int(psize)
if psize < 32:
# This is a MiB value
psize = psize * 1024 * 1024
else:
# This is a KiB value
psize = psize * 1024
return psize
def adjust_piece_size(self):
"""Adjusts the recommended piece based on the file/folder/path selected."""
size = self.files_treestore[0][2]
model = self.builder.get_object("combo_piece_size").get_model()
for index,value in enumerate(model):
psize = self.parse_piece_size_text(value[0])
pieces = size / psize
if pieces < 2048 or (index + 1) == len(model):
self.builder.get_object("combo_piece_size").set_active(index)
break
def _on_button_file_clicked(self, widget):
log.debug("_on_button_file_clicked")
# Setup the filechooserdialog
chooser = gtk.FileChooserDialog(_("Choose a file"),
self.dialog,
gtk.FILE_CHOOSER_ACTION_OPEN,
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN,
gtk.RESPONSE_OK))
chooser.set_transient_for(self.dialog)
chooser.set_select_multiple(False)
chooser.set_property("skip-taskbar-hint", True)
# Run the dialog
response = chooser.run()
if response == gtk.RESPONSE_OK:
result = chooser.get_filename()
else:
chooser.destroy()
return
path = result.decode('utf-8')
self.files_treestore.clear()
self.files_treestore.append(None, [result, gtk.STOCK_FILE, deluge.common.get_path_size(path)])
self.adjust_piece_size()
chooser.destroy()
def _on_button_folder_clicked(self, widget):
log.debug("_on_button_folder_clicked")
# Setup the filechooserdialog
chooser = gtk.FileChooserDialog(_("Choose a folder"),
self.dialog,
gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER,
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN,
gtk.RESPONSE_OK))
chooser.set_transient_for(self.dialog)
chooser.set_select_multiple(False)
chooser.set_property("skip-taskbar-hint", True)
# Run the dialog
response = chooser.run()
if response == gtk.RESPONSE_OK:
result = chooser.get_filename()
else:
chooser.destroy()
return
path = result.decode('utf-8')
self.files_treestore.clear()
self.files_treestore.append(None, [result, gtk.STOCK_OPEN, deluge.common.get_path_size(path)])
self.adjust_piece_size()
chooser.destroy()
def _on_button_remote_path_clicked(self, widget):
log.debug("_on_button_remote_path_clicked")
dialog = self.builder.get_object("remote_path_dialog")
entry = self.builder.get_object("entry_path")
dialog.set_transient_for(self.dialog)
entry.set_text("/")
entry.grab_focus()
response = dialog.run()
if response == gtk.RESPONSE_OK:
result = entry.get_text()
def _on_get_path_size(size):
log.debug("size: %s", size)
if size > 0:
self.files_treestore.clear()
self.files_treestore.append(None, [result, gtk.STOCK_NETWORK, size])
self.adjust_piece_size()
client.core.get_path_size(result).addCallback(_on_get_path_size)
client.force_call(True)
dialog.hide()
def _on_button_cancel_clicked(self, widget):
log.debug("_on_button_cancel_clicked")
self.dialog.destroy()
def _on_button_save_clicked(self, widget):
log.debug("_on_button_save_clicked")
if len(self.files_treestore) == 0:
return
is_remote = self.files_treestore[0][1] == gtk.STOCK_NETWORK
torrent_filename = "%s.torrent" % os.path.split(self.files_treestore[0][0].rstrip('/'))[-1]
if is_remote:
# This is a remote path
dialog = self.builder.get_object("remote_save_dialog")
dialog.set_transient_for(self.dialog)
self.builder.get_object("entry_save_path").set_text(torrent_filename)
response = dialog.run()
if response == gtk.RESPONSE_OK:
result = self.builder.get_object("entry_save_path").get_text()
else:
dialog.hide()
return
dialog.hide()
else:
# Setup the filechooserdialog
chooser = gtk.FileChooserDialog(_("Save .torrent file"),
self.dialog,
gtk.FILE_CHOOSER_ACTION_SAVE,
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_SAVE, gtk.RESPONSE_OK))
chooser.set_transient_for(self.dialog)
chooser.set_select_multiple(False)
chooser.set_property("skip-taskbar-hint", True)
# Add .torrent and * file filters
file_filter = gtk.FileFilter()
file_filter.set_name(_("Torrent files"))
file_filter.add_pattern("*." + "torrent")
chooser.add_filter(file_filter)
file_filter = gtk.FileFilter()
file_filter.set_name(_("All files"))
file_filter.add_pattern("*")
chooser.add_filter(file_filter)
chooser.set_current_name(torrent_filename)
# Run the dialog
response = chooser.run()
if response == gtk.RESPONSE_OK:
result = chooser.get_filename()
else:
chooser.destroy()
return
chooser.destroy()
# Fix up torrent filename
if len(result) < 9:
result += ".torrent"
elif result[-8:] != ".torrent":
result += ".torrent"
# Get the path
path = self.files_treestore[0][0]
# Get a list of trackers
trackers = []
if not len(self.trackers_liststore):
tracker = None
else:
# Create a list of lists [[tier0, ...], [tier1, ...], ...]
tier_dict = {}
for tier, tracker in self.trackers_liststore:
tier_dict.setdefault(tier, []).append(tracker)
trackers = [tier_dict[tier] for tier in sorted(tier_dict)]
# Get the first tracker in the first tier
tracker = trackers[0][0]
# Get a list of webseeds
webseeds = []
b = self.builder.get_object("textview_webseeds").get_buffer()
lines = b.get_text(b.get_start_iter(), b.get_end_iter()).strip().split("\n")
import deluge.common
for l in lines:
if deluge.common.is_url(l):
webseeds.append(l)
# Get the piece length in bytes
combo = self.builder.get_object("combo_piece_size")
piece_length = self.parse_piece_size_text(combo.get_model()[combo.get_active()][0])
author = self.builder.get_object("entry_author").get_text()
comment = self.builder.get_object("entry_comments").get_text()
private = self.builder.get_object("chk_private_flag").get_active()
add_to_session = self.builder.get_object("chk_add_to_session").get_active()
if is_remote:
def torrent_created():
self.builder.get_object("progress_dialog").hide_all()
client.deregister_event_handler("CreateTorrentProgressEvent", on_create_torrent_progress_event)
def on_create_torrent_progress_event(piece_count, num_pieces):
self._on_create_torrent_progress(piece_count, num_pieces)
if piece_count == num_pieces:
from twisted.internet import reactor
reactor.callLater(0.5, torrent_created) # pylint: disable-msg=E1101
client.register_event_handler("CreateTorrentProgressEvent", on_create_torrent_progress_event)
client.core.create_torrent(
path,
tracker,
piece_length,
comment,
result,
webseeds,
private,
author,
trackers,
add_to_session)
else:
def hide_progress(result):
self.builder.get_object("progress_dialog").hide_all()
deferToThread(self.create_torrent,
path.decode('utf-8'),
tracker,
piece_length,
self._on_create_torrent_progress,
comment,
result.decode('utf-8'),
webseeds,
private,
author,
trackers,
add_to_session).addCallback(hide_progress)
# Setup progress dialog
self.builder.get_object("progress_dialog").set_transient_for(component.get("MainWindow").window)
self.builder.get_object("progress_dialog").show_all()
self.dialog.destroy()
def create_torrent(self, path, tracker, piece_length, progress, comment, target,
webseeds, private, created_by, trackers, add_to_session):
import deluge.metafile
deluge.metafile.make_meta_file(
path,
tracker,
piece_length,
progress=progress,
comment=comment,
target=target,
webseeds=webseeds,
private=private,
created_by=created_by,
trackers=trackers)
if add_to_session:
client.core.add_torrent_file(
os.path.split(target)[-1],
base64.encodestring(open(target, "rb").read()),
{"download_location": os.path.split(path)[0]})
def _on_create_torrent_progress(self, value, num_pieces):
percent = float(value)/float(num_pieces)
def update_pbar_with_gobject(percent):
pbar = self.builder.get_object("progressbar")
pbar.set_text("%.2f%%" % (percent*100))
pbar.set_fraction(percent)
return False
if percent >= 0 and percent <= 1.0:
# Make sure there are no threads race conditions that can
# crash the UI while updating it.
gobject.idle_add(update_pbar_with_gobject, percent)
def _on_button_up_clicked(self, widget):
log.debug("_on_button_up_clicked")
row = self.builder.get_object("tracker_treeview").get_selection().get_selected()[1]
if row is None:
return
if self.trackers_liststore[row][0] == 0:
return
else:
self.trackers_liststore[row][0] -= 1
def _on_button_down_clicked(self, widget):
log.debug("_on_button_down_clicked")
row = self.builder.get_object("tracker_treeview").get_selection().get_selected()[1]
if row is None:
return
self.trackers_liststore[row][0] += 1
def _on_button_add_clicked(self, widget):
log.debug("_on_button_add_clicked")
builder = gtk.Builder()
builder.add_from_file(deluge.common.resource_filename(
"deluge.ui.gtkui", os.path.join("glade", "edit_trackers.ui")
))
dialog = builder.get_object("add_tracker_dialog")
dialog.set_transient_for(self.dialog)
textview = builder.get_object("textview_trackers")
if self.config["createtorrent.trackers"]:
textview.get_buffer().set_text("\n".join(self.config["createtorrent.trackers"]))
else:
textview.get_buffer().set_text("")
textview.grab_focus()
response = dialog.run()
if response == gtk.RESPONSE_OK:
# Create a list of trackers from the textview buffer
trackers = []
b = textview.get_buffer()
lines = b.get_text(b.get_start_iter(), b.get_end_iter()).strip().split("\n")
self.config["createtorrent.trackers"] = lines
log.debug("lines: %s", lines)
for l in lines:
if deluge.common.is_url(l):
trackers.append(l)
# We are going to add these trackers to the highest tier + 1
tier = 0
for row in self.trackers_liststore:
if row[0] > tier:
tier = row[0]
for tracker in trackers:
self.trackers_liststore.append([tier, tracker])
dialog.destroy()
def _on_button_remove_clicked(self, widget):
log.debug("_on_button_remove_clicked")
row = self.builder.get_object("tracker_treeview").get_selection().get_selected()[1]
if row is None:
return
self.trackers_liststore.remove(row)
| gpl-3.0 | -3,731,345,788,369,979,000 | 37.455508 | 111 | 0.590381 | false | 3.8959 | true | false | false |
partofthething/laserComm | laserComm/receiver.py | 1 | 2975 | '''
receiver runs the ADC and photoresistor to receive an input signal.
USes MCP3008 ADC via the hardware SPI interface.
Connections are:
MCP3008 VDD -> 3.3V (red)
MCP3008 VREF -> 3.3V (red)
MCP3008 AGND -> GND (orange)
MCP3008 CLK -> SCLK (yellow)
MCP3008 DOUT -> MISO (green)
MCP3008 DIN -> MOSI (yellow)
MCP3008 CS -> CE0 (red)
MCP3008 DGND -> GND (orange)
The photoresistor goes from 4 kOhms (dark) to like 90 Ohms. (flashlight).
Output is 1024*Vin/Vref.
Build a voltage divider with like a 200 Ohm resistor in series w/ the photoR and measure
Vout between them. I put photoresistor between vout and ground.
The signal is intended to be processed using signal_processor
'''
import time
import numpy
import matplotlib
matplotlib.use('Agg') # works headless (e.g. on Raspberry Pi)
import matplotlib.pyplot as plt
try:
import spidev
except ImportError:
print('no spidev')
GAP = 0.001
class ADC(object):
"""
The Analog-to-digital converter
"""
def __init__(self):
self.adc = None
def __enter__(self):
self.adc = spidev.SpiDev()
self.adc.open(0, 0)
def __exit__(self, exc_type, exc_value, traceback):
self.adc.close()
def read(self, input_number):
"""
read SPI data from MCP3008 chip
There are 8 possible channels (0 through 7)
Will return value between 0 and 1023
"""
if ((input_number > 7) or (input_number < 0)):
return -1
r = self.adc.xfer2([1, (8 + input_number) << 4, 0])
adcValue = ((r[1] & 3) << 8) + r[2]
return adcValue
class Receiver(object):
"""
Stream processor that uses adc
"""
@property
def times(self):
return numpy.linspace(0, 10, len(self.vals))
def receive(self, adc):
self.vals = []
# receive for 10 seconds
print('Receiving')
start = time.time()
while time.time() - start < 30.0:
self.vals.append(adc.read(0))
time.sleep(GAP / 10)
def plot(self, fname='adc.pdf'):
print('Plotting')
t = self.times
plt.figure(figsize=(12, 10))
plt.plot(t, self.vals, '-')
plt.xlabel('Time (s)')
plt.ylabel('ADC signal')
plt.title('ADC Signal Trace')
plt.grid(color='0.7')
if fname:
plt.savefig(fname)
def save(self, fname='adc.txt'):
"""
Save results to file
"""
print('Saving')
with open(fname, 'w') as f:
f.writelines(['{0:04d}\n'.format(vi) for vi in self.vals])
def load(self, fname='adc.txt'):
print('Loading')
with open(fname) as f:
vals = f.readlines()
self.vals = [float(vi) for vi in vals]
if __name__ == '__main__':
adc = ADC()
receiver = Receiver()
with adc:
vals = receiver.receive(adc)
receiver.plot()
receiver.save()
| mit | -8,309,013,175,638,978,000 | 23.791667 | 88 | 0.572437 | false | 3.331467 | false | false | false |
mikeh77/mi-instrument | mi/core/instrument/instrument_driver.py | 1 | 46251 | #!/usr/bin/env python
"""
@package ion.services.mi.instrument_driver Instrument driver structures
@file ion/services/mi/instrument_driver.py
@author Edward Hunter
@brief Instrument driver classes that provide structure towards interaction
with individual instruments in the system.
"""
__author__ = 'Steve Foley'
__license__ = 'Apache 2.0'
import time
import json
from threading import Thread
from mi.core.common import BaseEnum
from mi.core.exceptions import TestModeException
from mi.core.exceptions import NotImplementedException
from mi.core.exceptions import InstrumentException
from mi.core.exceptions import InstrumentParameterException
from mi.core.exceptions import InstrumentConnectionException
from mi.core.instrument.instrument_fsm import InstrumentFSM, ThreadSafeFSM
from mi.core.instrument.port_agent_client import PortAgentClient
from mi.core.log import get_logger,LoggerManager
log = get_logger()
class ConfigMetadataKey(BaseEnum):
"""
Keys used in the metadata structure that describes the driver, commands,
and parameters used in the driver and protocol.
"""
DRIVER = 'driver'
COMMANDS = 'commands'
PARAMETERS = 'parameters'
class DriverConfigKey(BaseEnum):
"""
Dictionary keys for driver config objects
"""
PARAMETERS = 'parameters'
SCHEDULER = 'scheduler'
# This is a copy since we can't import from pyon.
class ResourceAgentState(BaseEnum):
"""
Resource agent common states.
"""
POWERED_DOWN = 'RESOURCE_AGENT_STATE_POWERED_DOWN'
UNINITIALIZED = 'RESOURCE_AGENT_STATE_UNINITIALIZED'
INACTIVE = 'RESOURCE_AGENT_STATE_INACTIVE'
IDLE = 'RESOURCE_AGENT_STATE_IDLE'
STOPPED = 'RESOURCE_AGENT_STATE_STOPPED'
COMMAND = 'RESOURCE_AGENT_STATE_COMMAND'
STREAMING = 'RESOURCE_AGENT_STATE_STREAMING'
TEST = 'RESOURCE_AGENT_STATE_TEST'
CALIBRATE = 'RESOURCE_AGENT_STATE_CALIBRATE'
DIRECT_ACCESS = 'RESOUCE_AGENT_STATE_DIRECT_ACCESS'
BUSY = 'RESOURCE_AGENT_STATE_BUSY'
LOST_CONNECTION = 'RESOURCE_AGENT_STATE_LOST_CONNECTION'
ACTIVE_UNKNOWN = 'RESOURCE_AGENT_STATE_ACTIVE_UNKNOWN'
class ResourceAgentEvent(BaseEnum):
"""
Resource agent common events.
"""
ENTER = 'RESOURCE_AGENT_EVENT_ENTER'
EXIT = 'RESOURCE_AGENT_EVENT_EXIT'
POWER_UP = 'RESOURCE_AGENT_EVENT_POWER_UP'
POWER_DOWN = 'RESOURCE_AGENT_EVENT_POWER_DOWN'
INITIALIZE = 'RESOURCE_AGENT_EVENT_INITIALIZE'
RESET = 'RESOURCE_AGENT_EVENT_RESET'
GO_ACTIVE = 'RESOURCE_AGENT_EVENT_GO_ACTIVE'
GO_INACTIVE = 'RESOURCE_AGENT_EVENT_GO_INACTIVE'
RUN = 'RESOURCE_AGENT_EVENT_RUN'
CLEAR = 'RESOURCE_AGENT_EVENT_CLEAR'
PAUSE = 'RESOURCE_AGENT_EVENT_PAUSE'
RESUME = 'RESOURCE_AGENT_EVENT_RESUME'
GO_COMMAND = 'RESOURCE_AGENT_EVENT_GO_COMMAND'
GO_DIRECT_ACCESS = 'RESOURCE_AGENT_EVENT_GO_DIRECT_ACCESS'
GET_RESOURCE = 'RESOURCE_AGENT_EVENT_GET_RESOURCE'
SET_RESOURCE = 'RESOURCE_AGENT_EVENT_SET_RESOURCE'
EXECUTE_RESOURCE = 'RESOURCE_AGENT_EVENT_EXECUTE_RESOURCE'
GET_RESOURCE_STATE = 'RESOURCE_AGENT_EVENT_GET_RESOURCE_STATE'
GET_RESOURCE_CAPABILITIES = 'RESOURCE_AGENT_EVENT_GET_RESOURCE_CAPABILITIES'
DONE = 'RESOURCE_AGENT_EVENT_DONE'
PING_RESOURCE = 'RESOURCE_AGENT_PING_RESOURCE'
LOST_CONNECTION = 'RESOURCE_AGENT_EVENT_LOST_CONNECTION'
AUTORECONNECT = 'RESOURCE_AGENT_EVENT_AUTORECONNECT'
GET_RESOURCE_SCHEMA = 'RESOURCE_AGENT_EVENT_GET_RESOURCE_SCHEMA'
CHANGE_STATE_ASYNC = 'RESOURCE_AGENT_EVENT_CHANGE_STATE_ASYNC'
class DriverState(BaseEnum):
"""Common driver state enum"""
UNCONFIGURED = 'DRIVER_STATE_UNCONFIGURED'
DISCONNECTED = 'DRIVER_STATE_DISCONNECTED'
CONNECTING = 'DRIVER_STATE_CONNECTING'
DISCONNECTING = 'DRIVER_STATE_DISCONNECTING'
CONNECTED = 'DRIVER_STATE_CONNECTED'
ACQUIRE_SAMPLE = 'DRIVER_STATE_ACQUIRE_SAMPLE'
UPDATE_PARAMS = 'DRIVER_STATE_UPDATE_PARAMS'
SET = 'DRIVER_STATE_SET'
SLEEP = 'DRIVER_STATE_SLEEP'
class DriverProtocolState(BaseEnum):
"""
Base states for driver protocols. Subclassed for specific driver
protocols.
"""
AUTOSAMPLE = 'DRIVER_STATE_AUTOSAMPLE'
TEST = 'DRIVER_STATE_TEST'
CALIBRATE = 'DRIVER_STATE_CALIBRATE'
COMMAND = 'DRIVER_STATE_COMMAND'
DIRECT_ACCESS = 'DRIVER_STATE_DIRECT_ACCESS'
UNKNOWN = 'DRIVER_STATE_UNKNOWN'
POLL = 'DRIVER_STATE_POLL'
class DriverConnectionState(BaseEnum):
"""
Base states for driver connections.
"""
UNCONFIGURED = 'DRIVER_STATE_UNCONFIGURED'
DISCONNECTED = 'DRIVER_STATE_DISCONNECTED'
CONNECTED = 'DRIVER_STATE_CONNECTED'
class DriverEvent(BaseEnum):
"""
Base events for driver state machines. Commands and other events
are transformed into state machine events for handling.
"""
ENTER = 'DRIVER_EVENT_ENTER'
EXIT = 'DRIVER_EVENT_EXIT'
INITIALIZE = 'DRIVER_EVENT_INITIALIZE'
CONFIGURE = 'DRIVER_EVENT_CONFIGURE'
CONNECT = 'DRIVER_EVENT_CONNECT'
CONNECTION_LOST = 'DRIVER_CONNECTION_LOST'
DISCONNECT = 'DRIVER_EVENT_DISCONNECT'
SET = 'DRIVER_EVENT_SET'
GET = 'DRIVER_EVENT_GET'
DISCOVER = 'DRIVER_EVENT_DISCOVER'
EXECUTE = 'DRIVER_EVENT_EXECUTE'
ACQUIRE_SAMPLE = 'DRIVER_EVENT_ACQUIRE_SAMPLE'
START_AUTOSAMPLE = 'DRIVER_EVENT_START_AUTOSAMPLE'
STOP_AUTOSAMPLE = 'DRIVER_EVENT_STOP_AUTOSAMPLE'
TEST = 'DRIVER_EVENT_TEST'
RUN_TEST = 'DRIVER_EVENT_RUN_TEST'
STOP_TEST = 'DRIVER_EVENT_STOP_TEST'
CALIBRATE = 'DRIVER_EVENT_CALIBRATE'
RESET = 'DRIVER_EVENT_RESET'
ENTER = 'DRIVER_EVENT_ENTER'
EXIT = 'DRIVER_EVENT_EXIT'
UPDATE_PARAMS = 'DRIVER_EVENT_UPDATE_PARAMS'
BREAK = 'DRIVER_EVENT_BREAK'
EXECUTE_DIRECT = 'EXECUTE_DIRECT'
START_DIRECT = 'DRIVER_EVENT_START_DIRECT'
STOP_DIRECT = 'DRIVER_EVENT_STOP_DIRECT'
PING_DRIVER = 'DRIVER_EVENT_PING_DRIVER'
FORCE_STATE = 'DRIVER_FORCE_STATE'
CLOCK_SYNC = 'DRIVER_EVENT_CLOCK_SYNC'
SCHEDULED_CLOCK_SYNC = 'DRIVER_EVENT_SCHEDULED_CLOCK_SYNC'
ACQUIRE_STATUS = 'DRIVER_EVENT_ACQUIRE_STATUS'
INIT_PARAMS = 'DRIVER_EVENT_INIT_PARAMS'
GAP_RECOVERY = 'DRIVER_EVENT_GAP_RECOVERY'
GAP_RECOVERY_COMPLETE = 'DRIVER_EVENT_GAP_RECOVERY_COMPLETE'
class DriverAsyncEvent(BaseEnum):
"""
Asynchronous driver event types.
"""
STATE_CHANGE = 'DRIVER_ASYNC_EVENT_STATE_CHANGE'
CONFIG_CHANGE = 'DRIVER_ASYNC_EVENT_CONFIG_CHANGE'
SAMPLE = 'DRIVER_ASYNC_EVENT_SAMPLE'
ERROR = 'DRIVER_ASYNC_EVENT_ERROR'
RESULT = 'DRIVER_ASYNC_RESULT'
DIRECT_ACCESS = 'DRIVER_ASYNC_EVENT_DIRECT_ACCESS'
AGENT_EVENT = 'DRIVER_ASYNC_EVENT_AGENT_EVENT'
class DriverParameter(BaseEnum):
"""
Base driver parameters. Subclassed by specific drivers with device
specific parameters.
"""
ALL = 'DRIVER_PARAMETER_ALL'
class InstrumentDriver(object):
"""
Base class for instrument drivers.
"""
def __init__(self, event_callback):
"""
Constructor.
@param event_callback The driver process callback used to send
asynchrous driver events to the agent.
"""
LoggerManager()
self._send_event = event_callback
self._test_mode = False
#############################################################
# Device connection interface.
#############################################################
def set_test_mode(self, mode):
"""
Enable test mode for the driver. If this mode is envoked
then the user has access to test_ commands.
@param mode: test mode state
"""
self._test_mode = True if mode else False
def initialize(self, *args, **kwargs):
"""
Initialize driver connection, bringing communications parameters
into unconfigured state (no connection object).
@raises InstrumentStateException if command not allowed in current state
@raises NotImplementedException if not implemented by subclass.
"""
raise NotImplementedException('initialize() not implemented.')
def configure(self, *args, **kwargs):
"""
Configure the driver for communications with the device via
port agent / logger (valid but unconnected connection object).
@param arg[0] comms config dict.
@raises InstrumentStateException if command not allowed in current state
@throws InstrumentParameterException if missing comms or invalid config dict.
@raises NotImplementedException if not implemented by subclass.
"""
raise NotImplementedException('configure() not implemented.')
def connect(self, *args, **kwargs):
"""
Establish communications with the device via port agent / logger
(connected connection object).
@raises InstrumentStateException if command not allowed in current state
@throws InstrumentConnectionException if the connection failed.
@raises NotImplementedException if not implemented by subclass.
"""
raise NotImplementedException('connect() not implemented.')
def disconnect(self, *args, **kwargs):
"""
Disconnect from device via port agent / logger.
@raises InstrumentStateException if command not allowed in current state
@raises NotImplementedException if not implemented by subclass.
"""
raise NotImplementedException('disconnect() not implemented.')
#############################################################
# Command and control interface.
#############################################################
def discover_state(self, *args, **kwargs):
"""
Determine initial state upon establishing communications.
@param timeout=timeout Optional command timeout.
@retval Current device state.
@raises InstrumentTimeoutException if could not wake device.
@raises InstrumentStateException if command not allowed in current state or if
device state not recognized.
@raises NotImplementedException if not implemented by subclass.
"""
raise NotImplementedException('discover_state() is not implemented.')
def get_resource_capabilities(self, *args, **kwargs):
"""
Return driver commands and parameters.
@param current_state True to retrieve commands available in current
state, otherwise reutrn all commands.
@retval list of AgentCapability objects representing the drivers
capabilities.
@raises NotImplementedException if not implemented by subclass.
"""
raise NotImplementedException('get_resource_capabilities() is not implemented.')
def get_resource_state(self, *args, **kwargs):
"""
Return the current state of the driver.
@retval str current driver state.
@raises NotImplementedException if not implemented by subclass.
"""
raise NotImplementedException('get_resource_state() is not implemented.')
def get_resource(self, *args, **kwargs):
"""
Retrieve device parameters.
@param args[0] DriverParameter.ALL or a list of parameters to retrive.
@retval parameter : value dict.
@raises InstrumentParameterException if missing or invalid get parameters.
@raises InstrumentStateException if command not allowed in current state
@raises NotImplementedException if not implemented by subclass.
"""
raise NotImplementedException('get_resource() is not implemented.')
def set_resource(self, *args, **kwargs):
"""
Set device parameters.
@param args[0] parameter : value dict of parameters to set.
@param timeout=timeout Optional command timeout.
@raises InstrumentParameterException if missing or invalid set parameters.
@raises InstrumentTimeoutException if could not wake device or no response.
@raises InstrumentProtocolException if set command not recognized.
@raises InstrumentStateException if command not allowed in current state.
@raises NotImplementedException if not implemented by subclass.
"""
raise NotImplementedException('set_resource() not implemented.')
def execute_resource(self, *args, **kwargs):
"""
Execute a driver command.
@param timeout=timeout Optional command timeout.
@ retval Command specific.
@raises InstrumentTimeoutException if could not wake device or no response.
@raises InstrumentProtocolException if command not recognized.
@raises InstrumentStateException if command not allowed in current state.
@raises NotImplementedException if not implemented by subclass.
"""
raise NotImplementedException('execute_resource() not implemented.')
def start_direct(self, *args, **kwargs):
"""
Start direct access mode
@param timeout=timeout Optional command timeout.
@ retval Command specific.
@raises NotImplementedException if not implemented by subclass.
"""
raise NotImplementedException('execute_resource() not implemented.')
def stop_direct(self, *args, **kwargs):
"""
Stop direct access mode
@param timeout=timeout Optional command timeout.
@ retval Command specific.
@raises NotImplementedException if not implemented by subclass.
"""
raise NotImplementedException('execute_resource() not implemented.')
########################################################################
# Event interface.
########################################################################
def _driver_event(self, type, val=None):
"""
Construct and send an asynchronous driver event.
@param type a DriverAsyncEvent type specifier.
@param val event value for sample and test result events.
"""
event = {
'type' : type,
'value' : None,
'time' : time.time()
}
if type == DriverAsyncEvent.STATE_CHANGE:
state = self.get_resource_state()
event['value'] = state
self._send_event(event)
elif type == DriverAsyncEvent.CONFIG_CHANGE:
config = self.get_resource(DriverParameter.ALL)
event['value'] = config
self._send_event(event)
elif type == DriverAsyncEvent.SAMPLE:
event['value'] = val
self._send_event(event)
elif type == DriverAsyncEvent.ERROR:
event['value'] = val
self._send_event(event)
elif type == DriverAsyncEvent.RESULT:
event['value'] = val
self._send_event(event)
elif type == DriverAsyncEvent.DIRECT_ACCESS:
event['value'] = val
self._send_event(event)
elif type == DriverAsyncEvent.AGENT_EVENT:
event['value'] = val
self._send_event(event)
########################################################################
# Test interface.
########################################################################
def driver_ping(self, msg):
"""
Echo a message.
@param msg the message to prepend and echo back to the caller.
"""
reply = 'driver_ping: '+msg
return reply
def test_exceptions(self, msg):
"""
Test exception handling in the driver process.
@param msg message string to put in a raised exception to be caught in
a test.
@raises InstrumentExeption always.
"""
raise InstrumentException(msg)
class SingleConnectionInstrumentDriver(InstrumentDriver):
"""
Base class for instrument drivers with a single device connection.
Provides connenction state logic for single connection drivers. This is
the base class for the majority of driver implementation classes.
"""
def __init__(self, event_callback):
"""
Constructor for singly connected instrument drivers.
@param event_callback Callback to the driver process to send asynchronous
driver events back to the agent.
"""
InstrumentDriver.__init__(self, event_callback)
# The only and only instrument connection.
# Exists in the connected state.
self._connection = None
# The one and only instrument protocol.
self._protocol = None
# Build connection state machine.
self._connection_fsm = ThreadSafeFSM(DriverConnectionState,
DriverEvent,
DriverEvent.ENTER,
DriverEvent.EXIT)
# Add handlers for all events.
self._connection_fsm.add_handler(DriverConnectionState.UNCONFIGURED, DriverEvent.ENTER, self._handler_unconfigured_enter)
self._connection_fsm.add_handler(DriverConnectionState.UNCONFIGURED, DriverEvent.EXIT, self._handler_unconfigured_exit)
self._connection_fsm.add_handler(DriverConnectionState.UNCONFIGURED, DriverEvent.INITIALIZE, self._handler_unconfigured_initialize)
self._connection_fsm.add_handler(DriverConnectionState.UNCONFIGURED, DriverEvent.CONFIGURE, self._handler_unconfigured_configure)
self._connection_fsm.add_handler(DriverConnectionState.DISCONNECTED, DriverEvent.ENTER, self._handler_disconnected_enter)
self._connection_fsm.add_handler(DriverConnectionState.DISCONNECTED, DriverEvent.EXIT, self._handler_disconnected_exit)
self._connection_fsm.add_handler(DriverConnectionState.DISCONNECTED, DriverEvent.INITIALIZE, self._handler_disconnected_initialize)
self._connection_fsm.add_handler(DriverConnectionState.DISCONNECTED, DriverEvent.CONFIGURE, self._handler_disconnected_configure)
self._connection_fsm.add_handler(DriverConnectionState.DISCONNECTED, DriverEvent.CONNECT, self._handler_disconnected_connect)
self._connection_fsm.add_handler(DriverConnectionState.CONNECTED, DriverEvent.ENTER, self._handler_connected_enter)
self._connection_fsm.add_handler(DriverConnectionState.CONNECTED, DriverEvent.EXIT, self._handler_connected_exit)
self._connection_fsm.add_handler(DriverConnectionState.CONNECTED, DriverEvent.DISCONNECT, self._handler_connected_disconnect)
self._connection_fsm.add_handler(DriverConnectionState.CONNECTED, DriverEvent.CONNECTION_LOST, self._handler_connected_connection_lost)
self._connection_fsm.add_handler(DriverConnectionState.CONNECTED, DriverEvent.DISCOVER, self._handler_connected_protocol_event)
self._connection_fsm.add_handler(DriverConnectionState.CONNECTED, DriverEvent.GET, self._handler_connected_protocol_event)
self._connection_fsm.add_handler(DriverConnectionState.CONNECTED, DriverEvent.SET, self._handler_connected_protocol_event)
self._connection_fsm.add_handler(DriverConnectionState.CONNECTED, DriverEvent.EXECUTE, self._handler_connected_protocol_event)
self._connection_fsm.add_handler(DriverConnectionState.CONNECTED, DriverEvent.FORCE_STATE, self._handler_connected_protocol_event)
self._connection_fsm.add_handler(DriverConnectionState.CONNECTED, DriverEvent.START_DIRECT, self._handler_connected_start_direct_event)
self._connection_fsm.add_handler(DriverConnectionState.CONNECTED, DriverEvent.STOP_DIRECT, self._handler_connected_stop_direct_event)
# Start state machine.
self._connection_fsm.start(DriverConnectionState.UNCONFIGURED)
self._pre_da_config = {}
self._startup_config = {}
# Idempotency flag for lost connections.
# This set to false when a connection is established to
# allow for lost callback to become activated.
self._connection_lost = True
#############################################################
# Device connection interface.
#############################################################
def initialize(self, *args, **kwargs):
"""
Initialize driver connection, bringing communications parameters
into unconfigured state (no connection object).
@raises InstrumentStateException if command not allowed in current state
"""
# Forward event and argument to the connection FSM.
return self._connection_fsm.on_event(DriverEvent.INITIALIZE, *args, **kwargs)
def configure(self, *args, **kwargs):
"""
Configure the driver for communications with the device via
port agent / logger (valid but unconnected connection object).
@param arg[0] comms config dict.
@raises InstrumentStateException if command not allowed in current state
@throws InstrumentParameterException if missing comms or invalid config dict.
"""
# Forward event and argument to the connection FSM.
return self._connection_fsm.on_event(DriverEvent.CONFIGURE, *args, **kwargs)
def connect(self, *args, **kwargs):
"""
Establish communications with the device via port agent / logger
(connected connection object).
@raises InstrumentStateException if command not allowed in current state
@throws InstrumentConnectionException if the connection failed.
"""
# Forward event and argument to the connection FSM.
result = self._connection_fsm.on_event(DriverEvent.CONNECT, *args, **kwargs)
init_config = {}
if len(args) > 0 and isinstance(args[0], dict):
init_config = args[0]
self.set_init_params(init_config)
return result
def disconnect(self, *args, **kwargs):
"""
Disconnect from device via port agent / logger.
@raises InstrumentStateException if command not allowed in current state
"""
# Forward event and argument to the connection FSM.
return self._connection_fsm.on_event(DriverEvent.DISCONNECT, *args, **kwargs)
#############################################################
# Configuration logic
#############################################################
def get_init_params(self):
"""
get the driver initialization parameters
@return driver configuration dictionary
"""
return self._startup_config
def set_init_params(self, config):
"""
Set the initialization parameters down in the protocol and store the
driver configuration in the driver.
If the protocol hasn't been setup yet cache the config. Next time
this method is called, if you call it with an empty config it will
read from the cache.
@param config This default configuration assumes a structure driver
configuration dict with keys named in DriverConfigKey.
Stranger parameters can be adjusted by over riding this method.
@raise InstrumentParameterException If the config cannot be applied
"""
if not isinstance(config, dict):
raise InstrumentParameterException("Incompatible initialization parameters")
if self._protocol:
param_config = None
if config:
param_config = config
elif self._startup_config:
param_config = self._startup_config
if param_config:
self._protocol.set_init_params(param_config)
self._protocol.initialize_scheduler()
if config:
self._startup_config = config
def apply_startup_params(self):
"""
Apply the startup values previously stored in the protocol to
the running config of the live instrument. The startup values are the
values that are (1) marked as startup parameters and are (2) the "best"
value to use at startup. Preference is given to the previously-set init
value, then the default value, then the currently used value.
This default implementation simply pushes the logic down into the protocol
for processing should the action be better accomplished down there.
The driver writer can decide to overload this method in the derived
driver class and apply startup parameters in the driver (likely calling
some get and set methods for the resource). If the driver does not
implement an apply_startup_params() method in the driver, this method
will call into the protocol. Deriving protocol classes are expected to
implement an apply_startup_params() method lest they get the exception
from the base InstrumentProtocol implementation.
"""
log.debug("Base driver applying startup params...")
self._protocol.apply_startup_params()
def get_cached_config(self):
"""
Return the configuration object that shows the instrument's
configuration as cached in the protocol parameter dictionary.
@retval The running configuration in the instruments config format. By
default, it is a dictionary of parameter names and values.
"""
if self._protocol:
return self._protocol.get_cached_config()
def get_config_metadata(self):
"""
Return the configuration metadata object in JSON format
@retval The description of the parameters, commands, and driver info
in a JSON string
@see https://confluence.oceanobservatories.org/display/syseng/CIAD+MI+SV+Instrument+Driver-Agent+parameter+and+command+metadata+exchange
"""
log.debug("Getting metadata from driver...")
protocol = self._protocol
# Because the config requires information from the protocol param dict
# we temporarily instantiate a protocol object to get at the static
# information.
if not protocol:
self._build_protocol()
log.debug("Getting metadata from protocol...")
return self._protocol.get_config_metadata_dict()
def restore_direct_access_params(self, config):
"""
Restore the correct values out of the full config that is given when
returning from direct access. By default, this takes a simple dict of
param name and value. Override this class as needed as it makes some
simple assumptions about how your instrument sets things.
@param config The configuration that was previously saved (presumably
to disk somewhere by the driver that is working with this protocol)
"""
vals = {}
# for each parameter that is read only, restore
da_params = self._protocol.get_direct_access_params()
for param in da_params:
vals[param] = config[param]
log.debug("Restore DA Parameters: %s" % vals)
self.set_resource(vals, True)
#############################################################
# Commande and control interface.
#############################################################
def discover_state(self, *args, **kwargs):
"""
Determine initial state upon establishing communications.
@param timeout=timeout Optional command timeout.
@retval Current device state.
@raises InstrumentTimeoutException if could not wake device.
@raises InstrumentStateException if command not allowed in current state or if
device state not recognized.
@raises NotImplementedException if not implemented by subclass.
"""
# Forward event and argument to the protocol FSM.
return self._connection_fsm.on_event(DriverEvent.DISCOVER, DriverEvent.DISCOVER, *args, **kwargs)
def get_resource_capabilities(self, current_state=True, *args, **kwargs):
"""
Return driver commands and parameters.
@param current_state True to retrieve commands available in current
state, otherwise reutrn all commands.
@retval list of AgentCapability objects representing the drivers
capabilities.
@raises NotImplementedException if not implemented by subclass.
"""
if self._protocol:
return self._protocol.get_resource_capabilities(current_state)
else:
return [['foobb'], ['fooaa']]
def get_resource_state(self, *args, **kwargs):
"""
Return the current state of the driver.
@retval str current driver state.
@raises NotImplementedException if not implemented by subclass.
"""
connection_state = self._connection_fsm.get_current_state()
if connection_state == DriverConnectionState.CONNECTED:
return self._protocol.get_current_state()
else:
return connection_state
def get_resource(self, *args, **kwargs):
"""
Retrieve device parameters.
@param args[0] DriverParameter.ALL or a list of parameters to retrive.
@retval parameter : value dict.
@raises InstrumentParameterException if missing or invalid get parameters.
@raises InstrumentStateException if command not allowed in current state
@raises NotImplementedException if not implemented by subclass.
"""
# Forward event and argument to the protocol FSM.
return self._connection_fsm.on_event(DriverEvent.GET, DriverEvent.GET, *args, **kwargs)
def set_resource(self, *args, **kwargs):
"""
Set device parameters.
@param args[0] parameter : value dict of parameters to set.
@param timeout=timeout Optional command timeout.
@raises InstrumentParameterException if missing or invalid set parameters.
@raises InstrumentTimeoutException if could not wake device or no response.
@raises InstrumentProtocolException if set command not recognized.
@raises InstrumentStateException if command not allowed in current state.
@raises NotImplementedException if not implemented by subclass.
"""
# Forward event and argument to the protocol FSM.
return self._connection_fsm.on_event(DriverEvent.SET, DriverEvent.SET, *args, **kwargs)
def execute_resource(self, resource_cmd, *args, **kwargs):
"""
Poll for a sample.
@param timeout=timeout Optional command timeout.
@ retval Device sample dict.
@raises InstrumentTimeoutException if could not wake device or no response.
@raises InstrumentProtocolException if acquire command not recognized.
@raises InstrumentStateException if command not allowed in current state.
@raises NotImplementedException if not implemented by subclass.
"""
# Forward event and argument to the protocol FSM.
return self._connection_fsm.on_event(DriverEvent.EXECUTE, resource_cmd, *args, **kwargs)
def start_direct(self, *args, **kwargs):
"""
start direct access mode
@param timeout=timeout Optional command timeout.
@ retval Device sample dict.
@raises InstrumentTimeoutException if could not wake device or no response.
@raises InstrumentProtocolException if acquire command not recognized.
@raises InstrumentStateException if command not allowed in current state.
@raises NotImplementedException if not implemented by subclass.
"""
# Need to pass the event as a parameter because the event handler to capture the current
# pre-da config requires it.
return self._connection_fsm.on_event(DriverEvent.START_DIRECT, DriverEvent.START_DIRECT)
def execute_direct(self, *args, **kwargs):
"""
execute direct accesscommand
@param timeout=timeout Optional command timeout.
@ retval Device sample dict.
@raises InstrumentTimeoutException if could not wake device or no response.
@raises InstrumentProtocolException if acquire command not recognized.
@raises InstrumentStateException if command not allowed in current state.
@raises NotImplementedException if not implemented by subclass.
"""
return self.execute_resource(DriverEvent.EXECUTE_DIRECT, *args, **kwargs)
def stop_direct(self, *args, **kwargs):
"""
stop direct access mode
@param timeout=timeout Optional command timeout.
@ retval Device sample dict.
@raises InstrumentTimeoutException if could not wake device or no response.
@raises InstrumentProtocolException if acquire command not recognized.
@raises InstrumentStateException if command not allowed in current state.
@raises NotImplementedException if not implemented by subclass.
"""
return self._connection_fsm.on_event(DriverEvent.STOP_DIRECT, DriverEvent.STOP_DIRECT)
def test_force_state(self, *args, **kwargs):
"""
Force driver into a given state for the purposes of unit testing
@param state=desired_state Required desired state to change to.
@raises InstrumentParameterException if no state parameter.
@raises TestModeException if not in test mode
"""
if(not self._test_mode):
raise TestModeException();
# Get the required param
state = kwargs.get('state', None) # via kwargs
if state is None:
raise InstrumentParameterException('Missing state parameter.')
# We are mucking with internal FSM parameters which may be bad.
# The alternative was to raise an event to change the state. Dont
# know which is better.
self._protocol._protocol_fsm.current_state = state
########################################################################
# Unconfigured handlers.
########################################################################
def _handler_unconfigured_enter(self, *args, **kwargs):
"""
Enter unconfigured state.
"""
# Send state change event to agent.
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
def _handler_unconfigured_exit(self, *args, **kwargs):
"""
Exit unconfigured state.
"""
pass
def _handler_unconfigured_initialize(self, *args, **kwargs):
"""
Initialize handler. We are already in unconfigured state, do nothing.
@retval (next_state, result) tuple, (None, None).
"""
next_state = None
result = None
return (next_state, result)
def _handler_unconfigured_configure(self, *args, **kwargs):
"""
Configure driver for device comms.
@param args[0] Communiations config dictionary.
@retval (next_state, result) tuple, (DriverConnectionState.DISCONNECTED,
None) if successful, (None, None) otherwise.
@raises InstrumentParameterException if missing or invalid param dict.
"""
next_state = None
result = None
# Get the required param dict.
config = kwargs.get('config', None) # via kwargs
# TODO use kwargs as the only mechanism
if config is None:
try:
config = args[0] # via first argument
except IndexError:
pass
if config is None:
raise InstrumentParameterException('Missing comms config parameter.')
# Verify dict and construct connection client.
self._connection = self._build_connection(config)
next_state = DriverConnectionState.DISCONNECTED
return (next_state, result)
########################################################################
# Disconnected handlers.
########################################################################
def _handler_disconnected_enter(self, *args, **kwargs):
"""
Enter disconnected state.
"""
# Send state change event to agent.
self._connection_lost = True
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
def _handler_disconnected_exit(self, *args, **kwargs):
"""
Exit disconnected state.
"""
pass
def _handler_disconnected_initialize(self, *args, **kwargs):
"""
Initialize device communications. Causes the connection parameters to
be reset.
@retval (next_state, result) tuple, (DriverConnectionState.UNCONFIGURED,
None).
"""
next_state = None
result = None
self._connection = None
next_state = DriverConnectionState.UNCONFIGURED
return (next_state, result)
def _handler_disconnected_configure(self, *args, **kwargs):
"""
Configure driver for device comms.
@param args[0] Communiations config dictionary.
@retval (next_state, result) tuple, (None, None).
@raises InstrumentParameterException if missing or invalid param dict.
"""
next_state = None
result = None
# Get required config param dict.
config = kwargs.get('config', None) # via kwargs
# TODO use kwargs as the only mechanism
if config is None:
try:
config = args[0] # via first argument
except IndexError:
pass
if config is None:
raise InstrumentParameterException('Missing comms config parameter.')
# Verify configuration dict, and update connection if possible.
self._connection = self._build_connection(config)
return (next_state, result)
def _handler_disconnected_connect(self, *args, **kwargs):
"""
Establish communications with the device via port agent / logger and
construct and intialize a protocol FSM for device interaction.
@retval (next_state, result) tuple, (DriverConnectionState.CONNECTED,
None) if successful.
@raises InstrumentConnectionException if the attempt to connect failed.
"""
next_state = None
result = None
self._build_protocol()
try:
self._connection.init_comms(self._protocol.got_data,
self._protocol.got_raw,
self._got_exception,
self._lost_connection_callback)
self._protocol._connection = self._connection
next_state = DriverConnectionState.CONNECTED
except InstrumentConnectionException as e:
log.error("Connection Exception: %s", e)
log.error("Instrument Driver remaining in disconnected state.")
# Re-raise the exception
raise
return (next_state, result)
########################################################################
# Connected handlers.
########################################################################
def _handler_connected_enter(self, *args, **kwargs):
"""
Enter connected state.
"""
# Send state change event to agent.
self._connection_lost = False
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
def _handler_connected_exit(self, *args, **kwargs):
"""
Exit connected state.
"""
pass
def _handler_connected_disconnect(self, *args, **kwargs):
"""
Disconnect to the device via port agent / logger and destroy the
protocol FSM.
@retval (next_state, result) tuple, (DriverConnectionState.DISCONNECTED,
None) if successful.
"""
next_state = None
result = None
log.info("_handler_connected_disconnect: invoking stop_comms().")
self._connection.stop_comms()
self._protocol = None
next_state = DriverConnectionState.DISCONNECTED
return (next_state, result)
def _handler_connected_connection_lost(self, *args, **kwargs):
"""
The device connection was lost. Stop comms, destroy protocol FSM and
revert to disconnected state.
@retval (next_state, result) tuple, (DriverConnectionState.DISCONNECTED,
None).
"""
next_state = None
result = None
log.info("_handler_connected_connection_lost: invoking stop_comms().")
self._connection.stop_comms()
self._protocol = None
# Send async agent state change event.
log.info("_handler_connected_connection_lost: sending LOST_CONNECTION " \
"event, moving to DISCONNECTED state.")
self._driver_event(DriverAsyncEvent.AGENT_EVENT,
ResourceAgentEvent.LOST_CONNECTION)
next_state = DriverConnectionState.DISCONNECTED
return (next_state, result)
def _handler_connected_protocol_event(self, event, *args, **kwargs):
"""
Forward a driver command event to the protocol FSM.
@param args positional arguments to pass on.
@param kwargs keyword arguments to pass on.
@retval (next_state, result) tuple, (None, protocol result).
"""
next_state = None
result = self._protocol._protocol_fsm.on_event(event, *args, **kwargs)
return (next_state, result)
def _handler_connected_start_direct_event(self, event, *args, **kwargs):
"""
Stash the current config first, then forward a driver command event
to the protocol FSM.
@param args positional arguments to pass on.
@param kwargs keyword arguments to pass on.
@retval (next_state, result) tuple, (None, protocol result).
"""
next_state = None
# Get the value for all direct access parameters and store them in the protocol
self._pre_da_config = self.get_resource(self._protocol.get_direct_access_params())
self._protocol.store_direct_access_config(self._pre_da_config)
self._protocol.enable_da_initialization()
log.debug("starting DA. Storing DA parameters for restore: %s", self._pre_da_config)
result = self._protocol._protocol_fsm.on_event(event, *args, **kwargs)
return (next_state, result)
def _handler_connected_stop_direct_event(self, event, *args, **kwargs):
"""
Restore previous config first, then forward a driver command event
to the protocol FSM.
@param args positional arguments to pass on.
@param kwargs keyword arguments to pass on.
@retval (next_state, result) tuple, (None, protocol result).
"""
next_state = None
result = self._protocol._protocol_fsm.on_event(event, *args, **kwargs)
# Moving the responsibility for applying DA parameters to the
# protocol.
#self.restore_direct_access_params(self._pre_da_config)
return (next_state, result)
########################################################################
# Helpers.
########################################################################
def _build_connection(self, config):
"""
Constructs and returns a Connection object according to the given
configuration. The connection object is a LoggerClient instance in
this base class. Subclasses can overwrite this operation as needed.
The value returned by this operation is assigned to self._connection
and also to self._protocol._connection upon entering in the
DriverConnectionState.CONNECTED state.
@param config configuration dict
@retval a Connection instance, which will be assigned to
self._connection
@throws InstrumentParameterException Invalid configuration.
"""
if 'mock_port_agent' in config:
mock_port_agent = config['mock_port_agent']
# check for validity here...
if (mock_port_agent is not None):
return mock_port_agent
try:
addr = config['addr']
port = config['port']
cmd_port = config.get('cmd_port')
if isinstance(addr, str) and isinstance(port, int) and len(addr)>0:
return PortAgentClient(addr, port, cmd_port)
else:
raise InstrumentParameterException('Invalid comms config dict.')
except (TypeError, KeyError):
raise InstrumentParameterException('Invalid comms config dict.')
def _got_exception(self, exception):
"""
Callback for the client for exception handling with async data. Exceptions
are wrapped in an event and sent up to the agent.
"""
try:
log.error("ASYNC Data Exception Detected: %s (%s)", exception.__class__.__name__, str(exception))
finally:
self._driver_event(DriverAsyncEvent.ERROR, exception)
def _lost_connection_callback(self, error_string):
"""
A callback invoked by the port agent client when it looses
connectivity to the port agent.
"""
if not self._connection_lost:
log.info("_lost_connection_callback: starting thread to send " \
"CONNECTION_LOST event to instrument driver.")
self._connection_lost = True
lost_comms_thread = Thread(
target=self._connection_fsm.on_event,
args=(DriverEvent.CONNECTION_LOST, ))
lost_comms_thread.start()
else:
log.info("_lost_connection_callback: connection_lost flag true.")
def _build_protocol(self):
"""
Construct device specific single connection protocol FSM.
Overridden in device specific subclasses.
"""
pass
| bsd-2-clause | 7,355,743,505,821,260,000 | 41.122951 | 144 | 0.632224 | false | 4.7471 | true | false | false |
fvbock/gDBPool | gdbpool/connection_pool.py | 1 | 6127 | # -*- coding: utf-8 -*-
# Copyright 2011-2012 Florian von Bock (f at vonbock dot info)
#
# gDBPool - db connection pooling for gevent
__author__ = "Florian von Bock"
__email__ = "f at vonbock dot info"
__version__ = "0.1.3"
import gevent
from gevent import monkey; monkey.patch_all()
import psycopg2
import sys, traceback
from psyco_ge import make_psycopg_green; make_psycopg_green()
assert 'gdbpool.psyco_ge' in sys.modules.keys()
from gevent.queue import Queue, Empty as QueueEmptyException
from time import time
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_UNCOMMITTED, ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_REPEATABLE_READ, ISOLATION_LEVEL_SERIALIZABLE
psycopg2.extensions.register_type( psycopg2.extensions.UNICODE )
psycopg2.extensions.register_type( psycopg2.extensions.UNICODEARRAY )
from psycopg2 import InterfaceError
from pool_connection import PoolConnection
from gdbpool_error import DBInteractionException, DBPoolConnectionException, PoolConnectionException, StreamEndException
class DBConnectionPool( object ):
"""
The Connection Pool
"Classic" pool of connections with connection lifecycle management
"""
def __init__( self, dsn, db_module = 'psycopg2', pool_size = 10,
conn_lifetime = 600, do_log = False ):
"""
:param string dsn: DSN for the default `class:DBConnectionPool`
:param string db_module: name of the DB-API module to use
:param int pool_size: Poolsize of the first/default `class:DBConnectionPool`
:param int conn_lifetime: Number of seconds after which a connection will be recycled when :meth:`.put` back
:param bool do_log: Log to the console or not
"""
if do_log:
import logging
logging.basicConfig( level = logging.INFO, format = "%(asctime)s %(message)s" )
self.logger = logging.getLogger()
self.do_log = do_log
self.dsn = dsn
self.db_module = db_module
self.pool_size = pool_size
self.CONN_RECYCLE_AFTER = conn_lifetime if conn_lifetime is not None else 0
self.pool = Queue( self.pool_size )
__import__( db_module )
self.connection_jobs = map( lambda x: gevent.spawn( self.create_connection ), xrange( self.pool_size ) )
try:
gevent.joinall( self.connection_jobs, timeout = 10 )
assert self.pool_size == self.pool.qsize()
if self.do_log:
self.logger.info( "$ poolsize: %i" % self.pool.qsize() )
self.ready = True
except AssertionError, e:
raise DBPoolConnectionException( "Could not get %s connections for the pool as requested. %s" % ( self.pool_size, e.message ) )
except Exception, e:
raise e
def __del__( self ):
while not self.pool.empty():
conn = self.pool.get()
conn.close()
def create_connection( self ):
"""
Try to open a new connection to the database and put it on the pool
"""
try:
self.pool.put( PoolConnection( self.db_module, self.dsn ) )
except PoolConnectionException, e:
raise e
def resize( self, new_size ):
"""
Resize the pool (nr. of connections on the pool)
:param int new_size: nr ob connections the pool should be resized to
"""
while self.qsize != new_size:
if self.qsize < new_size:
self.create_connection()
else:
conn = self.pool.get()
conn.close()
def get( self, timeout = None, iso_level = ISOLATION_LEVEL_READ_COMMITTED ):
"""
Get a connection from the pool
:param int timeout: seconds to wait for a connection or None
:param iso_level: transaction isolation level to be set on the connection. Must be one of psycopg2.extensions ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_UNCOMMITTED, ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_REPEATABLE_READ, ISOLATION_LEVEL_SERIALIZABLE
:returns: -- a :class:`PoolConnection`
"""
try:
conn = self.pool.get( timeout = timeout )
if iso_level != ISOLATION_LEVEL_READ_COMMITTED:
conn.set_isolation_level( iso_level )
return conn
except gevent.queue.Empty, e:
raise PoolConnectionException( e )
def put( self, conn, timeout = 1, force_recycle = False ):
"""
Put a connection back onto the pool
:param conn: The :class:`PoolConnection` object to be put back onto the pool
:param int timeout: timeout in seconds to to put the connection onto the pool
:param bool force_recycle: Force connection recycling independent from the pool wide connection lifecycle
"""
if isinstance( conn, PoolConnection ):
if ( self.CONN_RECYCLE_AFTER != 0 and time() - conn.PoolConnection_initialized_at < self.CONN_RECYCLE_AFTER ) and force_recycle == False:
try:
conn.reset()
if conn.isolation_level != ISOLATION_LEVEL_READ_COMMITTED:
if self.do_log:
self.logger.info( "set ISOLATION_LEVEL_READ_COMMITTED." )
conn.set_isolation_level( ISOLATION_LEVEL_READ_COMMITTED )
conn.commit()
self.pool.put( conn, timeout = timeout )
except gevent.queue.Full, e:
raise PoolConnectionException( e )
else:
if self.do_log:
self.logger.info( "recycling conn." )
try:
conn.reset() # ?
conn.close()
except InterfaceError:
pass
del conn
gevent.spawn( self.create_connection ).join()
else:
raise PoolConnectionException( "Passed object %s is not a PoolConnection." % ( conn, ) )
@property
def qsize( self ):
return self.pool.qsize()
| mit | -6,869,467,594,450,040,000 | 40.120805 | 273 | 0.610903 | false | 4.087392 | false | false | false |
nachowski/csv2sqlite | csv2sqlite.py | 1 | 3373 | # Converts a CSV file (typically exported from individual .xls sheets) to a sqlite db as a new table
#
# - The CSV filename (without .csv extension) is used as the table name.
# - The columns from the first row are used as column names in the table.
# - You can store multiple tables (from multiple csv files) in the same sqlite3 db
# - The table will contain an __id primary key and an android_metadata table to make it android-compatible.
# - Typically the .db file should be zipped and copied to <android-project>/assets
#
# Usage: python2 csv2sqlite.py my_fabulous_data.csv db.sqlite
#
# Author: Nachiket Apte <[email protected]>
import csv, sqlite3, sys, os
try:
filename = sys.argv[1]
except IndexError:
print 'Missing argument: python csv2sqlite.py <tablename.csv>'
sys.exit(2)
try:
sqlitefilename = sys.argv[2]
except IndexError:
print 'Using default name for db: mydb.sqlite'
sqlitefilename = "mydb.sqlite"
# open our csv file for parsing. We use a standard db filename which may contain
# multiple tables from different csv files
reader = csv.reader(open(filename, 'r'), delimiter=';')
table, fileExtension = os.path.splitext(filename)
conn = sqlite3.connect(sqlitefilename)
curs = conn.cursor()
# Android-specific shizz. Remove if not relevant
curs.execute("DROP TABLE IF EXISTS android_metadata")
curs.execute("CREATE TABLE IF NOT EXISTS android_metadata (locale TEXT DEFAULT 'en_US')")
curs.execute("INSERT INTO 'android_metadata' VALUES ('en_US')")
##
counter = 0
# Screw fancy functions, I'm a python noob
tableInsertValues = "?";
tableInsertSql = "INSERT INTO " + table + " (__id"
for row in reader:
if counter == 0:
# first row of csv, create table based on columns
colNameCreateString = ""
for colName in row:
# No spaces in column names. All other formatting is preserved
colName = colName.replace(' ', '')
# All columns are strings, good luck future developers
colNameCreateString += ", " + colName + " TEXT"
# Magic here
tableInsertSql += ", " + colName
tableInsertValues += ", ?"
# make our insert statement based on the column values
tableInsertSql += ") VALUES (" + tableInsertValues + ");"
# make and execute our create statement
curs.execute("DROP TABLE IF EXISTS " + table)
print "Making table " + table + " with " + str(len(row)) + " columns"
try:
curs.execute("CREATE TABLE IF NOT EXISTS " + table + " ( __id INTEGER PRIMARY KEY" + colNameCreateString + ");")
except sqlite3.OperationalError:
# Some .xls files might be missing a title row
print "First row must contain headers! This one contains " + str(row)
sys.exit(2)
else:
# insert row as data
to_db = [counter]
for colVal in row:
colVal = colVal.strip(); # trim
if len(colVal) == 0:
# excel is dumb sometimes, convert empty strings to null values
to_db.append(None)
else:
to_db.append(unicode(colVal.strip(), "utf8"))
curs.execute(tableInsertSql, to_db)
counter += 1
conn.commit()
print "Imported " + str(counter - 1) + " rows into " + sqlitefilename | mit | -521,818,109,497,795,300 | 37.781609 | 124 | 0.636525 | false | 3.963572 | false | false | false |
razziel89/ManipulateAggregates | ManipulateAggregates/collection/gnuplot/__init__.py | 1 | 33249 | """Class definition to control Gnuplot from Python.
"""
# This file is part of ManipulateAggregates.
#
# Copyright (C) 2016 by Torsten Sachse
#
# ManipulateAggregates is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ManipulateAggregates is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ManipulateAggregates. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import itertools
from subprocess import Popen, PIPE
from shutil import move
import tempfile as tf
import distutils.spawn as ds
from . import gpcsv
from . import postprocess
# default values for gnuplot
GNUPLOT_DEFAULT = {
"dash": "AUTO",
"color": "AUTO",
"point": "AUTO",
"lines": True,
"points": False,
"title": "AUTO",
"xcol": 1,
"ycol": 2,
"zcol": 3,
"type": "UNKNOWN",
"head": False,
"linewidth": 1.0,
"pointsize": 1.0,
"solid": True,
"opacity": 1.0,
}
def _which(executable):
"""Return whether or not an executable was found.
Args:
executable: (string) the name of the executable
Returns:
whether or not the specified executable was found
"""
return ds.find_executable(executable) is not None
def _mv(src, dest):
"""Move a file from src to dest.
Args:
src: (string) source path of the file
dest: (string) destination path of the file
"""
move(src, dest)
class gnuplot:
"""Controller class for gnuplot.
Attributes:
correct_mark: (bool) whether or not marks should be slightly displaced if they
overlap
correct_mark_dist: (float) by how much to displace overlaping marks
dict: (dictionary) holds configuration parameters
dimensions: (int, 2 or 3): the dimensionality of the plot
f: (pipe) STDIO of GP to which the controlling information is written.
font: (string) font to use
fontsize: (int) fontsize to use
GP: (process) an instance of Gnuplot opened via Popen
rectanglecount: (int) how many rectangles were already drawn
tempfiles: list of temporary files and whether they shall be auto-deleted
xmarks: (dictionary) used to store marks in x direction - check for overlaps
xrange: (tuple of 2 floats) range for x axis
ymarks: (dictionary) used to store marks in y direction - check for overlaps
yrange: (tuple of 2 floats) range for y axis
zrange: (tuple of 2 floats) range for y axis if plot in 3 dimensions
"""
class gp_dict:
"""A dummy class to hold the description of the config dictionaries.
All members of this class are actually keys (of type string) that can
be in each config dictionary and the given type is that of the
associated value. The two letter abbreviations lc, lw and dt are
Gnuplot commands. Please see Gnuplot's documentation.
Attributes:
lines: (none) if the key is set, plot a line
points: (none) if the key is set, plot points
type: (string) type of plot: 'file' (raw data) or 'function' (analytic function)
function: (string) description of the analytic function
filename: (string) the file that contains the raw data
xcol: (int) coloumn in the file that contains x data
ycol: (int) coloumn in the file that contains y data
zcol: (int) coloumn in the file that contains z data
border: (none) when plotting a rectange, it will have a border if this is set
color: (int) colour as in 'lc INT'. Special value: 'AUTO'
dash: (int) dash type as in 'dt INT'. Special value: 'AUTO'
point: (int) point type as in 'dt INT'. Special value: 'AUTO'
head: (none) whether arrows shall have heads
linewidth: (float) line width as in 'lw FLOAT'
opacity: (float) opacity of the rectangle (if solid is set)
pointsize: (float) size of the points (if points is declared)
solid: (none) if set, plot a solid rectangle (not just border)
title: (string) the name of the plotted function/data
"""
def __init__(self):
"""Dummy constructor, do not use."""
raise Exception("This dummy class shall not be used directly")
self.lines = ""
self.points = ""
self.type = ""
self.function = ""
self.filename = ""
self.xcol = ""
self.ycol = ""
self.zcol = ""
self.border = ""
self.color = ""
self.dash = ""
self.point = ""
self.head = ""
self.linewidth = ""
self.opacity = ""
self.pointsize = ""
self.solid = ""
self.title = ""
def __init__(
self,
filename,
size=(20, 12),
linewidth=1,
xrange=None,
yrange=None,
correct_mark=True,
correct_mark_dist=0.001,
fontsize=10,
xlog=False,
ylog=False,
classic_colors=True,
dimensions=2,
font="Helvetica",
verbose=False,
):
"""Constructor.
Args:
filename: (string) name of the output file that will be created
size: (tuple of 2 ints) the (x,y) size of the output in cm
linewidth: (float) lines are scaled by this factor
xrange: (tuple of 2 floats) range for x axis
yrange: (tuple of 2 floats) range for y axis
correct_mark: (bool) whether or not marks should be slightly displaced if
they overlap
correct_mark_dist: (float) by how much to displace overlaping marks
fontsize: (int) fontsize to use
xlog: (bool) whether or not the x axis is on a logarithmic scale
ylog: (bool) whether or not the y axis is on a logarithmic scale
classic_colors: (bool) whether or not to use Gnuplot's classic color scheme
dimensions: (int, 2 or 3): the dimensionality of the plot
font: (string) font to use
verbose: (bool) if True, echo everything that is being passed to Gnuplot to
stderr
"""
self.tempfiles = []
self.GP = Popen(["gnuplot"], stdin=PIPE, stdout=sys.stderr, stderr=sys.stderr)
self.cat = Popen(["cat"], stdin=PIPE, stdout=sys.stderr, stderr=sys.stderr)
self.f = self.GP.stdin
self.rectanglecount = 1
self.dimensions = dimensions
self.fontsize = fontsize
self.font = font
self.filename = filename
self.verbose = verbose
if self.dimensions not in (2, 3):
raise ValueError("Wrong number of dimensions provided.")
self._write(
'set term pdf enhanced colour font "%s,%d" size %dcm,%dcm linewidth %d\n'
% (font, fontsize, size[0], size[1], linewidth)
)
if classic_colors:
self._write("set colors classic\n")
if xlog:
self._write("set logscale x\n")
if ylog:
self._write("set logscale y\n")
self.correct_mark = correct_mark
if correct_mark:
self.xmarks = {}
self.ymarks = {}
self.correct_mark_dist = correct_mark_dist
self.xrange = xrange
if self.xrange is not None:
self._write("set xrange [%f:%f]\n" % (xrange[0], xrange[1]))
self.yrange = yrange
if self.yrange is not None:
self._write("set yrange [%f:%f]\n" % (yrange[0], yrange[1]))
self._write('set output "%s.pdf"\n' % (filename))
def _write(self, s):
"""Write something to Gnuplot but also print it to
stderr if verbose output is requested.
Args:
s (string): string to be passed to Gnuplot
"""
self.f.write(s)
if self.verbose:
self.cat.stdin.write(s)
def _set_dict(self, dict):
"""Set the dictionary holding config parameters.
Each function to be plotted has a certain set of possible config
options. See gnuplot.gp_dict for all possible options.
Args:
dict: (dictionary) holds configuration parameters in the form of
key-value pairs.
"""
self.dict = dict
def _get(self, *args, **kwargs):
"""Retrieve a value from the dictionary.
Args:
`*args`: (strings) the config options whose associated value shall be
retrieved.
`**kwargs`: (dictionary) key strict: whether or not to raise an Error
if the key cannot be found in the current dictionary or the
default one. If False then None is returned in such cases.
"""
for k in args:
result = self.dict.get(k, GNUPLOT_DEFAULT.get(k, None))
if result is not None:
break
if kwargs.get("strict", False) and result is None:
raise KeyError(
"Key %s not provided by the current dictionary and no default set."
% (key)
)
return result
def set_title(self, title):
"""Set the title of the plot.
Args:
title: (string) the title of the generated plot
"""
self._write('set title "%s"\n' % (title))
def emptyplot(self):
"""Create an empty plot.
This is useful if only marks or arrows shall be plotted. Normally,
Gnuplot would not create a plot under such conditions.
Requires xrange and yrange to be set using set_xrange and
set_yrange.
"""
if self.xrange is None or self.yrange is None:
raise RuntimeError(
"Cannot perform emtpy plot if either xrange or yrange is not set."
)
self._write("plot NaN notitle\n")
self.postplot()
def postplot(self):
"""Unset arrows and labels.
This is required when creating a plot that contains multiple pages since
otherwise the labels and arrows/marks would be repeated on every page.
"""
self._write("unset arrow\n")
self._write("unset label\n")
def unset(self, prop, oneprop=True):
"""Send an arbitrary 'unset' command to Gnuplot.
Args:
prop: (string or iterable of strings) if oneprop is True (the
default), unset the one property given via prop. Otherwise
unset all properties in the iterable prop.
oneprop: (bool) whether or not prop is not an iterable
"""
if oneprop:
iterable = [prop]
else:
iterable = prop
for p in iterable:
self._write("unset %s\n" % (p))
def set(self, prop, oneprop=True):
"""Send an arbitrary 'set' command to Gnuplot.
Args:
prop: (string or iterable of strings) if oneprop is True (the
default), et the one property given via prop. Otherwise set
all properties in the iterable prop.
oneprop: (bool) whether or not prop is not an iterable
"""
if oneprop:
iterable = [prop]
else:
iterable = prop
for p in iterable:
self._write("set %s\n" % (p))
def lineplot(self, data):
"""Plot one or several functions (can also be raw data).
Each function has a certain set of possible config options. See
gnuplot.gp_dict for all possible options.
Args:
data: (dictionary or list of dictionaries) each dictionary contains
a set of key-value pairs that define the function to be plotted
and how it shall be formated.
"""
if isinstance(data, dict):
dict_list = [data]
else:
try:
if False in (isinstance(d, dict) for d in data):
raise TypeError("")
else:
dict_list = data
except TypeError:
raise TypeError(
"Data for lineplot is neither a dictionary nor a list of dictionaries."
)
if len(dict_list) == 0:
print("WARNING: cannot plot since no data was passed over", file=sys.stderr)
return
breakchar = ", "
if self.dimensions == 2:
self._write("plot ")
elif self.dimensions == 3:
self._write("splot ")
count = 1
for f in dict_list:
self._set_dict(f)
if f == dict_list[-1]:
breakchar = "\n"
if not self._get("lines") and not self._get("points"):
raise ValueError(
"At least one of 'lines' or 'points' has to be declared otherwise nothing would be plotted."
)
if self._get("type") == "function":
self._write("%s " % (self._get("function")))
elif self._get("type") == "filename" or self._get("type") == "file":
self._write('"%s" u ' % (self._get("filename", "file", strict=False)))
# x coloumn
if isinstance(self._get("xcol"), int):
self._write("($%d):" % (self._get("xcol")))
else:
self._write("(%s):" % (self._get("xcol")))
# y coloumn
if isinstance(self._get("ycol"), int):
self._write("($%d)" % (self._get("ycol")))
else:
self._write("(%s)" % (self._get("ycol")))
# z coloumn, if present
if self.dimensions == 3:
if isinstance(self._get("zcol"), int):
self._write(":($%d) " % (self._get("zcol")))
else:
self._write(":(%s) " % (self._get("zcol")))
self._write(" ")
elif self._get("type") == "UNKNOWN":
raise ValueError(
"No plot type provided. Missing key 'type' from dictionary."
)
else:
raise ValueError("Unknown plot type: %s" % (f["type"]))
if self._set_style(count, "line"):
count += 1
self._write(breakchar)
if self.correct_mark:
self.xmarks = {}
self.ymarks = {}
self.postplot()
def data_to_file(self, data, formatstring=None, delete=True):
"""Convert some data (given as x-y value pairs) to a format for Gnuplot.
Args:
data: (list of pairs of floats) the data to be plotted
formatstring: (string) a printf-type string that will be used to
convert each element of data to a string. Gnuplot will plot
what's left after the conversion.
delete: (bool) whether or not the temporary file that is created
shall be deleted when finalize is called
"""
tempfile = tf.NamedTemporaryFile(delete=False)
if formatstring is None:
for datum in data:
tempfile.write("\t".join(map(str, datum)) + "\n")
else:
for datum in data:
tempfile.write(formatstring % tuple(datum))
tempfile.close()
self.tempfiles.append((delete, tempfile.name))
return tempfile.name
def _set_style(self, count, type):
"""Create s string that Gnuplot understands and that describes a plot's style.
Args:
count: (int) how many times already the automatic style generation
has been used
type: (string) what kind of thing shall be plotted. Can be:
"lines", "rectangle" or "arrow".
"""
used_count = False
style = ""
if type == "line":
style += "w "
if self._get("lines"):
style += "l"
if self._get("points"):
style += "p "
style += "ps %f " % (self._get("pointsize"))
style += " "
dash = self._get("dash")
if dash == "AUTO":
style += "dt %d " % (count)
used_count = True
else:
style += "dt %d " % (dash)
point = self._get("point")
if point == "AUTO":
style += "pt %d " % (count)
used_count = True
else:
style += "pt %d " % (point)
color = self._get("color")
if color == "AUTO":
style += "lc %d " % (count)
used_count = True
else:
style += "lc %d " % (color)
width = self._get("linewidth")
style += "lw %f " % (width)
title = self._get("title")
if title == "AUTO":
pass
elif title is None:
style += "notitle "
else:
style += 'title "%s" ' % (title)
elif type == "rectangle":
color = self._get("color")
if color == "AUTO" or color is None:
style += "fc "
else:
style += "fc %s " % (color)
style += "lt -1 lw 0 "
if self._get("solid"):
style += "fs solid %f " % (self._get("opacity"))
if self._get("border") is None:
style += "noborder "
elif type == "arrow":
dash = self._get("dash")
if dash == "AUTO":
style += "dt %d " % (count)
used_count = True
else:
style += "dt %d " % (dash)
color = self._get("color")
if color == "AUTO":
style += "lc %d " % (count)
used_count = True
else:
style += "lc %d " % (color)
width = self._get("linewidth")
style += "lw %f " % (width)
if not self._get("head"):
style += "nohead "
self._write(style)
return used_count
def set_xrange(self, start, stop):
"""Set the range of the plot in x-direction.
Args:
start: (float) start of the x range
stop: (float) end of the x range
"""
self._write("set xrange [%f:%f]\n" % (start, stop))
self.xrange = (start, stop)
def set_yrange(self, start, stop):
"""Set the range of the plot in y-direction.
Args:
start: (float) start of the y range
stop: (float) end of the y range
"""
self._write("set yrange [%f:%f]\n" % (start, stop))
self.yrange = (start, stop)
def set_zrange(self, start, stop):
"""Set the range of the plot in z-direction if the plot is 3D.
Args:
start: (float) start of the z range
stop: (float) end of the z range
"""
if self.dimensions == 3:
self._write("set zrange [%f:%f]\n" % (start, stop))
self.zrange = (start, stop)
else:
raise ValueError("Cannot set zrange for non-3d plot.")
def set_stick(self, pos, height, color, base=0.0, width=0.5):
"""Create a vertical line of a certain height (i.e., stick).
Args:
pos: (float) the x position of the stick
height: (float) the height in the y direction of the stick
color: (int) the color if the stick as in 'lc INT'
base: (float) where the stick shall start (defaults to x axis)
width: (float) the width of the stick
"""
try:
if len(pos) != len(height):
print(
"To print several sticks, the positions list and the height list must have the same number of elements."
)
else:
gen = ((p, h) for p, h in zip(pos, height))
except TypeError:
gen = [(pos, height)]
for p, h in gen:
self.mark(
p,
"x",
width=width,
color=color,
rectangle=False,
opacity=1.0,
center=True,
extent=(base, base + h),
)
def set_background(
self, opacities, colors=None, nr_fields=None, direction="x", extent=None
):
"""Create a non-white background for the plot.
You can create backgrounds of areas with alternating colors or even
checked backgrounds. This is realized as repeating a given pattern of
opacities and colours until the entirety of the plot is filled in a
certain direction. A checked pattern can be obtained by repeating a
black-and-white pattern multiple times as black-and-white ->
white-and-black -> black-and-white etc. These backgrounds are realized
via rectangles, so they support all the properties of Gnuplot's
rectangles.
Args:
opacities: (iterable of floats) pattern of opacities
colors: (iterable of ints) pattern of colors. Defaults to black for
all pattern elements.
nr_fields: (int) the number of sections in which to partition the
plot. E.g., if given a black-and-white pattern, a value of 5
would result in black->white->black->white->black.
direction: ("x" or "y") the direction of the pattern (defaults to "x")
extent: (tuple of 2 floats) how far in the other direction (that
not specified by direction) the pattern shall extent
"""
if direction == "x":
samerange = self.xrange
otherrange = self.yrange
elif direction == "y":
samerange = self.yrange
otherrange = self.xrange
else:
raise ValueError('Unknown direction "%s", must be x or y.' % (direction))
if self.dimensions != 2:
raise RuntimeError("Cannot create background for non-2d plot.")
if extent is None:
if otherrange is None:
raise ValueError(
"Cannot create background in %s-direction without other range being set."
% (direction)
)
else:
extent = otherrange
if samerange is None:
raise ValueError(
"Cannot create background in %s-direction without same range being set."
% (direction)
)
try:
if colors is None:
colors = [None] * len(opacities)
except TypeError:
opacities = [opacities]
colors = [None]
try:
if len(colors) != len(opacities):
raise ValueError(
"Cannot create background, colors and opacities do not have the same number of elements."
)
else:
iterable = [(c, o) for c, o in zip(colors, opacities)]
except TypeError:
iterable = [(colours, opacities)]
if nr_fields is None:
nr_fields = len(colors)
result = []
width = 1.0 * (samerange[1] - samerange[0]) / (nr_fields)
pos = samerange[0]
count = 0
for color, opacity in itertools.cycle(iterable):
self.mark(
pos,
direction,
width=width,
color=color,
rectangle=True,
center=False,
opacity=opacity,
extent=extent,
)
result.append((pos, pos + width))
pos += width
count += 1
if count == nr_fields:
break
return result
def mark(
self,
pos,
direction,
width=0.5,
color=None,
rectangle=False,
opacity=1.0,
center=True,
extent=None,
label=None,
zpos=None,
dash=None,
):
"""Create vertival or horizontal line on the plot.
If the plot is 3D, the position in the 3rd direction is also required.
However, the plots are still in planes parallel to the x-y plane.
Args:
pos: (float) x or y position of the mark (depending on direction)
direction: ("x" or "y") the direction of the line
width: (float) the line width
color: (int) colour as in 'lc INT'
rectangle: (bool) whether the mark is not just a line but a rectangle
opacity: (float) opacity of the mark (only used if rectangle)
center: (bool) whether or not the given position is the mark's center.
Otherwise, the pos is considered to be the left border (only
used if rectangle)
extent: (tuple of 2 floats) the startpoint and endpoint in the
direction of the line (defaults to: entire plot)
label: (dictionary) an optional description of an optional label. See
description of set_label for required and optional keys.
zpos: (float) position of the mark in a 3D plot. Required if the
dimensionality of the plot is 3.
"""
if direction == "x":
hererange = self.yrange
heremarks = self.xmarks
startpos = lambda p, e: (p, e[0])
endpos = lambda p, e: (p, e[1])
elif direction == "y":
hererange = self.xrange
heremarks = self.ymarks
startpos = lambda p, e: (e[0], p)
endpos = lambda p, e: (e[1], p)
else:
raise ValueError('Unknown direction "%s", must be x or y.' % (direction))
if self.dimensions != 2:
if rectangle:
raise RuntimeError(
"Cannot set %smark as rectangle for non-2d plot." % (direction)
)
elif self.dimensions == 3:
if zpos is None:
raise RuntimeError(
"Cannot set %smark as arrow for non-2d plot without zpos defined."
% (direction)
)
else:
raise RuntimeError(
"Fatal internal error: wrong number of dimensions set. However that happened."
)
if extent is None:
if hererange is None:
raise ValueError(
"Cannot create %smark without other range being set." % (direction)
)
else:
extent = hererange
if not rectangle:
if self.correct_mark:
if pos in heremarks:
heremarks[pos] += 1
_pos = pos + self.correct_mark_dist * (heremarks[pos])
else:
heremarks[pos] = 0
_pos = pos
self._write("set arrow from %8.7E,%8.7E" % startpos(_pos, extent))
if self.dimensions == 3:
self._write(",%8.7E" % (zpos))
self._write(" to %8.7E,%8.7E" % endpos(_pos, extent))
if self.dimensions == 3:
self._write(",%8.7E" % (zpos))
self._write(" ")
self._set_dict(
{
"head": False,
"color": color if color is not None else 0,
"linewidth": width,
"dash": 1 if dash is None else dash,
}
)
self._set_style(heremarks[pos], "arrow")
if opacity != 1.0:
print(
"WARNING: opacity unequal 100% set, but is ignored for xmark that is not a rectangle.",
file=sys.stderr,
)
else:
if center:
pos -= 0.5 * width
self._write("set obj %d rect from " % (self.rectanglecount))
self._write("%f,%f to " % startpos(pos, extent))
self._write("%f,%f " % endpos(pos + width, extent))
self._set_dict({"color": color, "opacity": opacity, "border": None})
self._set_style(self.rectanglecount, "rectangle")
self.rectanglecount += 1
self._write("\n")
if label is not None:
if isinstance(label, dict):
label = [label]
for l in label:
if "where" in l:
where = l["where"]
else:
where = "tl"
if where == "tl":
labelpos = (
startpos(pos, extent)[0] + l["offset"][0],
startpos(pos, extent)[1] + l["offset"][1],
)
l["pivot"] = "left"
elif where == "bl":
labelpos = (
startpos(pos, extent)[0] + l["offset"][0],
endpos(pos, extent)[1] + l["offset"][1],
)
l["pivot"] = "left"
elif where == "tr":
labelpos = (
endpos(pos, extent)[0] + l["offset"][0],
startpos(pos, extent)[1] + l["offset"][1],
)
l["pivot"] = "right"
elif where == "br":
labelpos = (
endpos(pos, extent)[0] + l["offset"][0],
endpos(pos, extent)[1] + l["offset"][1],
)
l["pivot"] = "right"
elif where == "c":
labelpos = (
0.5 * (startpos(pos, extent)[0] + endpos(pos, extent)[0])
+ l["offset"][0],
0.5 * (startpos(pos, extent)[1] + endpos(pos, extent)[1])
+ l["offset"][1],
)
l["pivot"] = "center"
else:
raise ValueError(
'Wrong value for "where" in dictionary. Must be one of ["tl","bl","tr","br","c"] but is %s.'
% (where)
)
l["position"] = labelpos
self.set_label(l)
def set_label(self, label):
"""Set a label on a plot.
The argument label is a dictionary whose entries for "font",
"fontsize", "pivot", "rotation" and "depth" can overwrite the defaults.
Needs to have entries for "text" (the label's text) and "position"
(tuple of floats describing the position).
Args:
label: (dictionary) a description of the label. See description of
set_label for required and optional keys.
"""
if "font" in label:
font = label["font"]
else:
font = self.font
if "fontsize" in label:
fontsize = label["fontsize"]
else:
fontsize = self.fontsize
if "pivot" in label:
pivot = label["pivot"]
else:
pivot = "center"
if "rotation" in label:
rotation = label["rotation"]
else:
rotation = 0.0
if "depth" in label:
depth = label["depth"]
else:
depth = "front"
self._write(
'set label "%s" at %f,%f font "%s,%d" %s rotate by %.2f %s\n'
% (
label["text"],
label["position"][0],
label["position"][1],
font,
fontsize,
depth,
rotation,
pivot,
)
)
def finalize(self, delete=True, convert=False):
"""Finalize the plot.
This calls a set of routines that finish the plotting procedure.
Without calling this, the plot will never actually be created.
Args:
delete: (bool) whether or not temporary files that were declared as
"to-be-deleted" shall actually be deleted.
convert: (bool) whether or not to convert the eps file to a pdf
file if the required software is installed
"""
if not self.f.closed:
self.f.close()
rc = self.GP.wait()
if delete:
for d, filename in self.tempfiles:
if d:
os.remove(filename)
return rc
| gpl-3.0 | 6,274,585,563,111,448,000 | 36.48478 | 124 | 0.512045 | false | 4.428476 | false | false | false |
relekang/python-semantic-release | semantic_release/settings.py | 1 | 4061 | """Helpers to read settings from setup.cfg or pyproject.toml
"""
import configparser
import importlib
import logging
import os
from collections import UserDict
from functools import wraps
from os import getcwd
from typing import Callable, List
import tomlkit
from tomlkit.exceptions import TOMLKitError
from .errors import ImproperConfigurationError
logger = logging.getLogger(__name__)
def _config():
cwd = getcwd()
ini_paths = [
os.path.join(os.path.dirname(__file__), "defaults.cfg"),
os.path.join(cwd, "setup.cfg"),
]
ini_config = _config_from_ini(ini_paths)
toml_path = os.path.join(cwd, "pyproject.toml")
toml_config = _config_from_pyproject(toml_path)
# Cast to a UserDict so that we can mock the get() method.
return UserDict({**ini_config, **toml_config})
def _config_from_ini(paths):
parser = configparser.ConfigParser()
parser.read(paths)
flags = {
"changelog_capitalize",
"changelog_scope",
"check_build_status",
"commit_version_number",
"patch_without_tag",
"major_on_zero",
"remove_dist",
"upload_to_pypi",
"upload_to_release",
"tag_commit",
}
# Iterate through the sections so that default values are applied
# correctly. See:
# https://stackoverflow.com/questions/1773793/convert-configparser-items-to-dictionary
config = {}
for key, _ in parser.items("semantic_release"):
if key in flags:
config[key] = parser.getboolean("semantic_release", key)
else:
config[key] = parser.get("semantic_release", key)
return config
def _config_from_pyproject(path):
if os.path.isfile(path):
try:
with open(path, "r") as f:
pyproject = tomlkit.loads(f.read())
if pyproject:
return pyproject.get("tool", {}).get("semantic_release", {})
except TOMLKitError as e:
logger.warning(f"Could not decode pyproject.toml: {e}")
return {}
config = _config()
def current_commit_parser() -> Callable:
"""Get the currently-configured commit parser
:raises ImproperConfigurationError: if ImportError or AttributeError is raised
:returns: Commit parser
"""
try:
# All except the last part is the import path
parts = config.get("commit_parser").split(".")
module = ".".join(parts[:-1])
# The final part is the name of the parse function
return getattr(importlib.import_module(module), parts[-1])
except (ImportError, AttributeError) as error:
raise ImproperConfigurationError(f'Unable to import parser "{error}"')
def current_changelog_components() -> List[Callable]:
"""Get the currently-configured changelog components
:raises ImproperConfigurationError: if ImportError or AttributeError is raised
:returns: List of component functions
"""
component_paths = config.get("changelog_components").split(",")
components = list()
for path in component_paths:
try:
# All except the last part is the import path
parts = path.split(".")
module = ".".join(parts[:-1])
# The final part is the name of the component function
components.append(getattr(importlib.import_module(module), parts[-1]))
except (ImportError, AttributeError) as error:
raise ImproperConfigurationError(
f'Unable to import changelog component "{path}"'
)
return components
def overload_configuration(func):
"""This decorator gets the content of the "define" array and edits "config"
according to the pairs of key/value.
"""
@wraps(func)
def wrap(*args, **kwargs):
if "define" in kwargs:
for defined_param in kwargs["define"]:
pair = defined_param.split("=", maxsplit=1)
if len(pair) == 2:
config[str(pair[0])] = pair[1]
return func(*args, **kwargs)
return wrap
| mit | -702,771,670,662,746,500 | 28.860294 | 90 | 0.628417 | false | 4.097881 | true | false | false |
Azure/WALinuxAgent | tests/protocol/mocks.py | 1 | 9717 | # Copyright 2020 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.6+ and Openssl 1.0+
#
import contextlib
import re
from azurelinuxagent.common.protocol.wire import WireProtocol
from azurelinuxagent.common.utils import restutil
from tests.tools import patch
from tests.protocol import mockwiredata
# regex used to determine whether to use the mock wireserver data
_USE_MOCK_WIRE_DATA_RE = re.compile(
r'https?://(mock-goal-state|{0}).*'.format(restutil.KNOWN_WIRESERVER_IP.replace(r'.', r'\.')), re.IGNORECASE)
@contextlib.contextmanager
def mock_wire_protocol(mock_wire_data_file, http_get_handler=None, http_post_handler=None, http_put_handler=None, fail_on_unknown_request=True):
"""
Creates a WireProtocol object that handles requests to the WireServer and the Host GA Plugin (i.e requests on the WireServer endpoint), plus
some requests to storage (requests on the fake server 'mock-goal-state').
The data returned by those requests is read from the files specified by 'mock_wire_data_file' (which must follow the structure of the data
files defined in tests/protocol/mockwiredata.py).
The caller can also provide handler functions for specific HTTP methods using the http_*_handler arguments. The return value of the handler
function is interpreted similarly to the "return_value" argument of patch(): if it is an exception the exception is raised or, if it is
any object other than None, the value is returned by the mock. If the handler function returns None the call is handled using the mock
wireserver data or passed to the original to restutil.http_request.
The returned protocol object maintains a list of "tracked" urls. When a handler function returns a value than is not None the url for the
request is automatically added to the tracked list. The handler function can add other items to this list using the track_url() method on
the mock.
The return value of this function is an instance of WireProtocol augmented with these properties/methods:
* mock_wire_data - the WireProtocolData constructed from the mock_wire_data_file parameter.
* start() - starts the patchers for http_request and CryptUtil
* stop() - stops the patchers
* track_url(url) - adds the given item to the list of tracked urls.
* get_tracked_urls() - returns the list of tracked urls.
NOTE: This function patches common.utils.restutil.http_request and common.protocol.wire.CryptUtil; you need to be aware of this if your
tests patch those methods or others in the call stack (e.g. restutil.get, resutil._http_request, etc)
"""
tracked_urls = []
# use a helper function to keep the HTTP handlers (they need to be modified by set_http_handlers() and
# Python 2.* does not support nonlocal declarations)
def http_handlers(get, post, put):
http_handlers.get = get
http_handlers.post = post
http_handlers.put = put
del tracked_urls[:]
http_handlers(get=http_get_handler, post=http_post_handler, put=http_put_handler)
#
# function used to patch restutil.http_request
#
original_http_request = restutil.http_request
def http_request(method, url, data, **kwargs):
# if there is a handler for the request, use it
handler = None
if method == 'GET':
handler = http_handlers.get
elif method == 'POST':
handler = http_handlers.post
elif method == 'PUT':
handler = http_handlers.put
if handler is not None:
if method == 'GET':
return_value = handler(url, **kwargs)
else:
return_value = handler(url, data, **kwargs)
if return_value is not None:
tracked_urls.append(url)
if isinstance(return_value, Exception):
raise return_value
return return_value
# if the request was not handled try to use the mock wireserver data
if _USE_MOCK_WIRE_DATA_RE.match(url) is not None:
if method == 'GET':
return protocol.mock_wire_data.mock_http_get(url, **kwargs)
if method == 'POST':
return protocol.mock_wire_data.mock_http_post(url, data, **kwargs)
if method == 'PUT':
return protocol.mock_wire_data.mock_http_put(url, data, **kwargs)
# the request was not handled; fail or call the original resutil.http_request
if fail_on_unknown_request:
raise ValueError('Unknown HTTP request: {0} [{1}]'.format(url, method))
return original_http_request(method, url, data, **kwargs)
#
# functions to start/stop the mocks
#
def start():
patched = patch("azurelinuxagent.common.utils.restutil.http_request", side_effect=http_request)
patched.start()
start.http_request_patch = patched
patched = patch("azurelinuxagent.common.protocol.wire.CryptUtil", side_effect=protocol.mock_wire_data.mock_crypt_util)
patched.start()
start.crypt_util_patch = patched
start.http_request_patch = None
start.crypt_util_patch = None
def stop():
if start.crypt_util_patch is not None:
start.crypt_util_patch.stop()
if start.http_request_patch is not None:
start.http_request_patch.stop()
#
# create the protocol object
#
protocol = WireProtocol(restutil.KNOWN_WIRESERVER_IP)
protocol.mock_wire_data = mockwiredata.WireProtocolData(mock_wire_data_file)
protocol.start = start
protocol.stop = stop
protocol.track_url = lambda url: tracked_urls.append(url) # pylint: disable=unnecessary-lambda
protocol.get_tracked_urls = lambda: tracked_urls
protocol.set_http_handlers = lambda http_get_handler=None, http_post_handler=None, http_put_handler=None:\
http_handlers(get=http_get_handler, post=http_post_handler, put=http_put_handler)
# go do it
try:
protocol.start()
protocol.detect()
yield protocol
finally:
protocol.stop()
class HttpRequestPredicates(object):
"""
Utility functions to check the urls used by tests
"""
@staticmethod
def is_goal_state_request(url):
return url.lower() == 'http://{0}/machine/?comp=goalstate'.format(restutil.KNOWN_WIRESERVER_IP)
@staticmethod
def is_telemetry_request(url):
return url.lower() == 'http://{0}/machine?comp=telemetrydata'.format(restutil.KNOWN_WIRESERVER_IP)
@staticmethod
def is_health_service_request(url):
return url.lower() == 'http://{0}:80/healthservice'.format(restutil.KNOWN_WIRESERVER_IP)
@staticmethod
def is_in_vm_artifacts_profile_request(url):
return re.match(r'https://.+\.blob\.core\.windows\.net/\$system/.+\.(vmSettings|settings)\?.+', url) is not None
@staticmethod
def _get_host_plugin_request_artifact_location(url, request_kwargs):
if 'headers' not in request_kwargs:
raise ValueError('Host plugin request is missing HTTP headers ({0})'.format(url))
headers = request_kwargs['headers']
if 'x-ms-artifact-location' not in headers:
raise ValueError('Host plugin request is missing the x-ms-artifact-location header ({0})'.format(url))
return headers['x-ms-artifact-location']
@staticmethod
def is_host_plugin_health_request(url):
return url.lower() == 'http://{0}:{1}/health'.format(restutil.KNOWN_WIRESERVER_IP, restutil.HOST_PLUGIN_PORT)
@staticmethod
def is_host_plugin_extension_artifact_request(url):
return url.lower() == 'http://{0}:{1}/extensionartifact'.format(restutil.KNOWN_WIRESERVER_IP, restutil.HOST_PLUGIN_PORT)
@staticmethod
def is_host_plugin_status_request(url):
return url.lower() == 'http://{0}:{1}/status'.format(restutil.KNOWN_WIRESERVER_IP, restutil.HOST_PLUGIN_PORT)
@staticmethod
def is_host_plugin_extension_request(request_url, request_kwargs, extension_url):
if not HttpRequestPredicates.is_host_plugin_extension_artifact_request(request_url):
return False
artifact_location = HttpRequestPredicates._get_host_plugin_request_artifact_location(request_url, request_kwargs)
return artifact_location == extension_url
@staticmethod
def is_host_plugin_in_vm_artifacts_profile_request(url, request_kwargs):
if not HttpRequestPredicates.is_host_plugin_extension_artifact_request(url):
return False
artifact_location = HttpRequestPredicates._get_host_plugin_request_artifact_location(url, request_kwargs)
return HttpRequestPredicates.is_in_vm_artifacts_profile_request(artifact_location)
@staticmethod
def is_host_plugin_put_logs_request(url):
return url.lower() == 'http://{0}:{1}/vmagentlog'.format(restutil.KNOWN_WIRESERVER_IP,
restutil.HOST_PLUGIN_PORT)
class MockHttpResponse:
def __init__(self, status, body=''):
self.body = body
self.status = status
def read(self, *_):
return self.body
| apache-2.0 | -2,802,583,881,621,370,000 | 43.573394 | 144 | 0.678605 | false | 3.966122 | false | false | false |
Hanaasagi/sorator | orator/query/grammars/postgres_grammar.py | 1 | 4336 | # -*- coding: utf-8 -*-
from .grammar import QueryGrammar
class PostgresQueryGrammar(QueryGrammar):
_operators = [
'=', '<', '>', '<=', '>=', '<>', '!=',
'like', 'not like', 'between', 'ilike',
'&', '|', '#', '<<', '>>'
]
marker = '%s'
def _compile_lock(self, query, value):
"""
Compile the lock into SQL
:param query: A QueryBuilder instance
:type query: QueryBuilder
:param value: The lock value
:type value: bool or str
:return: The compiled lock
:rtype: str
"""
if isinstance(value, str):
return value
if value:
return 'FOR UPDATE'
return 'FOR SHARE'
def compile_update(self, query, values):
"""
Compile an update statement into SQL
:param query: A QueryBuilder instance
:type query: QueryBuilder
:param values: The update values
:type values: dict
:return: The compiled update
:rtype: str
"""
table = self.wrap_table(query.from__)
columns = self._compile_update_columns(values)
from_ = self._compile_update_from(query)
where = self._compile_update_wheres(query)
return ('UPDATE %s SET %s%s %s' %
(table, columns, from_, where)).strip()
def _compile_update_columns(self, values):
"""
Compile the columns for the update statement
:param values: The columns
:type values: dict
:return: The compiled columns
:rtype: str
"""
columns = []
for key, value in values.items():
columns.append('%s = %s' % (self.wrap(key), self.parameter(value)))
return ', '.join(columns)
def _compile_update_from(self, query):
"""
Compile the "from" clause for an update with a join.
:param query: A QueryBuilder instance
:type query: QueryBuilder
:return: The compiled sql
:rtype: str
"""
if not query.joins:
return ''
froms = []
for join in query.joins:
froms.append(self.wrap_table(join.table))
if len(froms):
return ' FROM %s' % ', '.join(froms)
return ''
def _compile_update_wheres(self, query):
"""
Compile the additional where clauses for updates with joins.
:param query: A QueryBuilder instance
:type query: QueryBuilder
:return: The compiled sql
:rtype: str
"""
base_where = self._compile_wheres(query)
if not query.joins:
return base_where
join_where = self._compile_update_join_wheres(query)
if not base_where.strip():
return 'WHERE %s' % self._remove_leading_boolean(join_where)
return '%s %s' % (base_where, join_where)
def _compile_update_join_wheres(self, query):
"""
Compile the "join" clauses for an update.
:param query: A QueryBuilder instance
:type query: QueryBuilder
:return: The compiled sql
:rtype: str
"""
join_wheres = []
for join in query.joins:
for clause in join.clauses:
join_wheres.append(self._compile_join_constraints(clause))
return ' '.join(join_wheres)
def compile_insert_get_id(self, query, values, sequence=None):
"""
Compile an insert and get ID statement into SQL.
:param query: A QueryBuilder instance
:type query: QueryBuilder
:param values: The values to insert
:type values: dict
:param sequence: The id sequence
:type sequence: str
:return: The compiled statement
:rtype: str
"""
if sequence is None:
sequence = 'id'
return '%s RETURNING %s'\
% (self.compile_insert(query, values), self.wrap(sequence))
def compile_truncate(self, query):
"""
Compile a truncate table statement into SQL.
:param query: A QueryBuilder instance
:type query: QueryBuilder
:return: The compiled statement
:rtype: str
"""
return {
'TRUNCATE %s RESTART IDENTITY' % self.wrap_table(query.from__): {}
}
| mit | 5,993,220,880,501,831,000 | 23.777143 | 79 | 0.545895 | false | 4.271921 | false | false | false |
jianghuaw/nova | nova/tests/unit/api/openstack/compute/test_block_device_mapping.py | 1 | 17244 | # Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from oslo_serialization import jsonutils
from six.moves import range
from webob import exc
from nova.api.openstack.compute import block_device_mapping
from nova.api.openstack.compute import servers as servers_v21
from nova import block_device
from nova.compute import api as compute_api
from nova import exception
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit.image import fake
from nova.tests.unit import matchers
CONF = cfg.CONF
class BlockDeviceMappingTestV21(test.TestCase):
validation_error = exception.ValidationError
def _setup_controller(self):
self.controller = servers_v21.ServersController()
def setUp(self):
super(BlockDeviceMappingTestV21, self).setUp()
fakes.stub_out_nw_api(self)
self._setup_controller()
fake.stub_out_image_service(self)
self.bdm = [{
'no_device': None,
'source_type': 'volume',
'destination_type': 'volume',
'uuid': 'fake',
'device_name': 'vdb',
'delete_on_termination': False,
}]
def _get_servers_body(self, no_image=False):
body = {
'server': {
'name': 'server_test',
'imageRef': '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
'flavorRef': 'http://localhost/123/flavors/3',
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
if no_image:
del body['server']['imageRef']
return body
def _test_create(self, params, no_image=False):
body = self._get_servers_body(no_image)
body['server'].update(params)
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.headers['content-type'] = 'application/json'
req.body = jsonutils.dump_as_bytes(body)
self.controller.create(req, body=body).obj['server']
def test_create_instance_with_volumes_enabled_no_image(self):
"""Test that the create will fail if there is no image
and no bdms supplied in the request
"""
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertNotIn('imageRef', kwargs)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exc.HTTPBadRequest,
self._test_create, {}, no_image=True)
@mock.patch.object(compute_api.API, '_validate_bdm')
@mock.patch.object(compute_api.API, '_get_bdm_image_metadata')
def test_create_instance_with_bdms_and_no_image(
self, mock_bdm_image_metadata, mock_validate_bdm):
mock_bdm_image_metadata.return_value = {}
mock_validate_bdm.return_value = True
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertThat(
block_device.BlockDeviceDict(self.bdm[0]),
matchers.DictMatches(kwargs['block_device_mapping'][0])
)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
self._test_create(params, no_image=True)
mock_validate_bdm.assert_called_once_with(
mock.ANY, mock.ANY, mock.ANY, mock.ANY)
mock_bdm_image_metadata.assert_called_once_with(
mock.ANY, mock.ANY, False)
@mock.patch.object(compute_api.API, '_validate_bdm')
@mock.patch.object(compute_api.API, '_get_bdm_image_metadata')
def test_create_instance_with_bdms_and_empty_imageRef(
self, mock_bdm_image_metadata, mock_validate_bdm):
mock_bdm_image_metadata.return_value = {}
mock_validate_bdm.return_value = True
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertThat(
block_device.BlockDeviceDict(self.bdm[0]),
matchers.DictMatches(kwargs['block_device_mapping'][0])
)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm,
'imageRef': ''}
self._test_create(params)
def test_create_instance_with_imageRef_as_full_url(self):
bdm = [{'device_name': 'foo'}]
image_href = ('http://localhost/v2/fake/images/'
'76fa36fc-c930-4bf3-8c8a-ea2a2420deb6')
params = {block_device_mapping.ATTRIBUTE_NAME: bdm,
'imageRef': image_href}
self.assertRaises(exception.ValidationError,
self._test_create, params)
def test_create_instance_with_non_uuid_imageRef(self):
bdm = [{'device_name': 'foo'}]
params = {block_device_mapping.ATTRIBUTE_NAME: bdm,
'imageRef': '123123abcd'}
self.assertRaises(exception.ValidationError,
self._test_create, params)
def test_create_instance_with_invalid_bdm_in_2nd_dict(self):
bdm_1st = {"source_type": "image", "delete_on_termination": True,
"boot_index": 0,
"uuid": "2ff3a1d3-ed70-4c3f-94ac-941461153bc0",
"destination_type": "local"}
bdm_2nd = {"source_type": "volume",
"uuid": "99d92140-3d0c-4ea5-a49c-f94c38c607f0",
"destination_type": "invalid"}
bdm = [bdm_1st, bdm_2nd]
params = {block_device_mapping.ATTRIBUTE_NAME: bdm,
'imageRef': '2ff3a1d3-ed70-4c3f-94ac-941461153bc0'}
self.assertRaises(exception.ValidationError,
self._test_create, params)
def test_create_instance_with_boot_index_none_ok(self):
"""Tests creating a server with two block devices. One is the boot
device and the other is a non-bootable device.
"""
# From the docs:
# To disable a device from booting, set the boot index to a negative
# value or use the default boot index value, which is None. The
# simplest usage is, set the boot index of the boot device to 0 and use
# the default boot index value, None, for any other devices.
bdms = [
# This is the bootable device that would create a 20GB cinder
# volume from the given image.
{
'source_type': 'image',
'destination_type': 'volume',
'boot_index': 0,
'uuid': '155d900f-4e14-4e4c-a73d-069cbf4541e6',
'volume_size': 20
},
# This is the non-bootable 10GB ext4 ephemeral block device.
{
'source_type': 'blank',
'destination_type': 'local',
'boot_index': None,
# If 'guest_format' is 'swap' then a swap device is created.
'guest_format': 'ext4'
}
]
params = {block_device_mapping.ATTRIBUTE_NAME: bdms}
self._test_create(params, no_image=True)
def test_create_instance_with_boot_index_none_image_local_fails(self):
"""Tests creating a server with a local image-based block device which
has a boot_index of None which is invalid.
"""
bdms = [{
'source_type': 'image',
'destination_type': 'local',
'boot_index': None,
'uuid': '155d900f-4e14-4e4c-a73d-069cbf4541e6'
}]
params = {block_device_mapping.ATTRIBUTE_NAME: bdms}
self.assertRaises(exc.HTTPBadRequest, self._test_create,
params, no_image=True)
def test_create_instance_with_invalid_boot_index(self):
bdm = [{"source_type": "image", "delete_on_termination": True,
"boot_index": 'invalid',
"uuid": "2ff3a1d3-ed70-4c3f-94ac-941461153bc0",
"destination_type": "local"}]
params = {block_device_mapping.ATTRIBUTE_NAME: bdm,
'imageRef': '2ff3a1d3-ed70-4c3f-94ac-941461153bc0'}
self.assertRaises(exception.ValidationError,
self._test_create, params)
def test_create_instance_with_device_name_not_string(self):
self.bdm[0]['device_name'] = 123
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
self.assertRaises(self.validation_error,
self._test_create, params, no_image=True)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_bdm_param_not_list(self, mock_create):
self.params = {'block_device_mapping': '/dev/vdb'}
self.assertRaises(self.validation_error,
self._test_create, self.params)
def test_create_instance_with_device_name_empty(self):
self.bdm[0]['device_name'] = ''
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
self.assertRaises(self.validation_error,
self._test_create, params, no_image=True)
def test_create_instance_with_device_name_too_long(self):
self.bdm[0]['device_name'] = 'a' * 256
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
self.assertRaises(self.validation_error,
self._test_create, params, no_image=True)
def test_create_instance_with_space_in_device_name(self):
self.bdm[0]['device_name'] = 'v da'
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertTrue(kwargs['legacy_bdm'])
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
self.assertRaises(self.validation_error,
self._test_create, params, no_image=True)
def test_create_instance_with_invalid_size(self):
self.bdm[0]['volume_size'] = 'hello world'
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
self.assertRaises(self.validation_error,
self._test_create, params, no_image=True)
def _test_create_instance_with_destination_type_error(self,
destination_type):
self.bdm[0]['destination_type'] = destination_type
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
self.assertRaises(self.validation_error,
self._test_create, params, no_image=True)
def test_create_instance_with_destination_type_empty_string(self):
self._test_create_instance_with_destination_type_error('')
def test_create_instance_with_invalid_destination_type(self):
self._test_create_instance_with_destination_type_error('fake')
@mock.patch.object(compute_api.API, '_validate_bdm')
def test_create_instance_bdm(self, mock_validate_bdm):
bdm = [{
'source_type': 'volume',
'device_name': 'fake_dev',
'uuid': 'fake_vol'
}]
bdm_expected = [{
'source_type': 'volume',
'device_name': 'fake_dev',
'volume_id': 'fake_vol'
}]
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertFalse(kwargs['legacy_bdm'])
for expected, received in zip(bdm_expected,
kwargs['block_device_mapping']):
self.assertThat(block_device.BlockDeviceDict(expected),
matchers.DictMatches(received))
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {block_device_mapping.ATTRIBUTE_NAME: bdm}
self._test_create(params, no_image=True)
mock_validate_bdm.assert_called_once_with(mock.ANY,
mock.ANY,
mock.ANY,
mock.ANY)
@mock.patch.object(compute_api.API, '_validate_bdm')
def test_create_instance_bdm_missing_device_name(self, mock_validate_bdm):
del self.bdm[0]['device_name']
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertFalse(kwargs['legacy_bdm'])
self.assertNotIn(None,
kwargs['block_device_mapping'][0]['device_name'])
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
self._test_create(params, no_image=True)
mock_validate_bdm.assert_called_once_with(mock.ANY,
mock.ANY,
mock.ANY,
mock.ANY)
@mock.patch.object(
block_device.BlockDeviceDict, '_validate',
side_effect=exception.InvalidBDMFormat(details='Wrong BDM'))
def test_create_instance_bdm_validation_error(self, mock_validate):
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
self.assertRaises(exc.HTTPBadRequest,
self._test_create, params, no_image=True)
@mock.patch('nova.compute.api.API._get_bdm_image_metadata')
def test_create_instance_non_bootable_volume_fails(self, fake_bdm_meta):
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
fake_bdm_meta.side_effect = exception.InvalidBDMVolumeNotBootable(id=1)
self.assertRaises(exc.HTTPBadRequest, self._test_create, params,
no_image=True)
def test_create_instance_bdm_api_validation_fails(self):
self.validation_fail_test_validate_called = False
self.validation_fail_instance_destroy_called = False
bdm_exceptions = ((exception.InvalidBDMSnapshot, {'id': 'fake'}),
(exception.InvalidBDMVolume, {'id': 'fake'}),
(exception.InvalidBDMImage, {'id': 'fake'}),
(exception.InvalidBDMBootSequence, {}),
(exception.InvalidBDMLocalsLimit, {}))
ex_iter = iter(bdm_exceptions)
def _validate_bdm(*args, **kwargs):
self.validation_fail_test_validate_called = True
ex, kargs = next(ex_iter)
raise ex(**kargs)
def _instance_destroy(*args, **kwargs):
self.validation_fail_instance_destroy_called = True
self.stub_out('nova.compute.api.API._validate_bdm', _validate_bdm)
self.stub_out('nova.objects.Instance.destroy', _instance_destroy)
for _unused in range(len(bdm_exceptions)):
params = {block_device_mapping.ATTRIBUTE_NAME:
[self.bdm[0].copy()]}
self.assertRaises(exc.HTTPBadRequest,
self._test_create, params)
self.assertTrue(self.validation_fail_test_validate_called)
self.assertFalse(self.validation_fail_instance_destroy_called)
self.validation_fail_test_validate_called = False
self.validation_fail_instance_destroy_called = False
| apache-2.0 | -440,329,227,146,488,260 | 39.478873 | 79 | 0.587799 | false | 3.903124 | true | false | false |
spadae22/odoo | addons/stock_account/wizard/stock_invoice_onshipping.py | 1 | 6916 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
JOURNAL_TYPE_MAP = {
('outgoing', 'customer'): ['sale'],
('outgoing', 'supplier'): ['purchase_refund'],
('outgoing', 'transit'): ['sale', 'purchase_refund'],
('incoming', 'supplier'): ['purchase'],
('incoming', 'customer'): ['sale_refund'],
('incoming', 'transit'): ['purchase', 'sale_refund'],
}
class stock_invoice_onshipping(osv.osv_memory):
def _get_journal(self, cr, uid, context=None):
journal_obj = self.pool.get('account.journal')
journal_type = self._get_journal_type(cr, uid, context=context)
journals = journal_obj.search(cr, uid, [('type', '=', journal_type)])
return journals and journals[0] or False
def _get_journal_type(self, cr, uid, context=None):
if context is None:
context = {}
res_ids = context and context.get('active_ids', [])
pick_obj = self.pool.get('stock.picking')
pickings = pick_obj.browse(cr, uid, res_ids, context=context)
pick = pickings and pickings[0]
if not pick or not pick.move_lines:
return 'sale'
type = pick.picking_type_id.code
usage = pick.move_lines[0].location_id.usage if type == 'incoming' else pick.move_lines[0].location_dest_id.usage
return JOURNAL_TYPE_MAP.get((type, usage), ['sale'])[0]
_name = "stock.invoice.onshipping"
_description = "Stock Invoice Onshipping"
_columns = {
'journal_id': fields.many2one('account.journal', 'Destination Journal', required=True),
'journal_type': fields.selection([('purchase_refund', 'Refund Purchase'), ('purchase', 'Create Vendor Invoice'),
('sale_refund', 'Refund Sale'), ('sale', 'Create Customer Invoice')], 'Journal Type', readonly=True),
'group': fields.boolean("Group by partner"),
'invoice_date': fields.date('Invoice Date'),
}
_defaults = {
'journal_type': _get_journal_type,
'journal_id' : _get_journal,
}
def onchange_journal_id(self, cr, uid, ids, journal_id, context=None):
if context is None:
context = {}
domain = {}
value = {}
active_id = context.get('active_id')
if active_id:
picking = self.pool['stock.picking'].browse(cr, uid, active_id, context=context)
type = picking.picking_type_id.code
usage = picking.move_lines[0].location_id.usage if type == 'incoming' else picking.move_lines[0].location_dest_id.usage
journal_types = JOURNAL_TYPE_MAP.get((type, usage), ['sale', 'purchase', 'sale_refund', 'purchase_refund'])
domain['journal_id'] = [('type', 'in', journal_types)]
if journal_id:
journal = self.pool['account.journal'].browse(cr, uid, journal_id, context=context)
value['journal_type'] = journal.type
return {'value': value, 'domain': domain}
def view_init(self, cr, uid, fields_list, context=None):
if context is None:
context = {}
res = super(stock_invoice_onshipping, self).view_init(cr, uid, fields_list, context=context)
pick_obj = self.pool.get('stock.picking')
count = 0
active_ids = context.get('active_ids',[])
for pick in pick_obj.browse(cr, uid, active_ids, context=context):
if pick.invoice_state != '2binvoiced':
count += 1
if len(active_ids) == count:
raise osv.except_osv(_('Warning!'), _('None of these picking lists require invoicing.'))
return res
def open_invoice(self, cr, uid, ids, context=None):
if context is None:
context = {}
invoice_ids = self.create_invoice(cr, uid, ids, context=context)
if not invoice_ids:
raise osv.except_osv(_('Error!'), _('No invoice created!'))
data = self.browse(cr, uid, ids[0], context=context)
action_model = False
action = {}
journal2type = {'sale':'out_invoice', 'purchase':'in_invoice' , 'sale_refund':'out_refund', 'purchase_refund':'in_refund'}
inv_type = journal2type.get(data.journal_type) or 'out_invoice'
data_pool = self.pool.get('ir.model.data')
if inv_type == "out_invoice":
action_id = data_pool.xmlid_to_res_id(cr, uid, 'account.action_invoice_tree1')
elif inv_type == "in_invoice":
action_id = data_pool.xmlid_to_res_id(cr, uid, 'account.action_invoice_tree2')
elif inv_type == "out_refund":
action_id = data_pool.xmlid_to_res_id(cr, uid, 'account.action_invoice_tree3')
elif inv_type == "in_refund":
action_id = data_pool.xmlid_to_res_id(cr, uid, 'account.action_invoice_tree4')
if action_id:
action_pool = self.pool['ir.actions.act_window']
action = action_pool.read(cr, uid, action_id, context=context)
action['domain'] = "[('id','in', ["+','.join(map(str,invoice_ids))+"])]"
return action
return True
def create_invoice(self, cr, uid, ids, context=None):
context = dict(context or {})
picking_pool = self.pool.get('stock.picking')
data = self.browse(cr, uid, ids[0], context=context)
journal2type = {'sale':'out_invoice', 'purchase':'in_invoice', 'sale_refund':'out_refund', 'purchase_refund':'in_refund'}
context['date_inv'] = data.invoice_date
acc_journal = self.pool.get("account.journal")
inv_type = journal2type.get(data.journal_type) or 'out_invoice'
context['inv_type'] = inv_type
active_ids = context.get('active_ids', [])
res = picking_pool.action_invoice_create(cr, uid, active_ids,
journal_id = data.journal_id.id,
group = data.group,
type = inv_type,
context=context)
return res
| agpl-3.0 | -8,529,741,307,361,504,000 | 45.106667 | 143 | 0.590081 | false | 3.754615 | false | false | false |
Ecogenomics/GTDBNCBI | scripts_dev/lpsn_scrape/lpsn_to_database.py | 1 | 4370 | #!/usr/bin/env python
###############################################################################
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
__prog_name__ = 'update_database_from_ftp.py'
__prog_desc__ = ('Update the LPSN tables in GTDB. ' +
'LPSN tables are independant of the metadata GTDB information')
__author__ = 'Pierre Chaumeil'
__copyright__ = 'Copyright 2016'
__credits__ = ['Pierre Chaumeil']
__license__ = 'GPL3'
__version__ = '0.0.1'
__maintainer__ = 'Pierre Chaumeil'
__email__ = '[email protected]'
__status__ = 'Development'
import os
import argparse
import sys
from database_configuration import GenomeDatabaseConnectionLPSNUpdate
class UpdateLPSNDatabase(object):
def __init__(self, path):
self.path = path
self.lpsn_genera_file = os.path.join(path, 'lpsn_genera.tsv')
self.lpsn_strains_file = os.path.join(path, 'lpsn_strains.tsv')
self.lpsn_species_file = os.path.join(path, 'lpsn_species.tsv')
self.temp_con = GenomeDatabaseConnectionLPSNUpdate.GenomeDatabaseConnectionLPSNUpdate()
self.temp_con.MakePostgresConnection()
self.temp_cur = self.temp_con.cursor()
def runUpdate(self):
# Check if the files exist:
if os.path.isfile(self.lpsn_genera_file) and os.path.isfile(self.lpsn_strains_file) and os.path.isfile(self.lpsn_species_file):
self.temp_cur.execute('TRUNCATE lpsn_genera;')
print "Deletion lpsn_genera done"
fr = open(self.lpsn_genera_file)
fr.readline()
self.temp_cur.copy_from(fr, 'lpsn_genera')
print 'Copy lpsn_genera done'
self.temp_con.commit()
self.temp_cur.execute('TRUNCATE lpsn_species;')
print "Deletion lpsn_species done"
fr = open(self.lpsn_species_file)
fr.readline()
self.temp_cur.copy_from(fr, 'lpsn_species')
print 'Copy lpsn_species done'
self.temp_con.commit()
fr = open(self.lpsn_strains_file)
fr.readline()
self.temp_cur.execute('TRUNCATE lpsn_strains;')
print "Deletion lpsn_strains done"
self.temp_cur.copy_from(fr, 'lpsn_strains')
print 'Copy lpsn_strains done'
self.temp_con.commit()
else:
print 'Some files are missing in {0}'.format(self.path)
self.temp_con.ClosePostgresConnection()
if __name__ == "__main__":
print __prog_name__ + ' v' + __version__ + ': ' + __prog_desc__
print ' by ' + __author__ + ' (' + __email__ + ')' + '\n'
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--lpsn_dir', dest="lpsn_dir",
required=True, help='Directory to the LPSN files lpsn_genera.tsv lpsn_species.tsv lpsn_strains.tsv')
args = parser.parse_args()
try:
update_lpsn_mngr = UpdateLPSNDatabase(args.lpsn_dir)
update_lpsn_mngr.runUpdate()
except SystemExit:
print "\nControlled exit resulting from an unrecoverable error or warning."
except:
print "\nUnexpected error:", sys.exc_info()[0]
raise
| gpl-3.0 | 6,039,277,418,561,814,000 | 41.019231 | 135 | 0.545538 | false | 3.770492 | false | false | false |
openvenues/libpostal | scripts/geodata/osm/definitions.py | 1 | 2511 | import os
import re
import six
from collections import defaultdict
from geodata.graph.topsort import topsort
this_dir = os.path.realpath(os.path.dirname(__file__))
DEFAULT_SCRIPT_PATH = os.path.join(this_dir, 'fetch_osm_address_data.sh')
valid_key_regex = re.compile('VALID_(.*?)_KEYS="(.*)"')
variable_regex = re.compile(r'\$VALID_(.*?)_KEYS(?=\b)')
kv_regex = re.compile('([^\s]*)=([^\s]*)')
class OSMDefinitions(object):
ALL = '*'
ADMIN_BORDER = 'admin_border'
ADMIN_NODE = 'admin_node'
AEROWAY = 'aeroway'
AMENITY = 'amenity'
BUILDING = 'building'
HISTORIC = 'historic'
LANDUSE = 'landuse'
NATURAL = 'natural'
LOCALITY = 'locality'
NEIGHBORHOOD = 'neighborhood'
EXTENDED_NEIGHBORHOOD = 'extended_neighborhood'
OFFICE = 'office'
PLACE = 'place'
POPULATED_PLACE = 'populated_place'
SHOP = 'shop'
TOURISM = 'tourism'
VENUE = 'venue'
WATERWAY = 'waterway'
def __init__(self, filename=DEFAULT_SCRIPT_PATH):
script = open(filename).read()
dependencies = defaultdict(list)
definitions = {}
matches = valid_key_regex.findall(script)
match_text = {d.lower(): t for d, t in matches}
for definition, text in matches:
variables = variable_regex.findall(text)
if not variables:
dependencies[definition.lower()] = []
for v in variables:
dependencies[definition.lower()].append(v.lower())
for definition in topsort(dependencies):
definition = definition.lower()
text = match_text[definition]
variables = variable_regex.findall(text)
for v in variables:
v = v.lower()
text = text.replace('$VALID_{}_KEYS'.format(v.upper()), match_text[v])
kvs = defaultdict(set)
for k, v in kv_regex.findall(text):
if v != '':
kvs[k].add(v.lower())
else:
kvs[k].add(self.ALL)
definitions[definition] = kvs
self.definitions = definitions
def meets_definition(self, props, category):
defs = self.definitions.get(category, {})
if not defs:
return False
elif self.ALL in defs:
return True
for k, v in six.iteritems(props):
if v.lower() in defs.get(k.lower(), set()):
return True
return False
osm_definitions = OSMDefinitions()
| mit | -6,333,495,864,088,283,000 | 27.213483 | 86 | 0.571485 | false | 3.70901 | false | false | false |
allanlei/django-saas | saas/multidb/models.py | 1 | 4469 | from django.db import models
from django.conf import settings
from django.utils import simplejson as json
from django.db import connections
from django.core.exceptions import ValidationError
from django.db.utils import ConnectionDoesNotExist
import managers
from signals import db_pre_load, db_post_load, db_pre_unload, db_post_unload
DEFAULT = settings.DATABASES['default']
def validate_json(value):
try:
json.loads(value)
except ValueError:
raise ValidationError('Database extra is not JSON serializable')
class Database(models.Model):
ENGINES = (
('django.db.backends.postgresql_psycopg2', 'django.db.backends.postgresql_psycopg2'),
('django.db.backends.postgresql', 'django.db.backends.postgresql'),
('django.db.backends.mysql', 'django.db.backends.mysql'),
('django.db.backends.sqlite3', 'django.db.backends.sqlite3'),
('django.db.backends.oracle', 'django.db.backends.oracle'),
)
db = models.CharField(max_length=256, unique=True, help_text='The database name that goes into Django settings')
engine = models.CharField(max_length=48, default=DEFAULT['ENGINE'], choices=ENGINES, help_text='Django database engine type')
name = models.CharField(max_length=256, null=False, blank=False, help_text='The name of the database')
user = models.CharField(max_length=24, blank=True, help_text='The database user')
password = models.CharField(max_length=512, blank=True, help_text='The password for the database user. Encrypted')
host = models.CharField(max_length=96, blank=True, default=DEFAULT['HOST'], help_text='The hostname of the database server')
port = models.CharField(max_length=24, blank=True, default=DEFAULT['PORT'], help_text='The port of the database server')
extra = models.TextField(default='{}', validators=[validate_json])
objects = managers.DatabaseManager()
def __unicode__(self):
return u'%s(%s)' % (self.db, self.engine.split('.')[-1])
@property
def settings(self):
return {
'ENGINE': self.engine,
'NAME': self.name,
'USER': self.user,
'PASSWORD': self.password,
'HOST': self.host,
'PORT': self.port,
'OPTIONS': self.options,
}
@property
def options(self):
try:
return json.loads(self.extra)
except json.JSONDecodeError:
self.extra = '{}'
return json.loads(self.extra)
@options.setter
def options(self, value):
self.extra = json.dumps(value)
def is_loaded(self):
return self.db in settings.DATABASES
def load(self):
db_pre_load.send(sender=self.__class__, instance=self)
loaded = False
if not self.is_loaded():
settings.DATABASES[self.db] = self.settings
loaded = True
db_post_load.send(sender=self.__class__, instance=self, loaded=loaded)
def unload(self):
db_pre_unload.send(sender=self.__class__, instance=self)
if self.is_loaded():
del settings.DATABASES[self.db]
self.disconnect()
db_post_unload.send(sender=self.__class__, instance=self)
def disconnect(self):
try:
connections[self.db].close()
except ConnectionDoesNotExist:
pass
if not self.is_loaded() and self.db in connections._connections:
del connections._connections[self.db]
def __enter__(self):
self.load()
def __exit__(self, *args, **exceptions):
self.unload()
from signals import create_db, drop_db, unload_db, startup_db
from django.db.backends.signals import connection_created
#if getattr(settings, 'SAAS_MULTIDB_STARTUP', True): connection_created.connect(startup_db, dispatch_uid='db_autoload')
if getattr(settings, 'SAAS_MULTIDB_AUTOCREATE', True): models.signals.post_save.connect(create_db, sender=Database)
if getattr(settings, 'SAAS_MULTIDB_AUTODROP', True): models.signals.post_delete.connect(drop_db, sender=Database)
if getattr(settings, 'SAAS_MULTIDB_AUTOUNLOAD', True): models.signals.post_delete.connect(unload_db, sender=Database)
def conn_created(sender, connection, **kwargs):
for key, conn in connections._connections.items():
if conn == connection: print 'Connected to %s' % key
#connection_created.connect(conn_created)
| bsd-3-clause | 302,155,965,839,929,400 | 34.188976 | 129 | 0.653166 | false | 3.906469 | false | false | false |
opencorato/sayit | speeches/south_migrations/0054__rename_popolospeaker.py | 1 | 19144 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.rename_table('speeches_popolospeaker', 'speeches_speaker')
db.alter_column('speeches_recordingtimestamp', 'speaker_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['speeches.Speaker'], null=True, on_delete=models.SET_NULL))
db.alter_column('speeches_speech', 'speaker_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['speeches.Speaker'], null=True, on_delete=models.SET_NULL))
if not db.dry_run:
orm['contenttypes.contenttype'].objects.filter(app_label='speeches', model='speaker').update(model='oldspeaker')
orm['contenttypes.contenttype'].objects.filter(app_label='speeches', model='popolospeaker').update(model='speaker')
def backwards(self, orm):
db.rename_table('speeches_speaker', 'speeches_popolospeaker')
db.alter_column('speeches_recordingtimestamp', 'speaker_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['speeches.PopoloSpeaker'], null=True, on_delete=models.SET_NULL))
db.alter_column('speeches_speech', 'speaker_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['speeches.PopoloSpeaker'], null=True, on_delete=models.SET_NULL))
if not db.dry_run:
orm['contenttypes.contenttype'].objects.filter(app_label='speeches', model='speaker').update(model='popolospeaker')
orm['contenttypes.contenttype'].objects.filter(app_label='speeches', model='oldspeaker').update(model='speaker')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'instances.instance': {
'Meta': {'object_name': 'Instance'},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'created_instances'", 'null': 'True', 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('instances.fields.DNSLabelField', [], {'unique': 'True', 'max_length': '63', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'instances'", 'blank': 'True', 'to': "orm['auth.User']"})
},
'popolo.contactdetail': {
'Meta': {'object_name': 'ContactDetail'},
'contact_type': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created_at': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'end_date': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'start_date': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'updated_at': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'popolo.identifier': {
'Meta': {'object_name': 'Identifier'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'scheme': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'})
},
'popolo.link': {
'Meta': {'object_name': 'Link'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'popolo.othername': {
'Meta': {'object_name': 'OtherName'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'end_date': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'start_date': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'})
},
'popolo.person': {
'Meta': {'object_name': 'Person'},
'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'biography': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'birth_date': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'created_at': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'death_date': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'family_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'given_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'patronymic_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'sort_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'start_date': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'updated_at': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'})
},
'popolo.source': {
'Meta': {'object_name': 'Source'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'speeches.recording': {
'Meta': {'object_name': 'Recording'},
'audio': ('django.db.models.fields.files.FileField', [], {'max_length': '255'}),
'audio_duration': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['instances.Instance']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'start_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'speeches.recordingtimestamp': {
'Meta': {'ordering': "('timestamp',)", 'object_name': 'RecordingTimestamp'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['instances.Instance']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'recording': ('django.db.models.fields.related.ForeignKey', [], {'default': '0', 'related_name': "'timestamps'", 'to': "orm['speeches.Recording']"}),
'speaker': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['speeches.Speaker']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'speech': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['speeches.Speech']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'})
},
'speeches.section': {
'Meta': {'ordering': "('id',)", 'unique_together': "(('parent', 'slug', 'instance'),)", 'object_name': 'Section'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['instances.Instance']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['speeches.Section']"}),
'slug': ('sluggable.fields.SluggableField', [], {'unique_with': "('parent', 'instance')", 'max_length': '50', 'populate_from': "'title'"}),
'source_url': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'speeches.slug': {
'Meta': {'object_name': 'Slug'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'redirect': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'speeches.speaker': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('instance', 'slug'),)", 'object_name': 'Speaker', '_ormbases': ['popolo.Person']},
'instance': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['instances.Instance']"}),
'person_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['popolo.Person']", 'unique': 'True', 'primary_key': 'True'}),
'slug': ('sluggable.fields.SluggableField', [], {'unique_with': "('instance',)", 'max_length': '50', 'populate_from': "'name'"})
},
'speeches.speech': {
'Meta': {'ordering': "('start_date', 'start_time', 'id')", 'object_name': 'Speech'},
'audio': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'blank': 'True'}),
'celery_task_id': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'end_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.TextField', [], {'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['instances.Instance']"}),
'location': ('django.db.models.fields.TextField', [], {'db_index': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'section': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['speeches.Section']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'source_url': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'speaker': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['speeches.Speaker']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'speaker_display': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'start_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['speeches.Tag']", 'null': 'True', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'speeches.tag': {
'Meta': {'object_name': 'Tag'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['instances.Instance']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
}
}
complete_apps = ['speeches']
| agpl-3.0 | 8,611,772,012,662,599,000 | 84.464286 | 195 | 0.558765 | false | 3.613439 | false | false | false |
hcs/mailman | src/mailman/core/switchboard.py | 1 | 11246 | # Copyright (C) 2001-2012 by the Free Software Foundation, Inc.
#
# This file is part of GNU Mailman.
#
# GNU Mailman is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# GNU Mailman is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# GNU Mailman. If not, see <http://www.gnu.org/licenses/>.
"""Queuing and dequeuing message/metadata pickle files.
Messages are represented as email.message.Message objects (or an instance ofa
subclass). Metadata is represented as a Python dictionary. For every
message/metadata pair in a queue, a single file containing two pickles is
written. First, the message is written to the pickle, then the metadata
dictionary is written.
"""
from __future__ import absolute_import, print_function, unicode_literals
__metaclass__ = type
__all__ = [
'Switchboard',
'handle_ConfigurationUpdatedEvent',
]
import os
import time
import email
import pickle
import cPickle
import hashlib
import logging
from zope.interface import implementer
from mailman.config import config
from mailman.email.message import Message
from mailman.interfaces.configuration import ConfigurationUpdatedEvent
from mailman.interfaces.switchboard import ISwitchboard
from mailman.utilities.filesystem import makedirs
from mailman.utilities.string import expand
# 20 bytes of all bits set, maximum hashlib.sha.digest() value.
shamax = 0xffffffffffffffffffffffffffffffffffffffffL
# Small increment to add to time in case two entries have the same time. This
# prevents skipping one of two entries with the same time until the next pass.
DELTA = .0001
# We count the number of times a file has been moved to .bak and recovered.
# In order to prevent loops and a message flood, when the count reaches this
# value, we move the file to the bad queue as a .psv.
MAX_BAK_COUNT = 3
elog = logging.getLogger('mailman.error')
@implementer(ISwitchboard)
class Switchboard:
"""See `ISwitchboard`."""
def __init__(self, name, queue_directory,
slice=None, numslices=1, recover=False):
"""Create a switchboard object.
:param name: The queue name.
:type name: str
:param queue_directory: The queue directory.
:type queue_directory: str
:param slice: The slice number for this switchboard, or None. If not
None, it must be [0..`numslices`).
:type slice: int or None
:param numslices: The total number of slices to split this queue
directory into. It must be a power of 2.
:type numslices: int
:param recover: True if backup files should be recovered.
:type recover: bool
"""
assert (numslices & (numslices - 1)) == 0, (
'Not a power of 2: {0}'.format(numslices))
self.name = name
self.queue_directory = queue_directory
# If configured to, create the directory if it doesn't yet exist.
if config.create_paths:
makedirs(self.queue_directory, 0770)
# Fast track for no slices
self._lower = None
self._upper = None
# BAW: test performance and end-cases of this algorithm
if numslices <> 1:
self._lower = ((shamax + 1) * slice) / numslices
self._upper = (((shamax + 1) * (slice + 1)) / numslices) - 1
if recover:
self.recover_backup_files()
def enqueue(self, _msg, _metadata=None, **_kws):
"""See `ISwitchboard`."""
if _metadata is None:
_metadata = {}
# Calculate the SHA hexdigest of the message to get a unique base
# filename. We're also going to use the digest as a hash into the set
# of parallel runner processes.
data = _metadata.copy()
data.update(_kws)
listname = data.get('listname', '--nolist--')
# Get some data for the input to the sha hash.
now = time.time()
if data.get('_plaintext'):
protocol = 0
msgsave = cPickle.dumps(str(_msg), protocol)
else:
protocol = pickle.HIGHEST_PROTOCOL
msgsave = cPickle.dumps(_msg, protocol)
# listname is unicode but the input to the hash function must be an
# 8-bit string (eventually, a bytes object).
hashfood = msgsave + listname.encode('utf-8') + repr(now)
# Encode the current time into the file name for FIFO sorting. The
# file name consists of two parts separated by a '+': the received
# time for this message (i.e. when it first showed up on this system)
# and the sha hex digest.
filebase = repr(now) + '+' + hashlib.sha1(hashfood).hexdigest()
filename = os.path.join(self.queue_directory, filebase + '.pck')
tmpfile = filename + '.tmp'
# Always add the metadata schema version number
data['version'] = config.QFILE_SCHEMA_VERSION
# Filter out volatile entries. Use .keys() so that we can mutate the
# dictionary during the iteration.
for k in data.keys():
if k.startswith('_'):
del data[k]
# We have to tell the dequeue() method whether to parse the message
# object or not.
data['_parsemsg'] = (protocol == 0)
# Write to the pickle file the message object and metadata.
with open(tmpfile, 'w') as fp:
fp.write(msgsave)
cPickle.dump(data, fp, protocol)
fp.flush()
os.fsync(fp.fileno())
os.rename(tmpfile, filename)
return filebase
def dequeue(self, filebase):
"""See `ISwitchboard`."""
# Calculate the filename from the given filebase.
filename = os.path.join(self.queue_directory, filebase + '.pck')
backfile = os.path.join(self.queue_directory, filebase + '.bak')
# Read the message object and metadata.
with open(filename) as fp:
# Move the file to the backup file name for processing. If this
# process crashes uncleanly the .bak file will be used to
# re-instate the .pck file in order to try again.
os.rename(filename, backfile)
msg = cPickle.load(fp)
data = cPickle.load(fp)
if data.get('_parsemsg'):
# Calculate the original size of the text now so that we won't
# have to generate the message later when we do size restriction
# checking.
original_size = len(msg)
msg = email.message_from_string(msg, Message)
msg.original_size = original_size
data['original_size'] = original_size
return msg, data
def finish(self, filebase, preserve=False):
"""See `ISwitchboard`."""
bakfile = os.path.join(self.queue_directory, filebase + '.bak')
try:
if preserve:
bad_dir = config.switchboards['bad'].queue_directory
psvfile = os.path.join(bad_dir, filebase + '.psv')
os.rename(bakfile, psvfile)
else:
os.unlink(bakfile)
except EnvironmentError:
elog.exception(
'Failed to unlink/preserve backup file: %s', bakfile)
@property
def files(self):
"""See `ISwitchboard`."""
return self.get_files()
def get_files(self, extension='.pck'):
"""See `ISwitchboard`."""
times = {}
lower = self._lower
upper = self._upper
for f in os.listdir(self.queue_directory):
# By ignoring anything that doesn't end in .pck, we ignore
# tempfiles and avoid a race condition.
filebase, ext = os.path.splitext(f)
if ext <> extension:
continue
when, digest = filebase.split('+', 1)
# Throw out any files which don't match our bitrange. BAW: test
# performance and end-cases of this algorithm. MAS: both
# comparisons need to be <= to get complete range.
if lower is None or (lower <= long(digest, 16) <= upper):
key = float(when)
while key in times:
key += DELTA
times[key] = filebase
# FIFO sort
return [times[key] for key in sorted(times)]
def recover_backup_files(self):
"""See `ISwitchboard`."""
# Move all .bak files in our slice to .pck. It's impossible for both
# to exist at the same time, so the move is enough to ensure that our
# normal dequeuing process will handle them. We keep count in
# _bak_count in the metadata of the number of times we recover this
# file. When the count reaches MAX_BAK_COUNT, we move the .bak file
# to a .psv file in the bad queue.
for filebase in self.get_files('.bak'):
src = os.path.join(self.queue_directory, filebase + '.bak')
dst = os.path.join(self.queue_directory, filebase + '.pck')
with open(src, 'rb+') as fp:
try:
msg = cPickle.load(fp)
data_pos = fp.tell()
data = cPickle.load(fp)
except Exception as error:
# If unpickling throws any exception, just log and
# preserve this entry
elog.error('Unpickling .bak exception: %s\n'
'Preserving file: %s', error, filebase)
self.finish(filebase, preserve=True)
else:
data['_bak_count'] = data.get('_bak_count', 0) + 1
fp.seek(data_pos)
if data.get('_parsemsg'):
protocol = 0
else:
protocol = 1
cPickle.dump(data, fp, protocol)
fp.truncate()
fp.flush()
os.fsync(fp.fileno())
if data['_bak_count'] >= MAX_BAK_COUNT:
elog.error('.bak file max count, preserving file: %s',
filebase)
self.finish(filebase, preserve=True)
else:
os.rename(src, dst)
def handle_ConfigurationUpdatedEvent(event):
"""Initialize the global switchboards for input/output."""
if not isinstance(event, ConfigurationUpdatedEvent):
return
config = event.config
for conf in config.runner_configs:
name = conf.name.split('.')[-1]
assert name not in config.switchboards, (
'Duplicate runner name: {0}'.format(name))
substitutions = config.paths
substitutions['name'] = name
path = expand(conf.path, substitutions)
config.switchboards[name] = Switchboard(name, path)
| gpl-3.0 | -6,313,387,617,212,159,000 | 40.043796 | 78 | 0.599057 | false | 4.234187 | true | false | false |
all-of-us/raw-data-repository | rdr_service/services/google_sheets_client.py | 1 | 13026 | import backoff
from googleapiclient import discovery
from googleapiclient.errors import HttpError
from oauth2client.service_account import ServiceAccountCredentials
import socket
from rdr_service.services.gcp_utils import gcp_get_iam_service_key_info
class GoogleSheetsClient:
"""
Allows for interacting with a spreadsheet in google drive. This class is designed to be used as a context manager
and requires that:
- A service account (with a json keyfile) is authenticated
- The service account has the correct permissions to edit the google spreadsheet
Please carefully verify that this works for your purpose if you re-use this. There are some things that don't
currently work (such as formula manipulation and making new tabs).
"""
def __init__(self, spreadsheet_id, service_key_id, tab_offsets=None):
"""
:param spreadsheet_id: Google Drive id of the spreadsheet.
:param service_key_id: Key id for the service account used.
:type tab_offsets: Dictionary specifying tab names and offsets for them (defined in Google Sheet cell
notation such as B4). Giving a cell value will specify that any changes for that tab use that cell
as the origin. So with an origin of B4 an update to C5 would be given as row 1 and column 1.
Used to prevent updating headers in the target spreadsheet.
WARNING: Does not support columns past Z
"""
# Load credentials from service key file
self.service_key_id = service_key_id
self._spreadsheet_id = spreadsheet_id
self._default_tab_id = None
self._tabs = None
self._empty_cell_value = ''
self._tab_offsets = {tab_name: {
'row': int(offset[1:]) - 1, # convert row number specified in a system of counting from 1
'col': ord(offset[:1].upper()) - ord('A'), # Get column number (A = 0, B = 1, ...)
'offset_str': offset
} for tab_name, offset in tab_offsets.items()} if tab_offsets else {}
def _build_service(self):
service_key_info = gcp_get_iam_service_key_info(self.service_key_id)
api_credentials = ServiceAccountCredentials.from_json_keyfile_name(service_key_info['key_path'])
# The Google API client uses sockets, and the requests can take longer than the default timeout.
# The proposed solution is to increase the default timeout manually
# https://github.com/googleapis/google-api-python-client/issues/632
# The socket seems to be created when calling discover.build, so this temporarily increases the timeout for
# new sockets when the Google service creates its socket.
default_socket_timeout = socket.getdefaulttimeout()
num_seconds_in_five_minutes = 300
socket.setdefaulttimeout(num_seconds_in_five_minutes)
# Set up for being able to interact with the sheet in Drive
sheets_api_service = discovery.build('sheets', 'v4', credentials=api_credentials)
# Set the timeout back for anything else in the code that would use sockets
socket.setdefaulttimeout(default_socket_timeout)
return sheets_api_service
def __enter__(self):
self.download_values()
return self
def __exit__(self, *_):
self.upload_values()
@classmethod
def _initialize_empty_tab(cls):
return []
def _get_offset_row_col(self, tab_id):
tab_offset_data = self._tab_offsets.get(tab_id, {
'row': 0,
'col': 0
})
return tab_offset_data['row'], tab_offset_data['col']
def _get_offset_string(self, tab_id):
tab_offset_data = self._tab_offsets.get(tab_id, {
'offset_str': 'A1'
})
return tab_offset_data['offset_str']
@backoff.on_exception(backoff.constant, HttpError, max_tries=4, jitter=None, interval=30)
def download_values(self):
"""
Retrieve the values as they currently are in google drive.
Note: this will overwrite any changes that have been made this instance of the document using `update_cell`.
:return: None
"""
self._tabs = {}
# API call documented at https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/get
request = self._build_service().spreadsheets().get(spreadsheetId=self._spreadsheet_id, includeGridData=True)
response = request.execute()
# Parse the retrieved spreadsheet
tab_data = response['sheets']
for tab in tab_data:
tab_id = tab['properties'].get('title')
# Set the default tab to the first tab
if self._default_tab_id is None:
self._default_tab_id = tab_id
# Initialize the internal tab structure and parse the values from the response
self._tabs[tab_id] = self._initialize_empty_tab()
tab_grid_data = tab['data'][0].get('rowData', [])
for row_number, row_data in enumerate(tab_grid_data):
row_values = row_data.get('values')
if row_values:
for col_number, cell_data in enumerate(row_values):
row_offset, col_offset = self._get_offset_row_col(tab_id)
if row_number >= row_offset and col_number >= col_offset:
cell_value = cell_data.get('formattedValue', self._empty_cell_value)
self.update_cell(row_number - row_offset, col_number - col_offset, cell_value, tab_id)
def set_current_tab(self, tab_id):
"""
Change the default tab. Used to make updating multiple fields on one tab cleaner
(so the tab id doesn't need to be given with the location for each cell value).
:param tab_id: Name of the tab to use as the default.
:return: None
"""
self._default_tab_id = tab_id
def update_cell(self, row: int, col: int, value: str, tab_id=None):
"""
Change the value of a cell.
Any changes made will be stored locally until the next call to `upload_values`
(or when the context ends).
:param row: row number of the cell, starting from 0 at the top of the spreadsheet
:param col: column number of the cell, starting from 0 at the left of the spreadsheet
:param value: value to store
:param tab_id: Name of the tab to modify. The default tab is used if this parameter isn't provided.
:return: None
"""
if not isinstance(col, int):
col = int(col)
values_grid = self._tabs.get(tab_id or self._default_tab_id)
# Increase the number of rows we have if the caller is setting a cell on a
# row farther out than what is initialized
while row >= len(values_grid):
values_grid.append([self._empty_cell_value])
row_for_update = values_grid[row]
# Increase the number of columns we have in the row if the caller is setting a
# cell on a cell father out than what is initialized in the row
while col >= len(row_for_update):
row_for_update.append(self._empty_cell_value)
row_for_update[col] = value
def truncate_tab_at_row(self, row, tab_id=None):
"""
Clears all values from the sheet at and below the given row (setting their cells equal to an empty string).
:param row: Row to start clearing, starting from 0 at the top of the document
:param tab_id: Tab to clear values from, defaults to the current tab if not provided
"""
values_grid = self._tabs.get(tab_id or self._default_tab_id)
current_row = row
while current_row < len(values_grid): # Iterate through the rows
# Replace everything in the row with empty strings
values_grid[current_row] = [self._empty_cell_value] * len(values_grid[current_row])
current_row += 1
def insert_new_row_at(self, row_index, tab_id=None):
"""
Creates a new, empty row at the given row index. The current row at the given index will be moved down.
:param row_index: Index, counting from 0, for the new row
:param tab_id: Tab to add the new row to, defaults to the current tab if not provided
"""
values_grid = self._tabs.get(tab_id or self._default_tab_id)
values_grid.insert(row_index, [self._empty_cell_value])
# All the following rows will be moved down.
# Any row in front of a row that moves down will be uploaded to the document in the same position as the one
# that moved down. Any row in front of one that moves down needs to have as many cells as the one it's
# replacing, so that it will overwrite all the values left over from the row that it pushed down.
while row_index < len(values_grid) - 1: # The last row isn't replacing anything, so doesn't need to be checked
row_to_expand = values_grid[row_index]
number_of_cells_to_replace = len(values_grid[row_index + 1])
while number_of_cells_to_replace > len(row_to_expand):
row_to_expand.append(self._empty_cell_value)
row_index += 1
def remove_row_at(self, row_index, tab_id=None):
"""
Removes a row from the sheet.
:param row_index: Index, counting from 0, for the row to remove
:param tab_id: Tab to remove the row from, defaults to the current tab if not provided
"""
values_grid = self._tabs.get(tab_id or self._default_tab_id)
number_of_cells_replaced = len(values_grid[row_index])
del values_grid[row_index]
# Removing a row in the document means every row moves up, including the last one.
# So we need to insert a row at the end to overwrite the values left from when the original last row moves up.
# (The number of cells is expanded later in this method).
values_grid.append([self._empty_cell_value])
# All following rows will be moved up.
# Any rows after a row that moves up will be uploaded to the document in the same position as the one before it.
# If the following row doesn't have as many cells as the row it's replacing, then it wouldn't
# overwrite all the cells and some trailing values could be left over. All rows might need to have
# extra cells added so they will overwrite all the cells left from the row they're replacing.
while row_index < len(values_grid):
next_row = values_grid[row_index]
while number_of_cells_replaced > len(next_row):
next_row.append(self._empty_cell_value)
# Get the number of cells in this row, the row after it will be taking it's place in the document
number_of_cells_replaced = len(next_row)
row_index += 1
def get_row_at(self, row_index, tab_id=None):
"""
Retrieves the list of values at the given row. If the indexed row doesn't already exist, this method will
expand the grid until it does.
:param row_index: Index, counting from 0, for the row to retrieve
:param tab_id: Tab to read the row from, defaults to the current tab if not provided
:return: List of values that make up the given row
"""
values_grid = self._tabs.get(tab_id or self._default_tab_id)
while row_index >= len(values_grid):
values_grid.append([self._empty_cell_value])
return list(values_grid[row_index])
@backoff.on_exception(backoff.constant, HttpError, max_tries=4, jitter=None, interval=30)
def upload_values(self):
"""
Upload the local data to the google drive spreadsheet.
Note: any changes made to the target spreadsheet since the last call to `download_values` will be overwritten.
"""
request = self._build_service().spreadsheets().values().batchUpdate(
spreadsheetId=self._spreadsheet_id,
body={
'valueInputOption': 'RAW',
'data': [{
'range': f"'{tab_id}'!{self._get_offset_string(tab_id)}",
'values': tab_data
} for tab_id, tab_data in self._tabs.items()]
}
)
request.execute()
def get_tab_values(self, tab_id=None):
"""
Returns the values of the specified tab (or the current tab if no tab was specified).
Empty cells are represented by empty strings.
:param tab_id: Identifier of the tab to retrieve values from.
:return: A two dimensional list of strings that represent the cell values, organized by
rows (from the top down) and then columns (from left to right).
"""
if tab_id is None:
tab_id = self._default_tab_id
value_grid = self._tabs.get(tab_id)
return [[value for value in row] for row in value_grid]
| bsd-3-clause | -8,798,723,942,164,269,000 | 45.355872 | 120 | 0.63189 | false | 4.120848 | false | false | false |
w0rp/w0rpzone | blog/urls.py | 1 | 2639 | from django.conf.urls import re_path
from django.contrib.auth.decorators import login_required
from w0rplib.url import redir
from .feed import LatestArticleFeed
from .views import (
ArticleBanCommenterView,
ArticleDeleteCommentView,
ArticleDetailView,
ArticleEditPageView,
ArticleMonthArchiveView,
ArticlePageView,
ArticleUnbanCommenterView,
DeleteArticleView,
EditArticleView,
NewArticleView,
article_bounce_view,
upload_file_view,
)
urlpatterns = [
# Loading the main site gets you page 1.
re_path(
r"^$",
ArticlePageView.as_view(),
{"page": "1"},
name="blog-home",
),
# Redirect the first page back to the blog main page, for SEO.
redir(r"^page/0*1/$", "/blog"),
# Redirect appending "login" to the blog URL to the right login URL,
# which will redirect back to the blog.
redir(r"^login/$", "/login/?next=/blog"),
re_path(
r"^page/(?P<page>[\d]+)/$",
ArticlePageView.as_view(),
name="article-page"
),
re_path(
r"^delete/(?P<slug>[\w-]+)/$",
login_required(DeleteArticleView.as_view()),
name="delete-article"
),
re_path(
r"^edit-page/(?P<page>[\d]+)/$",
login_required(ArticleEditPageView.as_view()),
name="article-edit-list"
),
re_path(
r"^post/(?P<slug>[\w-]+)/$",
ArticleDetailView.as_view(),
name="article-detail"
),
re_path(
r"^post/(?P<slug>[\w-]+)/comment-bounce/$",
article_bounce_view,
name="article-comment-bounce"
),
re_path(
r"^post/(?P<slug>[\w-]+)/delete-comment/(?P<pk>\d+)/$",
ArticleDeleteCommentView.as_view(),
name="delete-comment"
),
re_path(
r"^post/(?P<slug>[\w-]+)/ban-comment/(?P<pk>\d+)/$",
ArticleBanCommenterView.as_view(),
name="ban-commenter"
),
re_path(
r"^post/(?P<slug>[\w-]+)/unban-comment/(?P<pk>\d+)/$",
ArticleUnbanCommenterView.as_view(),
name="unban-commenter"
),
re_path(
r"^date/(?P<year>\d{4})/(?P<month>1[0-2]|0[1-9])/$",
ArticleMonthArchiveView.as_view(month_format="%m"),
name="article-archive"
),
re_path(
r"^latest/feed/$",
LatestArticleFeed(),
name="article-feed"
),
re_path(
r"^new/$",
NewArticleView.as_view(),
name="new-article",
),
re_path(
r"^edit/(?P<slug>[\w-]+)/$",
EditArticleView.as_view(),
name="edit-article"
),
re_path(r"^upload/$", upload_file_view, name="upload-file"),
]
| bsd-2-clause | -3,337,851,615,868,272,000 | 26.489583 | 72 | 0.558924 | false | 3.418394 | false | false | false |
hbenarab/mt-iebkg | run_on_ollie_dataset.py | 1 | 8580 | __author__ = 'heni'
import datetime
import math
import numpy
import pickle
from utils.tools import get_accuracy
from ollie_comparison.utils.training_tools import create_word2ind,create_network,get_labeled_data
from utils.tools import shuffle
def run_on_ollie_dataset(iob_ollie_dataset_path,use_cross_validation):
settings = {'partial_training': 0.8,
'partial_testing': 0.2,
'fold': 10, # 5 folds 0,1,2,3,4
'lr': 0.05,
'verbose': 1,
'decay': False, # decay on the learning rate if improvement stops
'win': 7, # number of words in the context window
'bs': 9, # number of backprop through time steps
'nhidden': 100, # number of hidden units
'seed': 345,
'emb_dimension': 100, # dimension of word embedding
'nepochs': 50}
# iob_ollie_dataset_file=open(iob_ollie_dataset_path,'r')
indices=create_word2ind(iob_ollie_dataset_path)
words_index=indices['wordIndex']
labels_index=indices['labelIndex']
word2index = words_index.getCurrentIndex()
index2word = words_index.getIndex2Word()
label2index = labels_index.getCurrentIndex()
index2label = labels_index.getIndex2Word()
vocsize=len(word2index)
nclasses=len(label2index)
new_network_folder = datetime.datetime.now().strftime('%Y-%m-%d_%Hh%M')
rnn,model_folder=create_network(settings,nclasses,vocsize,new_network_folder)
print('RNN model created and saved under %s' % model_folder)
[labeled_data,labeled_data_size]=get_labeled_data(iob_ollie_dataset_path)
print('Labeled data size for articles: ',labeled_data_size)
sentences_list, labels_list = labeled_data.getData()
while [] in sentences_list:
print('Empty sentences were found. They will be removed')
empty=sentences_list.index([])
sentences_list.pop(empty)
labels_list.pop(empty)
assert len(sentences_list)==len(labels_list)
number_labeled_sentences = len(sentences_list)
print('The training phase of the RNN model on the Ollie dataset will begin now')
rnn=rnn.load(model_folder)
#########################################################
# training with consideration to parameters in settings #
#########################################################
if not use_cross_validation:
print('No cross-validation techniques will be used in this training process')
shuffle([sentences_list, labels_list], settings['seed'])
training_size = int(math.floor(settings['partial_training'] * number_labeled_sentences))
testing_size = int(math.floor(settings['partial_testing'] * number_labeled_sentences))
print('Training size: [0:{0}] = {0}'.format(training_size))
train_sentences = sentences_list[0:training_size]
train_labels = labels_list[0:training_size]
print('Testing size: [{0}:{1}] = {2}'.format(training_size, training_size + testing_size, testing_size))
test_sentences = sentences_list[training_size:training_size + testing_size]
test_labels = labels_list[training_size:training_size + testing_size]
else:
print('Cross validation will be used')
####################
# training process #
####################
# number_train_sentences = len(train_sentences)
# number_train_labels_toGuess = sum([len(x) for x in test_labels])
# print('Starting training with {0} labeled sentences in total for {1} epochs.'.
# format(number_train_sentences, settings['nepochs']))
best_accuracy = -numpy.inf
current_learning_rate = settings['lr']
best_epoch = 0
f1_of_best_acc=0
conf_mat_of_best_acc=None
for e in range(0, settings['nepochs']):
print('Epoch {0}'.format(e))
print('----------------------------------------------')
if use_cross_validation:
####################
# validation phase #
####################
print('Validation phase in process')
shuffle([sentences_list, labels_list], settings['seed'])
divide_in_folds=lambda lst,sz:[lst[i:i+sz] for i in range(0,len(lst),sz)]
if len(sentences_list)%settings['fold']==0:
size_of_fold=math.floor(len(sentences_list)/settings['fold'])
else:
size_of_fold=(math.floor(len(sentences_list)/settings['fold']))+1
sentences_in_folds=divide_in_folds(sentences_list,size_of_fold)
labels_in_folds=divide_in_folds(labels_list,size_of_fold)
assert len(sentences_in_folds)==settings['fold']
assert len(sentences_in_folds)==len(labels_in_folds)
all_validation_accuracies=[]
for j in range(0,len(sentences_in_folds)):
ex_tr_sent=sentences_in_folds[:]
ex_tr_labels=labels_in_folds[:]
# val_sent=sentences_in_folds[j]
# val_labels=labels_in_folds[j]
# assert len(val_sent)==len(val_labels)
val_sent=ex_tr_sent.pop(j)
val_labels=ex_tr_labels.pop(j)
assert len(val_sent)==len(val_labels)
assert len(ex_tr_sent)==len(ex_tr_labels)
tr_sent=[]
tr_labels=[]
for c in range(0,len(ex_tr_sent)):
tr_sent.extend(ex_tr_sent[c])
tr_labels.extend(ex_tr_labels[c])
assert len(tr_sent)==len(tr_labels)
train_dict={'sentences':tr_sent,'labels':tr_labels}
validation_dict={'sentences':val_sent,'labels':val_labels}
print('Training the fold number %i will begin now' % (j+1))
[current_validation_accuracy,f1,conf_mat]=get_accuracy(rnn,train_dict,validation_dict,word2index,label2index,settings,
current_learning_rate,e,index2word,is_validation=True)
all_validation_accuracies.append(current_validation_accuracy)
assert len(all_validation_accuracies)==settings['fold']
mean_validation=sum(all_validation_accuracies)/len(all_validation_accuracies)
if mean_validation>best_accuracy:
best_accuracy=mean_validation
f1_of_best_acc=f1
conf_mat_of_best_acc=conf_mat
print('New best validation accuracy: %2.2f%%' % best_accuracy)
# rnn.save(model_folder)
print('A new RNN has been saved.')
else:
print('Validation phase did not come up with a better accuracy (only %2.2f%%).'
'. A new epoch will begin' % mean_validation)
# rnn=rnn.load(model_folder)
#continue
##################
# Training phase #
##################
else:
shuffle([train_sentences, train_labels], settings['seed'])
print('Training in progress')
# rnn=rnn.load(model_folder)
# print('RNN saved during the validation phase has been loaded')
training_dict={'sentences':train_sentences,'labels':train_labels}
testing_dict={'sentences':test_sentences,'labels':test_labels}
[testing_accuracy,f1,conf_mat]=get_accuracy(rnn,training_dict,testing_dict,word2index,label2index,settings,
current_learning_rate,e,index2word,is_validation=False)
print('Accuracy during the testing phase (number of correct guessed labels) at %2.2f%%.' % testing_accuracy)
# check if current epoch is the best
if testing_accuracy> best_accuracy:
best_accuracy = testing_accuracy
best_epoch = e
f1_of_best_acc=f1
conf_mat_of_best_acc=conf_mat
rnn.save(model_folder)
print('Better testing accuracy !!')
else:
rnn=rnn.load(model_folder)
if abs(best_epoch-e)>=5:
current_learning_rate*=0.5
if current_learning_rate<1e-5: break
print('BEST RESULT: epoch ', best_epoch, 'with best accuracy: ', best_accuracy, '.',)
# iob_ollie_dataset_file.close()
pickle.dump([best_accuracy,f1_of_best_acc,conf_mat_of_best_acc],open('perf.pck','wb'))
# import sys
# sys.path.append('/home/heni/git/masterThesisKG/mt-iebkg')
run_on_ollie_dataset('data/ollie-scored.iob.txt',use_cross_validation=False) | mit | -4,446,617,290,510,862,000 | 44.163158 | 134 | 0.583217 | false | 3.864865 | true | false | false |
righetz/pyurl | pyurl.py | 1 | 4354 | #!/usr/bin/python3
"""Simple CUrl porting for Python3
"""
import urllib.request, re
import sys
import argparse
from urllib.parse import urlencode
import gettext
import locale
def main():
""""main method"""
language_set()
parser = argparse.ArgumentParser() #setting possible arguments
parser.add_argument('-o', metavar='output_file', help=_('Write output to file'))
parser.add_argument('-i', action='store_true', help=_('Include request headers'))
parser.add_argument('url', help=_('Define target URL'))
parser.add_argument('-d', metavar='DATA', help=_('Http POST data between quotation marks'))
parser.add_argument('-c', action='store_true', help=_('Show Http code'))
parser.add_argument('-a', metavar='user_agent', help=_('Set custom user agent'))
parser.add_argument('-k', action='store_true', help=_('headers only'))
check_args_and_exec(parser.parse_args())
def language_set():
"""read from UNIX or windows locale informations and set language"""
ita = gettext.translation('pyurl', localedir='locale', languages=['it'])
eng = gettext.translation('pyurl', localedir='locale', languages=['en'])
if locale.getlocale()[0] == 'it_IT' or locale.getlocale()[0] == 'ita':
ita.install()
else:
eng.install()
def check_args_and_exec(args):
"""arguments control and functions invoke"""
headers = ""
post_data = None
url = str(args.url)
if args.d is not None:
post_data = data_post_format(args.d)
if not re.match("http://", url):
url = "http://" + url
text = get_source(url, post_data, args.c, args.a)
if args.i or args.k:
if args.i and not args.k:
args.c = None
headers = get_headers(url, args.a, args.c)
if args.k is True:
text = ""
if args.o is not None:
save_to_file(text, args.o, headers)
else:
if headers:
print(headers)
print(text)
def connect(url, post_data, user_agent):
"""connection method"""
try:
if user_agent == None:
user_agent = "PyUrl V1.0"
req = urllib.request.Request(
url,
headers={"User-Agent" : user_agent
}
)
if post_data != None:
req.data = post_data.encode('utf-8')
src = urllib.request.urlopen(req)
except urllib.error.HTTPError as err:
sys.exit(err)
except urllib.error.URLError:
sys.exit(_("Could not resolve host %s\nCheck your connection") % url)
return src
def data_post_format(data_string):
"""format input data to be handled by urllib.request"""
data_list = data_string.split("&")
data_map = {}
for dato in data_list:
temp = dato.split("=")
try:
data_map[temp[0]] = temp[1] #check if user input is correct
except IndexError:
sys.exit(_("Specify every POST input as \"key=value\" "))
return urlencode(data_map)
def get_source(url, post_data, http_code, user_agent):
"""set connection to url and extract source"""
src = connect(url, post_data, user_agent)
charset = src.headers.get_param('charset')
if not charset:
charset = 'utf-8' # workaround for missing charset header data
content = []
if http_code:
content.append(_("Http code: %d\n\n ")% src.getcode())
while True:
line = src.readline()
if line:
content.append(line.decode(charset))
else:
src.close()
break
return "".join(content)
def get_headers(url, user_agent, http_code):
"""return URL headers"""
src = connect(url, None, user_agent)
if http_code:
return (_("Http code: %d\n\n ") % src.getcode()) + str(src.headers)
else:
return str(src.headers)
def save_to_file(text, outfile, headers):
"""write to file"""
try:
file_writer = open(outfile, 'w')
except FileNotFoundError:
sys.exit(_("Specified directory does not exists"))
except IsADirectoryError:
sys.exit(_("Target path is a directory, include file name"))
except IOError:
sys.exit(_("Input/Output error\nMaybe you don't have enough privileges?"))
if headers:
file_writer.write(headers)
file_writer.write(text)
file_writer.close()
if __name__ == "__main__":
main()
| gpl-2.0 | -7,379,587,351,273,032,000 | 32.492308 | 95 | 0.602205 | false | 3.705532 | false | false | false |
mstone/vscan | cpplint.py | 1 | 123523 | #!/usr/bin/python2.4
#
# Copyright (c) 2009 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Here are some issues that I've had people identify in my code during reviews,
# that I think are possible to flag automatically in a lint tool. If these were
# caught by lint, it would save time both for myself and that of my reviewers.
# Most likely, some of these are beyond the scope of the current lint framework,
# but I think it is valuable to retain these wish-list items even if they cannot
# be immediately implemented.
#
# Suggestions
# -----------
# - Check for no 'explicit' for multi-arg ctor
# - Check for boolean assign RHS in parens
# - Check for ctor initializer-list colon position and spacing
# - Check that if there's a ctor, there should be a dtor
# - Check accessors that return non-pointer member variables are
# declared const
# - Check accessors that return non-const pointer member vars are
# *not* declared const
# - Check for using public includes for testing
# - Check for spaces between brackets in one-line inline method
# - Check for no assert()
# - Check for spaces surrounding operators
# - Check for 0 in pointer context (should be NULL)
# - Check for 0 in char context (should be '\0')
# - Check for camel-case method name conventions for methods
# that are not simple inline getters and setters
# - Check that base classes have virtual destructors
# put " // namespace" after } that closes a namespace, with
# namespace's name after 'namespace' if it is named.
# - Do not indent namespace contents
# - Avoid inlining non-trivial constructors in header files
# include base/basictypes.h if DISALLOW_EVIL_CONSTRUCTORS is used
# - Check for old-school (void) cast for call-sites of functions
# ignored return value
# - Check gUnit usage of anonymous namespace
# - Check for class declaration order (typedefs, consts, enums,
# ctor(s?), dtor, friend declarations, methods, member vars)
#
"""Does google-lint on c++ files.
The goal of this script is to identify places in the code that *may*
be in non-compliance with google style. It does not attempt to fix
up these problems -- the point is to educate. It does also not
attempt to find all problems, or to ensure that everything it does
find is legitimately a problem.
In particular, we can get very confused by /* and // inside strings!
We do a small hack, which is to ignore //'s with "'s after them on the
same line, but it is far from perfect (in either direction).
"""
import codecs
import getopt
import math # for log
import os
import re
import sre_compile
import string
import sys
import unicodedata
_USAGE = """
Syntax: cpplint.py [--verbose=#] [--output=vs7] [--filter=-x,+y,...]
[--counting=total|toplevel|detailed]
<file> [file] ...
The style guidelines this tries to follow are those in
http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml
Every problem is given a confidence score from 1-5, with 5 meaning we are
certain of the problem, and 1 meaning it could be a legitimate construct.
This will miss some errors, and is not a substitute for a code review.
To suppress false-positive errors of a certain category, add a
'NOLINT(category)' comment to the line. NOLINT or NOLINT(*)
suppresses errors of all categories on that line.
The files passed in will be linted; at least one file must be provided.
Linted extensions are .cc, .cpp, and .h. Other file types will be ignored.
Flags:
output=vs7
By default, the output is formatted to ease emacs parsing. Visual Studio
compatible output (vs7) may also be used. Other formats are unsupported.
verbose=#
Specify a number 0-5 to restrict errors to certain verbosity levels.
filter=-x,+y,...
Specify a comma-separated list of category-filters to apply: only
error messages whose category names pass the filters will be printed.
(Category names are printed with the message and look like
"[whitespace/indent]".) Filters are evaluated left to right.
"-FOO" and "FOO" means "do not print categories that start with FOO".
"+FOO" means "do print categories that start with FOO".
Examples: --filter=-whitespace,+whitespace/braces
--filter=whitespace,runtime/printf,+runtime/printf_format
--filter=-,+build/include_what_you_use
To see a list of all the categories used in cpplint, pass no arg:
--filter=
counting=total|toplevel|detailed
The total number of errors found is always printed. If
'toplevel' is provided, then the count of errors in each of
the top-level categories like 'build' and 'whitespace' will
also be printed. If 'detailed' is provided, then a count
is provided for each category like 'build/class'.
"""
# We categorize each error message we print. Here are the categories.
# We want an explicit list so we can list them all in cpplint --filter=.
# If you add a new error message with a new category, add it to the list
# here! cpplint_unittest.py should tell you if you forget to do this.
# \ used for clearer layout -- pylint: disable-msg=C6013
_ERROR_CATEGORIES = [
'build/class',
'build/deprecated',
'build/endif_comment',
'build/forward_decl',
'build/header_guard',
'build/include',
'build/include_alpha',
'build/include_order',
'build/include_what_you_use',
'build/namespaces',
'build/printf_format',
'build/storage_class',
'legal/copyright',
'readability/braces',
'readability/casting',
'readability/check',
'readability/constructors',
'readability/fn_size',
'readability/function',
'readability/multiline_comment',
'readability/multiline_string',
'readability/nolint',
'readability/streams',
'readability/todo',
'readability/utf8',
'runtime/arrays',
'runtime/casting',
'runtime/explicit',
'runtime/int',
'runtime/init',
'runtime/invalid_increment',
'runtime/member_string_references',
'runtime/memset',
'runtime/operator',
'runtime/printf',
'runtime/printf_format',
'runtime/references',
'runtime/rtti',
'runtime/sizeof',
'runtime/string',
'runtime/threadsafe_fn',
'runtime/virtual',
'whitespace/blank_line',
'whitespace/braces',
'whitespace/comma',
'whitespace/comments',
'whitespace/end_of_line',
'whitespace/ending_newline',
'whitespace/indent',
'whitespace/labels',
'whitespace/line_length',
'whitespace/newline',
'whitespace/operators',
'whitespace/parens',
'whitespace/semicolon',
'whitespace/tab',
'whitespace/todo'
]
# The default state of the category filter. This is overrided by the --filter=
# flag. By default all errors are on, so only add here categories that should be
# off by default (i.e., categories that must be enabled by the --filter= flags).
# All entries here should start with a '-' or '+', as in the --filter= flag.
_DEFAULT_FILTERS = [ '-build/include_alpha' ]
# We used to check for high-bit characters, but after much discussion we
# decided those were OK, as long as they were in UTF-8 and didn't represent
# hard-coded international strings, which belong in a seperate i18n file.
# Headers that we consider STL headers.
_STL_HEADERS = frozenset([
'algobase.h', 'algorithm', 'alloc.h', 'bitset', 'deque', 'exception',
'function.h', 'functional', 'hash_map', 'hash_map.h', 'hash_set',
'hash_set.h', 'iterator', 'list', 'list.h', 'map', 'memory', 'new',
'pair.h', 'pthread_alloc', 'queue', 'set', 'set.h', 'sstream', 'stack',
'stl_alloc.h', 'stl_relops.h', 'type_traits.h',
'utility', 'vector', 'vector.h',
])
# Non-STL C++ system headers.
_CPP_HEADERS = frozenset([
'algo.h', 'builtinbuf.h', 'bvector.h', 'cassert', 'cctype',
'cerrno', 'cfloat', 'ciso646', 'climits', 'clocale', 'cmath',
'complex', 'complex.h', 'csetjmp', 'csignal', 'cstdarg', 'cstddef',
'cstdio', 'cstdlib', 'cstring', 'ctime', 'cwchar', 'cwctype',
'defalloc.h', 'deque.h', 'editbuf.h', 'exception', 'fstream',
'fstream.h', 'hashtable.h', 'heap.h', 'indstream.h', 'iomanip',
'iomanip.h', 'ios', 'iosfwd', 'iostream', 'iostream.h', 'istream.h',
'iterator.h', 'limits', 'map.h', 'multimap.h', 'multiset.h',
'numeric', 'ostream.h', 'parsestream.h', 'pfstream.h', 'PlotFile.h',
'procbuf.h', 'pthread_alloc.h', 'rope', 'rope.h', 'ropeimpl.h',
'SFile.h', 'slist', 'slist.h', 'stack.h', 'stdexcept',
'stdiostream.h', 'streambuf.h', 'stream.h', 'strfile.h', 'string',
'strstream', 'strstream.h', 'tempbuf.h', 'tree.h', 'typeinfo', 'valarray',
])
# Assertion macros. These are defined in base/logging.h and
# testing/base/gunit.h. Note that the _M versions need to come first
# for substring matching to work.
_CHECK_MACROS = [
'DCHECK', 'CHECK',
'EXPECT_TRUE_M', 'EXPECT_TRUE',
'ASSERT_TRUE_M', 'ASSERT_TRUE',
'EXPECT_FALSE_M', 'EXPECT_FALSE',
'ASSERT_FALSE_M', 'ASSERT_FALSE',
]
# Replacement macros for CHECK/DCHECK/EXPECT_TRUE/EXPECT_FALSE
_CHECK_REPLACEMENT = dict([(m, {}) for m in _CHECK_MACROS])
for op, replacement in [('==', 'EQ'), ('!=', 'NE'),
('>=', 'GE'), ('>', 'GT'),
('<=', 'LE'), ('<', 'LT')]:
_CHECK_REPLACEMENT['DCHECK'][op] = 'DCHECK_%s' % replacement
_CHECK_REPLACEMENT['CHECK'][op] = 'CHECK_%s' % replacement
_CHECK_REPLACEMENT['EXPECT_TRUE'][op] = 'EXPECT_%s' % replacement
_CHECK_REPLACEMENT['ASSERT_TRUE'][op] = 'ASSERT_%s' % replacement
_CHECK_REPLACEMENT['EXPECT_TRUE_M'][op] = 'EXPECT_%s_M' % replacement
_CHECK_REPLACEMENT['ASSERT_TRUE_M'][op] = 'ASSERT_%s_M' % replacement
for op, inv_replacement in [('==', 'NE'), ('!=', 'EQ'),
('>=', 'LT'), ('>', 'LE'),
('<=', 'GT'), ('<', 'GE')]:
_CHECK_REPLACEMENT['EXPECT_FALSE'][op] = 'EXPECT_%s' % inv_replacement
_CHECK_REPLACEMENT['ASSERT_FALSE'][op] = 'ASSERT_%s' % inv_replacement
_CHECK_REPLACEMENT['EXPECT_FALSE_M'][op] = 'EXPECT_%s_M' % inv_replacement
_CHECK_REPLACEMENT['ASSERT_FALSE_M'][op] = 'ASSERT_%s_M' % inv_replacement
# These constants define types of headers for use with
# _IncludeState.CheckNextIncludeOrder().
_C_SYS_HEADER = 1
_CPP_SYS_HEADER = 2
_LIKELY_MY_HEADER = 3
_POSSIBLE_MY_HEADER = 4
_OTHER_HEADER = 5
_regexp_compile_cache = {}
# Finds occurrences of NOLINT or NOLINT(...).
_RE_SUPPRESSION = re.compile(r'\bNOLINT\b(\([^)]*\))?')
# {str, set(int)}: a map from error categories to sets of linenumbers
# on which those errors are expected and should be suppressed.
_error_suppressions = {}
def ParseNolintSuppressions(filename, raw_line, linenum, error):
"""Updates the global list of error-suppressions.
Parses any NOLINT comments on the current line, updating the global
error_suppressions store. Reports an error if the NOLINT comment
was malformed.
Args:
filename: str, the name of the input file.
raw_line: str, the line of input text, with comments.
linenum: int, the number of the current line.
error: function, an error handler.
"""
# FIXME(adonovan): "NOLINT(" is misparsed as NOLINT(*).
m = _RE_SUPPRESSION.search(raw_line)
if m:
category = m.group(1)
if category in (None, '(*)'): # => "suppress all"
_error_suppressions.setdefault(None, set()).add(linenum)
else:
if category.startswith('(') and category.endswith(')'):
category = category[1:-1]
if category in _ERROR_CATEGORIES:
_error_suppressions.setdefault(category, set()).add(linenum)
else:
error(filename, linenum, 'readability/nolint', 5,
'Unknown NOLINT error category: %s' % category)
def ResetNolintSuppressions():
"Resets the set of NOLINT suppressions to empty."
_error_suppressions.clear()
def IsErrorSuppressedByNolint(category, linenum):
"""Returns true if the specified error category is suppressed on this line.
Consults the global error_suppressions map populated by
ParseNolintSuppressions/ResetNolintSuppressions.
Args:
category: str, the category of the error.
linenum: int, the current line number.
Returns:
bool, True iff the error should be suppressed due to a NOLINT comment.
"""
return (linenum in _error_suppressions.get(category, set()) or
linenum in _error_suppressions.get(None, set()))
def Match(pattern, s):
"""Matches the string with the pattern, caching the compiled regexp."""
# The regexp compilation caching is inlined in both Match and Search for
# performance reasons; factoring it out into a separate function turns out
# to be noticeably expensive.
if not pattern in _regexp_compile_cache:
_regexp_compile_cache[pattern] = sre_compile.compile(pattern)
return _regexp_compile_cache[pattern].match(s)
def Search(pattern, s):
"""Searches the string for the pattern, caching the compiled regexp."""
if not pattern in _regexp_compile_cache:
_regexp_compile_cache[pattern] = sre_compile.compile(pattern)
return _regexp_compile_cache[pattern].search(s)
class _IncludeState(dict):
"""Tracks line numbers for includes, and the order in which includes appear.
As a dict, an _IncludeState object serves as a mapping between include
filename and line number on which that file was included.
Call CheckNextIncludeOrder() once for each header in the file, passing
in the type constants defined above. Calls in an illegal order will
raise an _IncludeError with an appropriate error message.
"""
# self._section will move monotonically through this set. If it ever
# needs to move backwards, CheckNextIncludeOrder will raise an error.
_INITIAL_SECTION = 0
_MY_H_SECTION = 1
_C_SECTION = 2
_CPP_SECTION = 3
_OTHER_H_SECTION = 4
_TYPE_NAMES = {
_C_SYS_HEADER: 'C system header',
_CPP_SYS_HEADER: 'C++ system header',
_LIKELY_MY_HEADER: 'header this file implements',
_POSSIBLE_MY_HEADER: 'header this file may implement',
_OTHER_HEADER: 'other header',
}
_SECTION_NAMES = {
_INITIAL_SECTION: "... nothing. (This can't be an error.)",
_MY_H_SECTION: 'a header this file implements',
_C_SECTION: 'C system header',
_CPP_SECTION: 'C++ system header',
_OTHER_H_SECTION: 'other header',
}
def __init__(self):
dict.__init__(self)
# The name of the current section.
self._section = self._INITIAL_SECTION
# The path of last found header.
self._last_header = ''
def CanonicalizeAlphabeticalOrder(self, header_path):
"""Returns a path canonicalized for alphabetical comparisson.
- replaces "-" with "_" so they both cmp the same.
- removes '-inl' since we don't require them to be after the main header.
- lowercase everything, just in case.
Args:
header_path: Path to be canonicalized.
Returns:
Canonicalized path.
"""
return header_path.replace('-inl.h', '.h').replace('-', '_').lower()
def IsInAlphabeticalOrder(self, header_path):
"""Check if a header is in alphabetical order with the previous header.
Args:
header_path: Header to be checked.
Returns:
Returns true if the header is in alphabetical order.
"""
canonical_header = self.CanonicalizeAlphabeticalOrder(header_path)
if self._last_header > canonical_header:
return False
self._last_header = canonical_header
return True
def CheckNextIncludeOrder(self, header_type):
"""Returns a non-empty error message if the next header is out of order.
This function also updates the internal state to be ready to check
the next include.
Args:
header_type: One of the _XXX_HEADER constants defined above.
Returns:
The empty string if the header is in the right order, or an
error message describing what's wrong.
"""
error_message = ('Found %s after %s' %
(self._TYPE_NAMES[header_type],
self._SECTION_NAMES[self._section]))
last_section = self._section
if header_type == _C_SYS_HEADER:
if self._section <= self._C_SECTION:
self._section = self._C_SECTION
else:
self._last_header = ''
return error_message
elif header_type == _CPP_SYS_HEADER:
if self._section <= self._CPP_SECTION:
self._section = self._CPP_SECTION
else:
self._last_header = ''
return error_message
elif header_type == _LIKELY_MY_HEADER:
if self._section <= self._MY_H_SECTION:
self._section = self._MY_H_SECTION
else:
self._section = self._OTHER_H_SECTION
elif header_type == _POSSIBLE_MY_HEADER:
if self._section <= self._MY_H_SECTION:
self._section = self._MY_H_SECTION
else:
# This will always be the fallback because we're not sure
# enough that the header is associated with this file.
self._section = self._OTHER_H_SECTION
else:
assert header_type == _OTHER_HEADER
self._section = self._OTHER_H_SECTION
if last_section != self._section:
self._last_header = ''
return ''
class _CppLintState(object):
"""Maintains module-wide state.."""
def __init__(self):
self.verbose_level = 1 # global setting.
self.error_count = 0 # global count of reported errors
# filters to apply when emitting error messages
self.filters = _DEFAULT_FILTERS[:]
self.counting = 'total' # In what way are we counting errors?
self.errors_by_category = {} # string to int dict storing error counts
# output format:
# "emacs" - format that emacs can parse (default)
# "vs7" - format that Microsoft Visual Studio 7 can parse
self.output_format = 'emacs'
def SetOutputFormat(self, output_format):
"""Sets the output format for errors."""
self.output_format = output_format
def SetVerboseLevel(self, level):
"""Sets the module's verbosity, and returns the previous setting."""
last_verbose_level = self.verbose_level
self.verbose_level = level
return last_verbose_level
def SetCountingStyle(self, counting_style):
"""Sets the module's counting options."""
self.counting = counting_style
def SetFilters(self, filters):
"""Sets the error-message filters.
These filters are applied when deciding whether to emit a given
error message.
Args:
filters: A string of comma-separated filters (eg "+whitespace/indent").
Each filter should start with + or -; else we die.
Raises:
ValueError: The comma-separated filters did not all start with '+' or '-'.
E.g. "-,+whitespace,-whitespace/indent,whitespace/badfilter"
"""
# Default filters always have less priority than the flag ones.
self.filters = _DEFAULT_FILTERS[:]
for filt in filters.split(','):
clean_filt = filt.strip()
if clean_filt:
self.filters.append(clean_filt)
for filt in self.filters:
if not (filt.startswith('+') or filt.startswith('-')):
raise ValueError('Every filter in --filters must start with + or -'
' (%s does not)' % filt)
def ResetErrorCounts(self):
"""Sets the module's error statistic back to zero."""
self.error_count = 0
self.errors_by_category = {}
def IncrementErrorCount(self, category):
"""Bumps the module's error statistic."""
self.error_count += 1
if self.counting in ('toplevel', 'detailed'):
if self.counting != 'detailed':
category = category.split('/')[0]
if category not in self.errors_by_category:
self.errors_by_category[category] = 0
self.errors_by_category[category] += 1
def PrintErrorCounts(self):
"""Print a summary of errors by category, and the total."""
for category, count in self.errors_by_category.iteritems():
sys.stderr.write('Category \'%s\' errors found: %d\n' %
(category, count))
sys.stderr.write('Total errors found: %d\n' % self.error_count)
_cpplint_state = _CppLintState()
def _OutputFormat():
"""Gets the module's output format."""
return _cpplint_state.output_format
def _SetOutputFormat(output_format):
"""Sets the module's output format."""
_cpplint_state.SetOutputFormat(output_format)
def _VerboseLevel():
"""Returns the module's verbosity setting."""
return _cpplint_state.verbose_level
def _SetVerboseLevel(level):
"""Sets the module's verbosity, and returns the previous setting."""
return _cpplint_state.SetVerboseLevel(level)
def _SetCountingStyle(level):
"""Sets the module's counting options."""
_cpplint_state.SetCountingStyle(level)
def _Filters():
"""Returns the module's list of output filters, as a list."""
return _cpplint_state.filters
def _SetFilters(filters):
"""Sets the module's error-message filters.
These filters are applied when deciding whether to emit a given
error message.
Args:
filters: A string of comma-separated filters (eg "whitespace/indent").
Each filter should start with + or -; else we die.
"""
_cpplint_state.SetFilters(filters)
class _FunctionState(object):
"""Tracks current function name and the number of lines in its body."""
_NORMAL_TRIGGER = 250 # for --v=0, 500 for --v=1, etc.
_TEST_TRIGGER = 400 # about 50% more than _NORMAL_TRIGGER.
def __init__(self):
self.in_a_function = False
self.lines_in_function = 0
self.current_function = ''
def Begin(self, function_name):
"""Start analyzing function body.
Args:
function_name: The name of the function being tracked.
"""
self.in_a_function = True
self.lines_in_function = 0
self.current_function = function_name
def Count(self):
"""Count line in current function body."""
if self.in_a_function:
self.lines_in_function += 1
def Check(self, error, filename, linenum):
"""Report if too many lines in function body.
Args:
error: The function to call with any errors found.
filename: The name of the current file.
linenum: The number of the line to check.
"""
if Match(r'T(EST|est)', self.current_function):
base_trigger = self._TEST_TRIGGER
else:
base_trigger = self._NORMAL_TRIGGER
trigger = base_trigger * 2**_VerboseLevel()
if self.lines_in_function > trigger:
error_level = int(math.log(self.lines_in_function / base_trigger, 2))
# 50 => 0, 100 => 1, 200 => 2, 400 => 3, 800 => 4, 1600 => 5, ...
if error_level > 5:
error_level = 5
error(filename, linenum, 'readability/fn_size', error_level,
'Small and focused functions are preferred:'
' %s has %d non-comment lines'
' (error triggered by exceeding %d lines).' % (
self.current_function, self.lines_in_function, trigger))
def End(self):
"""Stop analizing function body."""
self.in_a_function = False
class _IncludeError(Exception):
"""Indicates a problem with the include order in a file."""
pass
class FileInfo:
"""Provides utility functions for filenames.
FileInfo provides easy access to the components of a file's path
relative to the project root.
"""
def __init__(self, filename):
self._filename = filename
def FullName(self):
"""Make Windows paths like Unix."""
return os.path.abspath(self._filename).replace('\\', '/')
def RepositoryName(self):
"""FullName after removing the local path to the repository.
If we have a real absolute path name here we can try to do something smart:
detecting the root of the checkout and truncating /path/to/checkout from
the name so that we get header guards that don't include things like
"C:\Documents and Settings\..." or "/home/username/..." in them and thus
people on different computers who have checked the source out to different
locations won't see bogus errors.
"""
fullname = self.FullName()
if os.path.exists(fullname):
project_dir = os.path.dirname(fullname)
if os.path.exists(os.path.join(project_dir, ".svn")):
# If there's a .svn file in the current directory, we recursively look
# up the directory tree for the top of the SVN checkout
root_dir = project_dir
one_up_dir = os.path.dirname(root_dir)
while os.path.exists(os.path.join(one_up_dir, ".svn")):
root_dir = os.path.dirname(root_dir)
one_up_dir = os.path.dirname(one_up_dir)
prefix = os.path.commonprefix([root_dir, project_dir])
return fullname[len(prefix) + 1:]
# Not SVN? Try to find a git or hg top level directory by searching up
# from the current path.
root_dir = os.path.dirname(fullname)
while (root_dir != os.path.dirname(root_dir) and
not os.path.exists(os.path.join(root_dir, ".git")) and
not os.path.exists(os.path.join(root_dir, ".hg"))):
root_dir = os.path.dirname(root_dir)
if (os.path.exists(os.path.join(root_dir, ".git")) or
os.path.exists(os.path.join(root_dir, ".hg"))):
prefix = os.path.commonprefix([root_dir, project_dir])
return fullname[len(prefix) + 1:]
# Don't know what to do; header guard warnings may be wrong...
return fullname
def Split(self):
"""Splits the file into the directory, basename, and extension.
For 'chrome/browser/browser.cc', Split() would
return ('chrome/browser', 'browser', '.cc')
Returns:
A tuple of (directory, basename, extension).
"""
googlename = self.RepositoryName()
project, rest = os.path.split(googlename)
return (project,) + os.path.splitext(rest)
def BaseName(self):
"""File base name - text after the final slash, before the final period."""
return self.Split()[1]
def Extension(self):
"""File extension - text following the final period."""
return self.Split()[2]
def NoExtension(self):
"""File has no source file extension."""
return '/'.join(self.Split()[0:2])
def IsSource(self):
"""File has a source file extension."""
return self.Extension()[1:] in ('c', 'cc', 'cpp', 'cxx')
def _ShouldPrintError(category, confidence, linenum):
"""Returns true iff confidence >= verbose, category passes
filter and is not NOLINT-suppressed."""
# There are three ways we might decide not to print an error message:
# a "NOLINT(category)" comment appears in the source,
# the verbosity level isn't high enough, or the filters filter it out.
if IsErrorSuppressedByNolint(category, linenum):
return False
if confidence < _cpplint_state.verbose_level:
return False
is_filtered = False
for one_filter in _Filters():
if one_filter.startswith('-'):
if category.startswith(one_filter[1:]):
is_filtered = True
elif one_filter.startswith('+'):
if category.startswith(one_filter[1:]):
is_filtered = False
else:
assert False # should have been checked for in SetFilter.
if is_filtered:
return False
return True
def Error(filename, linenum, category, confidence, message):
"""Logs the fact we've found a lint error.
We log where the error was found, and also our confidence in the error,
that is, how certain we are this is a legitimate style regression, and
not a misidentification or a use that's sometimes justified.
False positives can be suppressed by the use of
"cpplint(category)" comments on the offending line. These are
parsed into _error_suppressions.
Args:
filename: The name of the file containing the error.
linenum: The number of the line containing the error.
category: A string used to describe the "category" this bug
falls under: "whitespace", say, or "runtime". Categories
may have a hierarchy separated by slashes: "whitespace/indent".
confidence: A number from 1-5 representing a confidence score for
the error, with 5 meaning that we are certain of the problem,
and 1 meaning that it could be a legitimate construct.
message: The error message.
"""
if _ShouldPrintError(category, confidence, linenum):
_cpplint_state.IncrementErrorCount(category)
if _cpplint_state.output_format == 'vs7':
sys.stderr.write('%s(%s): %s [%s] [%d]\n' % (
filename, linenum, message, category, confidence))
else:
sys.stderr.write('%s:%s: %s [%s] [%d]\n' % (
filename, linenum, message, category, confidence))
# Matches standard C++ escape esequences per 2.13.2.3 of the C++ standard.
_RE_PATTERN_CLEANSE_LINE_ESCAPES = re.compile(
r'\\([abfnrtv?"\\\']|\d+|x[0-9a-fA-F]+)')
# Matches strings. Escape codes should already be removed by ESCAPES.
_RE_PATTERN_CLEANSE_LINE_DOUBLE_QUOTES = re.compile(r'"[^"]*"')
# Matches characters. Escape codes should already be removed by ESCAPES.
_RE_PATTERN_CLEANSE_LINE_SINGLE_QUOTES = re.compile(r"'.'")
# Matches multi-line C++ comments.
# This RE is a little bit more complicated than one might expect, because we
# have to take care of space removals tools so we can handle comments inside
# statements better.
# The current rule is: We only clear spaces from both sides when we're at the
# end of the line. Otherwise, we try to remove spaces from the right side,
# if this doesn't work we try on left side but only if there's a non-character
# on the right.
_RE_PATTERN_CLEANSE_LINE_C_COMMENTS = re.compile(
r"""(\s*/\*.*\*/\s*$|
/\*.*\*/\s+|
\s+/\*.*\*/(?=\W)|
/\*.*\*/)""", re.VERBOSE)
def IsCppString(line):
"""Does line terminate so, that the next symbol is in string constant.
This function does not consider single-line nor multi-line comments.
Args:
line: is a partial line of code starting from the 0..n.
Returns:
True, if next character appended to 'line' is inside a
string constant.
"""
line = line.replace(r'\\', 'XX') # after this, \\" does not match to \"
return ((line.count('"') - line.count(r'\"') - line.count("'\"'")) & 1) == 1
def FindNextMultiLineCommentStart(lines, lineix):
"""Find the beginning marker for a multiline comment."""
while lineix < len(lines):
if lines[lineix].strip().startswith('/*'):
# Only return this marker if the comment goes beyond this line
if lines[lineix].strip().find('*/', 2) < 0:
return lineix
lineix += 1
return len(lines)
def FindNextMultiLineCommentEnd(lines, lineix):
"""We are inside a comment, find the end marker."""
while lineix < len(lines):
if lines[lineix].strip().endswith('*/'):
return lineix
lineix += 1
return len(lines)
def RemoveMultiLineCommentsFromRange(lines, begin, end):
"""Clears a range of lines for multi-line comments."""
# Having // dummy comments makes the lines non-empty, so we will not get
# unnecessary blank line warnings later in the code.
for i in range(begin, end):
lines[i] = '// dummy'
def RemoveMultiLineComments(filename, lines, error):
"""Removes multiline (c-style) comments from lines."""
lineix = 0
while lineix < len(lines):
lineix_begin = FindNextMultiLineCommentStart(lines, lineix)
if lineix_begin >= len(lines):
return
lineix_end = FindNextMultiLineCommentEnd(lines, lineix_begin)
if lineix_end >= len(lines):
error(filename, lineix_begin + 1, 'readability/multiline_comment', 5,
'Could not find end of multi-line comment')
return
RemoveMultiLineCommentsFromRange(lines, lineix_begin, lineix_end + 1)
lineix = lineix_end + 1
def CleanseComments(line):
"""Removes //-comments and single-line C-style /* */ comments.
Args:
line: A line of C++ source.
Returns:
The line with single-line comments removed.
"""
commentpos = line.find('//')
if commentpos != -1 and not IsCppString(line[:commentpos]):
line = line[:commentpos]
# get rid of /* ... */
return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line)
class CleansedLines(object):
"""Holds 3 copies of all lines with different preprocessing applied to them.
1) elided member contains lines without strings and comments,
2) lines member contains lines without comments, and
3) raw member contains all the lines without processing.
All these three members are of <type 'list'>, and of the same length.
"""
def __init__(self, lines):
self.elided = []
self.lines = []
self.raw_lines = lines
self.num_lines = len(lines)
for linenum in range(len(lines)):
self.lines.append(CleanseComments(lines[linenum]))
elided = self._CollapseStrings(lines[linenum])
self.elided.append(CleanseComments(elided))
def NumLines(self):
"""Returns the number of lines represented."""
return self.num_lines
@staticmethod
def _CollapseStrings(elided):
"""Collapses strings and chars on a line to simple "" or '' blocks.
We nix strings first so we're not fooled by text like '"http://"'
Args:
elided: The line being processed.
Returns:
The line with collapsed strings.
"""
if not _RE_PATTERN_INCLUDE.match(elided):
# Remove escaped characters first to make quote/single quote collapsing
# basic. Things that look like escaped characters shouldn't occur
# outside of strings and chars.
elided = _RE_PATTERN_CLEANSE_LINE_ESCAPES.sub('', elided)
elided = _RE_PATTERN_CLEANSE_LINE_SINGLE_QUOTES.sub("''", elided)
elided = _RE_PATTERN_CLEANSE_LINE_DOUBLE_QUOTES.sub('""', elided)
return elided
def CloseExpression(clean_lines, linenum, pos):
"""If input points to ( or { or [, finds the position that closes it.
If lines[linenum][pos] points to a '(' or '{' or '[', finds the the
linenum/pos that correspond to the closing of the expression.
Args:
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
pos: A position on the line.
Returns:
A tuple (line, linenum, pos) pointer *past* the closing brace, or
(line, len(lines), -1) if we never find a close. Note we ignore
strings and comments when matching; and the line we return is the
'cleansed' line at linenum.
"""
line = clean_lines.elided[linenum]
startchar = line[pos]
if startchar not in '({[':
return (line, clean_lines.NumLines(), -1)
if startchar == '(': endchar = ')'
if startchar == '[': endchar = ']'
if startchar == '{': endchar = '}'
num_open = line.count(startchar) - line.count(endchar)
while linenum < clean_lines.NumLines() and num_open > 0:
linenum += 1
line = clean_lines.elided[linenum]
num_open += line.count(startchar) - line.count(endchar)
# OK, now find the endchar that actually got us back to even
endpos = len(line)
while num_open >= 0:
endpos = line.rfind(')', 0, endpos)
num_open -= 1 # chopped off another )
return (line, linenum, endpos + 1)
def CheckForCopyright(filename, lines, error):
"""Logs an error if no Copyright message appears at the top of the file."""
# We'll say it should occur by line 10. Don't forget there's a
# dummy line at the front.
for line in xrange(1, min(len(lines), 11)):
if re.search(r'Copyright', lines[line], re.I): break
else: # means no copyright line was found
error(filename, 0, 'legal/copyright', 5,
'No copyright message found. '
'You should have a line: "Copyright [year] <Copyright Owner>"')
def GetHeaderGuardCPPVariable(filename):
"""Returns the CPP variable that should be used as a header guard.
Args:
filename: The name of a C++ header file.
Returns:
The CPP variable that should be used as a header guard in the
named file.
"""
# Restores original filename in case that cpplint is invoked from Emacs's
# flymake.
filename = re.sub(r'_flymake\.h$', '.h', filename)
fileinfo = FileInfo(filename)
return re.sub(r'[-./\s]', '_', fileinfo.RepositoryName()).upper() + '_'
def CheckForHeaderGuard(filename, lines, error):
"""Checks that the file contains a header guard.
Logs an error if no #ifndef header guard is present. For other
headers, checks that the full pathname is used.
Args:
filename: The name of the C++ header file.
lines: An array of strings, each representing a line of the file.
error: The function to call with any errors found.
"""
cppvar = GetHeaderGuardCPPVariable(filename)
ifndef = None
ifndef_linenum = 0
define = None
endif = None
endif_linenum = 0
for linenum, line in enumerate(lines):
linesplit = line.split()
if len(linesplit) >= 2:
# find the first occurrence of #ifndef and #define, save arg
if not ifndef and linesplit[0] == '#ifndef':
# set ifndef to the header guard presented on the #ifndef line.
ifndef = linesplit[1]
ifndef_linenum = linenum
if not define and linesplit[0] == '#define':
define = linesplit[1]
# find the last occurrence of #endif, save entire line
if line.startswith('#endif'):
endif = line
endif_linenum = linenum
if not ifndef or not define or ifndef != define:
error(filename, 0, 'build/header_guard', 5,
'No #ifndef header guard found, suggested CPP variable is: %s' %
cppvar)
return
# The guard should be PATH_FILE_H_, but we also allow PATH_FILE_H__
# for backward compatibility.
if ifndef != cppvar:
error_level = 0
if ifndef != cppvar + '_':
error_level = 5
ParseNolintSuppressions(filename, lines[ifndef_linenum], ifndef_linenum,
error)
error(filename, ifndef_linenum, 'build/header_guard', error_level,
'#ifndef header guard has wrong style, please use: %s' % cppvar)
if endif != ('#endif // %s' % cppvar):
error_level = 0
if endif != ('#endif // %s' % (cppvar + '_')):
error_level = 5
ParseNolintSuppressions(filename, lines[endif_linenum], endif_linenum,
error)
error(filename, endif_linenum, 'build/header_guard', error_level,
'#endif line should be "#endif // %s"' % cppvar)
def CheckForUnicodeReplacementCharacters(filename, lines, error):
"""Logs an error for each line containing Unicode replacement characters.
These indicate that either the file contained invalid UTF-8 (likely)
or Unicode replacement characters (which it shouldn't). Note that
it's possible for this to throw off line numbering if the invalid
UTF-8 occurred adjacent to a newline.
Args:
filename: The name of the current file.
lines: An array of strings, each representing a line of the file.
error: The function to call with any errors found.
"""
for linenum, line in enumerate(lines):
if u'\ufffd' in line:
error(filename, linenum, 'readability/utf8', 5,
'Line contains invalid UTF-8 (or Unicode replacement character).')
def CheckForNewlineAtEOF(filename, lines, error):
"""Logs an error if there is no newline char at the end of the file.
Args:
filename: The name of the current file.
lines: An array of strings, each representing a line of the file.
error: The function to call with any errors found.
"""
# The array lines() was created by adding two newlines to the
# original file (go figure), then splitting on \n.
# To verify that the file ends in \n, we just have to make sure the
# last-but-two element of lines() exists and is empty.
if len(lines) < 3 or lines[-2]:
error(filename, len(lines) - 2, 'whitespace/ending_newline', 5,
'Could not find a newline character at the end of the file.')
def CheckForMultilineCommentsAndStrings(filename, clean_lines, linenum, error):
"""Logs an error if we see /* ... */ or "..." that extend past one line.
/* ... */ comments are legit inside macros, for one line.
Otherwise, we prefer // comments, so it's ok to warn about the
other. Likewise, it's ok for strings to extend across multiple
lines, as long as a line continuation character (backslash)
terminates each line. Although not currently prohibited by the C++
style guide, it's ugly and unnecessary. We don't do well with either
in this lint program, so we warn about both.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum]
# Remove all \\ (escaped backslashes) from the line. They are OK, and the
# second (escaped) slash may trigger later \" detection erroneously.
line = line.replace('\\\\', '')
if line.count('/*') > line.count('*/'):
error(filename, linenum, 'readability/multiline_comment', 5,
'Complex multi-line /*...*/-style comment found. '
'Lint may give bogus warnings. '
'Consider replacing these with //-style comments, '
'with #if 0...#endif, '
'or with more clearly structured multi-line comments.')
if (line.count('"') - line.count('\\"')) % 2:
error(filename, linenum, 'readability/multiline_string', 5,
'Multi-line string ("...") found. This lint script doesn\'t '
'do well with such strings, and may give bogus warnings. They\'re '
'ugly and unnecessary, and you should use concatenation instead".')
threading_list = (
('asctime(', 'asctime_r('),
('ctime(', 'ctime_r('),
('getgrgid(', 'getgrgid_r('),
('getgrnam(', 'getgrnam_r('),
('getlogin(', 'getlogin_r('),
('getpwnam(', 'getpwnam_r('),
('getpwuid(', 'getpwuid_r('),
('gmtime(', 'gmtime_r('),
('localtime(', 'localtime_r('),
('rand(', 'rand_r('),
('readdir(', 'readdir_r('),
('strtok(', 'strtok_r('),
('ttyname(', 'ttyname_r('),
)
def CheckPosixThreading(filename, clean_lines, linenum, error):
"""Checks for calls to thread-unsafe functions.
Much code has been originally written without consideration of
multi-threading. Also, engineers are relying on their old experience;
they have learned posix before threading extensions were added. These
tests guide the engineers to use thread-safe functions (when using
posix directly).
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum]
for single_thread_function, multithread_safe_function in threading_list:
ix = line.find(single_thread_function)
# Comparisons made explicit for clarity -- pylint: disable-msg=C6403
if ix >= 0 and (ix == 0 or (not line[ix - 1].isalnum() and
line[ix - 1] not in ('_', '.', '>'))):
error(filename, linenum, 'runtime/threadsafe_fn', 2,
'Consider using ' + multithread_safe_function +
'...) instead of ' + single_thread_function +
'...) for improved thread safety.')
# Matches invalid increment: *count++, which moves pointer instead of
# incrementing a value.
_RE_PATTERN_INVALID_INCREMENT = re.compile(
r'^\s*\*\w+(\+\+|--);')
def CheckInvalidIncrement(filename, clean_lines, linenum, error):
"""Checks for invalid increment *count++.
For example following function:
void increment_counter(int* count) {
*count++;
}
is invalid, because it effectively does count++, moving pointer, and should
be replaced with ++*count, (*count)++ or *count += 1.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum]
if _RE_PATTERN_INVALID_INCREMENT.match(line):
error(filename, linenum, 'runtime/invalid_increment', 5,
'Changing pointer instead of value (or unused value of operator*).')
class _ClassInfo(object):
"""Stores information about a class."""
def __init__(self, name, linenum):
self.name = name
self.linenum = linenum
self.seen_open_brace = False
self.is_derived = False
self.virtual_method_linenumber = None
self.has_virtual_destructor = False
self.brace_depth = 0
class _ClassState(object):
"""Holds the current state of the parse relating to class declarations.
It maintains a stack of _ClassInfos representing the parser's guess
as to the current nesting of class declarations. The innermost class
is at the top (back) of the stack. Typically, the stack will either
be empty or have exactly one entry.
"""
def __init__(self):
self.classinfo_stack = []
def CheckFinished(self, filename, error):
"""Checks that all classes have been completely parsed.
Call this when all lines in a file have been processed.
Args:
filename: The name of the current file.
error: The function to call with any errors found.
"""
if self.classinfo_stack:
# Note: This test can result in false positives if #ifdef constructs
# get in the way of brace matching. See the testBuildClass test in
# cpplint_unittest.py for an example of this.
error(filename, self.classinfo_stack[0].linenum, 'build/class', 5,
'Failed to find complete declaration of class %s' %
self.classinfo_stack[0].name)
def CheckForNonStandardConstructs(filename, clean_lines, linenum,
class_state, error):
"""Logs an error if we see certain non-ANSI constructs ignored by gcc-2.
Complain about several constructs which gcc-2 accepts, but which are
not standard C++. Warning about these in lint is one way to ease the
transition to new compilers.
- put storage class first (e.g. "static const" instead of "const static").
- "%lld" instead of %qd" in printf-type functions.
- "%1$d" is non-standard in printf-type functions.
- "\%" is an undefined character escape sequence.
- text after #endif is not allowed.
- invalid inner-style forward declaration.
- >? and <? operators, and their >?= and <?= cousins.
- classes with virtual methods need virtual destructors (compiler warning
available, but not turned on yet.)
Additionally, check for constructor/destructor style violations and reference
members, as it is very convenient to do so while checking for
gcc-2 compliance.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
class_state: A _ClassState instance which maintains information about
the current stack of nested class declarations being parsed.
error: A callable to which errors are reported, which takes 4 arguments:
filename, line number, error level, and message
"""
# Remove comments from the line, but leave in strings for now.
line = clean_lines.lines[linenum]
if Search(r'printf\s*\(.*".*%[-+ ]?\d*q', line):
error(filename, linenum, 'runtime/printf_format', 3,
'%q in format strings is deprecated. Use %ll instead.')
if Search(r'printf\s*\(.*".*%\d+\$', line):
error(filename, linenum, 'runtime/printf_format', 2,
'%N$ formats are unconventional. Try rewriting to avoid them.')
# Remove escaped backslashes before looking for undefined escapes.
line = line.replace('\\\\', '')
if Search(r'("|\').*\\(%|\[|\(|{)', line):
error(filename, linenum, 'build/printf_format', 3,
'%, [, (, and { are undefined character escapes. Unescape them.')
# For the rest, work with both comments and strings removed.
line = clean_lines.elided[linenum]
if Search(r'\b(const|volatile|void|char|short|int|long'
r'|float|double|signed|unsigned'
r'|schar|u?int8|u?int16|u?int32|u?int64)'
r'\s+(auto|register|static|extern|typedef)\b',
line):
error(filename, linenum, 'build/storage_class', 5,
'Storage class (static, extern, typedef, etc) should be first.')
if Match(r'\s*#\s*endif\s*[^/\s]+', line):
error(filename, linenum, 'build/endif_comment', 5,
'Uncommented text after #endif is non-standard. Use a comment.')
if Match(r'\s*class\s+(\w+\s*::\s*)+\w+\s*;', line):
error(filename, linenum, 'build/forward_decl', 5,
'Inner-style forward declarations are invalid. Remove this line.')
if Search(r'(\w+|[+-]?\d+(\.\d*)?)\s*(<|>)\?=?\s*(\w+|[+-]?\d+)(\.\d*)?',
line):
error(filename, linenum, 'build/deprecated', 3,
'>? and <? (max and min) operators are non-standard and deprecated.')
if Search(r'^\s*const\s*string\s*&\s*\w+\s*;', line):
# TODO(unknown): Could it be expanded safely to arbitrary references,
# without triggering too many false positives? The first
# attempt triggered 5 warnings for mostly benign code in the regtest, hence
# the restriction.
# Here's the original regexp, for the reference:
# type_name = r'\w+((\s*::\s*\w+)|(\s*<\s*\w+?\s*>))?'
# r'\s*const\s*' + type_name + '\s*&\s*\w+\s*;'
error(filename, linenum, 'runtime/member_string_references', 2,
'const string& members are dangerous. It is much better to use '
'alternatives, such as pointers or simple constants.')
# Track class entry and exit, and attempt to find cases within the
# class declaration that don't meet the C++ style
# guidelines. Tracking is very dependent on the code matching Google
# style guidelines, but it seems to perform well enough in testing
# to be a worthwhile addition to the checks.
classinfo_stack = class_state.classinfo_stack
# Look for a class declaration
class_decl_match = Match(
r'\s*(template\s*<[\w\s<>,:]*>\s*)?(class|struct)\s+(\w+(::\w+)*)', line)
if class_decl_match:
classinfo_stack.append(_ClassInfo(class_decl_match.group(3), linenum))
# Everything else in this function uses the top of the stack if it's
# not empty.
if not classinfo_stack:
return
classinfo = classinfo_stack[-1]
# If the opening brace hasn't been seen look for it and also
# parent class declarations.
if not classinfo.seen_open_brace:
# If the line has a ';' in it, assume it's a forward declaration or
# a single-line class declaration, which we won't process.
if line.find(';') != -1:
classinfo_stack.pop()
return
classinfo.seen_open_brace = (line.find('{') != -1)
# Look for a bare ':'
if Search('(^|[^:]):($|[^:])', line):
classinfo.is_derived = True
if not classinfo.seen_open_brace:
return # Everything else in this function is for after open brace
# The class may have been declared with namespace or classname qualifiers.
# The constructor and destructor will not have those qualifiers.
base_classname = classinfo.name.split('::')[-1]
# Look for single-argument constructors that aren't marked explicit.
# Technically a valid construct, but against style.
args = Match(r'(?<!explicit)\s+%s\s*\(([^,()]+)\)'
% re.escape(base_classname),
line)
if (args and
args.group(1) != 'void' and
not Match(r'(const\s+)?%s\s*&' % re.escape(base_classname),
args.group(1).strip())):
error(filename, linenum, 'runtime/explicit', 5,
'Single-argument constructors should be marked explicit.')
# Look for methods declared virtual.
if Search(r'\bvirtual\b', line):
classinfo.virtual_method_linenumber = linenum
# Only look for a destructor declaration on the same line. It would
# be extremely unlikely for the destructor declaration to occupy
# more than one line.
if Search(r'~%s\s*\(' % base_classname, line):
classinfo.has_virtual_destructor = True
# Look for class end.
brace_depth = classinfo.brace_depth
brace_depth = brace_depth + line.count('{') - line.count('}')
if brace_depth <= 0:
classinfo = classinfo_stack.pop()
# Try to detect missing virtual destructor declarations.
# For now, only warn if a non-derived class with virtual methods lacks
# a virtual destructor. This is to make it less likely that people will
# declare derived virtual destructors without declaring the base
# destructor virtual.
if ((classinfo.virtual_method_linenumber is not None) and
(not classinfo.has_virtual_destructor) and
(not classinfo.is_derived)): # Only warn for base classes
error(filename, classinfo.linenum, 'runtime/virtual', 4,
'The class %s probably needs a virtual destructor due to '
'having virtual method(s), one declared at line %d.'
% (classinfo.name, classinfo.virtual_method_linenumber))
else:
classinfo.brace_depth = brace_depth
def CheckSpacingForFunctionCall(filename, line, linenum, error):
"""Checks for the correctness of various spacing around function calls.
Args:
filename: The name of the current file.
line: The text of the line to check.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
# Since function calls often occur inside if/for/while/switch
# expressions - which have their own, more liberal conventions - we
# first see if we should be looking inside such an expression for a
# function call, to which we can apply more strict standards.
fncall = line # if there's no control flow construct, look at whole line
for pattern in (r'\bif\s*\((.*)\)\s*{',
r'\bfor\s*\((.*)\)\s*{',
r'\bwhile\s*\((.*)\)\s*[{;]',
r'\bswitch\s*\((.*)\)\s*{'):
match = Search(pattern, line)
if match:
fncall = match.group(1) # look inside the parens for function calls
break
# Except in if/for/while/switch, there should never be space
# immediately inside parens (eg "f( 3, 4 )"). We make an exception
# for nested parens ( (a+b) + c ). Likewise, there should never be
# a space before a ( when it's a function argument. I assume it's a
# function argument when the char before the whitespace is legal in
# a function name (alnum + _) and we're not starting a macro. Also ignore
# pointers and references to arrays and functions coz they're too tricky:
# we use a very simple way to recognize these:
# " (something)(maybe-something)" or
# " (something)(maybe-something," or
# " (something)[something]"
# Note that we assume the contents of [] to be short enough that
# they'll never need to wrap.
if ( # Ignore control structures.
not Search(r'\b(if|for|while|switch|return|delete)\b', fncall) and
# Ignore pointers/references to functions.
not Search(r' \([^)]+\)\([^)]*(\)|,$)', fncall) and
# Ignore pointers/references to arrays.
not Search(r' \([^)]+\)\[[^\]]+\]', fncall)):
if Search(r'\w\s*\(\s(?!\s*\\$)', fncall): # a ( used for a fn call
error(filename, linenum, 'whitespace/parens', 4,
'Extra space after ( in function call')
elif Search(r'\(\s+(?!(\s*\\)|\()', fncall):
error(filename, linenum, 'whitespace/parens', 2,
'Extra space after (')
if (Search(r'\w\s+\(', fncall) and
not Search(r'#\s*define|typedef', fncall)):
error(filename, linenum, 'whitespace/parens', 4,
'Extra space before ( in function call')
# If the ) is followed only by a newline or a { + newline, assume it's
# part of a control statement (if/while/etc), and don't complain
if Search(r'[^)]\s+\)\s*[^{\s]', fncall):
error(filename, linenum, 'whitespace/parens', 2,
'Extra space before )')
def IsBlankLine(line):
"""Returns true if the given line is blank.
We consider a line to be blank if the line is empty or consists of
only white spaces.
Args:
line: A line of a string.
Returns:
True, if the given line is blank.
"""
return not line or line.isspace()
def CheckForFunctionLengths(filename, clean_lines, linenum,
function_state, error):
"""Reports for long function bodies.
For an overview why this is done, see:
http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Write_Short_Functions
Uses a simplistic algorithm assuming other style guidelines
(especially spacing) are followed.
Only checks unindented functions, so class members are unchecked.
Trivial bodies are unchecked, so constructors with huge initializer lists
may be missed.
Blank/comment lines are not counted so as to avoid encouraging the removal
of vertical space and commments just to get through a lint check.
NOLINT *on the last line of a function* disables this check.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
function_state: Current function name and lines in body so far.
error: The function to call with any errors found.
"""
lines = clean_lines.lines
line = lines[linenum]
raw = clean_lines.raw_lines
raw_line = raw[linenum]
joined_line = ''
starting_func = False
regexp = r'(\w(\w|::|\*|\&|\s)*)\(' # decls * & space::name( ...
match_result = Match(regexp, line)
if match_result:
# If the name is all caps and underscores, figure it's a macro and
# ignore it, unless it's TEST or TEST_F.
function_name = match_result.group(1).split()[-1]
if function_name == 'TEST' or function_name == 'TEST_F' or (
not Match(r'[A-Z_]+$', function_name)):
starting_func = True
if starting_func:
body_found = False
for start_linenum in xrange(linenum, clean_lines.NumLines()):
start_line = lines[start_linenum]
joined_line += ' ' + start_line.lstrip()
if Search(r'(;|})', start_line): # Declarations and trivial functions
body_found = True
break # ... ignore
elif Search(r'{', start_line):
body_found = True
function = Search(r'((\w|:)*)\(', line).group(1)
if Match(r'TEST', function): # Handle TEST... macros
parameter_regexp = Search(r'(\(.*\))', joined_line)
if parameter_regexp: # Ignore bad syntax
function += parameter_regexp.group(1)
else:
function += '()'
function_state.Begin(function)
break
if not body_found:
# No body for the function (or evidence of a non-function) was found.
error(filename, linenum, 'readability/fn_size', 5,
'Lint failed to find start of function body.')
elif Match(r'^\}\s*$', line): # function end
function_state.Check(error, filename, linenum)
function_state.End()
elif not Match(r'^\s*$', line):
function_state.Count() # Count non-blank/non-comment lines.
_RE_PATTERN_TODO = re.compile(r'^//(\s*)TODO(\(.+?\))?:?(\s|$)?')
def CheckComment(comment, filename, linenum, error):
"""Checks for common mistakes in TODO comments.
Args:
comment: The text of the comment from the line in question.
filename: The name of the current file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
match = _RE_PATTERN_TODO.match(comment)
if match:
# One whitespace is correct; zero whitespace is handled elsewhere.
leading_whitespace = match.group(1)
if len(leading_whitespace) > 1:
error(filename, linenum, 'whitespace/todo', 2,
'Too many spaces before TODO')
username = match.group(2)
if not username:
error(filename, linenum, 'readability/todo', 2,
'Missing username in TODO; it should look like '
'"// TODO(my_username): Stuff."')
middle_whitespace = match.group(3)
# Comparisons made explicit for correctness -- pylint: disable-msg=C6403
if middle_whitespace != ' ' and middle_whitespace != '':
error(filename, linenum, 'whitespace/todo', 2,
'TODO(my_username) should be followed by a space')
def CheckSpacing(filename, clean_lines, linenum, error):
"""Checks for the correctness of various spacing issues in the code.
Things we check for: spaces around operators, spaces after
if/for/while/switch, no spaces around parens in function calls, two
spaces between code and comment, don't start a block with a blank
line, don't end a function with a blank line, don't have too many
blank lines in a row.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
raw = clean_lines.raw_lines
line = raw[linenum]
# Before nixing comments, check if the line is blank for no good
# reason. This includes the first line after a block is opened, and
# blank lines at the end of a function (ie, right before a line like '}'
if IsBlankLine(line):
elided = clean_lines.elided
prev_line = elided[linenum - 1]
prevbrace = prev_line.rfind('{')
# TODO(unknown): Don't complain if line before blank line, and line after,
# both start with alnums and are indented the same amount.
# This ignores whitespace at the start of a namespace block
# because those are not usually indented.
if (prevbrace != -1 and prev_line[prevbrace:].find('}') == -1
and prev_line[:prevbrace].find('namespace') == -1):
# OK, we have a blank line at the start of a code block. Before we
# complain, we check if it is an exception to the rule: The previous
# non-empty line has the paramters of a function header that are indented
# 4 spaces (because they did not fit in a 80 column line when placed on
# the same line as the function name). We also check for the case where
# the previous line is indented 6 spaces, which may happen when the
# initializers of a constructor do not fit into a 80 column line.
exception = False
if Match(r' {6}\w', prev_line): # Initializer list?
# We are looking for the opening column of initializer list, which
# should be indented 4 spaces to cause 6 space indentation afterwards.
search_position = linenum-2
while (search_position >= 0
and Match(r' {6}\w', elided[search_position])):
search_position -= 1
exception = (search_position >= 0
and elided[search_position][:5] == ' :')
else:
# Search for the function arguments or an initializer list. We use a
# simple heuristic here: If the line is indented 4 spaces; and we have a
# closing paren, without the opening paren, followed by an opening brace
# or colon (for initializer lists) we assume that it is the last line of
# a function header. If we have a colon indented 4 spaces, it is an
# initializer list.
exception = (Match(r' {4}\w[^\(]*\)\s*(const\s*)?(\{\s*$|:)',
prev_line)
or Match(r' {4}:', prev_line))
if not exception:
error(filename, linenum, 'whitespace/blank_line', 2,
'Blank line at the start of a code block. Is this needed?')
# This doesn't ignore whitespace at the end of a namespace block
# because that is too hard without pairing open/close braces;
# however, a special exception is made for namespace closing
# brackets which have a comment containing "namespace".
#
# Also, ignore blank lines at the end of a block in a long if-else
# chain, like this:
# if (condition1) {
# // Something followed by a blank line
#
# } else if (condition2) {
# // Something else
# }
if linenum + 1 < clean_lines.NumLines():
next_line = raw[linenum + 1]
if (next_line
and Match(r'\s*}', next_line)
and next_line.find('namespace') == -1
and next_line.find('} else ') == -1):
error(filename, linenum, 'whitespace/blank_line', 3,
'Blank line at the end of a code block. Is this needed?')
# Next, we complain if there's a comment too near the text
commentpos = line.find('//')
if commentpos != -1:
# Check if the // may be in quotes. If so, ignore it
# Comparisons made explicit for clarity -- pylint: disable-msg=C6403
if (line.count('"', 0, commentpos) -
line.count('\\"', 0, commentpos)) % 2 == 0: # not in quotes
# Allow one space for new scopes, two spaces otherwise:
if (not Match(r'^\s*{ //', line) and
((commentpos >= 1 and
line[commentpos-1] not in string.whitespace) or
(commentpos >= 2 and
line[commentpos-2] not in string.whitespace))):
error(filename, linenum, 'whitespace/comments', 2,
'At least two spaces is best between code and comments')
# There should always be a space between the // and the comment
commentend = commentpos + 2
if commentend < len(line) and not line[commentend] == ' ':
# but some lines are exceptions -- e.g. if they're big
# comment delimiters like:
# //----------------------------------------------------------
# or are an empty C++ style Doxygen comment, like:
# ///
# or they begin with multiple slashes followed by a space:
# //////// Header comment
match = (Search(r'[=/-]{4,}\s*$', line[commentend:]) or
Search(r'^/$', line[commentend:]) or
Search(r'^/+ ', line[commentend:]))
if not match:
error(filename, linenum, 'whitespace/comments', 4,
'Should have a space between // and comment')
CheckComment(line[commentpos:], filename, linenum, error)
line = clean_lines.elided[linenum] # get rid of comments and strings
# Don't try to do spacing checks for operator methods
line = re.sub(r'operator(==|!=|<|<<|<=|>=|>>|>)\(', 'operator\(', line)
# We allow no-spaces around = within an if: "if ( (a=Foo()) == 0 )".
# Otherwise not. Note we only check for non-spaces on *both* sides;
# sometimes people put non-spaces on one side when aligning ='s among
# many lines (not that this is behavior that I approve of...)
if Search(r'[\w.]=[\w.]', line) and not Search(r'\b(if|while) ', line):
error(filename, linenum, 'whitespace/operators', 4,
'Missing spaces around =')
# It's ok not to have spaces around binary operators like + - * /, but if
# there's too little whitespace, we get concerned. It's hard to tell,
# though, so we punt on this one for now. TODO.
# You should always have whitespace around binary operators.
# Alas, we can't test < or > because they're legitimately used sans spaces
# (a->b, vector<int> a). The only time we can tell is a < with no >, and
# only if it's not template params list spilling into the next line.
match = Search(r'[^<>=!\s](==|!=|<=|>=)[^<>=!\s]', line)
if not match:
# Note that while it seems that the '<[^<]*' term in the following
# regexp could be simplified to '<.*', which would indeed match
# the same class of strings, the [^<] means that searching for the
# regexp takes linear rather than quadratic time.
if not Search(r'<[^<]*,\s*$', line): # template params spill
match = Search(r'[^<>=!\s](<)[^<>=!\s]([^>]|->)*$', line)
if match:
error(filename, linenum, 'whitespace/operators', 3,
'Missing spaces around %s' % match.group(1))
# We allow no-spaces around << and >> when used like this: 10<<20, but
# not otherwise (particularly, not when used as streams)
match = Search(r'[^0-9\s](<<|>>)[^0-9\s]', line)
if match:
error(filename, linenum, 'whitespace/operators', 3,
'Missing spaces around %s' % match.group(1))
# There shouldn't be space around unary operators
match = Search(r'(!\s|~\s|[\s]--[\s;]|[\s]\+\+[\s;])', line)
if match:
error(filename, linenum, 'whitespace/operators', 4,
'Extra space for operator %s' % match.group(1))
# A pet peeve of mine: no spaces after an if, while, switch, or for
match = Search(r' (if\(|for\(|while\(|switch\()', line)
if match:
error(filename, linenum, 'whitespace/parens', 5,
'Missing space before ( in %s' % match.group(1))
# For if/for/while/switch, the left and right parens should be
# consistent about how many spaces are inside the parens, and
# there should either be zero or one spaces inside the parens.
# We don't want: "if ( foo)" or "if ( foo )".
# Exception: "for ( ; foo; bar)" and "for (foo; bar; )" are allowed.
match = Search(r'\b(if|for|while|switch)\s*'
r'\(([ ]*)(.).*[^ ]+([ ]*)\)\s*{\s*$',
line)
if match:
if len(match.group(2)) != len(match.group(4)):
if not (match.group(3) == ';' and
len(match.group(2)) == 1 + len(match.group(4)) or
not match.group(2) and Search(r'\bfor\s*\(.*; \)', line)):
error(filename, linenum, 'whitespace/parens', 5,
'Mismatching spaces inside () in %s' % match.group(1))
if not len(match.group(2)) in [0, 1]:
error(filename, linenum, 'whitespace/parens', 5,
'Should have zero or one spaces inside ( and ) in %s' %
match.group(1))
# You should always have a space after a comma (either as fn arg or operator)
if Search(r',[^\s]', line):
error(filename, linenum, 'whitespace/comma', 3,
'Missing space after ,')
# Next we will look for issues with function calls.
CheckSpacingForFunctionCall(filename, line, linenum, error)
# Except after an opening paren, you should have spaces before your braces.
# And since you should never have braces at the beginning of a line, this is
# an easy test.
if Search(r'[^ (]{', line):
error(filename, linenum, 'whitespace/braces', 5,
'Missing space before {')
# Make sure '} else {' has spaces.
if Search(r'}else', line):
error(filename, linenum, 'whitespace/braces', 5,
'Missing space before else')
# You shouldn't have spaces before your brackets, except maybe after
# 'delete []' or 'new char * []'.
if Search(r'\w\s+\[', line) and not Search(r'delete\s+\[', line):
error(filename, linenum, 'whitespace/braces', 5,
'Extra space before [')
# You shouldn't have a space before a semicolon at the end of the line.
# There's a special case for "for" since the style guide allows space before
# the semicolon there.
if Search(r':\s*;\s*$', line):
error(filename, linenum, 'whitespace/semicolon', 5,
'Semicolon defining empty statement. Use { } instead.')
elif Search(r'^\s*;\s*$', line):
error(filename, linenum, 'whitespace/semicolon', 5,
'Line contains only semicolon. If this should be an empty statement, '
'use { } instead.')
elif (Search(r'\s+;\s*$', line) and
not Search(r'\bfor\b', line)):
error(filename, linenum, 'whitespace/semicolon', 5,
'Extra space before last semicolon. If this should be an empty '
'statement, use { } instead.')
def GetPreviousNonBlankLine(clean_lines, linenum):
"""Return the most recent non-blank line and its line number.
Args:
clean_lines: A CleansedLines instance containing the file contents.
linenum: The number of the line to check.
Returns:
A tuple with two elements. The first element is the contents of the last
non-blank line before the current line, or the empty string if this is the
first non-blank line. The second is the line number of that line, or -1
if this is the first non-blank line.
"""
prevlinenum = linenum - 1
while prevlinenum >= 0:
prevline = clean_lines.elided[prevlinenum]
if not IsBlankLine(prevline): # if not a blank line...
return (prevline, prevlinenum)
prevlinenum -= 1
return ('', -1)
def CheckBraces(filename, clean_lines, linenum, error):
"""Looks for misplaced braces (e.g. at the end of line).
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum] # get rid of comments and strings
if Match(r'\s*{\s*$', line):
# We allow an open brace to start a line in the case where someone
# is using braces in a block to explicitly create a new scope,
# which is commonly used to control the lifetime of
# stack-allocated variables. We don't detect this perfectly: we
# just don't complain if the last non-whitespace character on the
# previous non-blank line is ';', ':', '{', or '}'.
prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
if not Search(r'[;:}{]\s*$', prevline):
error(filename, linenum, 'whitespace/braces', 4,
'{ should almost always be at the end of the previous line')
# An else clause should be on the same line as the preceding closing brace.
if Match(r'\s*else\s*', line):
prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
if Match(r'\s*}\s*$', prevline):
error(filename, linenum, 'whitespace/newline', 4,
'An else should appear on the same line as the preceding }')
# If braces come on one side of an else, they should be on both.
# However, we have to worry about "else if" that spans multiple lines!
if Search(r'}\s*else[^{]*$', line) or Match(r'[^}]*else\s*{', line):
if Search(r'}\s*else if([^{]*)$', line): # could be multi-line if
# find the ( after the if
pos = line.find('else if')
pos = line.find('(', pos)
if pos > 0:
(endline, _, endpos) = CloseExpression(clean_lines, linenum, pos)
if endline[endpos:].find('{') == -1: # must be brace after if
error(filename, linenum, 'readability/braces', 5,
'If an else has a brace on one side, it should have it on both')
else: # common case: else not followed by a multi-line if
error(filename, linenum, 'readability/braces', 5,
'If an else has a brace on one side, it should have it on both')
# Likewise, an else should never have the else clause on the same line
if Search(r'\belse [^\s{]', line) and not Search(r'\belse if\b', line):
error(filename, linenum, 'whitespace/newline', 4,
'Else clause should never be on same line as else (use 2 lines)')
# In the same way, a do/while should never be on one line
if Match(r'\s*do [^\s{]', line):
error(filename, linenum, 'whitespace/newline', 4,
'do/while clauses should not be on a single line')
# Braces shouldn't be followed by a ; unless they're defining a struct
# or initializing an array.
# We can't tell in general, but we can for some common cases.
prevlinenum = linenum
while True:
(prevline, prevlinenum) = GetPreviousNonBlankLine(clean_lines, prevlinenum)
if Match(r'\s+{.*}\s*;', line) and not prevline.count(';'):
line = prevline + line
else:
break
if (Search(r'{.*}\s*;', line) and
line.count('{') == line.count('}') and
not Search(r'struct|class|enum|\s*=\s*{', line)):
error(filename, linenum, 'readability/braces', 4,
"You don't need a ; after a }")
def ReplaceableCheck(operator, macro, line):
"""Determine whether a basic CHECK can be replaced with a more specific one.
For example suggest using CHECK_EQ instead of CHECK(a == b) and
similarly for CHECK_GE, CHECK_GT, CHECK_LE, CHECK_LT, CHECK_NE.
Args:
operator: The C++ operator used in the CHECK.
macro: The CHECK or EXPECT macro being called.
line: The current source line.
Returns:
True if the CHECK can be replaced with a more specific one.
"""
# This matches decimal and hex integers, strings, and chars (in that order).
match_constant = r'([-+]?(\d+|0[xX][0-9a-fA-F]+)[lLuU]{0,3}|".*"|\'.*\')'
# Expression to match two sides of the operator with something that
# looks like a literal, since CHECK(x == iterator) won't compile.
# This means we can't catch all the cases where a more specific
# CHECK is possible, but it's less annoying than dealing with
# extraneous warnings.
match_this = (r'\s*' + macro + r'\((\s*' +
match_constant + r'\s*' + operator + r'[^<>].*|'
r'.*[^<>]' + operator + r'\s*' + match_constant +
r'\s*\))')
# Don't complain about CHECK(x == NULL) or similar because
# CHECK_EQ(x, NULL) won't compile (requires a cast).
# Also, don't complain about more complex boolean expressions
# involving && or || such as CHECK(a == b || c == d).
return Match(match_this, line) and not Search(r'NULL|&&|\|\|', line)
def CheckCheck(filename, clean_lines, linenum, error):
"""Checks the use of CHECK and EXPECT macros.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
# Decide the set of replacement macros that should be suggested
raw_lines = clean_lines.raw_lines
current_macro = ''
for macro in _CHECK_MACROS:
if raw_lines[linenum].find(macro) >= 0:
current_macro = macro
break
if not current_macro:
# Don't waste time here if line doesn't contain 'CHECK' or 'EXPECT'
return
line = clean_lines.elided[linenum] # get rid of comments and strings
# Encourage replacing plain CHECKs with CHECK_EQ/CHECK_NE/etc.
for operator in ['==', '!=', '>=', '>', '<=', '<']:
if ReplaceableCheck(operator, current_macro, line):
error(filename, linenum, 'readability/check', 2,
'Consider using %s instead of %s(a %s b)' % (
_CHECK_REPLACEMENT[current_macro][operator],
current_macro, operator))
break
def GetLineWidth(line):
"""Determines the width of the line in column positions.
Args:
line: A string, which may be a Unicode string.
Returns:
The width of the line in column positions, accounting for Unicode
combining characters and wide characters.
"""
if isinstance(line, unicode):
width = 0
for c in unicodedata.normalize('NFC', line):
if unicodedata.east_asian_width(c) in ('W', 'F'):
width += 2
elif not unicodedata.combining(c):
width += 1
return width
else:
return len(line)
def CheckStyle(filename, clean_lines, linenum, file_extension, error):
"""Checks rules from the 'C++ style rules' section of cppguide.html.
Most of these rules are hard to test (naming, comment style), but we
do what we can. In particular we check for 2-space indents, line lengths,
tab usage, spaces inside code, etc.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
file_extension: The extension (without the dot) of the filename.
error: The function to call with any errors found.
"""
raw_lines = clean_lines.raw_lines
line = raw_lines[linenum]
if line.find('\t') != -1:
error(filename, linenum, 'whitespace/tab', 1,
'Tab found; better to use spaces')
# One or three blank spaces at the beginning of the line is weird; it's
# hard to reconcile that with 2-space indents.
# NOTE: here are the conditions rob pike used for his tests. Mine aren't
# as sophisticated, but it may be worth becoming so: RLENGTH==initial_spaces
# if(RLENGTH > 20) complain = 0;
# if(match($0, " +(error|private|public|protected):")) complain = 0;
# if(match(prev, "&& *$")) complain = 0;
# if(match(prev, "\\|\\| *$")) complain = 0;
# if(match(prev, "[\",=><] *$")) complain = 0;
# if(match($0, " <<")) complain = 0;
# if(match(prev, " +for \\(")) complain = 0;
# if(prevodd && match(prevprev, " +for \\(")) complain = 0;
initial_spaces = 0
cleansed_line = clean_lines.elided[linenum]
while initial_spaces < len(line) and line[initial_spaces] == ' ':
initial_spaces += 1
if line and line[-1].isspace() and line[-1] != '':
error(filename, linenum, 'whitespace/end_of_line', 4,
'Line ends in whitespace. Consider deleting these extra spaces.')
# There are certain situations we allow one space, notably for labels
elif ((initial_spaces == 1 or initial_spaces == 3) and
not Match(r'\s*\w+\s*:\s*$', cleansed_line)):
error(filename, linenum, 'whitespace/indent', 3,
'Weird number of spaces at line-start. '
'Are you using a 2-space indent?')
# Labels should always be indented at least one space.
elif not initial_spaces and line[:2] != '//' and Search(r'[^:]:\s*$',
line):
error(filename, linenum, 'whitespace/labels', 4,
'Labels should always be indented at least one space. '
'If this is a member-initializer list in a constructor or '
'the base class list in a class definition, the colon should '
'be on the following line.')
# Check if the line is a header guard.
is_header_guard = False
if file_extension == 'h':
cppvar = GetHeaderGuardCPPVariable(filename)
if (line.startswith('#ifndef %s' % cppvar) or
line.startswith('#define %s' % cppvar) or
line.startswith('#endif // %s' % cppvar)):
is_header_guard = True
# #include lines and header guards can be long, since there's no clean way to
# split them.
#
# URLs can be long too. It's possible to split these, but it makes them
# harder to cut&paste.
if (not line.startswith('#include') and not is_header_guard and
not Match(r'^\s*//.*http(s?)://\S*$', line)):
line_width = GetLineWidth(line)
if line_width > 100:
error(filename, linenum, 'whitespace/line_length', 4,
'Lines should very rarely be longer than 100 characters')
elif line_width > 80:
error(filename, linenum, 'whitespace/line_length', 2,
'Lines should be <= 80 characters long')
if (cleansed_line.count(';') > 1 and
# for loops are allowed two ;'s (and may run over two lines).
cleansed_line.find('for') == -1 and
(GetPreviousNonBlankLine(clean_lines, linenum)[0].find('for') == -1 or
GetPreviousNonBlankLine(clean_lines, linenum)[0].find(';') != -1) and
# It's ok to have many commands in a switch case that fits in 1 line
not ((cleansed_line.find('case ') != -1 or
cleansed_line.find('default:') != -1) and
cleansed_line.find('break;') != -1)):
error(filename, linenum, 'whitespace/newline', 4,
'More than one command on the same line')
# Some more style checks
CheckBraces(filename, clean_lines, linenum, error)
CheckSpacing(filename, clean_lines, linenum, error)
CheckCheck(filename, clean_lines, linenum, error)
_RE_PATTERN_INCLUDE_NEW_STYLE = re.compile(r'#include +"[^/]+\.h"')
_RE_PATTERN_INCLUDE = re.compile(r'^\s*#\s*include\s*([<"])([^>"]*)[>"].*$')
# Matches the first component of a filename delimited by -s and _s. That is:
# _RE_FIRST_COMPONENT.match('foo').group(0) == 'foo'
# _RE_FIRST_COMPONENT.match('foo.cc').group(0) == 'foo'
# _RE_FIRST_COMPONENT.match('foo-bar_baz.cc').group(0) == 'foo'
# _RE_FIRST_COMPONENT.match('foo_bar-baz.cc').group(0) == 'foo'
_RE_FIRST_COMPONENT = re.compile(r'^[^-_.]+')
def _DropCommonSuffixes(filename):
"""Drops common suffixes like _test.cc or -inl.h from filename.
For example:
>>> _DropCommonSuffixes('foo/foo-inl.h')
'foo/foo'
>>> _DropCommonSuffixes('foo/bar/foo.cc')
'foo/bar/foo'
>>> _DropCommonSuffixes('foo/foo_internal.h')
'foo/foo'
>>> _DropCommonSuffixes('foo/foo_unusualinternal.h')
'foo/foo_unusualinternal'
Args:
filename: The input filename.
Returns:
The filename with the common suffix removed.
"""
for suffix in ('test.cc', 'regtest.cc', 'unittest.cc',
'inl.h', 'impl.h', 'internal.h'):
if (filename.endswith(suffix) and len(filename) > len(suffix) and
filename[-len(suffix) - 1] in ('-', '_')):
return filename[:-len(suffix) - 1]
return os.path.splitext(filename)[0]
def _IsTestFilename(filename):
"""Determines if the given filename has a suffix that identifies it as a test.
Args:
filename: The input filename.
Returns:
True if 'filename' looks like a test, False otherwise.
"""
if (filename.endswith('_test.cc') or
filename.endswith('_unittest.cc') or
filename.endswith('_regtest.cc')):
return True
else:
return False
def _ClassifyInclude(fileinfo, include, is_system):
"""Figures out what kind of header 'include' is.
Args:
fileinfo: The current file cpplint is running over. A FileInfo instance.
include: The path to a #included file.
is_system: True if the #include used <> rather than "".
Returns:
One of the _XXX_HEADER constants.
For example:
>>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'stdio.h', True)
_C_SYS_HEADER
>>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'string', True)
_CPP_SYS_HEADER
>>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/foo.h', False)
_LIKELY_MY_HEADER
>>> _ClassifyInclude(FileInfo('foo/foo_unknown_extension.cc'),
... 'bar/foo_other_ext.h', False)
_POSSIBLE_MY_HEADER
>>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/bar.h', False)
_OTHER_HEADER
"""
# This is a list of all standard c++ header files, except
# those already checked for above.
is_stl_h = include in _STL_HEADERS
is_cpp_h = is_stl_h or include in _CPP_HEADERS
if is_system:
if is_cpp_h:
return _CPP_SYS_HEADER
else:
return _C_SYS_HEADER
# If the target file and the include we're checking share a
# basename when we drop common extensions, and the include
# lives in . , then it's likely to be owned by the target file.
target_dir, target_base = (
os.path.split(_DropCommonSuffixes(fileinfo.RepositoryName())))
include_dir, include_base = os.path.split(_DropCommonSuffixes(include))
if target_base == include_base and (
include_dir == target_dir or
include_dir == os.path.normpath(target_dir + '/../public')):
return _LIKELY_MY_HEADER
# If the target and include share some initial basename
# component, it's possible the target is implementing the
# include, so it's allowed to be first, but we'll never
# complain if it's not there.
target_first_component = _RE_FIRST_COMPONENT.match(target_base)
include_first_component = _RE_FIRST_COMPONENT.match(include_base)
if (target_first_component and include_first_component and
target_first_component.group(0) ==
include_first_component.group(0)):
return _POSSIBLE_MY_HEADER
return _OTHER_HEADER
def CheckIncludeLine(filename, clean_lines, linenum, include_state, error):
"""Check rules that are applicable to #include lines.
Strings on #include lines are NOT removed from elided line, to make
certain tasks easier. However, to prevent false positives, checks
applicable to #include lines in CheckLanguage must be put here.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
include_state: An _IncludeState instance in which the headers are inserted.
error: The function to call with any errors found.
"""
fileinfo = FileInfo(filename)
line = clean_lines.lines[linenum]
# "include" should use the new style "foo/bar.h" instead of just "bar.h"
if _RE_PATTERN_INCLUDE_NEW_STYLE.search(line):
error(filename, linenum, 'build/include', 4,
'Include the directory when naming .h files')
# we shouldn't include a file more than once. actually, there are a
# handful of instances where doing so is okay, but in general it's
# not.
match = _RE_PATTERN_INCLUDE.search(line)
if match:
include = match.group(2)
is_system = (match.group(1) == '<')
if include in include_state:
error(filename, linenum, 'build/include', 4,
'"%s" already included at %s:%s' %
(include, filename, include_state[include]))
else:
include_state[include] = linenum
# We want to ensure that headers appear in the right order:
# 1) for foo.cc, foo.h (preferred location)
# 2) c system files
# 3) cpp system files
# 4) for foo.cc, foo.h (deprecated location)
# 5) other google headers
#
# We classify each include statement as one of those 5 types
# using a number of techniques. The include_state object keeps
# track of the highest type seen, and complains if we see a
# lower type after that.
error_message = include_state.CheckNextIncludeOrder(
_ClassifyInclude(fileinfo, include, is_system))
if error_message:
error(filename, linenum, 'build/include_order', 4,
'%s. Should be: %s.h, c system, c++ system, other.' %
(error_message, fileinfo.BaseName()))
if not include_state.IsInAlphabeticalOrder(include):
error(filename, linenum, 'build/include_alpha', 4,
'Include "%s" not in alphabetical order' % include)
# Look for any of the stream classes that are part of standard C++.
match = _RE_PATTERN_INCLUDE.match(line)
if match:
include = match.group(2)
if Match(r'(f|ind|io|i|o|parse|pf|stdio|str|)?stream$', include):
# Many unit tests use cout, so we exempt them.
if not _IsTestFilename(filename):
error(filename, linenum, 'readability/streams', 3,
'Streams are highly discouraged.')
def CheckLanguage(filename, clean_lines, linenum, file_extension, include_state,
error):
"""Checks rules from the 'C++ language rules' section of cppguide.html.
Some of these rules are hard to test (function overloading, using
uint32 inappropriately), but we do the best we can.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
file_extension: The extension (without the dot) of the filename.
include_state: An _IncludeState instance in which the headers are inserted.
error: The function to call with any errors found.
"""
# If the line is empty or consists of entirely a comment, no need to
# check it.
line = clean_lines.elided[linenum]
if not line:
return
match = _RE_PATTERN_INCLUDE.search(line)
if match:
CheckIncludeLine(filename, clean_lines, linenum, include_state, error)
return
# Create an extended_line, which is the concatenation of the current and
# next lines, for more effective checking of code that may span more than one
# line.
if linenum + 1 < clean_lines.NumLines():
extended_line = line + clean_lines.elided[linenum + 1]
else:
extended_line = line
# Make Windows paths like Unix.
fullname = os.path.abspath(filename).replace('\\', '/')
# TODO(unknown): figure out if they're using default arguments in fn proto.
# Check for non-const references in functions. This is tricky because &
# is also used to take the address of something. We allow <> for templates,
# (ignoring whatever is between the braces) and : for classes.
# These are complicated re's. They try to capture the following:
# paren (for fn-prototype start), typename, &, varname. For the const
# version, we're willing for const to be before typename or after
# Don't check the implemention on same line.
fnline = line.split('{', 1)[0]
if (len(re.findall(r'\([^()]*\b(?:[\w:]|<[^()]*>)+(\s?&|&\s?)\w+', fnline)) >
len(re.findall(r'\([^()]*\bconst\s+(?:typename\s+)?(?:struct\s+)?'
r'(?:[\w:]|<[^()]*>)+(\s?&|&\s?)\w+', fnline)) +
len(re.findall(r'\([^()]*\b(?:[\w:]|<[^()]*>)+\s+const(\s?&|&\s?)[\w]+',
fnline))):
# We allow non-const references in a few standard places, like functions
# called "swap()" or iostream operators like "<<" or ">>".
if not Search(
r'(swap|Swap|operator[<>][<>])\s*\(\s*(?:[\w:]|<.*>)+\s*&',
fnline):
error(filename, linenum, 'runtime/references', 2,
'Is this a non-const reference? '
'If so, make const or use a pointer.')
# Check to see if they're using an conversion function cast.
# I just try to capture the most common basic types, though there are more.
# Parameterless conversion functions, such as bool(), are allowed as they are
# probably a member operator declaration or default constructor.
match = Search(
r'(\bnew\s+)?\b' # Grab 'new' operator, if it's there
r'(int|float|double|bool|char|int32|uint32|int64|uint64)\([^)]', line)
if match:
# gMock methods are defined using some variant of MOCK_METHODx(name, type)
# where type may be float(), int(string), etc. Without context they are
# virtually indistinguishable from int(x) casts.
if (match.group(1) is None and # If new operator, then this isn't a cast
not Match(r'^\s*MOCK_(CONST_)?METHOD\d+(_T)?\(', line)):
error(filename, linenum, 'readability/casting', 4,
'Using deprecated casting style. '
'Use static_cast<%s>(...) instead' %
match.group(2))
CheckCStyleCast(filename, linenum, line, clean_lines.raw_lines[linenum],
'static_cast',
r'\((int|float|double|bool|char|u?int(16|32|64))\)',
error)
# This doesn't catch all cases. Consider (const char * const)"hello".
CheckCStyleCast(filename, linenum, line, clean_lines.raw_lines[linenum],
'reinterpret_cast', r'\((\w+\s?\*+\s?)\)', error)
# In addition, we look for people taking the address of a cast. This
# is dangerous -- casts can assign to temporaries, so the pointer doesn't
# point where you think.
if Search(
r'(&\([^)]+\)[\w(])|(&(static|dynamic|reinterpret)_cast\b)', line):
error(filename, linenum, 'runtime/casting', 4,
('Are you taking an address of a cast? '
'This is dangerous: could be a temp var. '
'Take the address before doing the cast, rather than after'))
# Check for people declaring static/global STL strings at the top level.
# This is dangerous because the C++ language does not guarantee that
# globals with constructors are initialized before the first access.
match = Match(
r'((?:|static +)(?:|const +))string +([a-zA-Z0-9_:]+)\b(.*)',
line)
# Make sure it's not a function.
# Function template specialization looks like: "string foo<Type>(...".
# Class template definitions look like: "string Foo<Type>::Method(...".
if match and not Match(r'\s*(<.*>)?(::[a-zA-Z0-9_]+)?\s*\(([^"]|$)',
match.group(3)):
error(filename, linenum, 'runtime/string', 4,
'For a static/global string constant, use a C style string instead: '
'"%schar %s[]".' %
(match.group(1), match.group(2)))
# Check that we're not using RTTI outside of testing code.
if Search(r'\bdynamic_cast<', line) and not _IsTestFilename(filename):
error(filename, linenum, 'runtime/rtti', 5,
'Do not use dynamic_cast<>. If you need to cast within a class '
"hierarchy, use static_cast<> to upcast. Google doesn't support "
'RTTI.')
if Search(r'\b([A-Za-z0-9_]*_)\(\1\)', line):
error(filename, linenum, 'runtime/init', 4,
'You seem to be initializing a member variable with itself.')
if file_extension == 'h':
# TODO(unknown): check that 1-arg constructors are explicit.
# How to tell it's a constructor?
# (handled in CheckForNonStandardConstructs for now)
# TODO(unknown): check that classes have DISALLOW_EVIL_CONSTRUCTORS
# (level 1 error)
pass
# Check if people are using the verboten C basic types. The only exception
# we regularly allow is "unsigned short port" for port.
if Search(r'\bshort port\b', line):
if not Search(r'\bunsigned short port\b', line):
error(filename, linenum, 'runtime/int', 4,
'Use "unsigned short" for ports, not "short"')
else:
match = Search(r'\b(short|long(?! +double)|long long)\b', line)
if match:
error(filename, linenum, 'runtime/int', 4,
'Use int16/int64/etc, rather than the C type %s' % match.group(1))
# When snprintf is used, the second argument shouldn't be a literal.
match = Search(r'snprintf\s*\(([^,]*),\s*([0-9]*)\s*,', line)
if match and match.group(2) != '0':
# If 2nd arg is zero, snprintf is used to calculate size.
error(filename, linenum, 'runtime/printf', 3,
'If you can, use sizeof(%s) instead of %s as the 2nd arg '
'to snprintf.' % (match.group(1), match.group(2)))
# Check if some verboten C functions are being used.
if Search(r'\bsprintf\b', line):
error(filename, linenum, 'runtime/printf', 5,
'Never use sprintf. Use snprintf instead.')
match = Search(r'\b(strcpy|strcat)\b', line)
if match:
error(filename, linenum, 'runtime/printf', 4,
'Almost always, snprintf is better than %s' % match.group(1))
if Search(r'\bsscanf\b', line):
error(filename, linenum, 'runtime/printf', 1,
'sscanf can be ok, but is slow and can overflow buffers.')
# Check if some verboten operator overloading is going on
# TODO(unknown): catch out-of-line unary operator&:
# class X {};
# int operator&(const X& x) { return 42; } // unary operator&
# The trick is it's hard to tell apart from binary operator&:
# class Y { int operator&(const Y& x) { return 23; } }; // binary operator&
if Search(r'\boperator\s*&\s*\(\s*\)', line):
error(filename, linenum, 'runtime/operator', 4,
'Unary operator& is dangerous. Do not use it.')
# Check for suspicious usage of "if" like
# } if (a == b) {
if Search(r'\}\s*if\s*\(', line):
error(filename, linenum, 'readability/braces', 4,
'Did you mean "else if"? If not, start a new line for "if".')
# Check for potential format string bugs like printf(foo).
# We constrain the pattern not to pick things like DocidForPrintf(foo).
# Not perfect but it can catch printf(foo.c_str()) and printf(foo->c_str())
match = re.search(r'\b((?:string)?printf)\s*\(([\w.\->()]+)\)', line, re.I)
if match:
error(filename, linenum, 'runtime/printf', 4,
'Potential format string bug. Do %s("%%s", %s) instead.'
% (match.group(1), match.group(2)))
# Check for potential memset bugs like memset(buf, sizeof(buf), 0).
match = Search(r'memset\s*\(([^,]*),\s*([^,]*),\s*0\s*\)', line)
if match and not Match(r"^''|-?[0-9]+|0x[0-9A-Fa-f]$", match.group(2)):
error(filename, linenum, 'runtime/memset', 4,
'Did you mean "memset(%s, 0, %s)"?'
% (match.group(1), match.group(2)))
if Search(r'\busing namespace\b', line):
error(filename, linenum, 'build/namespaces', 5,
'Do not use namespace using-directives. '
'Use using-declarations instead.')
# Detect variable-length arrays.
match = Match(r'\s*(.+::)?(\w+) [a-z]\w*\[(.+)];', line)
if (match and match.group(2) != 'return' and match.group(2) != 'delete' and
match.group(3).find(']') == -1):
# Split the size using space and arithmetic operators as delimiters.
# If any of the resulting tokens are not compile time constants then
# report the error.
tokens = re.split(r'\s|\+|\-|\*|\/|<<|>>]', match.group(3))
is_const = True
skip_next = False
for tok in tokens:
if skip_next:
skip_next = False
continue
if Search(r'sizeof\(.+\)', tok): continue
if Search(r'arraysize\(\w+\)', tok): continue
tok = tok.lstrip('(')
tok = tok.rstrip(')')
if not tok: continue
if Match(r'\d+', tok): continue
if Match(r'0[xX][0-9a-fA-F]+', tok): continue
if Match(r'k[A-Z0-9]\w*', tok): continue
if Match(r'(.+::)?k[A-Z0-9]\w*', tok): continue
if Match(r'(.+::)?[A-Z][A-Z0-9_]*', tok): continue
# A catch all for tricky sizeof cases, including 'sizeof expression',
# 'sizeof(*type)', 'sizeof(const type)', 'sizeof(struct StructName)'
# requires skipping the next token becasue we split on ' ' and '*'.
if tok.startswith('sizeof'):
skip_next = True
continue
is_const = False
break
if not is_const:
error(filename, linenum, 'runtime/arrays', 1,
'Do not use variable-length arrays. Use an appropriately named '
"('k' followed by CamelCase) compile-time constant for the size.")
# If DISALLOW_EVIL_CONSTRUCTORS, DISALLOW_COPY_AND_ASSIGN, or
# DISALLOW_IMPLICIT_CONSTRUCTORS is present, then it should be the last thing
# in the class declaration.
match = Match(
(r'\s*'
r'(DISALLOW_(EVIL_CONSTRUCTORS|COPY_AND_ASSIGN|IMPLICIT_CONSTRUCTORS))'
r'\(.*\);$'),
line)
if match and linenum + 1 < clean_lines.NumLines():
next_line = clean_lines.elided[linenum + 1]
if not Search(r'^\s*};', next_line):
error(filename, linenum, 'readability/constructors', 3,
match.group(1) + ' should be the last thing in the class')
# Check for use of unnamed namespaces in header files. Registration
# macros are typically OK, so we allow use of "namespace {" on lines
# that end with backslashes.
if (file_extension == 'h'
and Search(r'\bnamespace\s*{', line)
and line[-1] != '\\'):
error(filename, linenum, 'build/namespaces', 4,
'Do not use unnamed namespaces in header files. See '
'http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Namespaces'
' for more information.')
def CheckCStyleCast(filename, linenum, line, raw_line, cast_type, pattern,
error):
"""Checks for a C-style cast by looking for the pattern.
This also handles sizeof(type) warnings, due to similarity of content.
Args:
filename: The name of the current file.
linenum: The number of the line to check.
line: The line of code to check.
raw_line: The raw line of code to check, with comments.
cast_type: The string for the C++ cast to recommend. This is either
reinterpret_cast or static_cast, depending.
pattern: The regular expression used to find C-style casts.
error: The function to call with any errors found.
"""
match = Search(pattern, line)
if not match:
return
# e.g., sizeof(int)
sizeof_match = Match(r'.*sizeof\s*$', line[0:match.start(1) - 1])
if sizeof_match:
error(filename, linenum, 'runtime/sizeof', 1,
'Using sizeof(type). Use sizeof(varname) instead if possible')
return
remainder = line[match.end(0):]
# The close paren is for function pointers as arguments to a function.
# eg, void foo(void (*bar)(int));
# The semicolon check is a more basic function check; also possibly a
# function pointer typedef.
# eg, void foo(int); or void foo(int) const;
# The equals check is for function pointer assignment.
# eg, void *(*foo)(int) = ...
#
# Right now, this will only catch cases where there's a single argument, and
# it's unnamed. It should probably be expanded to check for multiple
# arguments with some unnamed.
function_match = Match(r'\s*(\)|=|(const)?\s*(;|\{|throw\(\)))', remainder)
if function_match:
if (not function_match.group(3) or
function_match.group(3) == ';' or
raw_line.find('/*') < 0):
error(filename, linenum, 'readability/function', 3,
'All parameters should be named in a function')
return
# At this point, all that should be left is actual casts.
error(filename, linenum, 'readability/casting', 4,
'Using C-style cast. Use %s<%s>(...) instead' %
(cast_type, match.group(1)))
_HEADERS_CONTAINING_TEMPLATES = (
('<deque>', ('deque',)),
('<functional>', ('unary_function', 'binary_function',
'plus', 'minus', 'multiplies', 'divides', 'modulus',
'negate',
'equal_to', 'not_equal_to', 'greater', 'less',
'greater_equal', 'less_equal',
'logical_and', 'logical_or', 'logical_not',
'unary_negate', 'not1', 'binary_negate', 'not2',
'bind1st', 'bind2nd',
'pointer_to_unary_function',
'pointer_to_binary_function',
'ptr_fun',
'mem_fun_t', 'mem_fun', 'mem_fun1_t', 'mem_fun1_ref_t',
'mem_fun_ref_t',
'const_mem_fun_t', 'const_mem_fun1_t',
'const_mem_fun_ref_t', 'const_mem_fun1_ref_t',
'mem_fun_ref',
)),
('<limits>', ('numeric_limits',)),
('<list>', ('list',)),
('<map>', ('map', 'multimap',)),
('<memory>', ('allocator',)),
('<queue>', ('queue', 'priority_queue',)),
('<set>', ('set', 'multiset',)),
('<stack>', ('stack',)),
('<string>', ('char_traits', 'basic_string',)),
('<utility>', ('pair',)),
('<vector>', ('vector',)),
# gcc extensions.
# Note: std::hash is their hash, ::hash is our hash
('<hash_map>', ('hash_map', 'hash_multimap',)),
('<hash_set>', ('hash_set', 'hash_multiset',)),
('<slist>', ('slist',)),
)
_HEADERS_ACCEPTED_BUT_NOT_PROMOTED = {
# We can trust with reasonable confidence that map gives us pair<>, too.
'pair<>': ('map', 'multimap', 'hash_map', 'hash_multimap')
}
_RE_PATTERN_STRING = re.compile(r'\bstring\b')
_re_pattern_algorithm_header = []
for _template in ('copy', 'max', 'min', 'min_element', 'sort', 'swap',
'transform'):
# Match max<type>(..., ...), max(..., ...), but not foo->max, foo.max or
# type::max().
_re_pattern_algorithm_header.append(
(re.compile(r'[^>.]\b' + _template + r'(<.*?>)?\([^\)]'),
_template,
'<algorithm>'))
_re_pattern_templates = []
for _header, _templates in _HEADERS_CONTAINING_TEMPLATES:
for _template in _templates:
_re_pattern_templates.append(
(re.compile(r'(\<|\b)' + _template + r'\s*\<'),
_template + '<>',
_header))
def FilesBelongToSameModule(filename_cc, filename_h):
"""Check if these two filenames belong to the same module.
The concept of a 'module' here is a as follows:
foo.h, foo-inl.h, foo.cc, foo_test.cc and foo_unittest.cc belong to the
same 'module' if they are in the same directory.
some/path/public/xyzzy and some/path/internal/xyzzy are also considered
to belong to the same module here.
If the filename_cc contains a longer path than the filename_h, for example,
'/absolute/path/to/base/sysinfo.cc', and this file would include
'base/sysinfo.h', this function also produces the prefix needed to open the
header. This is used by the caller of this function to more robustly open the
header file. We don't have access to the real include paths in this context,
so we need this guesswork here.
Known bugs: tools/base/bar.cc and base/bar.h belong to the same module
according to this implementation. Because of this, this function gives
some false positives. This should be sufficiently rare in practice.
Args:
filename_cc: is the path for the .cc file
filename_h: is the path for the header path
Returns:
Tuple with a bool and a string:
bool: True if filename_cc and filename_h belong to the same module.
string: the additional prefix needed to open the header file.
"""
if not filename_cc.endswith('.cc'):
return (False, '')
filename_cc = filename_cc[:-len('.cc')]
if filename_cc.endswith('_unittest'):
filename_cc = filename_cc[:-len('_unittest')]
elif filename_cc.endswith('_test'):
filename_cc = filename_cc[:-len('_test')]
filename_cc = filename_cc.replace('/public/', '/')
filename_cc = filename_cc.replace('/internal/', '/')
if not filename_h.endswith('.h'):
return (False, '')
filename_h = filename_h[:-len('.h')]
if filename_h.endswith('-inl'):
filename_h = filename_h[:-len('-inl')]
filename_h = filename_h.replace('/public/', '/')
filename_h = filename_h.replace('/internal/', '/')
files_belong_to_same_module = filename_cc.endswith(filename_h)
common_path = ''
if files_belong_to_same_module:
common_path = filename_cc[:-len(filename_h)]
return files_belong_to_same_module, common_path
def UpdateIncludeState(filename, include_state, io=codecs):
"""Fill up the include_state with new includes found from the file.
Args:
filename: the name of the header to read.
include_state: an _IncludeState instance in which the headers are inserted.
io: The io factory to use to read the file. Provided for testability.
Returns:
True if a header was succesfully added. False otherwise.
"""
headerfile = None
try:
headerfile = io.open(filename, 'r', 'utf8', 'replace')
except IOError:
return False
linenum = 0
for line in headerfile:
linenum += 1
clean_line = CleanseComments(line)
match = _RE_PATTERN_INCLUDE.search(clean_line)
if match:
include = match.group(2)
# The value formatting is cute, but not really used right now.
# What matters here is that the key is in include_state.
include_state.setdefault(include, '%s:%d' % (filename, linenum))
return True
def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
io=codecs):
"""Reports for missing stl includes.
This function will output warnings to make sure you are including the headers
necessary for the stl containers and functions that you use. We only give one
reason to include a header. For example, if you use both equal_to<> and
less<> in a .h file, only one (the latter in the file) of these will be
reported as a reason to include the <functional>.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
include_state: An _IncludeState instance.
error: The function to call with any errors found.
io: The IO factory to use to read the header file. Provided for unittest
injection.
"""
required = {} # A map of header name to linenumber and the template entity.
# Example of required: { '<functional>': (1219, 'less<>') }
for linenum in xrange(clean_lines.NumLines()):
line = clean_lines.elided[linenum]
if not line or line[0] == '#':
continue
# String is special -- it is a non-templatized type in STL.
m = _RE_PATTERN_STRING.search(line)
if m:
# Don't warn about strings in non-STL namespaces:
# (We check only the first match per line; good enough.)
prefix = line[:m.start()]
if prefix.endswith('std::') or not prefix.endswith('::'):
required['<string>'] = (linenum, 'string')
for pattern, template, header in _re_pattern_algorithm_header:
if pattern.search(line):
required[header] = (linenum, template)
# The following function is just a speed up, no semantics are changed.
if not '<' in line: # Reduces the cpu time usage by skipping lines.
continue
for pattern, template, header in _re_pattern_templates:
if pattern.search(line):
required[header] = (linenum, template)
# The policy is that if you #include something in foo.h you don't need to
# include it again in foo.cc. Here, we will look at possible includes.
# Let's copy the include_state so it is only messed up within this function.
include_state = include_state.copy()
# Did we find the header for this file (if any) and succesfully load it?
header_found = False
# Use the absolute path so that matching works properly.
abs_filename = os.path.abspath(filename)
# For Emacs's flymake.
# If cpplint is invoked from Emacs's flymake, a temporary file is generated
# by flymake and that file name might end with '_flymake.cc'. In that case,
# restore original file name here so that the corresponding header file can be
# found.
# e.g. If the file name is 'foo_flymake.cc', we should search for 'foo.h'
# instead of 'foo_flymake.h'
abs_filename = re.sub(r'_flymake\.cc$', '.cc', abs_filename)
# include_state is modified during iteration, so we iterate over a copy of
# the keys.
for header in include_state.keys(): #NOLINT
(same_module, common_path) = FilesBelongToSameModule(abs_filename, header)
fullpath = common_path + header
if same_module and UpdateIncludeState(fullpath, include_state, io):
header_found = True
# If we can't find the header file for a .cc, assume it's because we don't
# know where to look. In that case we'll give up as we're not sure they
# didn't include it in the .h file.
# TODO(unknown): Do a better job of finding .h files so we are confident that
# not having the .h file means there isn't one.
if filename.endswith('.cc') and not header_found:
return
# All the lines have been processed, report the errors found.
for required_header_unstripped in required:
template = required[required_header_unstripped][1]
if template in _HEADERS_ACCEPTED_BUT_NOT_PROMOTED:
headers = _HEADERS_ACCEPTED_BUT_NOT_PROMOTED[template]
if [True for header in headers if header in include_state]:
continue
if required_header_unstripped.strip('<>"') not in include_state:
error(filename, required[required_header_unstripped][0],
'build/include_what_you_use', 4,
'Add #include ' + required_header_unstripped + ' for ' + template)
def ProcessLine(filename, file_extension,
clean_lines, line, include_state, function_state,
class_state, error):
"""Processes a single line in the file.
Args:
filename: Filename of the file that is being processed.
file_extension: The extension (dot not included) of the file.
clean_lines: An array of strings, each representing a line of the file,
with comments stripped.
line: Number of line being processed.
include_state: An _IncludeState instance in which the headers are inserted.
function_state: A _FunctionState instance which counts function lines, etc.
class_state: A _ClassState instance which maintains information about
the current stack of nested class declarations being parsed.
error: A callable to which errors are reported, which takes 4 arguments:
filename, line number, error level, and message
"""
raw_lines = clean_lines.raw_lines
ParseNolintSuppressions(filename, raw_lines[line], line, error)
CheckForFunctionLengths(filename, clean_lines, line, function_state, error)
CheckForMultilineCommentsAndStrings(filename, clean_lines, line, error)
CheckStyle(filename, clean_lines, line, file_extension, error)
CheckLanguage(filename, clean_lines, line, file_extension, include_state,
error)
CheckForNonStandardConstructs(filename, clean_lines, line,
class_state, error)
CheckPosixThreading(filename, clean_lines, line, error)
CheckInvalidIncrement(filename, clean_lines, line, error)
def ProcessFileData(filename, file_extension, lines, error):
"""Performs lint checks and reports any errors to the given error function.
Args:
filename: Filename of the file that is being processed.
file_extension: The extension (dot not included) of the file.
lines: An array of strings, each representing a line of the file, with the
last element being empty if the file is termined with a newline.
error: A callable to which errors are reported, which takes 4 arguments:
"""
lines = (['// marker so line numbers and indices both start at 1'] + lines +
['// marker so line numbers end in a known way'])
include_state = _IncludeState()
function_state = _FunctionState()
class_state = _ClassState()
ResetNolintSuppressions()
CheckForCopyright(filename, lines, error)
if file_extension == 'h':
CheckForHeaderGuard(filename, lines, error)
RemoveMultiLineComments(filename, lines, error)
clean_lines = CleansedLines(lines)
for line in xrange(clean_lines.NumLines()):
ProcessLine(filename, file_extension, clean_lines, line,
include_state, function_state, class_state, error)
class_state.CheckFinished(filename, error)
CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error)
# We check here rather than inside ProcessLine so that we see raw
# lines rather than "cleaned" lines.
CheckForUnicodeReplacementCharacters(filename, lines, error)
CheckForNewlineAtEOF(filename, lines, error)
def ProcessFile(filename, vlevel):
"""Does google-lint on a single file.
Args:
filename: The name of the file to parse.
vlevel: The level of errors to report. Every error of confidence
>= verbose_level will be reported. 0 is a good default.
"""
_SetVerboseLevel(vlevel)
try:
# Support the UNIX convention of using "-" for stdin. Note that
# we are not opening the file with universal newline support
# (which codecs doesn't support anyway), so the resulting lines do
# contain trailing '\r' characters if we are reading a file that
# has CRLF endings.
# If after the split a trailing '\r' is present, it is removed
# below. If it is not expected to be present (i.e. os.linesep !=
# '\r\n' as in Windows), a warning is issued below if this file
# is processed.
if filename == '-':
lines = codecs.StreamReaderWriter(sys.stdin,
codecs.getreader('utf8'),
codecs.getwriter('utf8'),
'replace').read().split('\n')
else:
lines = codecs.open(filename, 'r', 'utf8', 'replace').read().split('\n')
carriage_return_found = False
# Remove trailing '\r'.
for linenum in range(len(lines)):
if lines[linenum].endswith('\r'):
lines[linenum] = lines[linenum].rstrip('\r')
carriage_return_found = True
except IOError:
sys.stderr.write(
"Skipping input '%s': Can't open for reading\n" % filename)
return
# Note, if no dot is found, this will give the entire filename as the ext.
file_extension = filename[filename.rfind('.') + 1:]
# When reading from stdin, the extension is unknown, so no cpplint tests
# should rely on the extension.
if (filename != '-' and file_extension != 'cc' and file_extension != 'h'
and file_extension != 'cpp'):
sys.stderr.write('Ignoring %s; not a .cc or .h file\n' % filename)
else:
ProcessFileData(filename, file_extension, lines, Error)
if carriage_return_found and os.linesep != '\r\n':
# Use 0 for linenum since outputing only one error for potentially
# several lines.
Error(filename, 0, 'whitespace/newline', 1,
'One or more unexpected \\r (^M) found;'
'better to use only a \\n')
sys.stderr.write('Done processing %s\n' % filename)
def PrintUsage(message):
"""Prints a brief usage string and exits, optionally with an error message.
Args:
message: The optional error message.
"""
sys.stderr.write(_USAGE)
if message:
sys.exit('\nFATAL ERROR: ' + message)
else:
sys.exit(1)
def PrintCategories():
"""Prints a list of all the error-categories used by error messages.
These are the categories used to filter messages via --filter.
"""
sys.stderr.write(''.join(' %s\n' % cat for cat in _ERROR_CATEGORIES))
sys.exit(0)
def ParseArguments(args):
"""Parses the command line arguments.
This may set the output format and verbosity level as side-effects.
Args:
args: The command line arguments:
Returns:
The list of filenames to lint.
"""
try:
(opts, filenames) = getopt.getopt(args, '', ['help', 'output=', 'verbose=',
'counting=',
'filter='])
except getopt.GetoptError:
PrintUsage('Invalid arguments.')
verbosity = _VerboseLevel()
output_format = _OutputFormat()
filters = ''
counting_style = ''
for (opt, val) in opts:
if opt == '--help':
PrintUsage(None)
elif opt == '--output':
if not val in ('emacs', 'vs7'):
PrintUsage('The only allowed output formats are emacs and vs7.')
output_format = val
elif opt == '--verbose':
verbosity = int(val)
elif opt == '--filter':
filters = val
if not filters:
PrintCategories()
elif opt == '--counting':
if val not in ('total', 'toplevel', 'detailed'):
PrintUsage('Valid counting options are total, toplevel, and detailed')
counting_style = val
if not filenames:
PrintUsage('No files were specified.')
_SetOutputFormat(output_format)
_SetVerboseLevel(verbosity)
_SetFilters(filters)
_SetCountingStyle(counting_style)
return filenames
def main():
filenames = ParseArguments(sys.argv[1:])
# Change stderr to write with replacement characters so we don't die
# if we try to print something containing non-ASCII characters.
sys.stderr = codecs.StreamReaderWriter(sys.stderr,
codecs.getreader('utf8'),
codecs.getwriter('utf8'),
'replace')
_cpplint_state.ResetErrorCounts()
for filename in filenames:
ProcessFile(filename, _cpplint_state.verbose_level)
_cpplint_state.PrintErrorCounts()
sys.exit(_cpplint_state.error_count > 0)
if __name__ == '__main__':
main()
| bsd-3-clause | -166,617,796,872,889,120 | 38.502079 | 86 | 0.64915 | false | 3.735763 | false | false | false |
idaholab/raven | plugins/ExamplePlugin/src/SumOfExponential.py | 1 | 5349 | """
Author: A. Alfonsi
Date : 11/17/2017
"""
import numpy as np
import math
from PluginBaseClasses.ExternalModelPluginBase import ExternalModelPluginBase
class SumOfExponential(ExternalModelPluginBase):
# Example External Model plugin class
#################################
#### RAVEN API methods BEGIN ####
#################################
def _readMoreXML(self, container, xmlNode):
"""
Method to read the portion of the XML that belongs to this plugin
@ In, container, object, self-like object where all the variables can be stored
@ In, xmlNode, xml.etree.ElementTree.Element, XML node that needs to be read
@ Out, None
"""
container.coefficients = {}
container.startValue = None
container.endValue = None
container.numberPoints = 10
outputVarNode = xmlNode.find("outputVariable")
if outputVarNode is None:
raise IOError("ExamplePlugin: <outputVariable> XML block must be inputted!")
container.outputVariable = outputVarNode.text.strip()
monotonicVarNode = xmlNode.find("monotonicVariable")
if monotonicVarNode is None:
raise IOError("ExamplePlugin: <monotonicVariable> XML block must be inputted!")
container.monotonicVariableName = monotonicVarNode.text.strip()
for child in xmlNode:
if child.tag.strip() == "variables":
# get verbosity if it exists
container.variables = [var.strip() for var in child.text.split(",")]
if container.outputVariable not in container.variables:
raise IOError("ExamplePlugin: "+container.outputVariable+" variable MUST be present in the <variables> definition!")
if container.monotonicVariableName not in container.variables:
raise IOError("ExamplePlugin: "+container.monotonicVariableName+" variable MUST be present in the <variables> definition!")
if len(container.variables) < 2:
raise IOError("ExamplePlugin: at least 1 input and 1 output variable ("+container.outputVariable+") must be listed in the <variables> definition!!")
if child.tag.strip() == "coefficient":
if "varName" not in child.attrib:
raise IOError("ExamplePlugin: attribute varName must be present in <coefficient> XML node!")
container.coefficients[child.attrib['varName']] = float(child.text)
if child.tag.strip() == "startMonotonicVariableValue":
container.startValue = float(child.text)
if child.tag.strip() == "endMonotonicVariableValue":
container.endValue = float(child.text)
if child.tag.strip() == "numberCalculationPoints":
container.numberPoints = int(child.text)
if container.startValue is None:
raise IOError("ExamplePlugin: <startMonotonicVariableValue> XML has not been inputted!")
if container.endValue is None:
raise IOError("ExamplePlugin: <endMonotonicVariableValue> XML has not been inputted!")
container.variables.pop(container.variables.index("Xi"))
container.variables.pop(container.variables.index("monotonicVariable"))
def initialize(self, container,runInfoDict,inputFiles):
"""
Method to initialize this plugin
@ In, container, object, self-like object where all the variables can be stored
@ In, runInfoDict, dict, dictionary containing all the RunInfo parameters (XML node <RunInfo>)
@ In, inputFiles, list, list of input files (if any)
@ Out, None
"""
for var in container.variables:
if var not in container.coefficients:
container.coefficients[var] = 1.0
print("ExamplePlugin: not found coefficient for variable "+var+". Default value is 1.0!")
container.stepSize = (container.endValue - container.startValue)/float(container.numberPoints)
def run(self, container, Inputs):
"""
This is a simple example of the run method in a plugin.
This method takes the variables in input and computes
oneOutputOfThisPlugin(t) = var1Coefficient*exp(var1*t)+var2Coefficient*exp(var2*t) ...
@ In, container, object, self-like object where all the variables can be stored
@ In, Inputs, dict, dictionary of inputs from RAVEN
"""
Xi = np.zeros(container.numberPoints+1)
monotonicVariable = np.zeros(container.numberPoints+1)
monoVarVal = container.startValue
monotonicVariable[0] = container.startValue
varCoeff = np.asarray([container.coefficients[var] for var in container.variables])
varExponents = np.asarray([Inputs[var]*monoVarVal for var in container.variables])
Xi[0] = np.sum(varCoeff*np.exp(varExponents))
for step in range(container.numberPoints):
monoVarVal+=container.stepSize
monotonicVariable[step+1] = monoVarVal
varExponents = np.asarray([Inputs[var]*(monoVarVal-monotonicVariable[step]) for var in container.variables])
if np.max(varExponents) >= np.finfo(varExponents.dtype).maxexp:
print("ExamplePlugin: the exponents of the exponential cause overflow. Increase the number of <numberCalculationPoints>!")
Xi[step+1] = np.sum(varCoeff*np.exp(varExponents))
Xi[step+1]+=Xi[step]
container.__dict__[container.outputVariable] = Xi
container.__dict__[container.monotonicVariableName] = monotonicVariable
###############################
#### RAVEN API methods END ####
###############################
| apache-2.0 | -1,037,211,517,340,434,800 | 49.462264 | 158 | 0.689849 | false | 4.120955 | false | false | false |
jtrain/django-cloud-media | cloud_media/forms.py | 1 | 7970 | """
A collection of forms for adding a new resource in the admin.
"""
from django import forms
from django.conf import settings
from django.contrib.admin.helpers import AdminForm
from django.core.exceptions import ImproperlyConfigured
from django.utils.translation import ugettext_lazy as _
from django.utils.importlib import import_module
from django.utils.encoding import force_unicode
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from cloud_media.models import Resource
from cloud_media.wizard import FormWizard
import cloud_media.settings as backup_settings
BACKENDS = getattr(
settings,
'CLOUD_MEDIA_HOSTING_BACKENDS',
backup_settings.CLOUD_MEDIA_HOSTING_BACKENDS)
HOSTING_PROVIDERS = getattr(
settings,
'CLOUD_MEDIA_HOSTING_PROVIDERS',
backup_settings.CLOUD_MEDIA_HOSTING_PROVIDERS)
#----------------------------------------------------------------------
# Mixins.
class AdminFormMixin(forms.Form):
"""
Provides some admin-form-like features to ease the pain of having non
modeladmin forms in the admin.
Idea inspired by the formadmin project.
"""
fieldsets = ()
prepopulated_fields = {}
readonly_fields = None
model_admin = None
def adminform(self):
if not self.fieldsets:
self.fieldsets = [
(None,
{'fields':
self.fields.keys()})
]
adminform = AdminForm(self, self.fieldsets, self.prepopulated_fields,
self.readonly_fields, self.model_admin)
return adminform
#--------------------------------------------------------------------------
# wizard.
class RemoteMediaWizard(FormWizard):
"""
User fills in generic title + description on page 1.
Page 2 is dynamic. The form shown depends on the remote host chosen
for the file. It could be a BlipForm or a YoutubeForm etc..
"""
_mixins = (AdminFormMixin,)
@property
def mixins(self):
return self._mixins
@property
def __name__(self):
return self.__class__.__name__
def get_template(self, step):
return 'cloud_media/forms/wizard.html'
def done(self, request, form_list):
"""
The first form should specify the title, description and resource_type.
The final form should provide the resource_id.
"""
data = {}
resource_id = None
for form in form_list:
try:
resource_id = form.get_resource_id(request, self.backend)
except AttributeError:
pass
data.update(form.cleaned_data)
if not resource_id:
raise forms.ValidationError("Backend failed to provide resource id")
data['resource_id'] = resource_id
# remove data that is extra to that required by Resource model.
required_fields = set(f.name for f in Resource._meta.fields)
provided_fields = set(data)
data_to_remove = provided_fields - required_fields
map(data.pop, data_to_remove)
resource = Resource.objects.create(**data)
# redirect or remove popup window.
return self._model_admin.response_add(request, resource)
def process_step(self, request, form, step):
"""
Dynamically set the final form_list depending on the set request_type.
"""
super(RemoteMediaWizard, self).process_step(request, form, step)
resource_type = form.cleaned_data.get('resource_type')
if not resource_type:
return
# user can override default backend form in settings.
try:
NextForm = settings.CLOUD_MEDIA_HOSTING_UPLOAD_FORM[resource_type]
except (AttributeError, KeyError):
# not overridden select form based on backend.
backendname = BACKENDS.get(resource_type, BACKENDS.get('default'))
self.backend = _load_backend(backendname)()
NextForm = self.backend.get_form()
self.form_list[1] = NextForm
def add_mixins(self, form, mixins):
"""
Add a new set of base classes to the form's class for dynamic
inheritance of Mixins.
"""
form.__class__.__bases__ = mixins
def render_template(self, request, form, previous_fields, step,
context=None):
"""
Renders the template for the given step, returning an HttpResponse
object.
Override this method if you want to add a custom context, return a
different MIME type, etc. If you only need to override the template
name, use get_template() instead.
The template will be rendered with the following context:
step_field -- The name of the hidden field containing the step.
step0 -- The current step (zero-based).
step -- The current step (one-based).
step_count -- The total number of steps.
form -- The Form instance for the current step (either empty
or with errors).
previous_fields -- A string representing every previous data field,
plus hashes for completed forms, all in the form of
hidden fields. Note that you'll need to run this
through the "safe" template filter, to prevent
auto-escaping, because it's raw HTML.
"""
context = context or {}
context.update(self.extra_context)
# allow dynamic mixins to be added to the form.
self.add_mixins(form, self.mixins)
return render_to_response(self.get_template(step), dict(context,
step_field=self.step_field_name,
step0=step,
step=step + 1,
step_count=self.num_steps(),
form=form,
is_popup='_popup' in request.REQUEST,
previous_fields=previous_fields
), context_instance=RequestContext(request))
def parse_params(self, request, admin=None, *args, **kwargs):
self._model_admin = admin
opts = admin.model._meta
self.extra_context.update({
'title': u'Add %s' % force_unicode(opts.verbose_name),
'current_app': admin.admin_site.name,
'has_change_permission': admin.has_change_permission(request),
'add': True,
'opts': opts,
'root_path': admin.admin_site.root_path,
'app_label': opts.app_label,
})
#--------------------------------------------------------------------------
# Forms.
class RemoteMediaBasicForm(forms.Form):
"""
A basic form to capture title, description and resource_type.
"""
title = forms.CharField(max_length=255)
description = forms.CharField(widget=forms.Textarea)
resource_type = forms.ChoiceField(
choices=HOSTING_PROVIDERS,
help_text=_("Where would you like to upload to?")
)
remote_media_wizard = RemoteMediaWizard([RemoteMediaBasicForm, 0])
#----------------------------------------------------------------------------
# Helpers.
_backends_cache = {}
def _load_backend(backend):
if not backend:
raise ImproperlyConfigured(
"%s isn't in your CLOUD_MEDIA_HOSTING_BACKENDS"
"and neither is 'default'" % resource_type)
if backend not in _backends_cache:
module_name, func_name = backend.rsplit('.', 1)
_backends_cache[backend] = getattr(import_module(module_name),
func_name)
return _backends_cache[backend]
| bsd-3-clause | 5,641,450,394,097,979,000 | 33.353448 | 80 | 0.572146 | false | 4.62029 | false | false | false |
twitter/pants | src/python/pants/task/task.py | 1 | 28963 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import sys
from abc import abstractmethod
from builtins import filter, map, object, set, str, zip
from contextlib import contextmanager
from hashlib import sha1
from itertools import repeat
from future.utils import PY3
from pants.base.exceptions import TaskError
from pants.base.worker_pool import Work
from pants.build_graph.target_filter_subsystem import TargetFilter
from pants.cache.artifact_cache import UnreadableArtifact, call_insert, call_use_cached_files
from pants.cache.cache_setup import CacheSetup
from pants.invalidation.build_invalidator import (BuildInvalidator, CacheKeyGenerator,
UncacheableCacheKeyGenerator)
from pants.invalidation.cache_manager import InvalidationCacheManager, InvalidationCheck
from pants.option.optionable import Optionable
from pants.option.options_fingerprinter import OptionsFingerprinter
from pants.option.scope import ScopeInfo
from pants.reporting.reporting_utils import items_to_report_element
from pants.source.source_root import SourceRootConfig
from pants.subsystem.subsystem_client_mixin import SubsystemClientMixin
from pants.util.dirutil import safe_mkdir, safe_rm_oldest_items_in_dir
from pants.util.memo import memoized_method, memoized_property
from pants.util.meta import AbstractClass, classproperty
class TaskBase(SubsystemClientMixin, Optionable, AbstractClass):
"""Defines a lifecycle that prepares a task for execution and provides the base machinery
needed to execute it.
Provides the base lifecycle methods that allow a task to interact with the command line, other
tasks and the user. The lifecycle is linear and run via the following sequence:
1. register_options - declare options configurable via cmd-line flag or config file.
2. product_types - declare the product types your task is capable of producing.
3. alternate_target_roots - propose a different set of target roots to use than those specified
via the CLI for the active pants run.
4. prepare - request any products needed from other tasks.
5. __init__ - distill configuration into the information needed to execute.
Provides access to the current run context for scoping work.
Also provides the basic facilities for doing work efficiently including providing a work directory
for scratch space on disk, an invalidator for checking which targets need work done on, and an
artifact cache for re-using previously cached work.
#TODO(John Sirois): Lifecycle is currently split between TaskBase and Task and lifecycle
(interface) and helpers (utility) are currently conflated. Tease these apart and narrow the scope
of the helpers. Ideally console tasks don't inherit a workdir, invalidator or build cache for
example.
"""
options_scope_category = ScopeInfo.TASK
# We set this explicitly on the synthetic subclass, so that it shares a stable name with
# its superclass, which is not necessary for regular use, but can be convenient in tests.
_stable_name = None
@classmethod
def implementation_version(cls):
"""
:API: public
"""
return [('TaskBase', 2)]
@classmethod
@memoized_method
def implementation_version_str(cls):
return '.'.join(['_'.join(map(str, x)) for x in cls.implementation_version()])
@classmethod
@memoized_method
def implementation_version_slug(cls):
return sha1(cls.implementation_version_str().encode('utf-8')).hexdigest()[:12]
@classmethod
def stable_name(cls):
"""The stable name of this task type.
We synthesize subclasses of the task types at runtime, and these synthesized subclasses
may have random names (e.g., in tests), so this gives us a stable name to use across runs,
e.g., in artifact cache references.
"""
return cls._stable_name or cls._compute_stable_name()
@classmethod
def _compute_stable_name(cls):
return '{}_{}'.format(cls.__module__, cls.__name__).replace('.', '_')
@classmethod
def subsystem_dependencies(cls):
return (super(TaskBase, cls).subsystem_dependencies() +
(CacheSetup.scoped(cls), BuildInvalidator.Factory, SourceRootConfig) +
((TargetFilter.scoped(cls),) if cls.target_filtering_enabled else tuple()))
@classmethod
def product_types(cls):
"""The list of products this Task produces. Set the product type(s) for this
task i.e. the product type(s) this task creates e.g ['classes'].
By default, each task is considered as creating a unique product type(s).
Subclasses that create products, should override this to specify their unique product type(s).
:API: public
"""
return []
@classmethod
def supports_passthru_args(cls):
"""Subclasses may override to indicate that they can use passthru args.
:API: public
"""
return False
@classmethod
def _scoped_options(cls, options):
return options[cls.options_scope]
@classmethod
def get_alternate_target_roots(cls, options, address_mapper, build_graph):
# Subclasses should not generally need to override this method.
return cls.alternate_target_roots(cls._scoped_options(options), address_mapper, build_graph)
@classmethod
def alternate_target_roots(cls, options, address_mapper, build_graph):
"""Allows a Task to propose alternate target roots from those specified on the CLI.
At most 1 unique proposal is allowed amongst all tasks involved in the run. If more than 1
unique list of target roots is proposed an error is raised during task scheduling.
:API: public
:returns list: The new target roots to use or None to accept the CLI specified target roots.
"""
@classmethod
def invoke_prepare(cls, options, round_manager):
# Subclasses should not generally need to override this method.
return cls.prepare(cls._scoped_options(options), round_manager)
@classmethod
def prepare(cls, options, round_manager):
"""Prepares a task for execution.
Called before execution and prior to any tasks that may be (indirectly) depended upon.
Typically a task that requires products from other goals would register interest in those
products here and then retrieve the requested product mappings when executed.
:API: public
"""
def __init__(self, context, workdir):
"""Subclass __init__ methods, if defined, *must* follow this idiom:
class MyTask(Task):
def __init__(self, *args, **kwargs):
super(MyTask, self).__init__(*args, **kwargs)
...
This allows us to change Task.__init__()'s arguments without
changing every subclass. If the subclass does not need its own
initialization, this method can (and should) be omitted entirely.
:API: public
"""
super(TaskBase, self).__init__()
self.context = context
self._workdir = workdir
self._task_name = type(self).__name__
self._cache_key_errors = set()
self._cache_factory = CacheSetup.create_cache_factory_for_task(self)
self._force_invalidated = False
@memoized_property
def _build_invalidator(self):
return BuildInvalidator.Factory.create(build_task=self.fingerprint)
def get_options(self):
"""Returns the option values for this task's scope.
:API: public
"""
return self.context.options.for_scope(self.options_scope)
def get_passthru_args(self):
"""Returns the passthru args for this task, if it supports them.
:API: public
"""
if not self.supports_passthru_args():
raise TaskError('{0} Does not support passthru args.'.format(self.stable_name()))
else:
return self.context.options.passthru_args_for_scope(self.options_scope)
@property
def skip_execution(self):
"""Whether this task should be skipped.
Tasks can override to specify skipping behavior (e.g., based on an option).
:API: public
"""
return False
@property
def act_transitively(self):
"""Whether this task should act on the transitive closure of the target roots.
Tasks can override to specify transitivity behavior (e.g., based on an option).
Note that this property is consulted by get_targets(), but tasks that bypass that
method must make their own decision on whether to act transitively or not.
:API: public
"""
return True
@classproperty
def target_filtering_enabled(cls):
"""Whether this task should apply configured filters against targets.
Tasks can override to enable target filtering (e.g. based on tags) and must
access targets via get_targets()
:API: public
"""
return False
def get_targets(self, predicate=None):
"""Returns the candidate targets this task should act on.
This method is a convenience for processing optional transitivity. Tasks may bypass it
and make their own decisions on which targets to act on.
NOTE: This method was introduced in 2018, so at the time of writing few tasks consult it.
Instead, they query self.context.targets directly.
TODO: Fix up existing targets to consult this method, for uniformity.
Note that returned targets have not been checked for invalidation. The caller should do
so as needed, typically by calling self.invalidated().
:API: public
"""
initial_targets = (self.context.targets(predicate) if self.act_transitively
else list(filter(predicate, self.context.target_roots)))
if not self.target_filtering_enabled:
return initial_targets
else:
return self._filter_targets(initial_targets)
def _filter_targets(self, targets):
included_targets = TargetFilter.scoped_instance(self).apply(targets)
excluded_targets = set(targets).difference(included_targets)
if excluded_targets:
self.context.log.info("{} target(s) excluded".format(len(excluded_targets)))
for target in excluded_targets:
self.context.log.debug("{} excluded".format(target.address.spec))
return included_targets
@memoized_property
def workdir(self):
"""A scratch-space for this task that will be deleted by `clean-all`.
It's guaranteed that no other task has been given this workdir path to use and that the workdir
exists.
:API: public
"""
safe_mkdir(self._workdir)
return self._workdir
@memoized_property
def versioned_workdir(self):
"""The Task.workdir suffixed with a fingerprint of the Task implementation version.
When choosing whether to store values directly in `self.workdir` or below it in
the directory returned by this property, you should generally prefer this value.
:API: public
"""
versioned_workdir = os.path.join(self.workdir, self.implementation_version_slug())
safe_mkdir(versioned_workdir)
return versioned_workdir
def _options_fingerprint(self, scope):
options_hasher = sha1()
options_hasher.update(scope.encode('utf-8'))
options_fp = OptionsFingerprinter.combined_options_fingerprint_for_scope(
scope,
self.context.options,
build_graph=self.context.build_graph,
include_passthru=self.supports_passthru_args(),
)
options_hasher.update(options_fp.encode('utf-8'))
return options_hasher.hexdigest() if PY3 else options_hasher.hexdigest().decode('utf-8')
@memoized_property
def fingerprint(self):
"""Returns a fingerprint for the identity of the task.
A task fingerprint is composed of the options the task is currently running under.
Useful for invalidating unchanging targets being executed beneath changing task
options that affect outputted artifacts.
A task's fingerprint is only valid after the task has been fully initialized.
"""
hasher = sha1()
hasher.update(self.stable_name().encode('utf-8'))
hasher.update(self._options_fingerprint(self.options_scope).encode('utf-8'))
hasher.update(self.implementation_version_str().encode('utf-8'))
for dep in self.subsystem_closure_iter():
hasher.update(self._options_fingerprint(dep.options_scope).encode('utf-8'))
return hasher.hexdigest() if PY3 else hasher.hexdigest().decode('utf-8')
def artifact_cache_reads_enabled(self):
return self._cache_factory.read_cache_available()
def artifact_cache_writes_enabled(self):
return self._cache_factory.write_cache_available()
def invalidate(self):
"""Invalidates all targets for this task."""
self._build_invalidator.force_invalidate_all()
@property
def create_target_dirs(self):
"""Whether to create a results_dir per VersionedTarget in the workdir of the Task.
This defaults to the value of `self.cache_target_dirs` (as caching them requires
creating them), but may be overridden independently to create the dirs without caching
them.
:API: public
"""
return self.cache_target_dirs
@property
def cache_target_dirs(self):
"""Whether to cache files in VersionedTarget's results_dir after exiting an invalidated block.
Subclasses may override this method to return True if they wish to use this style
of "automated" caching, where each VersionedTarget is given an associated results directory,
which will automatically be uploaded to the cache. Tasks should place the output files
for each VersionedTarget in said results directory. It is highly suggested to follow this
schema for caching, rather than manually making updates to the artifact cache.
:API: public
"""
return False
@property
def incremental(self):
"""Whether this Task implements incremental building of individual targets.
Incremental tasks with `cache_target_dirs` set will have the results_dir of the previous build
for a target cloned into the results_dir for the current build (where possible). This
copy-on-write behaviour allows for immutability of the results_dir once a target has been
marked valid.
:API: public
"""
return False
@property
def cache_incremental(self):
"""For incremental tasks, indicates whether the results of incremental builds should be cached.
Deterministic per-target incremental compilation is a relatively difficult thing to implement,
so this property provides an escape hatch to avoid caching things in that riskier case.
:API: public
"""
return False
@contextmanager
def invalidated(self,
targets,
invalidate_dependents=False,
silent=False,
fingerprint_strategy=None,
topological_order=False):
"""Checks targets for invalidation, first checking the artifact cache.
Subclasses call this to figure out what to work on.
:API: public
:param targets: The targets to check for changes.
:param invalidate_dependents: If True then any targets depending on changed targets are
invalidated.
:param silent: If true, suppress logging information about target invalidation.
:param fingerprint_strategy: A FingerprintStrategy instance, which can do per task,
finer grained fingerprinting of a given Target.
:param topological_order: Whether to invalidate in dependency order.
If no exceptions are thrown by work in the block, the build cache is updated for the targets.
Note: the artifact cache is not updated. That must be done manually.
:returns: Yields an InvalidationCheck object reflecting the targets.
:rtype: InvalidationCheck
"""
invalidation_check = self._do_invalidation_check(fingerprint_strategy,
invalidate_dependents,
targets,
topological_order)
self._maybe_create_results_dirs(invalidation_check.all_vts)
if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled():
with self.context.new_workunit('cache'):
cached_vts, uncached_vts, uncached_causes = \
self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
if cached_vts:
cached_targets = [vt.target for vt in cached_vts]
self.context.run_tracker.artifact_cache_stats.add_hits(self._task_name, cached_targets)
if not silent:
self._report_targets('Using cached artifacts for ', cached_targets, '.')
if uncached_vts:
uncached_targets = [vt.target for vt in uncached_vts]
self.context.run_tracker.artifact_cache_stats.add_misses(self._task_name,
uncached_targets,
uncached_causes)
if not silent:
self._report_targets('No cached artifacts for ', uncached_targets, '.')
# Now that we've checked the cache, re-partition whatever is still invalid.
invalidation_check = InvalidationCheck(invalidation_check.all_vts, uncached_vts)
if not silent:
targets = []
for vt in invalidation_check.invalid_vts:
targets.extend(vt.targets)
if len(targets):
target_address_references = [t.address.reference() for t in targets]
msg_elements = [
'Invalidated ',
items_to_report_element(target_address_references, 'target'),
'.',
]
self.context.log.info(*msg_elements)
self._update_invalidation_report(invalidation_check, 'pre-check')
# Cache has been checked to create the full list of invalid VTs.
# Only copy previous_results for this subset of VTs.
if self.incremental:
for vts in invalidation_check.invalid_vts:
vts.copy_previous_results()
# This may seem odd: why would we need to invalidate a VersionedTargetSet that is already
# invalid? But the name force_invalidate() is slightly misleading in this context - what it
# actually does is delete the key file created at the end of the last successful task run.
# This is necessary to avoid the following scenario:
#
# 1) In state A: Task suceeds and writes some output. Key is recorded by the invalidator.
# 2) In state B: Task fails, but writes some output. Key is not recorded.
# 3) After reverting back to state A: The current key is the same as the one recorded at the
# end of step 1), so it looks like no work needs to be done, but actually the task
# must re-run, to overwrite the output written in step 2.
#
# Deleting the file ensures that if a task fails, there is no key for which we might think
# we're in a valid state.
for vts in invalidation_check.invalid_vts:
vts.force_invalidate()
# Yield the result, and then mark the targets as up to date.
yield invalidation_check
self._update_invalidation_report(invalidation_check, 'post-check')
for vt in invalidation_check.invalid_vts:
vt.update()
# Background work to clean up previous builds.
if self.context.options.for_global_scope().workdir_max_build_entries is not None:
self._launch_background_workdir_cleanup(invalidation_check.all_vts)
def _update_invalidation_report(self, invalidation_check, phase):
invalidation_report = self.context.invalidation_report
if invalidation_report:
for vts in invalidation_check.all_vts:
invalidation_report.add_vts(self._task_name, vts.targets, vts.cache_key, vts.valid,
phase=phase)
def _do_invalidation_check(self,
fingerprint_strategy,
invalidate_dependents,
targets,
topological_order):
if self._cache_factory.ignore:
cache_key_generator = UncacheableCacheKeyGenerator()
else:
cache_key_generator = CacheKeyGenerator(
self.context.options.for_global_scope().cache_key_gen_version,
self.fingerprint)
cache_manager = InvalidationCacheManager(self.workdir,
cache_key_generator,
self._build_invalidator,
invalidate_dependents,
fingerprint_strategy=fingerprint_strategy,
invalidation_report=self.context.invalidation_report,
task_name=self._task_name,
task_version_slug=self.implementation_version_slug(),
artifact_write_callback=self.maybe_write_artifact)
# If this Task's execution has been forced, invalidate all our target fingerprints.
if self._cache_factory.ignore and not self._force_invalidated:
self.invalidate()
self._force_invalidated = True
return cache_manager.check(targets, topological_order=topological_order)
def maybe_write_artifact(self, vt):
if self._should_cache_target_dir(vt):
self.update_artifact_cache([(vt, [vt.current_results_dir])])
def _launch_background_workdir_cleanup(self, vts):
workdir_build_cleanup_job = Work(self._cleanup_workdir_stale_builds,
[(vts,)],
'workdir_build_cleanup')
self.context.submit_background_work_chain([workdir_build_cleanup_job])
def _cleanup_workdir_stale_builds(self, vts):
# workdir_max_build_entries has been assured of not None before invoking this method.
workdir_max_build_entries = self.context.options.for_global_scope().workdir_max_build_entries
max_entries_per_target = max(2, workdir_max_build_entries)
for vt in vts:
live_dirs = list(vt.live_dirs())
if not live_dirs:
continue
root_dir = os.path.dirname(vt.results_dir)
safe_rm_oldest_items_in_dir(root_dir, max_entries_per_target, excludes=live_dirs)
def _should_cache_target_dir(self, vt):
"""Return true if the given vt should be written to a cache (if configured)."""
return (
self.cache_target_dirs and
vt.cacheable and
(not vt.is_incremental or self.cache_incremental) and
self.artifact_cache_writes_enabled()
)
def _maybe_create_results_dirs(self, vts):
"""If `cache_target_dirs`, create results_dirs for the given versioned targets."""
if self.create_target_dirs:
for vt in vts:
vt.create_results_dir()
def check_artifact_cache_for(self, invalidation_check):
"""Decides which VTS to check the artifact cache for.
By default we check for each invalid target. Can be overridden, e.g., to
instead check only for a single artifact for the entire target set.
"""
return invalidation_check.invalid_vts
def check_artifact_cache(self, vts):
"""Checks the artifact cache for the specified list of VersionedTargetSets.
Returns a tuple (cached, uncached, uncached_causes) of VersionedTargets that were
satisfied/unsatisfied from the cache. Uncached VTS are also attached with their
causes for the miss: `False` indicates a legit miss while `UnreadableArtifact`
is due to either local or remote cache failures.
"""
return self.do_check_artifact_cache(vts)
def do_check_artifact_cache(self, vts, post_process_cached_vts=None):
"""Checks the artifact cache for the specified list of VersionedTargetSets.
Returns a tuple (cached, uncached, uncached_causes) of VersionedTargets that were
satisfied/unsatisfied from the cache.
"""
if not vts:
return [], [], []
read_cache = self._cache_factory.get_read_cache()
items = [(read_cache, vt.cache_key, vt.current_results_dir if self.cache_target_dirs else None)
for vt in vts]
res = self.context.subproc_map(call_use_cached_files, items)
cached_vts = []
uncached_vts = []
uncached_causes = []
# Note that while the input vts may represent multiple targets (for tasks that overrride
# check_artifact_cache_for), the ones we return must represent single targets.
# Once flattened, cached/uncached vts are in separate lists. Each uncached vts is paired
# with why it is missed for stat reporting purpose.
for vt, was_in_cache in zip(vts, res):
if was_in_cache:
cached_vts.extend(vt.versioned_targets)
else:
uncached_vts.extend(vt.versioned_targets)
uncached_causes.extend(repeat(was_in_cache, len(vt.versioned_targets)))
if isinstance(was_in_cache, UnreadableArtifact):
self._cache_key_errors.update(was_in_cache.key)
if post_process_cached_vts:
post_process_cached_vts(cached_vts)
for vt in cached_vts:
vt.update()
return cached_vts, uncached_vts, uncached_causes
def update_artifact_cache(self, vts_artifactfiles_pairs):
"""Write to the artifact cache, if we're configured to.
vts_artifactfiles_pairs - a list of pairs (vts, artifactfiles) where
- vts is single VersionedTargetSet.
- artifactfiles is a list of absolute paths to artifacts for the VersionedTargetSet.
"""
update_artifact_cache_work = self._get_update_artifact_cache_work(vts_artifactfiles_pairs)
if update_artifact_cache_work:
self.context.submit_background_work_chain([update_artifact_cache_work],
parent_workunit_name='cache')
def _get_update_artifact_cache_work(self, vts_artifactfiles_pairs):
"""Create a Work instance to update an artifact cache, if we're configured to.
vts_artifactfiles_pairs - a list of pairs (vts, artifactfiles) where
- vts is single VersionedTargetSet.
- artifactfiles is a list of paths to artifacts for the VersionedTargetSet.
"""
cache = self._cache_factory.get_write_cache()
if cache:
if len(vts_artifactfiles_pairs) == 0:
return None
# Do some reporting.
targets = set()
for vts, _ in vts_artifactfiles_pairs:
targets.update(vts.targets)
self._report_targets(
'Caching artifacts for ',
list(targets),
'.',
logger=self.context.log.debug,
)
always_overwrite = self._cache_factory.overwrite()
# Cache the artifacts.
args_tuples = []
for vts, artifactfiles in vts_artifactfiles_pairs:
overwrite = always_overwrite or vts.cache_key in self._cache_key_errors
args_tuples.append((cache, vts.cache_key, artifactfiles, overwrite))
return Work(lambda x: self.context.subproc_map(call_insert, x), [(args_tuples,)], 'insert')
else:
return None
def _report_targets(self, prefix, targets, suffix, logger=None):
target_address_references = [t.address.reference() for t in targets]
msg_elements = [
prefix,
items_to_report_element(target_address_references, 'target'),
suffix,
]
logger = logger or self.context.log.info
logger(*msg_elements)
def require_single_root_target(self):
"""If a single target was specified on the cmd line, returns that target.
Otherwise throws TaskError.
:API: public
"""
target_roots = self.context.target_roots
if len(target_roots) == 0:
raise TaskError('No target specified.')
elif len(target_roots) > 1:
raise TaskError('Multiple targets specified: {}'
.format(', '.join([repr(t) for t in target_roots])))
return target_roots[0]
def determine_target_roots(self, goal_name):
"""Helper for tasks that scan for default target roots.
:param string goal_name: The goal name to use for any warning emissions.
"""
if not self.context.target_roots:
print('WARNING: No targets were matched in goal `{}`.'.format(goal_name), file=sys.stderr)
# For the v2 path, e.g. `./pants list` is a functional no-op. This matches the v2 mode behavior
# of e.g. `./pants --changed-parent=HEAD list` (w/ no changes) returning an empty result.
return self.context.target_roots
class Task(TaskBase):
"""An executable task.
Tasks form the atoms of work done by pants and when executed generally produce artifacts as a
side effect whether these be files on disk (for example compilation outputs) or characters output
to the terminal (for example dependency graph metadata).
:API: public
"""
def __init__(self, context, workdir):
"""
Add pass-thru Task Constructor for public API visibility.
:API: public
"""
super(Task, self).__init__(context, workdir)
@abstractmethod
def execute(self):
"""Executes this task.
:API: public
"""
class QuietTaskMixin(object):
"""A mixin to signal that pants shouldn't print verbose progress information for this task."""
pass
| apache-2.0 | 8,560,133,421,249,656,000 | 38.621067 | 100 | 0.686497 | false | 4.163744 | false | false | false |
coderbone/SickRage-alt | setup.py | 1 | 2381 | # -*- coding: utf-8 -*
"""
Use setup tools to install sickchill
"""
import os
from setuptools import find_packages, setup
from requirements.sort import file_to_dict
try:
from babel.messages import frontend as babel
except ImportError:
babel = None
ROOT = os.path.realpath(os.path.join(os.path.dirname(__file__)))
with open(os.path.join(ROOT, 'readme.md'), 'r') as r:
long_description = r.read()
def get_requirements(rel_file_path):
file_path = os.path.join(ROOT, rel_file_path)
data = file_to_dict(file_path)
if data is False:
print('get_requirements failed')
return []
return [pkg['install'] for pkg in data
if pkg['active'] and pkg['install']]
requirements = get_requirements('requirements/requirements.txt')
commands = {}
if babel:
commands.update({
'compile_catalog': babel.compile_catalog,
'extract_messages': babel.extract_messages,
'init_catalog': babel.init_catalog,
'update_catalog': babel.update_catalog
})
setup(
name="sickchill",
version="0.0.1",
description="Automatic Video Library Manager for TV Shows",
long_description=long_description,
url='https://sickchill.github.io',
download_url='https://github.com/SickChill/SickChill.git',
author='miigotu',
author_email='[email protected]',
license='GPLv2',
packages=find_packages(),
# install_requires=requirements, # Commented-out for now
install_requires=[
'pytz',
'requests',
'mako',
'configobj'
],
test_suite="tests",
tests_require=[
'coveralls',
'nose',
'rednose',
'mock',
'vcrpy-unittest',
'babel',
'flake8-coding',
'isort'
],
python_requires='>=2.7, <3',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Multimedia :: Video',
],
cmdclass=commands,
message_extractors={
'gui': [
('**/views/**.mako', 'mako', {'input_encoding': 'utf-8'}),
('**/js/*.min.js', 'ignore', None),
('**/js/*.js', 'javascript', {'input_encoding': 'utf-8'})
],
'sickchill': [('**.py', 'python', None)],
'sickbeard': [('**.py', 'python', None)],
},
)
| gpl-3.0 | 890,928,035,765,774,600 | 24.329787 | 70 | 0.582948 | false | 3.580451 | false | false | false |
alphagov/notifications-api | tests/app/v2/notifications/test_post_notifications.py | 1 | 48523 | import uuid
from unittest import mock
from unittest.mock import call
import pytest
from boto.exception import SQSError
from flask import current_app, json
from app.dao import templates_dao
from app.dao.service_sms_sender_dao import dao_update_service_sms_sender
from app.models import (
EMAIL_TYPE,
INTERNATIONAL_SMS_TYPE,
NOTIFICATION_CREATED,
SMS_TYPE,
Notification,
)
from app.schema_validation import validate
from app.v2.errors import RateLimitError
from app.v2.notifications.notification_schemas import (
post_email_response,
post_sms_response,
)
from tests import create_authorization_header
from tests.app.db import (
create_api_key,
create_reply_to_email,
create_service,
create_service_sms_sender,
create_service_with_inbound_number,
create_template,
)
from tests.conftest import set_config_values
@pytest.mark.parametrize("reference", [None, "reference_from_client"])
def test_post_sms_notification_returns_201(client, sample_template_with_placeholders, mocker, reference):
mocked = mocker.patch('app.celery.provider_tasks.deliver_sms.apply_async')
data = {
'phone_number': '+447700900855',
'template_id': str(sample_template_with_placeholders.id),
'personalisation': {' Name': 'Jo'}
}
if reference:
data.update({"reference": reference})
auth_header = create_authorization_header(service_id=sample_template_with_placeholders.service_id)
response = client.post(
path='/v2/notifications/sms',
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 201
resp_json = json.loads(response.get_data(as_text=True))
assert validate(resp_json, post_sms_response) == resp_json
notifications = Notification.query.all()
assert len(notifications) == 1
assert notifications[0].status == NOTIFICATION_CREATED
notification_id = notifications[0].id
assert notifications[0].postage is None
assert notifications[0].document_download_count is None
assert resp_json['id'] == str(notification_id)
assert resp_json['reference'] == reference
assert resp_json['content']['body'] == sample_template_with_placeholders.content.replace("(( Name))", "Jo")
assert resp_json['content']['from_number'] == current_app.config['FROM_NUMBER']
assert 'v2/notifications/{}'.format(notification_id) in resp_json['uri']
assert resp_json['template']['id'] == str(sample_template_with_placeholders.id)
assert resp_json['template']['version'] == sample_template_with_placeholders.version
assert 'services/{}/templates/{}'.format(sample_template_with_placeholders.service_id,
sample_template_with_placeholders.id) \
in resp_json['template']['uri']
assert not resp_json["scheduled_for"]
assert mocked.called
def test_post_sms_notification_uses_inbound_number_as_sender(client, notify_db_session, mocker):
service = create_service_with_inbound_number(inbound_number='1')
template = create_template(service=service, content="Hello (( Name))\nYour thing is due soon")
mocked = mocker.patch('app.celery.provider_tasks.deliver_sms.apply_async')
data = {
'phone_number': '+447700900855',
'template_id': str(template.id),
'personalisation': {' Name': 'Jo'}
}
auth_header = create_authorization_header(service_id=service.id)
response = client.post(
path='/v2/notifications/sms',
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 201
resp_json = json.loads(response.get_data(as_text=True))
assert validate(resp_json, post_sms_response) == resp_json
notifications = Notification.query.all()
assert len(notifications) == 1
notification_id = notifications[0].id
assert resp_json['id'] == str(notification_id)
assert resp_json['content']['from_number'] == '1'
assert notifications[0].reply_to_text == '1'
mocked.assert_called_once_with([str(notification_id)], queue='send-sms-tasks')
def test_post_sms_notification_uses_inbound_number_reply_to_as_sender(client, notify_db_session, mocker):
service = create_service_with_inbound_number(inbound_number='07123123123')
template = create_template(service=service, content="Hello (( Name))\nYour thing is due soon")
mocked = mocker.patch('app.celery.provider_tasks.deliver_sms.apply_async')
data = {
'phone_number': '+447700900855',
'template_id': str(template.id),
'personalisation': {' Name': 'Jo'}
}
auth_header = create_authorization_header(service_id=service.id)
response = client.post(
path='/v2/notifications/sms',
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 201
resp_json = json.loads(response.get_data(as_text=True))
assert validate(resp_json, post_sms_response) == resp_json
notifications = Notification.query.all()
assert len(notifications) == 1
notification_id = notifications[0].id
assert resp_json['id'] == str(notification_id)
assert resp_json['content']['from_number'] == '447123123123'
assert notifications[0].reply_to_text == '447123123123'
mocked.assert_called_once_with([str(notification_id)], queue='send-sms-tasks')
def test_post_sms_notification_returns_201_with_sms_sender_id(
client, sample_template_with_placeholders, mocker
):
sms_sender = create_service_sms_sender(service=sample_template_with_placeholders.service, sms_sender='123456')
mocked = mocker.patch('app.celery.provider_tasks.deliver_sms.apply_async')
data = {
'phone_number': '+447700900855',
'template_id': str(sample_template_with_placeholders.id),
'personalisation': {' Name': 'Jo'},
'sms_sender_id': str(sms_sender.id)
}
auth_header = create_authorization_header(service_id=sample_template_with_placeholders.service_id)
response = client.post(
path='/v2/notifications/sms',
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 201
resp_json = json.loads(response.get_data(as_text=True))
assert validate(resp_json, post_sms_response) == resp_json
assert resp_json['content']['from_number'] == sms_sender.sms_sender
notifications = Notification.query.all()
assert len(notifications) == 1
assert notifications[0].reply_to_text == sms_sender.sms_sender
mocked.assert_called_once_with([resp_json['id']], queue='send-sms-tasks')
def test_post_sms_notification_uses_sms_sender_id_reply_to(
client, sample_template_with_placeholders, mocker
):
sms_sender = create_service_sms_sender(service=sample_template_with_placeholders.service, sms_sender='07123123123')
mocked = mocker.patch('app.celery.provider_tasks.deliver_sms.apply_async')
data = {
'phone_number': '+447700900855',
'template_id': str(sample_template_with_placeholders.id),
'personalisation': {' Name': 'Jo'},
'sms_sender_id': str(sms_sender.id)
}
auth_header = create_authorization_header(service_id=sample_template_with_placeholders.service_id)
response = client.post(
path='/v2/notifications/sms',
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 201
resp_json = json.loads(response.get_data(as_text=True))
assert validate(resp_json, post_sms_response) == resp_json
assert resp_json['content']['from_number'] == '447123123123'
notifications = Notification.query.all()
assert len(notifications) == 1
assert notifications[0].reply_to_text == '447123123123'
mocked.assert_called_once_with([resp_json['id']], queue='send-sms-tasks')
def test_notification_reply_to_text_is_original_value_if_sender_is_changed_after_post_notification(
client, sample_template, mocker
):
sms_sender = create_service_sms_sender(service=sample_template.service, sms_sender='123456', is_default=False)
mocker.patch('app.celery.provider_tasks.deliver_sms.apply_async')
data = {
'phone_number': '+447700900855',
'template_id': str(sample_template.id),
'sms_sender_id': str(sms_sender.id)
}
auth_header = create_authorization_header(service_id=sample_template.service_id)
response = client.post(
path='/v2/notifications/sms',
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
dao_update_service_sms_sender(service_id=sample_template.service_id,
service_sms_sender_id=sms_sender.id,
is_default=sms_sender.is_default,
sms_sender='updated')
assert response.status_code == 201
notifications = Notification.query.all()
assert len(notifications) == 1
assert notifications[0].reply_to_text == '123456'
def test_should_cache_template_lookups_in_memory(mocker, client, sample_template):
mock_get_template = mocker.patch(
'app.dao.templates_dao.dao_get_template_by_id_and_service_id',
wraps=templates_dao.dao_get_template_by_id_and_service_id,
)
mocker.patch('app.celery.provider_tasks.deliver_sms.apply_async')
data = {
'phone_number': '+447700900855',
'template_id': str(sample_template.id),
}
for _ in range(5):
auth_header = create_authorization_header(service_id=sample_template.service_id)
client.post(
path='/v2/notifications/sms',
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header]
)
assert mock_get_template.call_count == 1
assert mock_get_template.call_args_list == [
call(service_id=str(sample_template.service_id), template_id=str(sample_template.id), version=None)
]
assert Notification.query.count() == 5
def test_should_cache_template_and_service_in_redis(mocker, client, sample_template):
from app.schemas import service_schema, template_schema
mock_redis_get = mocker.patch(
'app.redis_store.get',
return_value=None,
)
mock_redis_set = mocker.patch(
'app.redis_store.set',
)
mocker.patch('app.celery.provider_tasks.deliver_sms.apply_async')
data = {
'phone_number': '+447700900855',
'template_id': str(sample_template.id),
}
auth_header = create_authorization_header(service_id=sample_template.service_id)
client.post(
path='/v2/notifications/sms',
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header]
)
expected_service_key = f'service-{sample_template.service_id}'
expected_templates_key = f'service-{sample_template.service_id}-template-{sample_template.id}-version-None'
assert mock_redis_get.call_args_list == [
call(expected_service_key),
call(expected_templates_key),
]
service_dict = service_schema.dump(sample_template.service).data
template_dict = template_schema.dump(sample_template).data
assert len(mock_redis_set.call_args_list) == 2
service_call, templates_call = mock_redis_set.call_args_list
assert service_call[0][0] == expected_service_key
assert json.loads(service_call[0][1]) == {'data': service_dict}
assert service_call[1]['ex'] == 604_800
assert templates_call[0][0] == expected_templates_key
assert json.loads(templates_call[0][1]) == {'data': template_dict}
assert templates_call[1]['ex'] == 604_800
def test_should_return_template_if_found_in_redis(mocker, client, sample_template):
from app.schemas import service_schema, template_schema
service_dict = service_schema.dump(sample_template.service).data
template_dict = template_schema.dump(sample_template).data
mocker.patch(
'app.redis_store.get',
side_effect=[
json.dumps({'data': service_dict}).encode('utf-8'),
json.dumps({'data': template_dict}).encode('utf-8'),
],
)
mock_get_template = mocker.patch(
'app.dao.templates_dao.dao_get_template_by_id_and_service_id'
)
mock_get_service = mocker.patch(
'app.dao.services_dao.dao_fetch_service_by_id'
)
mocker.patch('app.celery.provider_tasks.deliver_sms.apply_async')
data = {
'phone_number': '+447700900855',
'template_id': str(sample_template.id),
}
auth_header = create_authorization_header(service_id=sample_template.service_id)
response = client.post(
path='/v2/notifications/sms',
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header]
)
assert response.status_code == 201
assert mock_get_template.called is False
assert mock_get_service.called is False
@pytest.mark.parametrize("notification_type, key_send_to, send_to",
[("sms", "phone_number", "+447700900855"),
("email", "email_address", "[email protected]")])
def test_post_notification_returns_400_and_missing_template(client, sample_service,
notification_type, key_send_to, send_to):
data = {
key_send_to: send_to,
'template_id': str(uuid.uuid4())
}
auth_header = create_authorization_header(service_id=sample_service.id)
response = client.post(
path='/v2/notifications/{}'.format(notification_type),
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 400
assert response.headers['Content-type'] == 'application/json'
error_json = json.loads(response.get_data(as_text=True))
assert error_json['status_code'] == 400
assert error_json['errors'] == [{"error": "BadRequestError",
"message": 'Template not found'}]
@pytest.mark.parametrize("notification_type, key_send_to, send_to", [
("sms", "phone_number", "+447700900855"),
("email", "email_address", "[email protected]"),
("letter", "personalisation", {"address_line_1": "The queen", "postcode": "SW1 1AA"})
])
def test_post_notification_returns_401_and_well_formed_auth_error(client, sample_template,
notification_type, key_send_to, send_to):
data = {
key_send_to: send_to,
'template_id': str(sample_template.id)
}
response = client.post(
path='/v2/notifications/{}'.format(notification_type),
data=json.dumps(data),
headers=[('Content-Type', 'application/json')])
assert response.status_code == 401
assert response.headers['Content-type'] == 'application/json'
error_resp = json.loads(response.get_data(as_text=True))
assert error_resp['status_code'] == 401
assert error_resp['errors'] == [{'error': "AuthError",
'message': 'Unauthorized: authentication token must be provided'}]
@pytest.mark.parametrize("notification_type, key_send_to, send_to",
[("sms", "phone_number", "+447700900855"),
("email", "email_address", "[email protected]")])
def test_notification_returns_400_and_for_schema_problems(client, sample_template, notification_type, key_send_to,
send_to):
data = {
key_send_to: send_to,
'template': str(sample_template.id)
}
auth_header = create_authorization_header(service_id=sample_template.service_id)
response = client.post(
path='/v2/notifications/{}'.format(notification_type),
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 400
assert response.headers['Content-type'] == 'application/json'
error_resp = json.loads(response.get_data(as_text=True))
assert error_resp['status_code'] == 400
assert {'error': 'ValidationError',
'message': "template_id is a required property"
} in error_resp['errors']
assert {'error': 'ValidationError',
'message':
'Additional properties are not allowed (template was unexpected)'
} in error_resp['errors']
@pytest.mark.parametrize("reference", [None, "reference_from_client"])
def test_post_email_notification_returns_201(client, sample_email_template_with_placeholders, mocker, reference):
mocked = mocker.patch('app.celery.provider_tasks.deliver_email.apply_async')
data = {
"email_address": sample_email_template_with_placeholders.service.users[0].email_address,
"template_id": sample_email_template_with_placeholders.id,
"personalisation": {"name": "Bob"}
}
if reference:
data.update({"reference": reference})
auth_header = create_authorization_header(service_id=sample_email_template_with_placeholders.service_id)
response = client.post(
path="v2/notifications/email",
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 201
resp_json = json.loads(response.get_data(as_text=True))
assert validate(resp_json, post_email_response) == resp_json
notification = Notification.query.one()
assert notification.status == NOTIFICATION_CREATED
assert notification.postage is None
assert resp_json['id'] == str(notification.id)
assert resp_json['reference'] == reference
assert notification.reference is None
assert notification.reply_to_text is None
assert notification.document_download_count is None
assert resp_json['content']['body'] == sample_email_template_with_placeholders.content \
.replace('((name))', 'Bob')
assert resp_json['content']['subject'] == sample_email_template_with_placeholders.subject \
.replace('((name))', 'Bob')
assert resp_json['content']['from_email'] == "{}@{}".format(
sample_email_template_with_placeholders.service.email_from, current_app.config['NOTIFY_EMAIL_DOMAIN'])
assert 'v2/notifications/{}'.format(notification.id) in resp_json['uri']
assert resp_json['template']['id'] == str(sample_email_template_with_placeholders.id)
assert resp_json['template']['version'] == sample_email_template_with_placeholders.version
assert 'services/{}/templates/{}'.format(str(sample_email_template_with_placeholders.service_id),
str(sample_email_template_with_placeholders.id)) \
in resp_json['template']['uri']
assert not resp_json["scheduled_for"]
assert mocked.called
@pytest.mark.parametrize('recipient, notification_type', [
('[email protected]', EMAIL_TYPE),
('[email protected]', EMAIL_TYPE),
('[email protected]', EMAIL_TYPE),
('07700 900000', 'sms'),
('07700 900111', 'sms'),
('07700 900222', 'sms')
])
def test_should_not_persist_or_send_notification_if_simulated_recipient(
client,
recipient,
notification_type,
sample_email_template,
sample_template,
mocker):
apply_async = mocker.patch('app.celery.provider_tasks.deliver_{}.apply_async'.format(notification_type))
if notification_type == 'sms':
data = {
'phone_number': recipient,
'template_id': str(sample_template.id)
}
else:
data = {
'email_address': recipient,
'template_id': str(sample_email_template.id)
}
auth_header = create_authorization_header(service_id=sample_email_template.service_id)
response = client.post(
path='/v2/notifications/{}'.format(notification_type),
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 201
apply_async.assert_not_called()
assert json.loads(response.get_data(as_text=True))["id"]
assert Notification.query.count() == 0
@pytest.mark.parametrize("notification_type, key_send_to, send_to",
[("sms", "phone_number", "07700 900 855"),
("email", "email_address", "[email protected]")])
def test_send_notification_uses_priority_queue_when_template_is_marked_as_priority(
client,
sample_service,
mocker,
notification_type,
key_send_to,
send_to
):
mocker.patch('app.celery.provider_tasks.deliver_{}.apply_async'.format(notification_type))
sample = create_template(
service=sample_service,
template_type=notification_type,
process_type='priority'
)
mocked = mocker.patch('app.celery.provider_tasks.deliver_{}.apply_async'.format(notification_type))
data = {
key_send_to: send_to,
'template_id': str(sample.id)
}
auth_header = create_authorization_header(service_id=sample.service_id)
response = client.post(
path='/v2/notifications/{}'.format(notification_type),
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
notification_id = json.loads(response.data)['id']
assert response.status_code == 201
mocked.assert_called_once_with([notification_id], queue='priority-tasks')
@pytest.mark.parametrize(
"notification_type, key_send_to, send_to",
[("sms", "phone_number", "07700 900 855"), ("email", "email_address", "[email protected]")]
)
def test_returns_a_429_limit_exceeded_if_rate_limit_exceeded(
client,
sample_service,
mocker,
notification_type,
key_send_to,
send_to
):
sample = create_template(service=sample_service, template_type=notification_type)
persist_mock = mocker.patch('app.v2.notifications.post_notifications.persist_notification')
deliver_mock = mocker.patch('app.v2.notifications.post_notifications.send_notification_to_queue_detached')
mocker.patch(
'app.v2.notifications.post_notifications.check_rate_limiting',
side_effect=RateLimitError("LIMIT", "INTERVAL", "TYPE"))
data = {
key_send_to: send_to,
'template_id': str(sample.id)
}
auth_header = create_authorization_header(service_id=sample.service_id)
response = client.post(
path='/v2/notifications/{}'.format(notification_type),
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
error = json.loads(response.data)['errors'][0]['error']
message = json.loads(response.data)['errors'][0]['message']
status_code = json.loads(response.data)['status_code']
assert response.status_code == 429
assert error == 'RateLimitError'
assert message == 'Exceeded rate limit for key type TYPE of LIMIT requests per INTERVAL seconds'
assert status_code == 429
assert not persist_mock.called
assert not deliver_mock.called
def test_post_sms_notification_returns_400_if_not_allowed_to_send_int_sms(
client,
notify_db_session,
):
service = create_service(service_permissions=[SMS_TYPE])
template = create_template(service=service)
data = {
'phone_number': '20-12-1234-1234',
'template_id': template.id
}
auth_header = create_authorization_header(service_id=service.id)
response = client.post(
path='/v2/notifications/sms',
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header]
)
assert response.status_code == 400
assert response.headers['Content-type'] == 'application/json'
error_json = json.loads(response.get_data(as_text=True))
assert error_json['status_code'] == 400
assert error_json['errors'] == [
{"error": "BadRequestError", "message": 'Cannot send to international mobile numbers'}
]
def test_post_sms_notification_with_archived_reply_to_id_returns_400(client, sample_template, mocker):
archived_sender = create_service_sms_sender(
sample_template.service,
'12345',
is_default=False,
archived=True)
mocker.patch('app.celery.provider_tasks.deliver_email.apply_async')
data = {
"phone_number": '+447700900855',
"template_id": sample_template.id,
'sms_sender_id': archived_sender.id
}
auth_header = create_authorization_header(service_id=sample_template.service_id)
response = client.post(
path="v2/notifications/sms",
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 400
resp_json = json.loads(response.get_data(as_text=True))
assert 'sms_sender_id {} does not exist in database for service id {}'. \
format(archived_sender.id, sample_template.service_id) in resp_json['errors'][0]['message']
assert 'BadRequestError' in resp_json['errors'][0]['error']
@pytest.mark.parametrize('recipient,label,permission_type, notification_type,expected_error', [
('07700 900000', 'phone_number', 'email', 'sms', 'text messages'),
('[email protected]', 'email_address', 'sms', 'email', 'emails')])
def test_post_sms_notification_returns_400_if_not_allowed_to_send_notification(
notify_db_session, client, recipient, label, permission_type, notification_type, expected_error
):
service = create_service(service_permissions=[permission_type])
sample_template_without_permission = create_template(service=service, template_type=notification_type)
data = {
label: recipient,
'template_id': sample_template_without_permission.id
}
auth_header = create_authorization_header(service_id=sample_template_without_permission.service.id)
response = client.post(
path='/v2/notifications/{}'.format(sample_template_without_permission.template_type),
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 400
assert response.headers['Content-type'] == 'application/json'
error_json = json.loads(response.get_data(as_text=True))
assert error_json['status_code'] == 400
assert error_json['errors'] == [
{"error": "BadRequestError", "message": "Service is not allowed to send {}".format(expected_error)}
]
@pytest.mark.parametrize('restricted', [True, False])
def test_post_sms_notification_returns_400_if_number_not_in_guest_list(
notify_db_session, client, restricted
):
service = create_service(restricted=restricted, service_permissions=[SMS_TYPE, INTERNATIONAL_SMS_TYPE])
template = create_template(service=service)
create_api_key(service=service, key_type='team')
data = {
"phone_number": '+327700900855',
"template_id": template.id,
}
auth_header = create_authorization_header(service_id=service.id, key_type='team')
response = client.post(
path='/v2/notifications/sms',
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 400
error_json = json.loads(response.get_data(as_text=True))
assert error_json['status_code'] == 400
assert error_json['errors'] == [
{"error": "BadRequestError", "message": 'Can’t send to this recipient using a team-only API key'}
]
def test_post_sms_notification_returns_201_if_allowed_to_send_int_sms(
sample_service,
sample_template,
client,
mocker,
):
mocker.patch('app.celery.provider_tasks.deliver_sms.apply_async')
data = {
'phone_number': '20-12-1234-1234',
'template_id': sample_template.id
}
auth_header = create_authorization_header(service_id=sample_service.id)
response = client.post(
path='/v2/notifications/sms',
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 201
assert response.headers['Content-type'] == 'application/json'
def test_post_sms_should_persist_supplied_sms_number(client, sample_template_with_placeholders, mocker):
mocked = mocker.patch('app.celery.provider_tasks.deliver_sms.apply_async')
data = {
'phone_number': '+(44) 77009-00855',
'template_id': str(sample_template_with_placeholders.id),
'personalisation': {' Name': 'Jo'}
}
auth_header = create_authorization_header(service_id=sample_template_with_placeholders.service_id)
response = client.post(
path='/v2/notifications/sms',
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 201
resp_json = json.loads(response.get_data(as_text=True))
notifications = Notification.query.all()
assert len(notifications) == 1
notification_id = notifications[0].id
assert '+(44) 77009-00855' == notifications[0].to
assert resp_json['id'] == str(notification_id)
assert mocked.called
def test_post_notification_raises_bad_request_if_not_valid_notification_type(client, sample_service):
auth_header = create_authorization_header(service_id=sample_service.id)
response = client.post(
'/v2/notifications/foo',
data='{}',
headers=[('Content-Type', 'application/json'), auth_header]
)
assert response.status_code == 404
error_json = json.loads(response.get_data(as_text=True))
assert 'The requested URL was not found on the server.' in error_json['message']
@pytest.mark.parametrize("notification_type",
['sms', 'email'])
def test_post_notification_with_wrong_type_of_sender(
client,
sample_template,
sample_email_template,
notification_type,
fake_uuid):
if notification_type == EMAIL_TYPE:
template = sample_email_template
form_label = 'sms_sender_id'
data = {
'email_address': '[email protected]',
'template_id': str(sample_email_template.id),
form_label: fake_uuid
}
elif notification_type == SMS_TYPE:
template = sample_template
form_label = 'email_reply_to_id'
data = {
'phone_number': '+447700900855',
'template_id': str(template.id),
form_label: fake_uuid
}
auth_header = create_authorization_header(service_id=template.service_id)
response = client.post(
path='/v2/notifications/{}'.format(notification_type),
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 400
resp_json = json.loads(response.get_data(as_text=True))
assert 'Additional properties are not allowed ({} was unexpected)'.format(form_label) \
in resp_json['errors'][0]['message']
assert 'ValidationError' in resp_json['errors'][0]['error']
def test_post_email_notification_with_valid_reply_to_id_returns_201(client, sample_email_template, mocker):
reply_to_email = create_reply_to_email(sample_email_template.service, '[email protected]')
mocked = mocker.patch('app.celery.provider_tasks.deliver_email.apply_async')
data = {
"email_address": sample_email_template.service.users[0].email_address,
"template_id": sample_email_template.id,
'email_reply_to_id': reply_to_email.id
}
auth_header = create_authorization_header(service_id=sample_email_template.service_id)
response = client.post(
path="v2/notifications/email",
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 201
resp_json = json.loads(response.get_data(as_text=True))
assert validate(resp_json, post_email_response) == resp_json
notification = Notification.query.first()
assert notification.reply_to_text == '[email protected]'
assert resp_json['id'] == str(notification.id)
assert mocked.called
assert notification.reply_to_text == reply_to_email.email_address
def test_post_email_notification_with_invalid_reply_to_id_returns_400(client, sample_email_template, mocker, fake_uuid):
mocker.patch('app.celery.provider_tasks.deliver_email.apply_async')
data = {
"email_address": sample_email_template.service.users[0].email_address,
"template_id": sample_email_template.id,
'email_reply_to_id': fake_uuid
}
auth_header = create_authorization_header(service_id=sample_email_template.service_id)
response = client.post(
path="v2/notifications/email",
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 400
resp_json = json.loads(response.get_data(as_text=True))
assert 'email_reply_to_id {} does not exist in database for service id {}'. \
format(fake_uuid, sample_email_template.service_id) in resp_json['errors'][0]['message']
assert 'BadRequestError' in resp_json['errors'][0]['error']
def test_post_email_notification_with_archived_reply_to_id_returns_400(client, sample_email_template, mocker):
archived_reply_to = create_reply_to_email(
sample_email_template.service,
'[email protected]',
is_default=False,
archived=True)
mocker.patch('app.celery.provider_tasks.deliver_email.apply_async')
data = {
"email_address": '[email protected]',
"template_id": sample_email_template.id,
'email_reply_to_id': archived_reply_to.id
}
auth_header = create_authorization_header(service_id=sample_email_template.service_id)
response = client.post(
path="v2/notifications/email",
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 400
resp_json = json.loads(response.get_data(as_text=True))
assert 'email_reply_to_id {} does not exist in database for service id {}'. \
format(archived_reply_to.id, sample_email_template.service_id) in resp_json['errors'][0]['message']
assert 'BadRequestError' in resp_json['errors'][0]['error']
@pytest.mark.parametrize(
'csv_param',
(
{'is_csv': None},
{'is_csv': False},
{'is_csv': True},
{},
)
)
def test_post_notification_with_document_upload(client, notify_db_session, mocker, csv_param):
service = create_service(service_permissions=[EMAIL_TYPE])
service.contact_link = '[email protected]'
template = create_template(
service=service,
template_type='email',
content="Document 1: ((first_link)). Document 2: ((second_link))"
)
mocker.patch('app.celery.provider_tasks.deliver_email.apply_async')
document_download_mock = mocker.patch('app.v2.notifications.post_notifications.document_download_client')
document_download_mock.upload_document.side_effect = lambda service_id, content, is_csv: f'{content}-link'
data = {
"email_address": service.users[0].email_address,
"template_id": template.id,
"personalisation": {
"first_link": {"file": "abababab", **csv_param},
"second_link": {"file": "cdcdcdcd", **csv_param}
}
}
auth_header = create_authorization_header(service_id=service.id)
response = client.post(
path="v2/notifications/email",
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 201, response.get_data(as_text=True)
resp_json = json.loads(response.get_data(as_text=True))
assert validate(resp_json, post_email_response) == resp_json
assert document_download_mock.upload_document.call_args_list == [
call(str(service.id), 'abababab', csv_param.get('is_csv')),
call(str(service.id), 'cdcdcdcd', csv_param.get('is_csv'))
]
notification = Notification.query.one()
assert notification.status == NOTIFICATION_CREATED
assert notification.personalisation == {
'first_link': 'abababab-link',
'second_link': 'cdcdcdcd-link'
}
assert notification.document_download_count == 2
assert resp_json['content']['body'] == 'Document 1: abababab-link. Document 2: cdcdcdcd-link'
def test_post_notification_with_document_upload_simulated(client, notify_db_session, mocker):
service = create_service(service_permissions=[EMAIL_TYPE])
service.contact_link = '[email protected]'
template = create_template(
service=service,
template_type='email',
content="Document: ((document))"
)
mocker.patch('app.celery.provider_tasks.deliver_email.apply_async')
document_download_mock = mocker.patch('app.v2.notifications.post_notifications.document_download_client')
document_download_mock.get_upload_url.return_value = 'https://document-url'
data = {
"email_address": '[email protected]',
"template_id": template.id,
"personalisation": {"document": {"file": "abababab"}}
}
auth_header = create_authorization_header(service_id=service.id)
response = client.post(
path="v2/notifications/email",
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 201, response.get_data(as_text=True)
resp_json = json.loads(response.get_data(as_text=True))
assert validate(resp_json, post_email_response) == resp_json
assert resp_json['content']['body'] == 'Document: https://document-url/test-document'
def test_post_notification_without_document_upload_permission(client, notify_db_session, mocker):
service = create_service(service_permissions=[EMAIL_TYPE])
template = create_template(
service=service,
template_type='email',
content="Document: ((document))"
)
mocker.patch('app.celery.provider_tasks.deliver_email.apply_async')
document_download_mock = mocker.patch('app.v2.notifications.post_notifications.document_download_client')
document_download_mock.upload_document.return_value = 'https://document-url/'
data = {
"email_address": service.users[0].email_address,
"template_id": template.id,
"personalisation": {"document": {"file": "abababab"}}
}
auth_header = create_authorization_header(service_id=service.id)
response = client.post(
path="v2/notifications/email",
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 400, response.get_data(as_text=True)
def test_post_notification_returns_400_when_get_json_throws_exception(client, sample_email_template):
auth_header = create_authorization_header(service_id=sample_email_template.service_id)
response = client.post(
path="v2/notifications/email",
data="[",
headers=[('Content-Type', 'application/json'), auth_header])
assert response.status_code == 400
@pytest.mark.parametrize('notification_type, content_type',
[('email', 'application/json'),
('email', 'application/text'),
('sms', 'application/json'),
('sms', 'application/text')]
)
def test_post_notification_when_payload_is_invalid_json_returns_400(
client, sample_service, notification_type, content_type):
auth_header = create_authorization_header(service_id=sample_service.id)
payload_not_json = {
"template_id": "dont-convert-to-json",
}
response = client.post(
path='/v2/notifications/{}'.format(notification_type),
data=payload_not_json,
headers=[('Content-Type', content_type), auth_header],
)
assert response.status_code == 400
error_msg = json.loads(response.get_data(as_text=True))["errors"][0]["message"]
assert error_msg == 'Invalid JSON supplied in POST data'
@pytest.mark.parametrize('notification_type', ['email', 'sms'])
def test_post_notification_returns_201_when_content_type_is_missing_but_payload_is_valid_json(
client, sample_service, notification_type, mocker):
template = create_template(service=sample_service, template_type=notification_type)
mocker.patch('app.celery.provider_tasks.deliver_{}.apply_async'.format(notification_type))
auth_header = create_authorization_header(service_id=sample_service.id)
valid_json = {
"template_id": str(template.id),
}
if notification_type == 'email':
valid_json.update({"email_address": sample_service.users[0].email_address})
else:
valid_json.update({"phone_number": "+447700900855"})
response = client.post(
path='/v2/notifications/{}'.format(notification_type),
data=json.dumps(valid_json),
headers=[auth_header],
)
assert response.status_code == 201
@pytest.mark.parametrize('notification_type', ['email', 'sms'])
def test_post_email_notification_when_data_is_empty_returns_400(client, sample_service, notification_type):
auth_header = create_authorization_header(service_id=sample_service.id)
data = None
response = client.post(
path='/v2/notifications/{}'.format(notification_type),
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header],
)
error_msg = json.loads(response.get_data(as_text=True))["errors"][0]["message"]
assert response.status_code == 400
if notification_type == 'sms':
assert error_msg == 'phone_number is a required property'
else:
assert error_msg == 'email_address is a required property'
@pytest.mark.parametrize("notification_type", ("email", "sms"))
def test_post_notifications_saves_email_or_sms_to_queue(client, notify_db_session, mocker, notification_type):
save_task = mocker.patch(f"app.celery.tasks.save_api_{notification_type}.apply_async")
mock_send_task = mocker.patch(f'app.celery.provider_tasks.deliver_{notification_type}.apply_async')
service = create_service(
service_name='high volume service',
)
with set_config_values(current_app, {
'HIGH_VOLUME_SERVICE': [str(service.id)],
}):
template = create_template(service=service, content='((message))', template_type=notification_type)
data = {
"template_id": template.id,
"personalisation": {"message": "Dear citizen, have a nice day"}
}
data.update({"email_address": "[email protected]"}) if notification_type == EMAIL_TYPE \
else data.update({"phone_number": "+447700900855"})
response = client.post(
path=f'/v2/notifications/{notification_type}',
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), create_authorization_header(service_id=service.id)]
)
json_resp = response.get_json()
assert response.status_code == 201
assert json_resp['id']
assert json_resp['content']['body'] == "Dear citizen, have a nice day"
assert json_resp['template']['id'] == str(template.id)
save_task.assert_called_once_with([mock.ANY], queue=f'save-api-{notification_type}-tasks')
assert not mock_send_task.called
assert len(Notification.query.all()) == 0
@pytest.mark.parametrize("notification_type", ("email", "sms"))
def test_post_notifications_saves_email_or_sms_normally_if_saving_to_queue_fails(
client, notify_db_session, mocker, notification_type
):
save_task = mocker.patch(
f"app.celery.tasks.save_api_{notification_type}.apply_async",
side_effect=SQSError({'some': 'json'}, 'some opname')
)
mock_send_task = mocker.patch(f'app.celery.provider_tasks.deliver_{notification_type}.apply_async')
service = create_service(
service_name='high volume service',
)
with set_config_values(current_app, {
'HIGH_VOLUME_SERVICE': [str(service.id)],
}):
template = create_template(service=service, content='((message))', template_type=notification_type)
data = {
"template_id": template.id,
"personalisation": {"message": "Dear citizen, have a nice day"}
}
data.update({"email_address": "[email protected]"}) if notification_type == EMAIL_TYPE \
else data.update({"phone_number": "+447700900855"})
response = client.post(
path=f'/v2/notifications/{notification_type}',
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), create_authorization_header(service_id=service.id)]
)
json_resp = response.get_json()
assert response.status_code == 201
assert json_resp['id']
assert json_resp['content']['body'] == "Dear citizen, have a nice day"
assert json_resp['template']['id'] == str(template.id)
save_task.assert_called_once_with([mock.ANY], queue=f'save-api-{notification_type}-tasks')
mock_send_task.assert_called_once_with([json_resp['id']], queue=f'send-{notification_type}-tasks')
assert Notification.query.count() == 1
@pytest.mark.parametrize("notification_type", ("email", "sms"))
def test_post_notifications_doesnt_use_save_queue_for_test_notifications(
client, notify_db_session, mocker, notification_type
):
save_task = mocker.patch(f"app.celery.tasks.save_api_{notification_type}.apply_async")
mock_send_task = mocker.patch(f'app.celery.provider_tasks.deliver_{notification_type}.apply_async')
service = create_service(
service_name='high volume service',
)
with set_config_values(current_app, {
'HIGH_VOLUME_SERVICE': [str(service.id)],
}):
template = create_template(service=service, content='((message))', template_type=notification_type)
data = {
"template_id": template.id,
"personalisation": {"message": "Dear citizen, have a nice day"}
}
data.update({"email_address": "[email protected]"}) if notification_type == EMAIL_TYPE \
else data.update({"phone_number": "+447700900855"})
response = client.post(
path=f'/v2/notifications/{notification_type}',
data=json.dumps(data),
headers=[('Content-Type', 'application/json'),
create_authorization_header(service_id=service.id, key_type='test')]
)
json_resp = response.get_json()
assert response.status_code == 201
assert json_resp['id']
assert json_resp['content']['body'] == "Dear citizen, have a nice day"
assert json_resp['template']['id'] == str(template.id)
assert mock_send_task.called
assert not save_task.called
assert len(Notification.query.all()) == 1
def test_post_notification_does_not_use_save_queue_for_letters(client, sample_letter_template, mocker):
mock_save = mocker.patch("app.v2.notifications.post_notifications.save_email_or_sms_to_queue")
mock_create_pdf_task = mocker.patch('app.celery.tasks.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async')
with set_config_values(current_app, {
'HIGH_VOLUME_SERVICE': [str(sample_letter_template.service_id)],
}):
data = {
'template_id': str(sample_letter_template.id),
'personalisation': {
'address_line_1': 'Her Royal Highness Queen Elizabeth II',
'address_line_2': 'Buckingham Palace',
'address_line_3': 'London',
'postcode': 'SW1 1AA',
}
}
response = client.post(
path='/v2/notifications/letter',
data=json.dumps(data),
headers=[('Content-Type', 'application/json'),
create_authorization_header(service_id=sample_letter_template.service_id)]
)
assert response.status_code == 201
json_resp = response.get_json()
assert not mock_save.called
mock_create_pdf_task.assert_called_once_with([str(json_resp['id'])], queue='create-letters-pdf-tasks')
| mit | 2,010,469,411,737,156,000 | 40.224299 | 120 | 0.655139 | false | 3.707006 | true | false | false |
CnPaMeng/WeiboMsgBackupGUI | sina/loginsinacom.py | 1 | 22729 | #!/usr/bin/python
#-*- encoding: utf-8 -*-
# This file is part of the Pameng,
# Pameng website: http://www.cnpameng.com/,
# Sina weibo: http://weibo.com/cnpameng.
# This file is part of WeiboMsgBackup.
# Copyright (C) 2013 Pameng.
# Pameng <[email protected]>, 2013.
# WeiboMsgBackup is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
# WeiboMsgBackup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with WeiboMsgBackup; see the file COPYING3. If not see
# <http://www.gnu.org/licenses/>.
import urllib2
import cookielib
import time
import datetime
import json
import re
import random
import urllib
import base64
import StringIO
import gzip
from model.log4py import Log4py
import sys
from model import syscontext
import os
import wx
import rsa
from rsa import transform
logger = Log4py().getLogger("run")
class LoginSinaCom():
def __init__(self, **kwargs):
#INIT cookie load object
self.cj = cookielib.LWPCookieJar()
self.cookie_support = urllib2.HTTPCookieProcessor(self.cj)
self.opener = urllib2.build_opener(self.cookie_support, urllib2.HTTPHandler)
urllib2.install_opener(self.opener)
self.soft_path = kwargs.get("soft_path", "")
self.cookiefile = os.path.join(self.soft_path, "cookie.dat")
self.proxyip = kwargs.get("proxyip", "")
self.pcid = ""
self.servertime = ""
self.nonce = ""
self.pubkey = ''
self.rsakv = ''
def __get_millitime(self):
""" get mill times """
pre = str(int(time.time()))
pos = str(datetime.datetime.now().microsecond)[:3]
p = pre + pos
return p
def get_servertime(self, login_un):
""" get sine server time """
url = 'http://login.sina.com.cn/sso/prelogin.php?entry=account&callback=sinaSSOController.preloginCallBack&su=&rsakt=mod&client=ssologin.js(v1.4.2)&_=%s' % self.__get_millitime()
result = {}
servertime = None
nonce = None
headers = self.__get_headers()
headers['Host'] = 'login.sina.com.cn'
headers['Accept'] = '*/*'
headers['Referer'] = 'http://weibo.com/'
del headers['Accept-encoding']
for i in range(3): #@UnusedVariable
req = self.pack_request(url, headers)
data = urllib2.urlopen(req).read()
p = re.compile('\((.*)\)')
try:
json_data = p.search(data).group(1)
data = json.loads(json_data)
servertime = str(data['servertime'])
nonce = data['nonce']
result["servertime"] = servertime
result["nonce"] = nonce
result["rsakv"] = str(data['rsakv'])
result["pubkey"] = str(data['pubkey'])
self.pcid = str(data['pcid'])
break
except:
msg = u'Get severtime error!'
logger.error(msg)
continue
return result
def get_global_id(self):
""" get sina session id """
time = self.__get_millitime()
url = "http://beacon.sina.com.cn/a.gif"
headers = self.__get_headers()
headers['Host'] = 'beacon.sina.com.cn'
headers['Accept'] = 'image/png,image/*;q=0.8,*/*;q=0.5'
headers['Referer'] = 'http://weibo.com/'
req = self.pack_request(url, headers)
urllib2.urlopen(req)
def get_random_nonce(self, range_num=6):
""" get random nonce key """
nonce = ""
for i in range(range_num): #@UnusedVariable
nonce += random.choice('QWERTYUIOPASDFGHJKLZXCVBNM1234567890')
return nonce
def dec2hex(self, string_num):
base = [str(x) for x in range(10)] + [chr(x) for x in range(ord('A'), ord('A')+6)]
num = int(string_num)
mid = []
while True:
if num == 0: break
num, rem = divmod(num, 16)
mid.append(base[rem])
return ''.join([str(x) for x in mid[::-1]])
def get_pwd(self, pwd, servertime, nonce):
#pwd1 = hashlib.sha1(pwd).hexdigest()
#pwd2 = hashlib.sha1(pwd1).hexdigest()
#pwd3_ = pwd2 + servertime + nonce
#pwd3 = hashlib.sha1(pwd3_).hexdigest()
#return pwd3
p = int(self.pubkey, 16)
pub_key = rsa.PublicKey(p, int('10001', 16))
pwd = '%s\t%s\n%s' % (servertime, nonce, pwd)
pwd = (self.dec2hex(transform.bytes2int(rsa.encrypt(pwd.encode('utf-8'), pub_key))))
return pwd
def get_user(self, username):
username_ = urllib.quote(username)
username = base64.encodestring(username_)[:-1]
return username
def save_verifycode(self, url):
try:
cookiestr = ""
for cookie in self.cj.as_lwp_str(True, True).split("\n"):
cookie = cookie.split(";")[0]
cookie = cookie.replace("\"", "").replace("Set-Cookie3: ", " ").strip() + ";"
cookiestr += cookie
headers = {'Host': 'login.sina.com.cn',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; rv:13.0) Gecko/20100101 Firefox/13.0.1',
'Accept': 'image/png,image/*;q=0.8,*/*;q=0.5',
#'Accept-encoding': 'gzip, deflate',
'Accept-Language': 'zh-cn,zh;q=0.8,en-us;q=0.5,en;q=0.3',
'Connection': 'keep-alive',
'Referer' : 'http://weibo.com/',
'Cookie' : cookiestr,
}
req = self.pack_request(url, headers)
response = urllib2.urlopen(req, timeout=10)
content = response.read()
f = open(os.path.join(self.soft_path, "pin.png"), "wb")
f.write(content)
f.flush()
f.close()
except:
logger.error(u"save verify code error.")
def login(self, login_un, login_pw):
loginFalg = False
try:
try:
stObj = self.get_servertime(login_un)
self.servertime = stObj.get("servertime")
self.nonce = stObj.get("nonce")
self.pubkey = stObj.get("pubkey")
self.rsakv = stObj.get("rsakv")
except:
return False
#获取会话ID
self.get_global_id()
loginHtml = self.do_login(login_un, login_pw)
loginHtml = loginHtml.replace('"', "'")
#print loginHtml
#p = re.compile('location\.replace\(\'(.*?)\'\)')
try:
p = re.compile('location\.replace\(\'(.*?)\'\)')
login_url = p.search(loginHtml).group(1)
#print login_url
if "retcode=0" in loginHtml:
return self.redo_login(login_url)
#是否需要手动输入验证码
if syscontext.VERIFY_INPUT_FLAG:
logger.info(u"Allow user type verify code.")
pass
else:
logger.error(u"Enable input verify code,return failure.")
return False
#需要验证码,你妹
if "retcode=5" in loginHtml:
logger.error(u"password or account error.")
return False
if "retcode=4040" in loginHtml:
logger.error(u"do login too much times.")
return False
#这次是真的要验证码:code 4049
if "retcode=4049" in login_url:
for i in range(3):
logger.info(u"need verify code.")
verifycode_url = 'http://login.sina.com.cn/cgi/pin.php?r=%s&s=0&p=%s' % (random.randint(20000000,99999999), self.pcid)
self.save_verifycode(verifycode_url)
syscontext.VERIFY_CODE = ""
codeimg = os.path.join(os.path.join(syscontext.userentity.get("path", ""), syscontext.FILE_PATH_DEFAULT), "pin.png")
logger.info(u"verify code img path:%s." % codeimg)
try:
window = syscontext.MAIN_WINDOW
genthread = syscontext.MAIN_GENTHREAD
wx.CallAfter(window.EnableMainWin, False)
wx.CallAfter(window.ShowVerifyCode, codeimg)
#print "before self.acquire"
genthread.lock.acquire()
genthread.lockcondition.wait()
genthread.lock.release()
#print "after self.release"
#veroifyFrame = VerifyCodeFrame(window, filename=codeimg)
#veroifyFrame.Center()
#veroifyFrame.Show(True)
#app.MainLoop()
except:
s = sys.exc_info()
msg = (u"app error %s happened on line %d" % (s[1], s[2].tb_lineno))
logger.error(msg)
door = syscontext.VERIFY_CODE
logger.error(u"get input verify code:%s" % door)
#附加验证码再次登录
self.nonce = self.get_random_nonce()
loginHtml = self.do_login(login_un, login_pw, door=door)
loginHtml = loginHtml.replace('"', "'")
p = re.compile('location\.replace\(\'(.*?)\'\)')
if p.search(loginHtml):
login_url = p.search(loginHtml).group(1)
return self.redo_login(login_url)
else:
if "retcode=2070" in loginHtml:
#小LANG吃翔吧
logger.error(u"verify code:%s error." % door)
continue
else:
break
except:
s = sys.exc_info()
msg = (u"do login %s happened on line %d" % (s[1], s[2].tb_lineno))
logger.error(msg)
loginFalg = False
except Exception:
s = sys.exc_info()
msg = (u"login: %s happened on line %d" % (s[1], s[2].tb_lineno))
logger.error(msg)
return loginFalg
def redo_login(self, login_url):
try:
headers = self.__get_headers()
headers['Referer'] = 'http://login.sina.com.cn/sso/login.php?client=ssologin.js(v1.4.2)'
req = self.pack_request(login_url, headers)
urllib2.urlopen(req)
#self.cj.clear(name="Apache", domain=".sina.com.cn", path="/")
#self.cj.clear(name="SINAGLOBAL", domain=".sina.com.cn", path="/")
self.cj.save(self.cookiefile, True, True)
msg = u'login success'
logger.info(msg)
loginFalg = True
except:
s = sys.exc_info()
msg = (u"redo_login %s happened on line %d" % (s[1], s[2].tb_lineno))
logger.error(msg)
loginFalg = False
return loginFalg
def do_login(self, login_un, login_pw, door=""):
try:
loginFalg = False #登录状态
username = login_un #微博账号
pwd = login_pw #微博密码
url = 'http://login.sina.com.cn/sso/login.php?client=ssologin.js(v1.4.2)'
#POST DATA for login
postdata = {
# 'entry': 'weibo',
# 'gateway': '1',
# 'from': '',
# 'savestate': '7',
# 'userticket': '1',
# 'ssosimplelogin': '1',
# 'vsnf': '1',
# 'vsnval': '',
# 'su': '',
# 'service': 'miniblog',
# 'servertime': '',
# 'nonce': '',
# 'pwencode': 'wsse',
# 'sp': '',
# 'encoding': 'UTF-8',
# 'url': 'http://weibo.com/ajaxlogin.php?framelogin=1&callback=parent.sinaSSOController.feedBackUrlCallBack',
# 'returntype': 'META'
'entry': 'weibo',
'gateway': '1',
'from': '',
'savestate': '7',
'userticket': '1',
'pagerefer' : '',
'ssosimplelogin': '1',
'vsnf': '1',
'vsnval': '',
'service': 'miniblog',
'pwencode': 'rsa2',
'rsakv' : self.rsakv,
'encoding': 'UTF-8',
'url': 'http://weibo.com/ajaxlogin.php?framelogin=1&callback=parent.sinaSSOController.feedBackUrlCallBack',
'returntype': 'META',
'prelt' : '26',
}
postdata['servertime'] = self.servertime
postdata['nonce'] = self.nonce
postdata['su'] = self.get_user(username)
postdata['sp'] = self.get_pwd(pwd, self.servertime, self.nonce).lower()
#当需要验证码登录的时候
if door:
postdata['pcid'] = self.pcid
postdata['door'] = door.lower()
#headers = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; rv:13.0) Gecko/20100101 Firefox/13.0.1'}
headers = {'Host': 'login.sina.com.cn',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; rv:17.0) Gecko/20100101 Firefox/17.0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-encoding': 'gzip, deflate',
'Accept-Language': 'zh-cn,zh;q=0.8,en-us;q=0.5,en;q=0.3',
#'Accept-Charset': 'GB2312,utf-8;q=0.7,*;q=0.7',
'Connection': 'keep-alive',
'Referer' : 'http://weibo.com/',
'Content-Type': 'application/x-www-form-urlencoded',
}
req = self.pack_request(url, headers, postdata)
result = urllib2.urlopen(req)
#cj.save(cookiefile, True, True)
if result.info().get("Content-Encoding") == 'gzip':
text = self.gzip_data(result.read())
else:
text = result.read()
return text
except:
s = sys.exc_info()
msg = (u"do_login: %s happened on line %d" % (s[1], s[2].tb_lineno))
logger.error(msg)
return loginFalg
def check_cookie(self, un, pw, softPath):
loginFalg = True
self.cookiefile = os.path.join(softPath, "cookie.dat")
if os.path.exists(self.cookiefile):
msg = u"cookie dat exist."
logger.info(msg)
if "Set-Cookie" not in open(self.cookiefile,'r').read():
msg = u"but does not contain a valid cookie."
logger.info(msg)
loginFalg = self.login(un, pw)
else:
loginFalg = self.login(un, pw)
if loginFalg:
return self.valid_cookie()
else:
return False
'''
#当HTML参数为空时
#需要在login调用之后执行
#返回cookiestr or Flase
'''
def valid_cookie(self, html=""):
#http://weibo.com/signup/signup
html = str(html)
if not html:
headers = self.__get_headers()
html = self.get_content_head(url="http://weibo.com/kaifulee", headers=headers)
if not html:
msg = u"need relogin."
logger.error(msg)
self.clear_cookiedat(self.cookiefile) #clear cookie file
return False
html = str(html)
html = html.replace('"', "'")
if "sinaSSOController" in html:
p = re.compile('location\.replace\(\'(.*?)\'\)')
#p = re.compile('location\.replace\("(.*?)"\)')
try:
login_url = p.search(html).group(1)
headers = self.__get_headers()
headers['Host'] = 'account.weibo.com'
req = self.pack_request(url=login_url, headers=headers)
result = urllib2.urlopen(req)
#self.cj.clear(name="Apache", domain=".sina.com.cn", path="/")
#self.cj.clear(name="SINAGLOBAL", domain=".sina.com.cn", path="/")
self.cj.save(self.cookiefile, True, True)
if result.info().get("Content-Encoding") == 'gzip':
html = self.gzipData(result.read())
else:
html = result.read()
except:
msg = u"relogin failure."
logger.error(msg)
self.clear_cookiedat(self.cookiefile)
return False
if "违反了新浪微博的安全检测规则" in html:
msg = u"cookie failure."
logger.error(msg)
self.clear_cookiedat(self.cookiefile) #clear cookie file
return False
elif "您的帐号存在异常" in html and "解除限制" in html:
msg = u"账号被限制."
logger.error(msg)
self.clear_cookiedat(self.cookiefile)#clear cookie file
return False
elif "$CONFIG['islogin'] = '0'" in html:
msg = u"登录失败."
logger.error(msg)
self.clear_cookiedat(self.cookiefile)#clear cookie file
return False
elif "$CONFIG['islogin']='1'" in html:
#print "cookie success."
msg = u"cookie success."
logger.info(msg)
#print cj.as_lwp_str(True, True).replace("\n", ";").replace("Set-Cookie3: ", " ").strip()
#cokiestr = ""
#for cookie in self.cj.as_lwp_str(True, True).split("\n"):
# if "Apache" in cookie or "SINAGLOBAL" in cookie:
# continue
# cookie = cookie.split(";")[0]
# cookie = cookie.replace("\"", "").replace("Set-Cookie3: ", " ").strip() + ";"
# cokiestr += cookie
self.cj.save(self.cookiefile, True, True)
return True
else:
msg = u"登录失败."
self.clear_cookiedat(self.cookiefile) #clear cookie file
logger.error(msg)
return False
def get_content_head(self, url, headers={}, data=None):
content = ""
try:
if os.path.exists(self.cookiefile):
self.cj.revert(self.cookiefile, True, True)
self.cookie_support = urllib2.HTTPCookieProcessor(self.cj)
self.opener = urllib2.build_opener(self.cookie_support, urllib2.HTTPHandler)
urllib2.install_opener(self.opener)
else:
return ""
req = self.pack_request(url=url, headers=headers, data=data)
#response = urllib2.urlopen(req, timeout=15)
response = self.opener.open(req, timeout=10)
if response.info().get("Content-Encoding") == 'gzip':
content = self.gzip_data(response.read())
else:
content = response.read()
#time.sleep(0.1*random.randint(10, 20))
except urllib2.HTTPError, e:
return e.code
except:
s=sys.exc_info()
msg = u"get_content Error %s happened on line %d" % (s[1], s[2].tb_lineno)
logger.error(msg)
content = ""
return content
def get_content_cookie(self, url, headers={}, data=None):
content = ""
try:
req = self.pack_request(url=url, headers=headers, data=data)
opener = urllib2.build_opener(self.cookie_support)
response = opener.open(req, timeout=10)
if response.info().get("Content-Encoding") == 'gzip':
content = self.gzip_data(response.read())
else:
content = response.read()
#time.sleep(0.1*random.randint(10, 20))
except:
s=sys.exc_info()
msg = u"get_content Error %s happened on line %d" % (s[1], s[2].tb_lineno)
logger.error(msg)
content = ""
return content
def clear_cookiedat(self, datpath):
try:
os.remove(datpath)
#f = file(datpath, 'w')
#f.truncate()
#f.close()
except:
pass
def pack_request(self, url="", headers={}, data=None):
if data:
headers['Content-Type'] = 'application/x-www-form-urlencoded; charset=UTF-8'
data = urllib.urlencode(data)
req = urllib2.Request(
url=url,
data=data,
headers=headers
)
proxyip = self.proxyip
if proxyip and "127.0.0.1" not in proxyip:
if proxyip.startswith("http"):
proxyip = proxyip.replace("http://", "")
req.set_proxy(proxyip, "http")
return req
def gzip_data(self, spider_data):
""" get data from gzip """
if 0 == len(spider_data):
return spider_data
spiderDataStream = StringIO.StringIO(spider_data)
spider_data = gzip.GzipFile(fileobj=spiderDataStream).read()
return spider_data
def __get_headers(self):
headers = {'Host': 'weibo.com',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; rv:13.0) Gecko/20100101 Firefox/13.0.1',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-encoding': 'gzip, deflate',
'Accept-Language': 'zh-cn,zh;q=0.8,en-us;q=0.5,en;q=0.3',
'Connection': 'keep-alive',
}
return headers | gpl-2.0 | -5,311,268,990,533,459,000 | 39.214669 | 186 | 0.492904 | false | 3.766588 | false | false | false |
FrodeSolheim/fs-uae-launcher | system/tools/filescanner.py | 1 | 9278 | from typing import Optional
import fsui
from fsgamesys.amiga.amiga import Amiga
from fsgamesys.context import fsgs
from fsgamesys.product import Product
from fswidgets.panel import Panel
from fswidgets.widget import Widget
from launcher.i18n import gettext
from launcher.launcher_signal import LauncherSignal
from launcher.scanner import Scanner
from launcher.settings.scan_paths_group import ScanPathsGroup
from system.classes.window import Window
TIMER_INTERVAL = 100
# FIXM: TODO: When clicking the Stop button, old (existing) data may be purged
class FileScannerWindow(Window):
@classmethod
def refresh_game_database(cls, window: Widget):
return cls(
window, minimal=True, interactive=False, scan_for_files=False
)
def __init__(
self,
parent: Optional[Widget] = None,
minimal: bool = False,
interactive: bool = True,
scan_for_files: bool = True,
):
title = gettext("File scanner")
super().__init__(parent, title=title, maximizable=False)
buttons, layout = fsui.DialogButtons.create_with_layout(
self, create_parent_layout=False
)
buttons.create_close_button()
self.layout.add_spacer(640, 0)
self.interactive = interactive
self.scan_for_files = scan_for_files
self.update_game_database = False
if not minimal:
if Product.includes_amiga():
self.scan_kickstart_group = ScanKickstartGroup(self)
layout.add(self.scan_kickstart_group, fill=True)
layout.add_spacer(20)
heading = gettext(
"Scan for Kickstarts, Files and Configurations"
)
else:
heading = gettext("Scan for ROMs, media and config files")
label = fsui.HeadingLabel(self, heading)
layout.add(label, margin_bottom=10)
self.scan_paths_group = ScanPathsGroup(self)
layout.add(self.scan_paths_group, fill=True, margin=0)
layout.add_spacer(20)
self.scan_progress_group = ScanProgressGroup(self)
layout.add(self.scan_progress_group, fill=True)
if interactive:
self.scan_button = buttons.add_button(
fsui.Button(buttons, gettext("Scan"))
)
self.scan_button.activated.connect(self.on_scan_button)
else:
self.scan_button = None
self.stop_button = buttons.add_button(
fsui.Button(buttons, gettext("Stop"))
)
self.stop_button.activated.connect(self.on_stop_button)
self.old_title = ""
self.old_status = ""
self.has_started_scan = False
self.start_timer(TIMER_INTERVAL)
if not self.interactive:
self.start_scan()
self.destroyed.connect(Scanner.stop)
def set_scan_title(self, text: str):
if not text:
return
if text == self.old_title:
return
self.old_title = text
self.scan_progress_group.title_label.set_text(text)
def set_scan_status(self, text: str):
if not text:
return
if text == self.old_status:
return
self.old_status = text
self.scan_progress_group.status_label.set_text(text)
def on_timer(self):
if not Scanner.running:
if self.has_started_scan:
if Scanner.error:
self.set_scan_title(gettext("Scan error"))
self.set_scan_status(Scanner.error)
else:
if not self.interactive:
self.end_modal(True)
return
self.set_scan_title(gettext("Scan complete"))
self.set_scan_status(
gettext("Click 'Scan' button if you want to re-scan")
)
else:
self.set_scan_title(gettext("No scan in progress"))
self.set_scan_status(
gettext("Click 'Scan' button to start scan")
)
if self.scan_button is not None:
self.scan_button.set_enabled()
self.stop_button.set_enabled(False)
return
status = Scanner.status
self.set_scan_title(status[0])
self.set_scan_status(status[1])
def on_scan_button(self):
self.start_scan()
def start_scan(self):
if self.scan_button is not None:
self.scan_button.set_enabled(False)
self.has_started_scan = True
self.set_scan_title(gettext("Starting scan"))
self.set_scan_status(gettext("Please wait..."))
paths = ScanPathsGroup.get_search_path()
self.stop_button.set_enabled()
Scanner.start(
paths,
scan_for_files=self.scan_for_files,
update_game_database=self.update_game_database,
purge_other_dirs=True,
)
# noinspection PyMethodMayBeStatic
def on_stop_button(self):
Scanner.stop_flag = True
class KickstartStatusGroup(fsui.Panel):
def __init__(self, parent: Widget, title: str, model):
self.model = model
super().__init__(parent)
self.layout = fsui.HorizontalLayout()
self.ok_image = fsui.Image("launcher:/data/ok_emblem.png")
self.na_image = fsui.Image("launcher:/data/na_emblem.png")
self.icon = fsui.ImageView(self, self.na_image)
self.layout.add(self.icon)
self.layout.add_spacer(10)
self.label = fsui.Label(self, title)
self.layout.add(self.label)
self.update()
LauncherSignal.add_listener("scan_done", self)
def onDestroy(self):
LauncherSignal.remove_listener("scan_done", self)
super().onDestroy()
def on_scan_done_signal(self):
self.update()
def update(self):
amiga = Amiga.get_model_config(self.model)
for sha1 in amiga["kickstarts"]:
if fsgs.file.find_by_sha1(sha1):
self.icon.set_image(self.ok_image)
return
self.icon.set_image(self.na_image)
class ScanKickstartGroup(Panel):
def __init__(self, parent: Widget):
super().__init__(parent)
self.layout = fsui.VerticalLayout()
label = fsui.HeadingLabel(
self, gettext("Available Kickstart Versions")
)
self.layout.add(label, margin_bottom=10)
icon_layout = fsui.HorizontalLayout()
self.layout.add(icon_layout, fill=True)
icon_layout.add_spacer(20)
image = fsui.Image("launcher:/data/kickstart.png")
self.image_view = fsui.ImageView(self, image)
icon_layout.add(self.image_view, valign=0.0, margin_right=10)
vert_layout = fsui.VerticalLayout()
icon_layout.add(vert_layout, fill=True, expand=True)
vert_layout.add_spacer(0)
label = fsui.Label(
self,
gettext(
"You should have kickstart files for "
"each Amiga model you want to use:"
),
)
vert_layout.add(label, margin_bottom=0)
hori_layout = fsui.HorizontalLayout()
vert_layout.add(hori_layout, fill=True)
self.kickstart_groups = []
column_layout = fsui.VerticalLayout()
hori_layout.add(column_layout, expand=True, fill=True, margin=10)
self.add_kickstart_group(column_layout, "Amiga 1000", "A1000")
column_layout.add_spacer(10)
self.add_kickstart_group(column_layout, "Amiga 500", "A500")
column_layout.add_spacer(10)
self.add_kickstart_group(column_layout, "Amiga 500+", "A500+")
column_layout = fsui.VerticalLayout()
hori_layout.add(column_layout, expand=True, fill=True, margin=10)
self.add_kickstart_group(column_layout, "Amiga 600", "A600")
column_layout.add_spacer(10)
self.add_kickstart_group(column_layout, "Amiga 1200", "A1200")
column_layout.add_spacer(10)
self.add_kickstart_group(column_layout, "Amiga 3000", "A3000")
column_layout = fsui.VerticalLayout()
hori_layout.add(column_layout, expand=True, fill=True, margin=10)
self.add_kickstart_group(column_layout, "Amiga 4000", "A4000/040")
column_layout.add_spacer(10)
self.add_kickstart_group(column_layout, "Amiga CD32", "CD32")
column_layout.add_spacer(10)
self.add_kickstart_group(column_layout, "Commodore CDTV", "CDTV")
def add_kickstart_group(self, layout, title, model):
group = KickstartStatusGroup(self, title, model)
self.kickstart_groups.append(group)
layout.add(group, fill=True)
class ScanProgressGroup(Panel):
def __init__(self, parent: Widget):
super().__init__(parent)
self.layout = fsui.HorizontalLayout()
self.layout2 = fsui.VerticalLayout()
self.layout.add(self.layout2, fill=True, expand=True)
self.title_label = fsui.HeadingLabel(self, "")
self.layout2.add(self.title_label, fill=True)
self.layout2.add_spacer(10)
self.status_label = fsui.Label(self, "")
self.layout2.add(self.status_label, fill=True)
| gpl-2.0 | -2,840,898,757,939,423,000 | 32.135714 | 78 | 0.598513 | false | 3.74566 | false | false | false |
freenas/py-bsd | bsd/pty.py | 1 | 5200 | """Pseudo terminal utilities. (Slightly modified to work on FreeBSD)"""
# Bugs: No signal handling. Doesn't set slave termios and window size.
# Only tested on Linux.
# See: W. Richard Stevens. 1992. Advanced Programming in the
# UNIX Environment. Chapter 19.
# Author: Steen Lumholt -- with additions by Guido.
from select import select
import os
import tty
__all__ = ["openpty","fork","spawn"]
STDIN_FILENO = 0
STDOUT_FILENO = 1
STDERR_FILENO = 2
CHILD = 0
def openpty():
"""openpty() -> (master_fd, slave_fd)
Open a pty master/slave pair, using os.openpty() if possible."""
try:
return os.openpty()
except (AttributeError, OSError):
pass
master_fd, slave_name = _open_terminal()
slave_fd = slave_open(slave_name)
return master_fd, slave_fd
def master_open():
"""master_open() -> (master_fd, slave_name)
Open a pty master and return the fd, and the filename of the slave end.
Deprecated, use openpty() instead."""
try:
master_fd, slave_fd = os.openpty()
except (AttributeError, OSError):
pass
else:
slave_name = os.ttyname(slave_fd)
os.close(slave_fd)
return master_fd, slave_name
return _open_terminal()
def _open_terminal():
"""Open pty master and return (master_fd, tty_name)."""
for x in 'pqrstuvwxyzPQRST':
for y in '0123456789abcdef':
pty_name = '/dev/pty' + x + y
try:
fd = os.open(pty_name, os.O_RDWR)
except OSError:
continue
return (fd, '/dev/tty' + x + y)
raise OSError('out of pty devices')
def slave_open(tty_name):
"""slave_open(tty_name) -> slave_fd
Open the pty slave and acquire the controlling terminal, returning
opened filedescriptor.
Deprecated, use openpty() instead."""
result = os.open(tty_name, os.O_RDWR)
try:
from fcntl import ioctl, I_PUSH
except ImportError:
return result
try:
ioctl(result, I_PUSH, "ptem")
ioctl(result, I_PUSH, "ldterm")
except OSError:
pass
return result
def fork():
"""fork() -> (pid, master_fd)
Fork and make the child a session leader with a controlling terminal."""
try:
pid, fd = os.forkpty()
except (AttributeError, OSError):
pass
else:
if pid == CHILD:
try:
os.setsid()
except OSError:
# os.forkpty() already set us session leader
pass
return pid, fd
master_fd, slave_fd = openpty()
pid = os.fork()
if pid == CHILD:
# Establish a new session.
os.setsid()
os.close(master_fd)
# Slave becomes stdin/stdout/stderr of child.
os.dup2(slave_fd, STDIN_FILENO)
os.dup2(slave_fd, STDOUT_FILENO)
os.dup2(slave_fd, STDERR_FILENO)
if (slave_fd > STDERR_FILENO):
os.close (slave_fd)
# Explicitly open the tty to make it become a controlling tty.
tmp_fd = os.open(os.ttyname(STDOUT_FILENO), os.O_RDWR)
os.close(tmp_fd)
else:
os.close(slave_fd)
# Parent and child process.
return pid, master_fd
def _writen(fd, data):
"""Write all the data to a descriptor."""
while data:
n = os.write(fd, data)
data = data[n:]
def _read(fd):
"""Default read function."""
return os.read(fd, 1024)
def _copy(master_fd, master_read=_read, stdin_read=_read):
"""Parent copy loop.
Copies
pty master -> standard output (master_read)
standard input -> pty master (stdin_read)"""
fds = [master_fd, STDIN_FILENO]
while True:
rfds, wfds, xfds = select(fds, [], [])
if master_fd in rfds:
data = master_read(master_fd)
if not data: # Reached EOF.
return
else:
os.write(STDOUT_FILENO, data)
if STDIN_FILENO in rfds:
data = stdin_read(STDIN_FILENO)
if not data:
fds.remove(STDIN_FILENO)
else:
_writen(master_fd, data)
def spawn(argv, master_read=_read, stdin_read=_read):
"""Create a spawned process."""
if type(argv) == type(''):
argv = (argv,)
pid, master_fd = fork()
if pid == CHILD:
try:
os.execlp(argv[0], *argv)
except:
# If we wanted to be really clever, we would use
# the same method as subprocess() to pass the error
# back to the parent. For now just dump stack trace.
traceback.print_exc()
finally:
os._exit(1)
try:
mode = tty.tcgetattr(STDIN_FILENO)
tty.setraw(STDIN_FILENO)
restore = 1
except tty.error: # This is the same as termios.error
restore = 0
try:
_copy(master_fd, master_read, stdin_read)
except OSError:
# Some OSes never return an EOF on pty, just raise
# an error instead.
pass
finally:
if restore:
tty.tcsetattr(STDIN_FILENO, tty.TCSAFLUSH, mode)
os.close(master_fd)
return os.waitpid(pid, 0)[1]
| bsd-3-clause | 6,431,780,181,907,985,000 | 27.571429 | 76 | 0.57 | false | 3.586207 | false | false | false |
arrti/proxypooler | tests/test_db.py | 1 | 1559 | import pytest
from proxypooler.errors import ProxyPoolerEmptyError
def test_db(conn):
conn.put('127.0.0.1:80', 15)
conn.put('127.0.0.1:81', 14)
conn.put('127.0.0.1:82', 210)
conn.put('127.0.0.1:83', 2)
conn.put('127.0.0.1:84', 100)
assert conn.size == 5
ip = conn.get()[0].decode('utf-8')
assert ip == '127.0.0.1:83'
ip = conn.get()[0].decode('utf-8')
assert ip == '127.0.0.1:81'
assert conn.size == 3
ips = conn.get_list(30)
assert len(ips) == 3
ip = ips[0][0].decode('utf-8')
assert ip == '127.0.0.1:80'
ip = ips[1][0].decode('utf-8')
assert ip == '127.0.0.1:84'
ip = ips[2][0].decode('utf-8')
assert ip == '127.0.0.1:82'
assert conn.size == 0
conn.put('127.0.0.1:83', 2)
conn.put('127.0.0.1:83', 20)
assert conn.size == 1
ip, expire = conn.get()
assert ip.decode('utf-8') == '127.0.0.1:83'
assert expire == 20
conn.put('127.0.0.1:83', 20)
conn.put_list([('127.0.0.1:84', 100), ('127.0.0.1:81', 14), ('127.0.0.1:82', 210)])
assert conn.size == 4
ip = conn.get()[0].decode('utf-8')
assert ip == '127.0.0.1:81'
ip = conn.get()[0].decode('utf-8')
assert ip == '127.0.0.1:83'
ips = conn.get_list(2, rev=True)
assert len(ips) == 2
assert ips[0][0].decode('utf-8') == '127.0.0.1:82'
assert ips[0][1] == 210
assert ips[1][0].decode('utf-8') == '127.0.0.1:84'
assert ips[1][1] == 100
assert conn.size == 0
def test_db_empty(conn):
with pytest.raises(ProxyPoolerEmptyError):
conn.get()
| apache-2.0 | -7,760,928,809,844,205,000 | 27.87037 | 87 | 0.546504 | false | 2.435938 | false | false | false |
xsleonard/wsgisubdomain | wsgisubdomain.py | 1 | 2974 | import socket
from threading import Lock
from __about__ import __version__, __title__, __description__
__all__ = ['__version__', '__title__', '__description__',
'SubdomainDispatcher']
class SubdomainDispatcher(object):
""" A WSGI application that gets or creates other WSGI applications
based on the subdomain.
Adapted from:
http://flask.pocoo.org/docs/patterns/appdispatch/#dispatch-by-subdomain
:param create_application: A function that accepts 'subdomain' as a
keyword argument and returns a WSGI application. Subdomain will be
either an empty string for the bare domain, `None` if the request is
for an IP address, or a full subdomain (e.g. 'www' or 'en.dl')
"""
def __init__(self, create_application):
self.create_application = create_application
self.lock = Lock()
self.instances = {}
def __call__(self, environ, start_response):
""" WSGI application interface
:param environ: WSGI environ
:param start_response: WSGI start_response
"""
app = self.get_application(environ)
return app(environ, start_response)
def get_application(self, environ):
""" Retrieve an application for a wsgi environ
:param environ: The environ object sent by wsgi to an application
"""
host = self._get_host(environ)
subdomain = self._extract_subdomain(host)
return self._get_application(subdomain)
def _get_application(self, subdomain):
""" Return a WSGI application for subdomain. The subdomain is
passed to the create_application constructor as a keyword argument.
:param subdomain: Subdomain to get or create an application with
"""
with self.lock:
app = self.instances.get(subdomain)
if app is None:
app = self.create_application(subdomain=subdomain)
self.instances[subdomain] = app
return app
@staticmethod
def _extract_subdomain(host):
""" Returns a subdomain from a host. This host is typically the
HTTP_HOST request envvar. If the host is an IP address, `None` is
returned
:param host: Request's target host
"""
host = host.split(':')[0]
# If the host is an IP address, there is no subdomain to extract
try:
# Check if the host is an ip address
socket.inet_aton(host)
except socket.error:
# It isn't an IP address, return the subdomain
return '.'.join(host.split('.')[:-2])
@staticmethod
def _get_host(environ):
""" Returns the true host from the request's environ.
:param environ: environ variable passed to a wsgi app by wsgi
"""
# HTTP_HOST is preferred to SERVER_NAME, but only SERVER_NAME is
# guaranteed to exist
return environ.get('HTTP_HOST', environ['SERVER_NAME'])
| mit | 1,412,791,263,099,934,500 | 34.404762 | 76 | 0.62273 | false | 4.589506 | false | false | false |
olysonek/tuned | tests/unit/hardware/test_device_matcher_udev.py | 1 | 1221 | import unittest2
import pyudev
from tuned.hardware.device_matcher_udev import DeviceMatcherUdev
class DeviceMatcherUdevTestCase(unittest2.TestCase):
@classmethod
def setUpClass(cls):
cls.udev_context = pyudev.Context()
cls.matcher = DeviceMatcherUdev()
def test_simple_search(self):
try:
device = pyudev.Devices.from_sys_path(self.udev_context,
"/sys/devices/virtual/tty/tty0")
except AttributeError:
device = pyudev.Device.from_sys_path(self.udev_context,
"/sys/devices/virtual/tty/tty0")
self.assertTrue(self.matcher.match("tty0", device))
try:
device = pyudev.Devices.from_sys_path(self.udev_context,
"/sys/devices/virtual/tty/tty1")
except AttributeError:
device = pyudev.Device.from_sys_path(self.udev_context,
"/sys/devices/virtual/tty/tty1")
self.assertFalse(self.matcher.match("tty0", device))
def test_regex_search(self):
try:
device = pyudev.Devices.from_sys_path(self.udev_context,
"/sys/devices/virtual/tty/tty0")
except AttributeError:
device = pyudev.Device.from_sys_path(self.udev_context,
"/sys/devices/virtual/tty/tty0")
self.assertTrue(self.matcher.match("tty.", device))
self.assertFalse(self.matcher.match("tty[1-9]", device))
| gpl-2.0 | -4,724,523,557,095,025,000 | 32.916667 | 64 | 0.736282 | false | 2.907143 | true | false | false |
TheDSCPL/SSRE_2017-2018_group8 | Projeto/Python/cryptopy/crypto/cipher/arc4.py | 1 | 2710 | # -*- coding: utf-8 -*-
""" crypto.cipher.arc4
A Stream Cipher Encryption Algorithm 'Arcfour'
A few lines of code/ideas borrowed from [PING]
[PING] CipherSaber implementation by Ka-Ping Yee <[email protected]>, 5 May 2000.
Some documentation text and test vectors taken from [IDARC4]
[IDARCH4] K.Kaukonen, R.Thayer, "A Stream Cipher Encryption Algorithm 'Arcfour'",
ftp://ietf.org/draft-kaukonen-cipher-arcfour-03.txt
Generally munged to map to crypto.cipher calling conventions
Copyright © (c) 2002 by Paul A. Lambert
Read LICENSE.txt for license information.
November 5, 2002
"""
class ARC4:
""" ARC4 Stream Cipher Algorithm
"""
def __init__(self,key=None):
""" key -> octet string for key """
self.name = 'ARC4'
self.strength = None # depends on keySize
self.blockSize = 1 # blockSize is in bytes
if key != None:
self.setKey(key)
def setKey(self, key):
""" Set initial state from key. Never use the same key more than once!
"""
self.keySize = len(key)
self.strength = self.keySize # this does not include subtracting IV size :-(
i, j, self.state = 0, 0, range(256)
for i in range(256):
j = (j + self.state[i] + ord(key[i % len(key)])) % 256
self.state[i], self.state[j] = self.state[j], self.state[i]
self.keyReady = 1 # Ready
def encrypt(self, plainText, more = None):
""" Encrypt a string and return a binary string
multiple sequential calls can be made using more =1,
this continues the encryption
New sessions of encrypt can NOT be called twice with the same key!!!!
"""
if self.keyReady != 1 : raise 'Error, ARC4 key already used once!'
if more != 1:
self.keyReady = None
cipherText = arcfourBlock(self.state, plainText)
return cipherText
def decrypt(self, cipherText, more = None):
""" Decrypt a string and return a string """
if self.keyReady != 1 :
raise 'set for decryption required'
if more != 1:
self.keyReady = None
plainText = arcfourBlock(self.state, cipherText)
return plainText
def arcfourBlock(state, input):
""" Use state to encrypt input string, returns string """
i, j, output = 0, 0, []
for byte in input:
i = (i + 1) % 256
j = (j + state[i]) % 256
state[i], state[j] = state[j], state[i]
n = (state[i] + state[j]) % 256
output.append(chr(ord(byte) ^ state[n]))
output = ''.join(output) # convert to string
return output
| mit | -4,618,640,495,810,670,000 | 34.657895 | 91 | 0.586716 | false | 3.697135 | false | false | false |
lochiiconnectivity/exabgp | lib/exabgp/application/cli.py | 1 | 4426 | #!/usr/bin/env python
# encoding: utf-8
"""
cli.py
Created by Thomas Mangin on 2014-12-22.
Copyright (c) 2009-2015 Exa Networks. All rights reserved.
"""
import sys
from exabgp.dep.cmd2 import cmd
from exabgp.version import version
class Completed (cmd.Cmd):
# use_rawinput = False
# prompt = ''
# doc_header = 'doc_header'
# misc_header = 'misc_header'
# undoc_header = 'undoc_header'
ruler = '-'
def __init__ (self,intro):
self.prompt = '%s> ' % intro
cmd.Cmd.__init__(self)
def completedefault (self, text, line, begidx, endidx):
commands = line.split()
local = self.completion
for command in commands:
if command in local:
local = local[command]
continue
break
return [_ for _ in local.keys() if _.startswith(text)]
def default (self,line):
print 'unrecognised syntax: ', line
def do_EOF (self):
return True
class SubMenu (Completed):
def do_exit (self,line):
return True
do_x = do_exit
class Attribute (SubMenu):
chars = ''.join(chr(_) for _ in range(ord('a'),ord('z')+1) + range(ord('0'),ord('9')+1) + [ord ('-')])
attribute = None
completion = {
'origin' : {
'igp': {
},
'egp': {
},
'incomplete': {
},
},
}
def __init__ (self,name):
self.name = name
SubMenu.__init__(self,'attribute %s' % name)
def do_origin (self,line):
if line in ('igp','egp','incomplete'):
self.attribute['origin'] = line
else:
print 'invalid origin'
def do_as_path (self,line):
pass
# next-hop
def do_med (self,line):
if not line.isdigit():
print 'invalid med, %s is not a number' % line
return
med = int(line)
if 0 > med < 65536:
print 'invalid med, %s is not a valid number' % line
self.attribute['origin'] = line
# local-preference
# atomic-aggregate
# aggregator
# community
# originator-id
# cluster-list
# extended-community
# psmi
# aigp
def do_show (self,line):
print 'attribute %s ' % self.name + ' '.join('%s %s' % (key,value) for key,value in self.attribute.iteritems())
class ExaBGP (Completed):
completion = {
'announce' : {
'route' : {
},
'l2vpn' : {
},
},
'neighbor': {
'include': {
},
'exclude': {
},
'reset': {
},
'list': {
},
},
'attribute' : {
},
'show': {
'routes' : {
'extensive': {
},
'minimal': {
},
},
},
'reload': {
},
'restart': {
},
}
def _update_prompt (self):
if self._neighbors:
self.prompt = '\n# neighbor ' + ', '.join(self._neighbors) + '\n> '
else:
self.prompt = '\n> '
##
## repeat last command
##
last = 'help'
def do_last (self, line):
"Print the input, replacing '$out' with the output of the last shell command"
# Obviously not robust
if hasattr(self, 'last_output'):
print line.replace('$out', self.last_output)
##
##
##
_neighbors = set()
def do_neighbor (self,line):
try:
action,ip = line.split()
except ValueError:
if line == 'reset':
print 'removed neighbors', ', '.join(self._neighbors)
self._neighbors = set()
self._update_prompt()
else:
print 'invalid syntax'
self.help_neighbor()
return
if action == 'include':
# check ip is an IP
# check ip is a known IP
self._neighbors.add(ip)
self._update_prompt()
elif action == 'exclude':
if ip in self._neighbors:
self._neighbors.remove(ip)
print 'neighbor excluded'
self._update_prompt()
else:
print 'invalid neighbor'
elif action == 'list':
print 'removed neighbors', ', '.join(self._neighbors)
else:
print 'invalid syntax'
self.help_neighbor()
def help_neighbor (self):
print "neighbor include <ip> : limit the action to the defined neighbors"
print "neighbor exclude <ip> : remove a particular neighbor"
print "neighbor reset : clear the neighbor previous set "
_attribute = {}
def do_attribute (self,name):
if not name:
self.help_attribute()
return
invalid = ''.join([_ for _ in name if _ not in Attribute.chars])
if invalid:
print 'invalid character(s) in attribute name: %s' % invalid
return
cli = Attribute(name)
cli.attribute = self._attribute.get(name,{})
cli.cmdloop()
def help_attribute (self):
print 'attribute <name>'
def do_quit (self,line):
return True
do_q = do_quit
if __name__ == '__main__':
if len(sys.argv) > 1:
ExaBGP().onecmd(' '.join(sys.argv[1:]))
else:
print "ExaBGP %s CLI" % version
ExaBGP('').cmdloop()
| bsd-3-clause | 2,480,145,758,677,052,400 | 17.596639 | 113 | 0.604383 | false | 2.874026 | false | false | false |
taiwenko/python | acs/acs_cold_start.py | 1 | 7587 | #!/usr/bin/env python
from time import sleep
import twk_utils
import math
import sys
import xpf6020
import tools.utils as tools
import watlowf4
from tools import shell
from blessings import Terminal
t = Terminal()
franz_num = raw_input('How many Franz are you testing? [1,2,3,or 4]: ').strip()
cycle_num = raw_input('How many temp cycles would you like to run?: ').strip()
utils = twk_utils.Twk_utils()
print "Accessing the XPF6020 Power Supplies"
ps1_path = '/dev/serial/by-id/usb-Prolific_Technology_Inc._USB-Serial_Controller_D-if00-port0'
ps2_path = '/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_A703PO3I-if00-port0'
pfc1_path = '/dev/serial/by-id/usb-loon_onboard_half_stack_hv_pfc_1a-if01-port0'
pfc2_path = '/dev/serial/by-id/usb-loon_onboard_half_stack_hv_pfc_2a-if01-port0'
pfc3_path = '/dev/serial/by-id/usb-loon_onboard_half_stack_hv_pfc_1b-if01-port0'
pfc4_path = '/dev/serial/by-id/usb-loon_onboard_half_stack_hv_pfc_2b-if01-port0'
print "Accessing the Temperature Chamber"
tchamber_path = '/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_A603R0MG-if00-port0'
chamber = watlowf4.WatlowF4(tchamber_path)
chamber.conditioning_on(True)
def ps_measure_check(ch, current_min, current_max, voltage_min, voltage_max, tolerance, max_cycle):
cycle = 0
avg_volt = 0
avg_current = 0
while cycle != max_cycle:
if ch == '1':
[r_mppt_v, r_mppt_i] = ps1.measure('1')
elif ch == '2':
[r_mppt_v, r_mppt_i] = ps1.measure('2')
elif ch == '3':
[r_mppt_v, r_mppt_i] = ps2.measure('1')
elif ch == '4':
[r_mppt_v, r_mppt_i] = ps2.measure('2')
else:
print 'Unknown Input Channel'
volt = float(r_mppt_v.split("V")[0])
curr = float(r_mppt_i.split("A")[0])
avg_volt = avg_volt + volt
avg_current = avg_current + curr
cycle = cycle + 1
sleep(1)
r_mppt_v = avg_volt / cycle;
r_mppt_i = avg_current / cycle;
if float(r_mppt_i) > float(current_max):
result = t.bold_red('FAILED')
result_count = 1
elif float(r_mppt_i) < float(current_min):
result = t.bold_red('FAILED')
result_count = 1
elif float(r_mppt_v) > float(voltage_max):
result = t.bold_red('FAILED')
result_count = 1
elif float(r_mppt_v) < float(voltage_min):
result = t.bold_red('FAILED')
result_count = 1
else:
result = t.bold_green('PASSED')
result_count = 0
print 'Franz CH%s @ %sV, %sA....[%s]' %(ch, r_mppt_v, r_mppt_i, result)
print ''
return result_count
def config_acs(pfc_path):
sleep(5)
tom = shell.Shell(pfc_path)
sleep(1)
sb = shell.Scoreboard(tom,'acs')
sleep(1)
tom.sendline('power on acs')
sleep(3)
print sb.query('power_acs_enabled')
sleep(1)
tom.close()
def clean_acs(pfc_path):
sleep(5)
tom = shell.Shell(pfc_path)
sleep(1)
sb = shell.Scoreboard(tom,'acs')
sleep(1)
tom.sendline('power off acs')
sleep(3)
print sb.query('power_acs_enabled')
sleep(1)
tom.close()
# Test starts here
offtime = 1 #15 #mins
offtime_sec = offtime * 60
run_count = 0
max_run_count = cycle_num
ch1result = 0
ch2result = 0
ch3result = 0
ch4result = 0
ts = utils.get_timestamp()
print '*** Franz test started @ %s***' % ts
batt_vin = 48
batt_iin = 20
ps1 = xpf6020.Xpf6020(ps1_path)
ps1.reset_ps()
ps2 = xpf6020.Xpf6020(ps2_path)
ps2.reset_ps()
ps1.set_voltage(1, batt_vin)
ps1.set_currentlimit(1, batt_iin)
if franz_num == '2':
ps1.set_voltage(2, batt_vin)
ps1.set_currentlimit(2, batt_iin)
elif franz_num == '3':
ps1.set_voltage(2, batt_vin)
ps1.set_currentlimit(2, batt_iin)
ps2.set_voltage(1,batt_vin)
ps2.set_currentlimit(1,batt_iin)
elif franz_num == '4':
ps1.set_voltage(2, batt_vin)
ps1.set_currentlimit(2, batt_iin)
ps2.set_voltage(1,batt_vin)
ps2.set_currentlimit(1,batt_iin)
ps2.set_voltage(2,batt_vin)
ps2.set_currentlimit(2,batt_iin)
else:
if franz_num != '1':
print 'Unknown franz amount. Can only test up to 4 franz at a time.'
sys.exit()
# Setup chamber
cold_temp = 20 #-60
soak_time = 1 #45 # min
chamber.ramp_down(cold_temp)
chamber.soak_time(soak_time)
while True:
# Turn on power supplies
ps1.ind_output('1','on')
if franz_num == '2':
ps1.ind_output('2','on')
elif franz_num == '3':
ps1.ind_output('2','on')
ps2.ind_output('1','on')
elif franz_num == '4':
ps1.ind_output('2','on')
ps2.ind_output('1','on')
ps2.ind_output('2','on')
else:
if franz_num != '1':
print 'Unknown Channel'
sleep(5)
# Turn on ACS using PFC
config_acs(pfc1_path)
if franz_num == '2':
config_acs(pfc2_path)
elif franz_num == '3':
config_acs(pfc2_path)
config_acs(pfc3_path)
elif franz_num == '4':
config_acs(pfc2_path)
config_acs(pfc3_path)
config_acs(pfc4_path)
else:
if franz_num != '1':
print 'Unknown Channel'
sleep(5)
# Measure current draw from PS
measurement_count = 5
print 'Averaging %d measurement...' % measurement_count
current = 0.12
voltage = 48
tolerance = 0.05
current_max = float(current) * (1 + tolerance)
current_min = float(current) * (1 - tolerance)
voltage_max = float(voltage) * (1 + tolerance)
voltage_min = float(voltage) * (1 - tolerance)
print 'Voltage Limits should be within %f to %fV' %(voltage_min, voltage_max)
print 'Current Limits should be within %f to %fA' %(current_min, current_max)
print ''
rc1 = ps_measure_check('1', current_min, current_max, voltage_min, voltage_max, tolerance, measurement_count)
ch1result = ch1result + rc1
if franz_num == '2':
rc2 = ps_measure_check('2', current_min, current_max, voltage_min, voltage_max, tolerance, measurement_count)
ch2result = ch2result + rc2
elif franz_num == '3':
rc2 = ps_measure_check('2', current_min, current_max, voltage_min, voltage_max, tolerance, measurement_count)
ch2result = ch2result + rc2
rc3 = ps_measure_check('3', current_min, current_max, voltage_min, voltage_max, tolerance, measurement_count)
ch3result = ch3result + rc3
elif franz_num == '4':
rc2 = ps_measure_check('2', current_min, current_max, voltage_min, voltage_max, tolerance, measurement_count)
ch2result = ch2result + rc2
rc3 = ps_measure_check('3', current_min, current_max, voltage_min, voltage_max, tolerance, measurement_count)
ch3result = ch3result + rc3
rc4 = ps_measure_check('4', current_min, current_max, voltage_min, voltage_max, tolerance, measurement_count)
ch4result = ch4result + rc4
else:
if franz_num != '1':
print 'Unknown franz amount.'
# Turn off ACS using PFC
clean_acs(pfc1_path)
if franz_num == '2':
clean_acs(pfc2_path)
elif franz_num == '3':
clean_acs(pfc2_path)
clean_acs(pfc3_path)
elif franz_num == '4':
clean_acs(pfc2_path)
clean_acs(pfc3_path)
clean_acs(pfc4_path)
else:
if franz_num != '1':
print 'Unknown Channel'
sleep(5)
# Turn off power supplies
ps1.all_output('off')
ps2.all_output('off')
run_count = run_count + 1
if run_count == int(max_run_count):
break;
ts = utils.get_timestamp()
print 'Off for %s min started @ %s' % (offtime, ts)
sleep(offtime_sec)
hot_temp = 24
print 'Ramping up to 24C'
chamber.ramp_up(hot_temp)
ts = utils.get_timestamp()
msg = '*** ACS test completed @ %s***' % ts
msg = msg + ', CH1 failed %s out of %s cycles' % (ch1result, max_run_count)
msg = msg + ', CH2 failed %s out of %s cycles' % (ch2result, max_run_count)
msg = msg + ', CH3 failed %s out of %s cycles' % (ch3result, max_run_count)
msg = msg + ', CH4 failed %s out of %s cycles' % (ch4result, max_run_count)
print msg
utils.send_email('ACS Cold-Start', msg)
| mit | 3,195,352,635,429,763,000 | 26.791209 | 113 | 0.652827 | false | 2.580612 | true | false | false |
SmileyJames/shopify_python_api | setup.py | 1 | 1764 | from setuptools import setup
NAME='ShopifyAPI'
exec(open('shopify/version.py').read())
DESCRIPTION='Shopify API for Python'
LONG_DESCRIPTION="""\
The ShopifyAPI library allows python developers to programmatically
access the admin section of stores using an ActiveResource like
interface similar the ruby Shopify API gem. The library makes HTTP
requests to Shopify in order to list, create, update, or delete
resources (e.g. Order, Product, Collection)."""
setup(name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author='Shopify',
author_email='[email protected]',
url='https://github.com/Shopify/shopify_python_api',
packages=['shopify', 'shopify/resources'],
scripts=['scripts/shopify_api.py'],
license='MIT License',
install_requires=[
'pyactiveresource>=2.1.1',
'PyYAML',
'six',
],
test_suite='test',
tests_require=[
'mock>=1.0.1',
],
platforms='Any',
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules']
)
| mit | -7,996,432,185,524,814,000 | 38.2 | 82 | 0.594671 | false | 4.59375 | false | false | false |
MDAnalysis/pyQuteMol | python/OctaMap.py | 1 | 5260 |
import numpy
def Area(a, b, c):
v = numpy.cross(b-a,c-a)
return numpy.sqrt(numpy.dot(v,v))*0.5
class OctaMapSamp:
def __init__(self):
self.size = 0
self.dir = None
self.dirrot = None
self.weight = None
def nsamp(self):
return len(self.dir)
def DuplicateTexels(self, t, s, tx, ty):
e=self.size - 1
# four corners
k0=(tx+ (ty )*s)*3
k1=(tx+e+(ty )*s)*3
k2=(tx+e+(ty+e)*s)*3
k3=(tx+ (ty+e)*s)*3
t[k0 ]=t[k1 ]=t[k2 ]=t[k3 ]
t[k0+1]=t[k1+1]=t[k2+1]=t[k3+1]
t[k0+2]=t[k1+2]=t[k2+2]=t[k3+2]
# sides
for i in range(1,size/2):
k0a=(tx + (ty +i )*s)*3
k0b=(tx + (ty +e-i)*s)*3
k1a=(tx+e + (ty +i )*s)*3
k1b=(tx+e + (ty +e-i)*s)*3
k2a=(tx+i + (ty )*s)*3
k2b=(tx+e-i+ (ty )*s)*3
k3a=(tx+i + (ty +e )*s)*3
k3b=(tx+e-i+ (ty +e )*s)*3
t[k0a+0]=t[k0b+0]; t[k1a+0]=t[k1b+0]; t[k2a+0]=t[k2b+0]; t[k3a+0]=t[k3b+0];
t[k0a+1]=t[k0b+1]; t[k1a+1]=t[k1b+1]; t[k2a+1]=t[k2b+1]; t[k3a+1]=t[k3b+1];
t[k0a+2]=t[k0b+2]; t[k1a+2]=t[k1b+2]; t[k2a+2]=t[k2b+2]; t[k3a+2]=t[k3b+2];
def FillTexture(self, t, s, tx, ty, cr, cg, cb):
for y in range(self.size):
for x in range(self.size):
k=(x+tx+(y+ty)*s)*3
p=dir[ Index( x , y ) ]
q=(p+numpy.array(1,1,1))/2.0*255.0
t[k]= q[0]
t[k+1]= q[1]
t[k+2]= q[2]
def Index(self,x, y):
return x+y*self.size
def Smooth(self,t, s, tx, ty):
size = self.size
oldvalue = numpy.zeros(size*size*6)
# copy old values
for y in range(0,size*2):
for x in range(0,size*3):
k=(x+tx+(y+ty)*s)*3
i= Index( x , y )
oldvalue[i]=t[k]
dy=size, dx=1;
e=size-1;
# smooth old values
for y in range(size):
for x in range(size):
i= Index( x , y )
TH=2
sum=oldvalue[i]
ddiv=1
w=0
if (y!=0): w=oldvalue[i-dy]
else: w=oldvalue[ Index( e-x , 1 ) ]
if(w>TH):
sum+=w
ddiv+=1
if (x!=0): w=oldvalue[i-dx]
else: w=oldvalue[ Index( 1 , e-y ) ]
if(w>TH):
sum+=w
ddiv+=1
if (y!=e): w=oldvalue[i+dy]
else: w=oldvalue[ Index( e-x ,e-1 ) ]
if(w>TH):
sum+=w
ddiv+=1
if (x!=e): w=oldvalue[i+dx]
else: w=oldvalue[ Index( e-1 , e-y ) ]
if(w>TH):
sum+=w
ddiv+=1
sum=(sum+ddiv/2)/ddiv
k=(x+tx+(y+ty)*s)*3
t[k]=t[k+1]=t[k+2]=sum
def SetSize(self,_size):
self.size=_size
self.initMap()
self.ComputeWeight()
def getDir(self, x, y):
fs=float(self.size)-1
#create point -
p = numpy.array((x*2./fs-1.,y*2./fs-1,0))
ax=numpy.abs(p[0]); ay=numpy.abs(p[1]); az=1
if (ax+ay>1.0):
p = numpy.array((numpy.sign(p[0])*(1-ay),numpy.sign(p[1])*(1-ax), 0))
az=-1
p[2]=(1-ax-ay)*az
# Normalize
p /= numpy.linalg.norm(p)
return p
def initMap(self):
size = self.size
dir = self.dir = numpy.zeros((size*size, 3))
for y in range(size):
for x in range(size):
dir[self.Index(x,y)]=self.getDir(x,y)
def ComputeWeight(self):
size = self.size
getDir = self.getDir
weight = self.weight = numpy.zeros((size*size))
k = 0
for y in range(size):
for x in range(size):
h=0.5
p00=getDir(x-h,y-h)
p01=getDir(x-h,y+0)
p02=getDir(x-h,y+h)
p10=getDir(x+0,y-h)
p11=getDir(x+0,y+0)
p12=getDir(x+0,y+h)
p20=getDir(x+h,y-h)
p21=getDir(x+h,y+0)
p22=getDir(x+h,y+h)
tota=0; c=0; e=size-1
if ( (x!=0) and (y!=0) ):
tota+=Area( p00, p10, p01 )
tota+=Area( p10, p11, p01 )
c+=1
if ( (x!=0) and (y!=e) ):
tota+=Area( p01, p11, p12 )
tota+=Area( p01, p12, p02 )
c+=1
if ( (x!=e) and (y!=0) ):
tota+=Area( p10, p20, p21 )
tota+=Area( p21, p11, p10 )
c+=1
if ( (x!=e) and (y!=e) ):
tota+=Area( p11, p21, p12 )
tota+=Area( p21, p22, p12 )
c+=1
weight[k]=1.0/(tota*4/c)
k+=1
def TotTexSizeX(self): return self.size
def TotTexSizeY(self): return self.size
octamap = OctaMapSamp()
| gpl-2.0 | -3,011,985,873,142,060,000 | 28.886364 | 87 | 0.384221 | false | 2.748171 | false | false | false |
3ev0/android-whitelist | blackswan/core/modularity.py | 1 | 1286 | __author__ = 'ivo'
import logging
import argparse
from blackswan import config
_log = logging.getLogger(__name__)
class ModuleBase():
argparser = None
def __init__(self):
self.config = {}
@classmethod
def register(cls):
cls.argparser = argparse.ArgumentParser(description=cls.description, prog=cls.modname, add_help=False)
cls.argparser.add_argument("-b", "--db", default=config.def_db, help="The blackswan db file. Default: {}".format(config.def_db))
cls.add_args()
config.modules[cls.modname] = cls
_log.debug("Module %s registered", cls.modname)
return
@classmethod
def add_args(cls):
raise NotImplementedError
def work(self):
raise NotImplementedError
def __repr__(self):
return "<{}({})>".format(self.modname, repr(self.config))
def parse_args(self, modargs):
args = self.argparser.parse_args(args=modargs)
self.config.update(**vars(args))
def run(self):
_log.info("Module %s started", self.modname)
self.work()
_log.info("Module %s finished", self.modname)
def configure(self, **kwargs):
self.config.update(kwargs)
_log.info("Module %s configured: \n%s", self.modname, repr(self.config))
| apache-2.0 | -8,348,951,146,452,041,000 | 26.956522 | 136 | 0.624417 | false | 3.716763 | true | false | false |
rbuffat/pyidf | tests/test_surfacecontaminantsourceandsinkgenericboundarylayerdiffusion.py | 1 | 2309 | import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.internal_gains import SurfaceContaminantSourceAndSinkGenericBoundaryLayerDiffusion
log = logging.getLogger(__name__)
class TestSurfaceContaminantSourceAndSinkGenericBoundaryLayerDiffusion(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_surfacecontaminantsourceandsinkgenericboundarylayerdiffusion(self):
pyidf.validation_level = ValidationLevel.error
obj = SurfaceContaminantSourceAndSinkGenericBoundaryLayerDiffusion()
# alpha
var_name = "Name"
obj.name = var_name
# object-list
var_surface_name = "object-list|Surface Name"
obj.surface_name = var_surface_name
# real
var_mass_transfer_coefficient = 0.0
obj.mass_transfer_coefficient = var_mass_transfer_coefficient
# object-list
var_schedule_name = "object-list|Schedule Name"
obj.schedule_name = var_schedule_name
# real
var_henry_adsorption_constant_or_partition_coefficient = 0.0001
obj.henry_adsorption_constant_or_partition_coefficient = var_henry_adsorption_constant_or_partition_coefficient
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.surfacecontaminantsourceandsinkgenericboundarylayerdiffusions[0].name, var_name)
self.assertEqual(idf2.surfacecontaminantsourceandsinkgenericboundarylayerdiffusions[0].surface_name, var_surface_name)
self.assertAlmostEqual(idf2.surfacecontaminantsourceandsinkgenericboundarylayerdiffusions[0].mass_transfer_coefficient, var_mass_transfer_coefficient)
self.assertEqual(idf2.surfacecontaminantsourceandsinkgenericboundarylayerdiffusions[0].schedule_name, var_schedule_name)
self.assertAlmostEqual(idf2.surfacecontaminantsourceandsinkgenericboundarylayerdiffusions[0].henry_adsorption_constant_or_partition_coefficient, var_henry_adsorption_constant_or_partition_coefficient) | apache-2.0 | 6,754,138,350,835,265,000 | 41.777778 | 208 | 0.737982 | false | 3.816529 | false | false | false |
drammock/pyeparse | pyeparse/utils.py | 1 | 3374 | # Authors: Denis Engemann <[email protected]>
#
# License: BSD (3-clause)
import numpy as np
from os import path as op
import glob
import tempfile
from shutil import rmtree
import atexit
def create_chunks(sequence, size):
"""Generate chunks from a sequence
Note. copied from MNE-Python
Parameters
----------
sequence : iterable
Any iterable object
size : int
The chunksize to be returned
"""
return (sequence[p:p + size] for p in range(0, len(sequence), size))
def fwhm_kernel_2d(size, fwhm, center=None):
""" Make a square gaussian kernel.
Note: adapted from https://gist.github.com/andrewgiessel/4635563
Parameters
----------
size : int
The length of the square matrix to create.
fmhw : int
The full wdith at hald maximum value.
"""
x = np.arange(0, size, 1, np.float64)
y = x[:, np.newaxis]
# center
x0 = y0 = size // 2
return np.exp(-4 * np.log(2) * ((x - x0) ** 2 + (y - y0) ** 2) / fwhm ** 2)
def pupil_kernel(fs, dur=4.0, t_max=0.930, n=10.1, s=1.):
"""Generate pupil response kernel modeled as an Erlang gamma function.
Parameters
----------
fs : int
Sampling frequency (samples/second) to use in generating the kernel.
dur : float
Length (in seconds) of the generated kernel.
t_max : float
Time (in seconds) where the response maximum is stipulated to occur.
n : float
Number of negative-exponential layers in the cascade defining the
s : float | None
Desired value for the area under the kernel. If `None`, no scaling is
performed.
"""
n_samp = int(np.round(fs * dur))
t = np.arange(n_samp, dtype=float) / fs
h = (t ** n) * np.exp(- n * t / t_max)
scal = 1. if s is None else float(s) / (np.sum(h) * (t[1] - t[0]))
h = scal * h
return h
def _get_test_fnames():
"""Get usable test files (omit EDF if no edf2asc)"""
path = op.join(op.dirname(__file__), 'tests', 'data')
fnames = glob.glob(op.join(path, '*.edf'))
return fnames
class _TempDir(str):
"""Class for creating and auto-destroying temp dir
This is designed to be used with testing modules.
We cannot simply use __del__() method for cleanup here because the rmtree
function may be cleaned up before this object, so we use the atexit module
instead.
"""
def __new__(self):
new = str.__new__(self, tempfile.mkdtemp())
return new
def __init__(self):
self._path = self.__str__()
atexit.register(self.cleanup)
def cleanup(self):
rmtree(self._path, ignore_errors=True)
def _has_joblib():
"""Helper to determine if joblib is installed"""
try:
import joblib # noqa
except Exception:
return False
else:
return True
def _has_h5py():
"""Helper to determine if joblib is installed"""
try:
import h5py # noqa
except Exception:
return False
else:
return True
def _has_edfapi():
"""Helper to determine if a user has edfapi installed"""
from .edf._raw import has_edfapi
return has_edfapi
_requires_h5py = np.testing.dec.skipif(not _has_h5py(),
'Requires h5py')
_requires_edfapi = np.testing.dec.skipif(not _has_edfapi(), 'Requires edfapi')
| bsd-3-clause | -3,761,224,465,408,458,000 | 24.560606 | 79 | 0.603734 | false | 3.536688 | true | false | false |
gzorin/RSXGL | extsrc/mesa/src/mapi/glapi/gen/gl_XML.py | 1 | 25960 | #!/usr/bin/env python
# (C) Copyright IBM Corporation 2004, 2005
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# on the rights to use, copy, modify, merge, publish, distribute, sub
# license, and/or sell copies of the Software, and to permit persons to whom
# the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice (including the next
# paragraph) shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
# IBM AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# Authors:
# Ian Romanick <[email protected]>
import libxml2
import re, sys, string
import typeexpr
def parse_GL_API( file_name, factory = None ):
doc = libxml2.readFile( file_name, None, libxml2.XML_PARSE_XINCLUDE + libxml2.XML_PARSE_NOBLANKS + libxml2.XML_PARSE_DTDVALID + libxml2.XML_PARSE_DTDATTR + libxml2.XML_PARSE_DTDLOAD + libxml2.XML_PARSE_NOENT )
ret = doc.xincludeProcess()
if not factory:
factory = gl_item_factory()
api = factory.create_item( "api", None, None )
api.process_element( doc )
# After the XML has been processed, we need to go back and assign
# dispatch offsets to the functions that request that their offsets
# be assigned by the scripts. Typically this means all functions
# that are not part of the ABI.
for func in api.functionIterateByCategory():
if func.assign_offset:
func.offset = api.next_offset;
api.next_offset += 1
doc.freeDoc()
return api
def is_attr_true( element, name ):
"""Read a name value from an element's attributes.
The value read from the attribute list must be either 'true' or
'false'. If the value is 'false', zero will be returned. If the
value is 'true', non-zero will be returned. An exception will be
raised for any other value."""
value = element.nsProp( name, None )
if value == "true":
return 1
elif value == "false":
return 0
else:
raise RuntimeError('Invalid value "%s" for boolean "%s".' % (value, name))
class gl_print_base:
"""Base class of all API pretty-printers.
In the model-view-controller pattern, this is the view. Any derived
class will want to over-ride the printBody, printRealHader, and
printRealFooter methods. Some derived classes may want to over-ride
printHeader and printFooter, or even Print (though this is unlikely).
"""
def __init__(self):
# Name of the script that is generating the output file.
# Every derived class should set this to the name of its
# source file.
self.name = "a"
# License on the *generated* source file. This may differ
# from the license on the script that is generating the file.
# Every derived class should set this to some reasonable
# value.
#
# See license.py for an example of a reasonable value.
self.license = "The license for this file is unspecified."
# The header_tag is the name of the C preprocessor define
# used to prevent multiple inclusion. Typically only
# generated C header files need this to be set. Setting it
# causes code to be generated automatically in printHeader
# and printFooter.
self.header_tag = None
# List of file-private defines that must be undefined at the
# end of the file. This can be used in header files to define
# names for use in the file, then undefine them at the end of
# the header file.
self.undef_list = []
return
def Print(self, api):
self.printHeader()
self.printBody(api)
self.printFooter()
return
def printHeader(self):
"""Print the header associated with all files and call the printRealHeader method."""
print '/* DO NOT EDIT - This file generated automatically by %s script */' \
% (self.name)
print ''
print '/*'
print ' * ' + self.license.replace('\n', '\n * ')
print ' */'
print ''
if self.header_tag:
print '#if !defined( %s )' % (self.header_tag)
print '# define %s' % (self.header_tag)
print ''
self.printRealHeader();
return
def printFooter(self):
"""Print the header associated with all files and call the printRealFooter method."""
self.printRealFooter()
if self.undef_list:
print ''
for u in self.undef_list:
print "# undef %s" % (u)
if self.header_tag:
print ''
print '#endif /* !defined( %s ) */' % (self.header_tag)
def printRealHeader(self):
"""Print the "real" header for the created file.
In the base class, this function is empty. All derived
classes should over-ride this function."""
return
def printRealFooter(self):
"""Print the "real" footer for the created file.
In the base class, this function is empty. All derived
classes should over-ride this function."""
return
def printPure(self):
"""Conditionally define `PURE' function attribute.
Conditionally defines a preprocessor macro `PURE' that wraps
GCC's `pure' function attribute. The conditional code can be
easilly adapted to other compilers that support a similar
feature.
The name is also added to the file's undef_list.
"""
self.undef_list.append("PURE")
print """# if defined(__GNUC__) || (defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590))
# define PURE __attribute__((pure))
# else
# define PURE
# endif"""
return
def printFastcall(self):
"""Conditionally define `FASTCALL' function attribute.
Conditionally defines a preprocessor macro `FASTCALL' that
wraps GCC's `fastcall' function attribute. The conditional
code can be easilly adapted to other compilers that support a
similar feature.
The name is also added to the file's undef_list.
"""
self.undef_list.append("FASTCALL")
print """# if defined(__i386__) && defined(__GNUC__) && !defined(__CYGWIN__) && !defined(__MINGW32__)
# define FASTCALL __attribute__((fastcall))
# else
# define FASTCALL
# endif"""
return
def printVisibility(self, S, s):
"""Conditionally define visibility function attribute.
Conditionally defines a preprocessor macro name S that wraps
GCC's visibility function attribute. The visibility used is
the parameter s. The conditional code can be easilly adapted
to other compilers that support a similar feature.
The name is also added to the file's undef_list.
"""
self.undef_list.append(S)
print """# if (defined(__GNUC__) && !defined(__CYGWIN__) && !defined(__MINGW32__)) || (defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590) && defined(__ELF__))
# define %s __attribute__((visibility("%s")))
# else
# define %s
# endif""" % (S, s, S)
return
def printNoinline(self):
"""Conditionally define `NOINLINE' function attribute.
Conditionally defines a preprocessor macro `NOINLINE' that
wraps GCC's `noinline' function attribute. The conditional
code can be easilly adapted to other compilers that support a
similar feature.
The name is also added to the file's undef_list.
"""
self.undef_list.append("NOINLINE")
print """# if defined(__GNUC__) || (defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590))
# define NOINLINE __attribute__((noinline))
# else
# define NOINLINE
# endif"""
return
def real_function_name(element):
name = element.nsProp( "name", None )
alias = element.nsProp( "alias", None )
if alias:
return alias
else:
return name
def real_category_name(c):
if re.compile("[1-9][0-9]*[.][0-9]+").match(c):
return "GL_VERSION_" + c.replace(".", "_")
else:
return c
def classify_category(name, number):
"""Based on the category name and number, select a numerical class for it.
Categories are divided into four classes numbered 0 through 3. The
classes are:
0. Core GL versions, sorted by version number.
1. ARB extensions, sorted by extension number.
2. Non-ARB extensions, sorted by extension number.
3. Un-numbered extensions, sorted by extension name.
"""
try:
core_version = float(name)
except Exception,e:
core_version = 0.0
if core_version > 0.0:
cat_type = 0
key = name
elif name.startswith("GL_ARB_") or name.startswith("GLX_ARB_") or name.startswith("WGL_ARB_"):
cat_type = 1
key = int(number)
else:
if number != None:
cat_type = 2
key = int(number)
else:
cat_type = 3
key = name
return [cat_type, key]
def create_parameter_string(parameters, include_names):
"""Create a parameter string from a list of gl_parameters."""
list = []
for p in parameters:
if p.is_padding:
continue
if include_names:
list.append( p.string() )
else:
list.append( p.type_string() )
if len(list) == 0: list = ["void"]
return string.join(list, ", ")
class gl_item:
def __init__(self, element, context):
self.context = context
self.name = element.nsProp( "name", None )
self.category = real_category_name( element.parent.nsProp( "name", None ) )
return
class gl_type( gl_item ):
def __init__(self, element, context):
gl_item.__init__(self, element, context)
self.size = int( element.nsProp( "size", None ), 0 )
te = typeexpr.type_expression( None )
tn = typeexpr.type_node()
tn.size = int( element.nsProp( "size", None ), 0 )
tn.integer = not is_attr_true( element, "float" )
tn.unsigned = is_attr_true( element, "unsigned" )
tn.name = "GL" + self.name
te.set_base_type_node( tn )
self.type_expr = te
return
def get_type_expression(self):
return self.type_expr
class gl_enum( gl_item ):
def __init__(self, element, context):
gl_item.__init__(self, element, context)
self.value = int( element.nsProp( "value", None ), 0 )
temp = element.nsProp( "count", None )
if not temp or temp == "?":
self.default_count = -1
else:
try:
c = int(temp)
except Exception,e:
raise RuntimeError('Invalid count value "%s" for enum "%s" in function "%s" when an integer was expected.' % (temp, self.name, n))
self.default_count = c
return
def priority(self):
"""Calculate a 'priority' for this enum name.
When an enum is looked up by number, there may be many
possible names, but only one is the 'prefered' name. The
priority is used to select which name is the 'best'.
Highest precedence is given to core GL name. ARB extension
names have the next highest, followed by EXT extension names.
Vendor extension names are the lowest.
"""
if self.name.endswith( "_BIT" ):
bias = 1
else:
bias = 0
if self.category.startswith( "GL_VERSION_" ):
priority = 0
elif self.category.startswith( "GL_ARB_" ):
priority = 2
elif self.category.startswith( "GL_EXT_" ):
priority = 4
else:
priority = 6
return priority + bias
class gl_parameter:
def __init__(self, element, context):
self.name = element.nsProp( "name", None )
ts = element.nsProp( "type", None )
self.type_expr = typeexpr.type_expression( ts, context )
temp = element.nsProp( "variable_param", None )
if temp:
self.count_parameter_list = temp.split( ' ' )
else:
self.count_parameter_list = []
# The count tag can be either a numeric string or the name of
# a variable. If it is the name of a variable, the int(c)
# statement will throw an exception, and the except block will
# take over.
c = element.nsProp( "count", None )
try:
count = int(c)
self.count = count
self.counter = None
except Exception,e:
count = 1
self.count = 0
self.counter = c
self.count_scale = int(element.nsProp( "count_scale", None ))
elements = (count * self.count_scale)
if elements == 1:
elements = 0
#if ts == "GLdouble":
# print '/* stack size -> %s = %u (before)*/' % (self.name, self.type_expr.get_stack_size())
# print '/* # elements = %u */' % (elements)
self.type_expr.set_elements( elements )
#if ts == "GLdouble":
# print '/* stack size -> %s = %u (after) */' % (self.name, self.type_expr.get_stack_size())
self.is_client_only = is_attr_true( element, 'client_only' )
self.is_counter = is_attr_true( element, 'counter' )
self.is_output = is_attr_true( element, 'output' )
# Pixel data has special parameters.
self.width = element.nsProp('img_width', None)
self.height = element.nsProp('img_height', None)
self.depth = element.nsProp('img_depth', None)
self.extent = element.nsProp('img_extent', None)
self.img_xoff = element.nsProp('img_xoff', None)
self.img_yoff = element.nsProp('img_yoff', None)
self.img_zoff = element.nsProp('img_zoff', None)
self.img_woff = element.nsProp('img_woff', None)
self.img_format = element.nsProp('img_format', None)
self.img_type = element.nsProp('img_type', None)
self.img_target = element.nsProp('img_target', None)
self.img_pad_dimensions = is_attr_true( element, 'img_pad_dimensions' )
self.img_null_flag = is_attr_true( element, 'img_null_flag' )
self.img_send_null = is_attr_true( element, 'img_send_null' )
self.is_padding = is_attr_true( element, 'padding' )
return
def compatible(self, other):
return 1
def is_array(self):
return self.is_pointer()
def is_pointer(self):
return self.type_expr.is_pointer()
def is_image(self):
if self.width:
return 1
else:
return 0
def is_variable_length(self):
return len(self.count_parameter_list) or self.counter
def is_64_bit(self):
count = self.type_expr.get_element_count()
if count:
if (self.size() / count) == 8:
return 1
else:
if self.size() == 8:
return 1
return 0
def string(self):
return self.type_expr.original_string + " " + self.name
def type_string(self):
return self.type_expr.original_string
def get_base_type_string(self):
return self.type_expr.get_base_name()
def get_dimensions(self):
if not self.width:
return [ 0, "0", "0", "0", "0" ]
dim = 1
w = self.width
h = "1"
d = "1"
e = "1"
if self.height:
dim = 2
h = self.height
if self.depth:
dim = 3
d = self.depth
if self.extent:
dim = 4
e = self.extent
return [ dim, w, h, d, e ]
def get_stack_size(self):
return self.type_expr.get_stack_size()
def size(self):
if self.is_image():
return 0
else:
return self.type_expr.get_element_size()
def get_element_count(self):
c = self.type_expr.get_element_count()
if c == 0:
return 1
return c
def size_string(self, use_parens = 1):
s = self.size()
if self.counter or self.count_parameter_list:
list = [ "compsize" ]
if self.counter and self.count_parameter_list:
list.append( self.counter )
elif self.counter:
list = [ self.counter ]
if s > 1:
list.append( str(s) )
if len(list) > 1 and use_parens :
return "(%s)" % (string.join(list, " * "))
else:
return string.join(list, " * ")
elif self.is_image():
return "compsize"
else:
return str(s)
def format_string(self):
if self.type_expr.original_string == "GLenum":
return "0x%x"
else:
return self.type_expr.format_string()
class gl_function( gl_item ):
def __init__(self, element, context):
self.context = context
self.name = None
self.entry_points = []
self.return_type = "void"
self.parameters = []
self.offset = -1
self.initialized = 0
self.images = []
self.assign_offset = 0
self.static_entry_points = []
# Track the parameter string (for the function prototype)
# for each entry-point. This is done because some functions
# change their prototype slightly when promoted from extension
# to ARB extension to core. glTexImage3DEXT and glTexImage3D
# are good examples of this. Scripts that need to generate
# code for these differing aliases need to real prototype
# for each entry-point. Otherwise, they may generate code
# that won't compile.
self.entry_point_parameters = {}
self.process_element( element )
return
def process_element(self, element):
name = element.nsProp( "name", None )
alias = element.nsProp( "alias", None )
if is_attr_true(element, "static_dispatch"):
self.static_entry_points.append(name)
self.entry_points.append( name )
if alias:
true_name = alias
else:
true_name = name
# Only try to set the offset when a non-alias
# entry-point is being processes.
offset = element.nsProp( "offset", None )
if offset:
try:
o = int( offset )
self.offset = o
except Exception, e:
self.offset = -1
if offset == "assign":
self.assign_offset = 1
if not self.name:
self.name = true_name
elif self.name != true_name:
raise RuntimeError("Function true name redefined. Was %s, now %s." % (self.name, true_name))
# There are two possible cases. The first time an entry-point
# with data is seen, self.initialized will be 0. On that
# pass, we just fill in the data. The next time an
# entry-point with data is seen, self.initialized will be 1.
# On that pass we have to make that the new values match the
# valuse from the previous entry-point.
parameters = []
return_type = "void"
child = element.children
while child:
if child.type == "element":
if child.name == "return":
return_type = child.nsProp( "type", None )
elif child.name == "param":
param = self.context.factory.create_item( "parameter", child, self.context)
parameters.append( param )
child = child.next
if self.initialized:
if self.return_type != return_type:
raise RuntimeError( "Return type changed in %s. Was %s, now %s." % (name, self.return_type, return_type))
if len(parameters) != len(self.parameters):
raise RuntimeError( "Parameter count mismatch in %s. Was %d, now %d." % (name, len(self.parameters), len(parameters)))
for j in range(0, len(parameters)):
p1 = parameters[j]
p2 = self.parameters[j]
if not p1.compatible( p2 ):
raise RuntimeError( 'Parameter type mismatch in %s. "%s" was "%s", now "%s".' % (name, p2.name, p2.type_expr.original_string, p1.type_expr.original_string))
if true_name == name or not self.initialized:
self.return_type = return_type
self.parameters = parameters
for param in self.parameters:
if param.is_image():
self.images.append( param )
if element.children:
self.initialized = 1
self.entry_point_parameters[name] = parameters
else:
self.entry_point_parameters[name] = []
return
def filter_entry_points(self, entry_point_list):
"""Filter out entry points not in entry_point_list."""
if not self.initialized:
raise RuntimeError('%s is not initialized yet' % self.name)
entry_points = []
for ent in self.entry_points:
if ent not in entry_point_list:
if ent in self.static_entry_points:
self.static_entry_points.remove(ent)
self.entry_point_parameters.pop(ent)
else:
entry_points.append(ent)
if not entry_points:
raise RuntimeError('%s has no entry point after filtering' % self.name)
self.entry_points = entry_points
if self.name not in entry_points:
# use the first remaining entry point
self.name = entry_points[0]
self.parameters = self.entry_point_parameters[entry_points[0]]
def get_images(self):
"""Return potentially empty list of input images."""
return self.images
def parameterIterator(self):
return self.parameters.__iter__();
def get_parameter_string(self, entrypoint = None):
if entrypoint:
params = self.entry_point_parameters[ entrypoint ]
else:
params = self.parameters
return create_parameter_string( params, 1 )
def get_called_parameter_string(self):
p_string = ""
comma = ""
for p in self.parameterIterator():
p_string = p_string + comma + p.name
comma = ", "
return p_string
def is_abi(self):
return (self.offset >= 0 and not self.assign_offset)
def is_static_entry_point(self, name):
return name in self.static_entry_points
def dispatch_name(self):
if self.name in self.static_entry_points:
return self.name
else:
return "_dispatch_stub_%u" % (self.offset)
def static_name(self, name):
if name in self.static_entry_points:
return name
else:
return "_dispatch_stub_%u" % (self.offset)
class gl_item_factory:
"""Factory to create objects derived from gl_item."""
def create_item(self, item_name, element, context):
if item_name == "function":
return gl_function(element, context)
if item_name == "type":
return gl_type(element, context)
elif item_name == "enum":
return gl_enum(element, context)
elif item_name == "parameter":
return gl_parameter(element, context)
elif item_name == "api":
return gl_api(self)
else:
return None
class gl_api:
def __init__(self, factory):
self.functions_by_name = {}
self.enums_by_name = {}
self.types_by_name = {}
self.category_dict = {}
self.categories = [{}, {}, {}, {}]
self.factory = factory
self.next_offset = 0
typeexpr.create_initial_types()
return
def filter_functions(self, entry_point_list):
"""Filter out entry points not in entry_point_list."""
functions_by_name = {}
for func in self.functions_by_name.itervalues():
entry_points = [ent for ent in func.entry_points if ent in entry_point_list]
if entry_points:
func.filter_entry_points(entry_points)
functions_by_name[func.name] = func
self.functions_by_name = functions_by_name
def process_element(self, doc):
element = doc.children
while element.type != "element" or element.name != "OpenGLAPI":
element = element.next
if element:
self.process_OpenGLAPI(element)
return
def process_OpenGLAPI(self, element):
child = element.children
while child:
if child.type == "element":
if child.name == "category":
self.process_category( child )
elif child.name == "OpenGLAPI":
self.process_OpenGLAPI( child )
child = child.next
return
def process_category(self, cat):
cat_name = cat.nsProp( "name", None )
cat_number = cat.nsProp( "number", None )
[cat_type, key] = classify_category(cat_name, cat_number)
self.categories[cat_type][key] = [cat_name, cat_number]
child = cat.children
while child:
if child.type == "element":
if child.name == "function":
func_name = real_function_name( child )
temp_name = child.nsProp( "name", None )
self.category_dict[ temp_name ] = [cat_name, cat_number]
if self.functions_by_name.has_key( func_name ):
func = self.functions_by_name[ func_name ]
func.process_element( child )
else:
func = self.factory.create_item( "function", child, self )
self.functions_by_name[ func_name ] = func
if func.offset >= self.next_offset:
self.next_offset = func.offset + 1
elif child.name == "enum":
enum = self.factory.create_item( "enum", child, self )
self.enums_by_name[ enum.name ] = enum
elif child.name == "type":
t = self.factory.create_item( "type", child, self )
self.types_by_name[ "GL" + t.name ] = t
child = child.next
return
def functionIterateByCategory(self, cat = None):
"""Iterate over functions by category.
If cat is None, all known functions are iterated in category
order. See classify_category for details of the ordering.
Within a category, functions are sorted by name. If cat is
not None, then only functions in that category are iterated.
"""
lists = [{}, {}, {}, {}]
for func in self.functionIterateAll():
[cat_name, cat_number] = self.category_dict[func.name]
if (cat == None) or (cat == cat_name):
[func_cat_type, key] = classify_category(cat_name, cat_number)
if not lists[func_cat_type].has_key(key):
lists[func_cat_type][key] = {}
lists[func_cat_type][key][func.name] = func
functions = []
for func_cat_type in range(0,4):
keys = lists[func_cat_type].keys()
keys.sort()
for key in keys:
names = lists[func_cat_type][key].keys()
names.sort()
for name in names:
functions.append(lists[func_cat_type][key][name])
return functions.__iter__()
def functionIterateByOffset(self):
max_offset = -1
for func in self.functions_by_name.itervalues():
if func.offset > max_offset:
max_offset = func.offset
temp = [None for i in range(0, max_offset + 1)]
for func in self.functions_by_name.itervalues():
if func.offset != -1:
temp[ func.offset ] = func
list = []
for i in range(0, max_offset + 1):
if temp[i]:
list.append(temp[i])
return list.__iter__();
def functionIterateAll(self):
return self.functions_by_name.itervalues()
def enumIterateByName(self):
keys = self.enums_by_name.keys()
keys.sort()
list = []
for enum in keys:
list.append( self.enums_by_name[ enum ] )
return list.__iter__()
def categoryIterate(self):
"""Iterate over categories.
Iterate over all known categories in the order specified by
classify_category. Each iterated value is a tuple of the
name and number (which may be None) of the category.
"""
list = []
for cat_type in range(0,4):
keys = self.categories[cat_type].keys()
keys.sort()
for key in keys:
list.append(self.categories[cat_type][key])
return list.__iter__()
def get_category_for_name( self, name ):
if self.category_dict.has_key(name):
return self.category_dict[name]
else:
return ["<unknown category>", None]
def typeIterate(self):
return self.types_by_name.itervalues()
def find_type( self, type_name ):
if type_name in self.types_by_name:
return self.types_by_name[ type_name ].type_expr
else:
print "Unable to find base type matching \"%s\"." % (type_name)
return None
| bsd-2-clause | 8,469,495,609,065,517,000 | 24.985986 | 210 | 0.667874 | false | 3.147048 | false | false | false |
wesleywerner/conspire | src/const.py | 1 | 7542 | STATE_MENU = 1
STATE_BUILD = 2
STATE_UFO = 3
STATE_FLIGHT = 4
STATE_RESULTS = 5
STATE_END = 100
# parts available for use per level number.
LEVEL_PARTS = {
1: ('tax returns', 'shopping list', 'todo list',
'ludum dare comments', 'bank accounts',
'website passwords', 'IP address scamlist',
),
2: ('human torso', 'human head',
'human left arm', 'human right arm',
'human left leg', 'human right leg',
'website passwords', 'todo list', 'alien left leg',
),
3: ('alien torso', 'alien head',
'alien left arm', 'alien right arm',
'alien left leg', 'alien right leg',
'human torso', 'human head', 'human left arm',
),
4: ('codex page I', 'codex page II', 'codex page III',
'codex page IV', 'codex page V', 'codex page VI',
'biblical references', 'book of psalms', 'book of tomas',
'todo list', 'ludum dare comments'
),
5: ('ptreodactyl torso', 'ptreodactyl skull',
'ptreodactyl right wing', 'ptreodactyl left wing',
'cyclops right arm', 'cyclops left leg',
'human left arm', 'human left leg',
),
6: ('alien torso', 'alien head',
'alien left arm', 'alien right arm',
'alien left leg', 'alien right leg',
'trex torso', 'trex head', 'trex tail', 'trex legs',
'human torso', 'human head',
'human left arm', 'human right arm',
'human left leg', 'human right leg',
),
}
#500: ('cyclops torso', 'cyclops skull',
#'cyclops right arm', 'cyclops left arm',
#'cyclops right leg', 'cyclops left leg',
#),
# parts a level may start off with as in use
DEFAULT_PARTS = {
1: ('shopping list', 'ludum dare comments'),
3: ('human torso', 'human head', 'human left arm',),
4: ('todo list', 'codex page I',),
5: ('ptreodactyl left wing', 'cyclops right arm', 'human left leg',),
}
# parts required to complete a level
COMPLETION_PARTS = {
1: ('tax returns', 'bank accounts',
'website passwords', 'IP address scamlist',
),
2: ('human torso', 'human head',
'human left arm', 'human right arm',
'human left leg', 'human right leg',
),
3: ('alien torso', 'alien head',
'alien left arm', 'alien right arm',
'alien left leg', 'alien right leg',
),
4: ('codex page I', 'codex page II', 'codex page III',
'codex page IV', 'codex page V', 'codex page VI',
),
5: ('ptreodactyl torso', 'ptreodactyl skull',
'ptreodactyl right wing', 'ptreodactyl left wing',
),
6: ('alien torso', 'alien head',
'alien left arm', 'alien right arm',
'alien left leg', 'alien right leg',
'trex torso', 'trex head', 'trex tail', 'trex legs'
),
}
LEVEL_SCENARIOS = (
"skip",
"Welcome to Conspiracy 101, agent!" \
"\n" \
"Use your arrows or mouse wheel to scroll through this briefing." \
"\n" \
"We represent certain global interests. " \
"These interests are kept hidden from the public, they are " \
"privately funded and have access to the top levels government." \
"\n" \
"To start you off, we need to forge some papers. Our goal is to " \
"discredit a high ranking official who is refusing cooperation " \
"with our fine organization. We hope this move will make him " \
"reconsider." \
"\n" \
"Compile fake papers for tax returns and bank accounts. " \
"These figures match up to a internet scamming operation this " \
"individual is apparently running, so include lists of website " \
"passwords and IP addresses." \
"\n" \
"Do this by placing the correct items in the green area below. " \
"When you are done proceed to plant the evidence, " \
"the evidence will be carried by aircraft via remote control. " \
"At a critical moment, you will force crash the craft, ensuring " \
"the evidence is discovered." \
"\n" \
"Good luck, and welcome to the team, Agent!",
"...anyway, welcome back, Agent! We have a situation..." \
"\n" \
"A problematic official is suspected of being a double agent. " \
"We are going to make him disappear from public by faking his death, " \
"while keeping him under ground for 'questioning'." \
"\n" \
"Construct a fake human body, as accurately as possbile. " \
"The body will be found at the air crash site you will coordinate. " \
"\n" \
"Report back after the mission for debriefing.",
"We are in the business of predicting the future, Agent! " \
"And the best way to predict the future, is to invent it!" \
"\n" \
"We have a situation with one of our underground testing fascilities, " \
"rumors are spreading of it's existence, and we cannot allow this. " \
"We need a distraction for the media, and conspiracy theorists love " \
"nothing more than a good alien story! " \
"\n" \
"Contruct a faux alien corpse, you will be flying it remotely in " \
"one of our top-secret super experimental aircraft. Remember to " \
"down it in the green zone for optimal mission success." \
"\n" \
"Well what are you waiting for, you are not in preschool " \
"any more!",
"A pertinent problem has arisen regarding the highest persons " \
"in state, specifically the ones involved in the secular movements. " \
"\n" \
"In exchange for invaluable information from these informants we " \
"are doing them a favor." \
"\n" \
"Construct a faux codex book to decode ancient texts. " \
"This will gain leverage over the public." \
"\n" \
"The codex will be taken from the crash site, by a fellow agent, " \
"and secured in another location for discovery. " \
"\n" \
"Remember to dot your i's and cross your t's.",
"Our friends over at the dept. of evolution made a very generous " \
"donation in our behalf, gaining notoriety with very important " \
"higher-ups. " \
"\n" \
"As a sign of good faith, you will construct a pterodactyl skeleton " \
"to be flown out to an undisclosed location." \
"\n" \
"This will serve their agenda nicely.",
"Good day, Agent!" \
"\n" \
"Today is a very special day, we are going to combine the ultimate " \
"of myths. A lot of people are unsure about the existence of " \
"extra terrestrials, and many others the same about dinosaurs. " \
"We will play on their uncertainty of both and create a story " \
"to distract everybody from what is really going on in the world! " \
"\n" \
"You must construct one alien corpse, and one T-rex skeleton. " \
"The cover story is that dinosaurs were in fact alien of origin." \
"\n" \
"Keep this up, Agent, and one day you'll be the one making up these stories!",
)
# affects the wording used in reports.
# try these:
# political, myth
SCENARIO_TYPE = (
'skip',
'political',
'political',
'myth',
'myth',
'myth',
'myth',
'',
)
# determine the type of the item to build, maps to levels.
ITEM_TYPES = (
'skip',
'documents',
'high ranking officials body',
'alien corpse',
'biblical codex',
'pterodactyl skeleton',
'alien corpse and T-rex skeleton',
'',
)
# determine the method of evidence deployment
TACTICAL_TYPE = (
0,
STATE_FLIGHT,
STATE_FLIGHT,
STATE_UFO,
STATE_FLIGHT,
STATE_FLIGHT,
STATE_UFO,
)
| gpl-3.0 | 5,350,094,556,457,263,000 | 34.408451 | 82 | 0.609122 | false | 3.386619 | false | false | false |
vIiRuS/Lagerregal | devices/migrations/0002_auto_20151105_0513.py | 1 | 5237 | from django.db import migrations, models
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('users', '0006_auto_20151105_0513'),
('devices', '0001_initial'),
('devicetypes', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('devicegroups', '0001_initial'),
('locations', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='template',
name='devicetype',
field=models.ForeignKey(blank=True, to='devicetypes.Type', null=True, on_delete=models.CASCADE),
),
migrations.AddField(
model_name='template',
name='manufacturer',
field=models.ForeignKey(blank=True, to='devices.Manufacturer', null=True, on_delete=models.CASCADE),
),
migrations.AddField(
model_name='room',
name='building',
field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, to='devices.Building', null=True),
),
migrations.AddField(
model_name='room',
name='section',
field=models.ForeignKey(related_name='rooms', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='locations.Section', null=True),
),
migrations.AddField(
model_name='picture',
name='device',
field=models.ForeignKey(related_name='pictures', to='devices.Device', on_delete=models.CASCADE),
),
migrations.AddField(
model_name='note',
name='creator',
field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, null=True),
),
migrations.AddField(
model_name='note',
name='device',
field=models.ForeignKey(related_name='notes', to='devices.Device', on_delete=models.CASCADE),
),
migrations.AddField(
model_name='lending',
name='device',
field=models.ForeignKey(blank=True, to='devices.Device', null=True, on_delete=models.CASCADE),
),
migrations.AddField(
model_name='lending',
name='owner',
field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, verbose_name='Lent to', to=settings.AUTH_USER_MODEL, null=True),
),
migrations.AddField(
model_name='deviceinformation',
name='device',
field=models.ForeignKey(related_name='information', to='devices.Device', on_delete=models.CASCADE),
),
migrations.AddField(
model_name='deviceinformation',
name='infotype',
field=models.ForeignKey(to='devices.DeviceInformationType', on_delete=models.CASCADE),
),
migrations.AddField(
model_name='device',
name='bookmarkers',
field=models.ManyToManyField(related_name='bookmarks', null=True, through='devices.Bookmark', to=settings.AUTH_USER_MODEL, blank=True),
),
migrations.AddField(
model_name='device',
name='creator',
field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, null=True),
),
migrations.AddField(
model_name='device',
name='currentlending',
field=models.ForeignKey(related_name='currentdevice', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='devices.Lending', null=True),
),
migrations.AddField(
model_name='device',
name='department',
field=models.ForeignKey(related_name='devices', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='users.Department', null=True),
),
migrations.AddField(
model_name='device',
name='devicetype',
field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, to='devicetypes.Type', null=True),
),
migrations.AddField(
model_name='device',
name='group',
field=models.ForeignKey(related_name='devices', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='devicegroups.Devicegroup', null=True),
),
migrations.AddField(
model_name='device',
name='manufacturer',
field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, to='devices.Manufacturer', null=True),
),
migrations.AddField(
model_name='device',
name='room',
field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, to='devices.Room', null=True),
),
migrations.AddField(
model_name='bookmark',
name='device',
field=models.ForeignKey(to='devices.Device', on_delete=models.CASCADE),
),
migrations.AddField(
model_name='bookmark',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE),
),
]
| bsd-3-clause | 7,381,653,905,418,718,000 | 41.577236 | 160 | 0.596525 | false | 4.209807 | false | false | false |
akshaybabloo/Car-ND | Project_5/laneline.py | 1 | 21371 | import cv2
import glob
import pickle
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
from sklearn.metrics import mean_squared_error
x_cor = 9 #Number of corners to find
y_cor = 6
# Prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(6,5,0)
objp = np.zeros((y_cor*x_cor,3), np.float32)
objp[:,:2] = np.mgrid[0:x_cor, 0:y_cor].T.reshape(-1,2)
def camera_cal():
# Arrays to store object points and image points from all the images.
objpoints = [] # 3d points in real world space
imgpoints = [] # 2d points in image plane.
images = glob.glob('camera_cal/calibration*.jpg') # Make a list of paths to calibration images
# Step through the list and search for chessboard corners
corners_not_found = [] #Calibration images in which opencv failed to find corners
for idx, fname in enumerate(images):
img = cv2.imread(fname)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) # Conver to grayscale
ret, corners = cv2.findChessboardCorners(gray, (x_cor,y_cor), None) # Find the chessboard corners
# If found, add object points, image points
if ret == True:
objpoints.append(objp)
imgpoints.append(corners)
else:
corners_not_found.append(fname)
print 'Corners were found on', str(len(imgpoints)), 'out of', str(len(images)), 'it is', str(len(imgpoints)*100.0/len(images)),'% of calibration images'
img_size = (img.shape[1], img.shape[0])
# Do camera calibration given object points and image points
ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints, imgpoints, img_size,None,None)
return mtx, dist
mtx, dist = camera_cal()
def undistort(img):
return cv2.undistort(img, mtx, dist, None, mtx)
def eq_Hist(img): # Histogram normalization
img[:, :, 0] = cv2.equalizeHist(img[:, :, 0])
img[:, :, 1] = cv2.equalizeHist(img[:, :, 1])
img[:, :, 2] = cv2.equalizeHist(img[:, :, 2])
return img
# Sobel
def sobel_img(img, thresh_min = 25, thresh_max = 255, sobel_kernel = 11):
sobelx = np.absolute(cv2.Sobel(img, cv2.CV_64F, 1, 0, ksize=sobel_kernel))
sobely = np.absolute(cv2.Sobel(img, cv2.CV_64F, 0, 1, ksize=sobel_kernel))
scaled_sobelx = np.uint16(255*sobelx/np.max(sobelx))
scaled_sobely = np.uint16(255*sobely/np.max(sobely))
sobel_sum = scaled_sobelx+0.2*scaled_sobely
scaled_sobel_sum = np.uint8(255*sobel_sum/np.max(sobel_sum))
sum_binary = np.zeros_like(scaled_sobel_sum)
sum_binary[(scaled_sobel_sum >= thresh_min) & (scaled_sobel_sum <= thresh_max)] = 1
return sum_binary
# Solbel magnitude
def sobel_mag_img(img, thresh_min = 25, thresh_max = 255, sobel_kernel = 11):
sobelx = np.absolute(cv2.Sobel(img, cv2.CV_64F, 1, 0, ksize=sobel_kernel))
sobely = np.absolute(cv2.Sobel(img, cv2.CV_64F, 0, 1, ksize=sobel_kernel))
gradmag = np.sqrt(sobelx**2 + sobely**2)
scaled_gradmag = np.uint8(255*gradmag/np.max(gradmag))
gradmag_binary = np.zeros_like(scaled_gradmag)
gradmag_binary[(scaled_gradmag >= thresh_min) & (scaled_gradmag <= thresh_max)] = 1
return gradmag_binary
# Sobel direction
def sobel_dir_img(img, thresh_min = 0.0, thresh_max = 1.5, sobel_kernel = 11):
sobelx = np.absolute(cv2.Sobel(img, cv2.CV_64F, 1, 0, ksize=sobel_kernel))
sobely = np.absolute(cv2.Sobel(img, cv2.CV_64F, 0, 1, ksize=sobel_kernel))
graddir = np.arctan2(sobely, sobelx)
graddir_binary = np.zeros_like(graddir)
graddir_binary[(graddir >= thresh_min) & (graddir <= thresh_max)] = 1
return graddir_binary
# Binary red channel threshold
def red_thres(img, thresh_min = 25, thresh_max = 255):
red = img[:,:,2]
red_binary = np.zeros_like(red)
red_binary[(red >= thresh_min) & (red <= thresh_max)] = 1
return red_binary
# Binary saturation channel threshold
def s_thres(img, thresh_min = 25, thresh_max = 255):
hls = cv2.cvtColor(img, cv2.COLOR_BGR2HLS)
s_channel = hls[:,:,2]
s_binary = np.zeros_like(s_channel)
s_binary[(s_channel > thresh_min) & (s_channel <= thresh_max)] = 1
return s_binary
# Return saturation channel
def s_hls(img):
hls = cv2.cvtColor(img, cv2.COLOR_BGR2HLS)
return hls[:,:,2]
IMAGE_H = 223
IMAGE_W = 1280
# Sharpen image
def sharpen_img(img):
gb = cv2.GaussianBlur(img, (5,5), 20.0)
return cv2.addWeighted(img, 2, gb, -1, 0)
# Compute linear image transformation img*s+m
def lin_img(img,s=1.0,m=0.0):
img2=cv2.multiply(img, np.array([s]))
return cv2.add(img2, np.array([m]))
# Change image contrast; s>1 - increase
def contr_img(img, s=1.0):
m=127.0*(1.0-s)
return lin_img(img, s, m)
# Create perspective image transformation matrices
def create_M():
src = np.float32([[0, 673], [1207, 673], [0, 450], [1280, 450]])
dst = np.float32([[569, 223], [711, 223], [0, 0], [1280, 0]])
M = cv2.getPerspectiveTransform(src, dst)
Minv = cv2.getPerspectiveTransform(dst, src)
return M, Minv
# Main image transformation routine to get a warped image
def transform(img, M):
undist = undistort(img)
img_size = (1280, 223)
warped = cv2.warpPerspective(undist, M, img_size)
warped = sharpen_img(warped)
warped = contr_img(warped, 1.1)
return warped
# Show original and warped image side by side
def show_warped(img, M):
f, (plot1, plot2) = plt.subplots(1, 2, figsize=(9, 3))
plot1.imshow(cv2.cvtColor(undistort(img), cv2.COLOR_BGR2RGB))
plot1.set_title('Undistorted', fontsize=20)
plot2.imshow(cv2.cvtColor(transform(img, M), cv2.COLOR_BGR2RGB))
plot2.set_title('Warped', fontsize=20)
# Show one image
def show_img(img):
if len(img.shape)==3:
plt.figure()
plt.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
else:
plt.figure()
plt.imshow(img, cmap='gray')
M, Minv = create_M()
#Calculate coefficients of polynom in y+h coordinates, i.e. f(y) -> f(y+h)
def pol_shift(pol, h):
pol_ord = len(pol)-1 # Determinate degree of the polynomial
if pol_ord == 3:
pol0 = pol[0]
pol1 = pol[1] + 3.0*pol[0]*h
pol2 = pol[2] + 3.0*pol[0]*h*h + 2.0*pol[1]*h
pol3 = pol[3] + pol[0]*h*h*h + pol[1]*h*h + pol[2]*h
return(np.array([pol0, pol1, pol2, pol3]))
if pol_ord == 2:
pol0 = pol[0]
pol1 = pol[1] + 2.0*pol[0]*h
pol2 = pol[2] + pol[0]*h*h+pol[1]*h
return(np.array([pol0, pol1, pol2]))
if pol_ord == 1:
pol0 = pol[0]
pol1 = pol[1] + pol[0]*h
return(np.array([pol0, pol1]))
# Calculate derivative for a polynom pol in a point x
def pol_d(pol, x):
pol_ord = len(pol)-1
if pol_ord == 3:
return 3.0*pol[0]*x*x+2.0*pol[1]*x+pol[2]
if pol_ord == 2:
return 2.0*pol[0]*x+pol[1]
if pol_ord == 1:
return pol[0]#*np.ones(len(np.array(x)))
# Calculate the second derivative for a polynom pol in a point x
def pol_dd(pol, x):
pol_ord = len(pol)-1
if pol_ord == 3:
return 6.0*pol[0]*x+2.0*pol[1]
if pol_ord == 2:
return 2.0*pol[0]
if pol_ord == 1:
return 0.0
# Calculate a polinomial value in a point x
def pol_calc(pol, x):
pol_f = np.poly1d(pol)
return(pol_f(x))
xm_in_px = 3.675 / 85 # Lane width (12 ft in m) is ~85 px on image
ym_in_px = 3.048 / 24 # Dashed line length (10 ft in m) is ~24 px on image
def px_to_m(px):
return xm_in_px*px
# Calculate offset from the lane center
def lane_offset(left, right):
offset = 1280/2.0-(pol_calc(left, 1.0)+ pol_calc(right, 1.0))/2.0
return px_to_m(offset)
# Calculate radius of curvature
MAX_RADIUS = 10000
def r_curv(pol, y):
if len(pol) == 2: # If the polinomial is a linear function
return MAX_RADIUS
else:
y_pol = np.linspace(0, 1, num=EQUID_POINTS)
x_pol = pol_calc(pol, y_pol)*xm_in_px
y_pol = y_pol*IMAGE_H*ym_in_px
pol = np.polyfit(y_pol, x_pol, len(pol)-1)
d_y = pol_d(pol, y)
dd_y = pol_dd(pol, y)
r = ((np.sqrt(1+d_y**2))**3)/abs(dd_y)
if r > MAX_RADIUS:
r = MAX_RADIUS
return r
def lane_curv(left, right):
l = r_curv(left, 1.0)
r = r_curv(right, 1.0)
if l < MAX_RADIUS and r < MAX_RADIUS:
return (r_curv(left, 1.0)+r_curv(right, 1.0))/2.0
else:
if l < MAX_RADIUS:
return l
if r < MAX_RADIUS:
return r
return MAX_RADIUS
#Calculate approximated equidistant to a parabola
EQUID_POINTS = 25 # Number of points to use for the equidistant approximation
def equidistant(pol, d, max_l = 1, plot = False):
y_pol = np.linspace(0, max_l, num=EQUID_POINTS)
x_pol = pol_calc(pol, y_pol)
y_pol *= IMAGE_H # Convert y coordinates bach to [0..223] scale
x_m = []
y_m = []
k_m = []
for i in range(len(x_pol)-1):
x_m.append((x_pol[i+1]-x_pol[i])/2.0+x_pol[i]) # Calculate polints position between given points
y_m.append((y_pol[i+1]-y_pol[i])/2.0+y_pol[i])
if x_pol[i+1] == x_pol[i]:
k_m.append(1e8) # A vary big number
else:
k_m.append(-(y_pol[i+1]-y_pol[i])/(x_pol[i+1]-x_pol[i])) # Slope of perpendicular lines
x_m = np.array(x_m)
y_m = np.array(y_m)
k_m = np.array(k_m)
#Calculate equidistant points
y_eq = d*np.sqrt(1.0/(1+k_m**2))
x_eq = np.zeros_like(y_eq)
if d >= 0:
for i in range(len(x_m)):
if k_m[i] < 0:
y_eq[i] = y_m[i]-abs(y_eq[i])
else:
y_eq[i] = y_m[i]+abs(y_eq[i])
x_eq[i] = (x_m[i]-k_m[i]*y_m[i])+k_m[i]*y_eq[i]
else:
for i in range(len(x_m)):
if k_m[i] < 0:
y_eq[i] = y_m[i]+abs(y_eq[i])
else:
y_eq[i] = y_m[i]-abs(y_eq[i])
x_eq[i] = (x_m[i]-k_m[i]*y_m[i])+k_m[i]*y_eq[i]
y_eq /= IMAGE_H # Convert y coordinates back to [0..1] scale
y_pol /= IMAGE_H
y_m /= IMAGE_H
pol_eq = np.polyfit(y_eq, x_eq, len(pol)-1) # Fit equidistant with a polinomial
if plot:
plt.plot(x_pol, y_pol, color='red', linewidth=1, label = 'Original line') #Original line
plt.plot(x_eq, y_eq, color='green', linewidth=1, label = 'Equidistant') #Equidistant
plt.plot(pol_calc(pol_eq, y_pol), y_pol, color='blue',
linewidth=1, label = 'Approximation') #Approximation
plt.legend()
for i in range(len(x_m)):
plt.plot([x_m[i],x_eq[i]], [y_m[i],y_eq[i]], color='black', linewidth=1) #Draw connection lines
plt.savefig('readme_img/equid.jpg')
return pol_eq
DEV_POL = 2 # Max mean squared error of the approximation
MSE_DEV = 1.1 # Minimum mean squared error ratio to consider higher order of the polynomial
def best_pol_ord(x, y):
pol1 = np.polyfit(y,x,1)
pred1 = pol_calc(pol1, y)
mse1 = mean_squared_error(x, pred1)
if mse1 < DEV_POL:
return pol1, mse1
pol2 = np.polyfit(y,x,2)
pred2 = pol_calc(pol2, y)
mse2 = mean_squared_error(x, pred2)
if mse2 < DEV_POL or mse1/mse2 < MSE_DEV:
return pol2, mse2
else:
pol3 = np.polyfit(y,x,3)
pred3 = pol_calc(pol3, y)
mse3 = mean_squared_error(x, pred3)
if mse2/mse3 < MSE_DEV:
return pol2, mse2
else:
return pol3, mse3
# Smooth polinomial functions of different degrees
def smooth_dif_ord(pol_p, x, y, new_ord):
x_p = pol_calc(pol_p, y)
x_new = (x+x_p)/2.0
return np.polyfit(y, x_new, new_ord)
# Calculate threashold for left line
def thres_l_calc(sens):
thres = -0.0045*sens**2+1.7581*sens-115.0
if thres < 25*(382.0-sens)/382.0+5:
thres = 25*(382.0-sens)/382.0+5
return thres
# Calculate threashold for right line
def thres_r_calc(sens):
thres = -0.0411*sens**2+9.1708*sens-430.0
if sens<210:
if thres < sens/6:
thres = sens/6
else:
if thres < 20:
thres = 20
return thres
WINDOW_SIZE = 15 # Half of the sensor span
DEV = 7 # Maximum of the point deviation from the sensor center
SPEED = 2 / IMAGE_H # Pixels shift per frame
POL_ORD = 2 # Default polinomial order
RANGE = 0.0 # Fraction of the image to skip
def find(img, left=True, p_ord=POL_ORD, pol = np.zeros(POL_ORD+1), max_n = 0):
x_pos = []
y_pos = []
max_l = img.shape[0] #number of lines in the img
for i in range(max_l-int(max_l*RANGE)):
y = max_l-i #Line number
y_01 = y / float(max_l) #y in [0..1] scale
if abs(pol[-1]) > 0: #If it not a still image or the first video frame
if y_01 >= max_n + SPEED: # If we can use pol to find center of the virtual sensor from the previous frame
cent = int(pol_calc(pol, y_01-SPEED))
if y == max_l:
if left:
cent = 605
else:
cent = 690
else: # Prolong the pol tangentially
k = pol_d(pol, max_n)
b = pol_calc(pol, max_n)-k*max_n
cent = int(k*y_01+b)
if cent > 1280-WINDOW_SIZE:
cent = 1280-WINDOW_SIZE
if cent < WINDOW_SIZE:
cent = WINDOW_SIZE
else: #If it is a still image
if len(x_pos) > 0: # If there are some points detected
cent = x_pos[-1] # Use the previous point as a senser center
else: #Initial guess on line position
if left:
cent = 605
else:
cent = 690
if left: #Subsample image
sens = 0.5*s_hls(img[max_l-1-i:max_l-i,cent-WINDOW_SIZE:cent+WINDOW_SIZE,:])\
+img[max_l-1-i:max_l-i,cent-WINDOW_SIZE:cent+WINDOW_SIZE,2]
else:
sens = img[max_l-1-i:max_l-i,cent-WINDOW_SIZE:cent+WINDOW_SIZE,2]
if len(sens[0,:]) < WINDOW_SIZE: #If we out of the image
break
x_max = max(sens[0,:]) #Find maximal value on the sensor
sens_mean = np.mean(sens[0,:])
# Get threshold
if left:
loc_thres = thres_l_calc(sens_mean)
loc_dev = DEV
else:
loc_thres = thres_r_calc(sens_mean)
loc_dev = DEV
if len(x_pos) == 0:
loc_dev = WINDOW_SIZE
if (x_max-sens_mean) > loc_thres and (x_max>100 or left):
if left:
x = list(reversed(sens[0,:])).index(x_max)
x = cent+WINDOW_SIZE-x
else:
x = list(sens[0,:]).index(x_max)
x = cent-WINDOW_SIZE+x
if x-1 < 569.0*y_01 or x+1 > 569.0*y_01+711 or np.nonzero(sens[0,:]) < WINDOW_SIZE: #if the sensor touchs black triangle
break # We are done
if abs(pol[-1]) < 1e-4: # If there are no polynomial provided
x_pos.append(x)
y_pos.append(y_01)
else:
if abs(x-cent) < loc_dev:#*14.206*r_curv(pol, max_l)**-0.2869:
x_pos.append(x)
y_pos.append(y_01)
if len(x_pos) > 1:
return x_pos, y_pos
else:
return [0], [0.0]
RANGE = 0.0
def get_lane(img, plot=False):
warp = transform(img, M)
img = undistort(img)
ploty = np.linspace(0, 1, num=warp.shape[0])
x2, y2 = find(warp)
x, y = find(warp, False)
right_fitx = pol_calc(best_pol_ord(x,y)[0], ploty)
left_fitx = pol_calc(best_pol_ord(x2,y2)[0], ploty)
y2 = np.int16(np.array(y2)*223.0) # Convert into [0..223] scale
y = np.int16(np.array(y)*223.0)
if plot:
for i in range(len(x)): # Plot points
cv2.circle(warp, (x[i], y[i]), 1, (255,50,255))
for i in range(len(x2)):
cv2.circle(warp, (x2[i], y2[i]), 1, (255,50,250))
show_img(warp)
plt.axis('off')
plt.plot(left_fitx, ploty*IMAGE_H, color='green', linewidth=1)
plt.plot(right_fitx, ploty*IMAGE_H, color='green', linewidth=1)
cv2.imwrite('img.jpg', warp)
return img, left_fitx, right_fitx, ploty*IMAGE_H
def draw_lane_img_p(img_path):
return cv2.imread(img_path)
def draw_lane(img, video=False):
if video:
img, left_fitx, right_fitx, ploty, left, right = get_lane_video(img)
else:
img, left_fitx, right_fitx, ploty = get_lane(img, False)
warp_zero = np.zeros((IMAGE_H,IMAGE_W)).astype(np.uint8)
color_warp = np.dstack((warp_zero, warp_zero, warp_zero))
# Recast the x and y points into usable format for cv2.fillPoly()
pts_left = np.array([np.transpose(np.vstack([left_fitx, ploty]))])
pts_right = np.array([np.flipud(np.transpose(np.vstack([right_fitx, ploty])))])
pts = np.hstack((pts_left, pts_right))
# Draw the lane onto the warped blank image
cv2.fillPoly(color_warp, np.int_([pts]), (0,255, 0))
# Warp the blank back to original image space using inverse perspective matrix (Minv)
newwarp = cv2.warpPerspective(color_warp, Minv, (img.shape[1], img.shape[0]))
# Combine the result with the original image
result = cv2.addWeighted(img, 1.0, newwarp, 0.6, 0)
if video:
# Add text information on the frame
font = cv2.FONT_HERSHEY_SIMPLEX
text_pos = 'Pos of the car: '+str(np.round(lane_offset(left, right),2))+ ' m'
radius = np.round(lane_curv(left, right),2)
if radius >= MAX_RADIUS:
radius = 'Inf'
else:
radius = str(radius)
text_rad = 'Radius: '+radius+ ' m'
cv2.putText(result,text_pos,(10,25), font, 1,(255,255,255),2)
cv2.putText(result,text_rad,(10,75), font, 1,(255,255,255),2)
return(result)
right_fit_p = np.zeros(POL_ORD+1)
left_fit_p = np.zeros(POL_ORD+1)
r_len = 0
l_len = 0
lane_w_p = 90
MIN = 60 # Minimal line separation (in px)
MAX = 95 # Maximal line separation (in px)
MIN_POINTS = 10 #Minimal points to consider a line
MAX_N = 5 # Maximal frames without line detected to use previous frame
n_count = 0 # Frame counter
r_n = 0 # Number of frames with unsuccessful line detection
l_n = 0
def get_lane_video(img):
global right_fit_p, left_fit_p, r_len, l_len, n_count, r_n, l_n
sw = False
warp = transform(img, M)
img = undistort(img)
if l_n < MAX_N and n_count > 0:
x, y = find(warp, pol = left_fit_p, max_n = l_len)
else:
x, y = find(warp)
if len(x) > MIN_POINTS:
left_fit, mse_l = best_pol_ord(x,y)
if mse_l > DEV_POL*9 and n_count > 0:
left_fit = left_fit_p
l_n += 1
else:
l_n /= 2
else:
left_fit = left_fit_p
l_n += 1
if r_n < MAX_N and n_count > 0:
x2, y2 = find(warp, False, pol = right_fit_p, max_n = r_len)
else:
x2, y2 = find(warp, False)
if len(x2) > MIN_POINTS:
right_fit, mse_r = best_pol_ord(x2, y2)
if mse_r > DEV_POL*9 and n_count > 0:
right_fit = right_fit_p
r_n += 1
else:
r_n /= 2
else:
right_fit = right_fit_p
r_n += 1
if n_count > 0: # if not the first video frame
# Apply filter
if len(left_fit_p) == len(left_fit): # If new and prev polinomial have the same order
left_fit = pol_shift(left_fit_p, -SPEED)*(1.0-len(x)/((1.0-RANGE)*IMAGE_H))+left_fit*(len(x)/((1.0-RANGE)*IMAGE_H))
else:
left_fit = smooth_dif_ord(left_fit_p, x, y, len(left_fit)-1)
l_len = y[-1]
if len(right_fit_p) == len(right_fit):
right_fit = pol_shift(right_fit_p, -SPEED)*(1.0-len(x2)/((1.0-RANGE)*IMAGE_H))+right_fit*(len(x2)/((1.0-RANGE)*IMAGE_H))
else:
right_fit = smooth_dif_ord(right_fit_p, x2, y2, len(right_fit)-1)
r_len = y2[-1]
if len(x) > MIN_POINTS and len(x2) <= MIN_POINTS: # If we have only left line
lane_w = pol_calc(right_fit_p, 1.0)-pol_calc(left_fit_p, 1.0)
right_fit = smooth_dif_ord(right_fit_p, pol_calc(equidistant(left_fit, lane_w, max_l=l_len), y),
y, len(left_fit)-1)
r_len = l_len
r_n /=2
if len(x2) > MIN_POINTS and len(x) <= MIN_POINTS: # If we have only right line
lane_w = pol_calc(right_fit_p, 1.0)-pol_calc(left_fit_p, 1.0)
#print(lane_w)
left_fit = smooth_dif_ord(left_fit_p, pol_calc(equidistant(right_fit, -lane_w, max_l=r_len), y2),
y2, len(right_fit)-1)
l_len = r_len
l_n /=2
if (l_n < MAX_N and r_n < MAX_N):
max_y = max(RANGE, l_len, r_len)
else:
max_y = 1.0#max(RANGE, l_len, r_len)
sw = True
d1 = pol_calc(right_fit, 1.0)-pol_calc(left_fit, 1.0)
dm = pol_calc(right_fit, max_y)-pol_calc(left_fit, max_y)
if (d1 > MAX or d1 < 60 or dm < 0):
left_fit = left_fit_p
right_fit = right_fit_p
l_n += 1
r_n += 1
ploty = np.linspace(max_y, 1, num=IMAGE_H)
left_fitx = pol_calc(left_fit, ploty)
right_fitx = pol_calc(right_fit, ploty)
right_fit_p = np.copy(right_fit)
left_fit_p = np.copy(left_fit)
n_count += 1
return img, left_fitx, right_fitx, ploty*223.0, left_fit, right_fit
def init_params(ran):
global right_fit_p, left_fit_p, n_count, RANGE, MIN_POINTS
right_fit_p = np.zeros(POL_ORD+1)
left_fit_p = np.zeros(POL_ORD+1)
n_count = 0
RANGE = ran
MIN_POINTS = 25-15*ran
| mit | 4,058,978,250,439,783,000 | 36.42732 | 156 | 0.575406 | false | 2.704847 | false | false | false |
edx/course-discovery | course_discovery/apps/course_metadata/migrations/0068_auto_20171108_1614.py | 1 | 1536 | # Generated by Django 1.11.3 on 2017-11-08 16:14
import django.db.models.deletion
import django_extensions.db.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20171004_1133'),
('course_metadata', '0067_auto_20171108_1432'),
]
operations = [
migrations.CreateModel(
name='CourseEntitlement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('price', models.DecimalField(decimal_places=2, default=0.0, max_digits=10)),
('sku', models.CharField(blank=True, max_length=128, null=True)),
('course', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='entitlements', to='course_metadata.Course')),
('currency', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Currency')),
('mode', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='course_metadata.SeatType')),
],
),
migrations.AlterUniqueTogether(
name='courseentitlement',
unique_together={('course', 'mode')},
),
]
| agpl-3.0 | 411,203,014,936,770,100 | 44.176471 | 149 | 0.63151 | false | 4.020942 | false | false | false |
AfricaChess/lichesshub | grandprix/migrations/0002_auto_20171110_0640.py | 1 | 1439 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-10 06:40
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('grandprix', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Season',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('start_date', models.DateField()),
('end_date', models.DateField()),
],
),
migrations.CreateModel(
name='TournamentType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
],
),
migrations.AddField(
model_name='tournament',
name='kind',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='grandprix.TournamentType'),
),
migrations.AddField(
model_name='tournament',
name='season',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='grandprix.Season'),
),
]
| mit | 9,022,895,200,339,313,000 | 33.261905 | 123 | 0.565671 | false | 4.232353 | false | false | false |
satyrius/cmsplugin-articles | cmsplugin_articles/migrations/0001_initial.py | 1 | 1615 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0003_auto_20140926_2347'),
]
operations = [
migrations.CreateModel(
name='ArticlesPlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('limit', models.PositiveIntegerField(verbose_name='Articles per page')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='TeaserExtension',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=255, null=True, verbose_name='Title', blank=True)),
('image', models.ImageField(upload_to=b'teaser', null=True, verbose_name='Image', blank=True)),
('description', models.TextField(null=True, verbose_name='Description', blank=True)),
('extended_object', models.OneToOneField(editable=False, to='cms.Page')),
('public_extension', models.OneToOneField(related_name='draft_extension', null=True, editable=False, to='cmsplugin_articles.TeaserExtension')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
]
| mit | -328,105,361,709,691,460 | 39.375 | 159 | 0.567802 | false | 4.400545 | false | false | false |
minlexx/pyevemon | esi_client/models/get_characters_character_id_killmails_recent_200_ok.py | 1 | 4307 | # coding: utf-8
"""
EVE Swagger Interface
An OpenAPI for EVE Online
OpenAPI spec version: 0.4.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class GetCharactersCharacterIdKillmailsRecent200Ok(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, killmail_hash=None, killmail_id=None):
"""
GetCharactersCharacterIdKillmailsRecent200Ok - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'killmail_hash': 'str',
'killmail_id': 'int'
}
self.attribute_map = {
'killmail_hash': 'killmail_hash',
'killmail_id': 'killmail_id'
}
self._killmail_hash = killmail_hash
self._killmail_id = killmail_id
@property
def killmail_hash(self):
"""
Gets the killmail_hash of this GetCharactersCharacterIdKillmailsRecent200Ok.
A hash of this killmail
:return: The killmail_hash of this GetCharactersCharacterIdKillmailsRecent200Ok.
:rtype: str
"""
return self._killmail_hash
@killmail_hash.setter
def killmail_hash(self, killmail_hash):
"""
Sets the killmail_hash of this GetCharactersCharacterIdKillmailsRecent200Ok.
A hash of this killmail
:param killmail_hash: The killmail_hash of this GetCharactersCharacterIdKillmailsRecent200Ok.
:type: str
"""
if killmail_hash is None:
raise ValueError("Invalid value for `killmail_hash`, must not be `None`")
self._killmail_hash = killmail_hash
@property
def killmail_id(self):
"""
Gets the killmail_id of this GetCharactersCharacterIdKillmailsRecent200Ok.
ID of this killmail
:return: The killmail_id of this GetCharactersCharacterIdKillmailsRecent200Ok.
:rtype: int
"""
return self._killmail_id
@killmail_id.setter
def killmail_id(self, killmail_id):
"""
Sets the killmail_id of this GetCharactersCharacterIdKillmailsRecent200Ok.
ID of this killmail
:param killmail_id: The killmail_id of this GetCharactersCharacterIdKillmailsRecent200Ok.
:type: int
"""
if killmail_id is None:
raise ValueError("Invalid value for `killmail_id`, must not be `None`")
self._killmail_id = killmail_id
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, GetCharactersCharacterIdKillmailsRecent200Ok):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| gpl-3.0 | -4,276,356,936,466,362,400 | 28.29932 | 101 | 0.572788 | false | 4.226693 | false | false | false |
ychaim/explorer | addresses/views.py | 1 | 33766 | from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.contrib.auth import login
from django.contrib.auth.decorators import login_required
from django.utils.translation import ugettext_lazy as _
from django.utils.timezone import now
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.clickjacking import xframe_options_exempt
from django.shortcuts import get_object_or_404
from annoying.decorators import render_to
from annoying.functions import get_object_or_None
from blockexplorer.decorators import assert_valid_coin_symbol
from blockexplorer.settings import BLOCKCYPHER_PUBLIC_KEY, BLOCKCYPHER_API_KEY, WEBHOOK_SECRET_KEY, BASE_URL
from blockcypher.api import get_address_details, get_address_overview, subscribe_to_address_webhook, get_forwarding_address_details
from blockcypher.constants import COIN_SYMBOL_MAPPINGS
from users.models import AuthUser, LoggedLogin
from addresses.models import AddressSubscription, AddressForwarding
from transactions.models import OnChainTransaction
from services.models import WebHook
from emails.models import SentEmail
from addresses.forms import KnownUserAddressSubscriptionForm, NewUserAddressSubscriptionForm, AddressSearchForm, KnownUserAddressForwardingForm, NewUserAddressForwardingForm
from utils import get_max_pages, get_user_agent, get_client_ip, uri_to_url, simple_pw_generator
import json
from urllib.parse import urlencode
SMALL_PAYMENTS_MSG = '''
Please note that for very small payments of 100 bits or less,
the payment will not forward as the amount to forward is lower than the mining fee.
'''
@assert_valid_coin_symbol
@render_to('address_overview.html')
def address_overview(request, coin_symbol, address, wallet_name=None):
TXNS_PER_PAGE = 100
# 1 indexed page
current_page = request.GET.get('page')
if current_page:
current_page = int(current_page)
else:
current_page = 1
try:
address_details = get_address_details(
address=address,
coin_symbol=coin_symbol,
txn_limit=TXNS_PER_PAGE,
api_key=BLOCKCYPHER_API_KEY,
)
except AssertionError:
msg = _('Invalid Address')
messages.warning(request, msg)
redir_url = reverse('coin_overview', kwargs={'coin_symbol': coin_symbol})
return HttpResponseRedirect(redir_url)
#import pprint; pprint.pprint(address_details, width=1)
if 'error' in address_details:
msg = _('Sorry, that address was not found')
messages.warning(request, msg)
return HttpResponseRedirect(reverse('home'))
if request.user.is_authenticated():
# notify user on page of any forwarding or subscriptions they may have
for address_subscription in AddressSubscription.objects.filter(
auth_user=request.user,
b58_address=address,
coin_symbol=coin_symbol,
unsubscribed_at=None,
):
if address_subscription.auth_user.email_verified:
msg = _('Private Message: you are subscribed to this address and will receive email notifications at <b>%(user_email)s</b> (<a href="%(unsub_url)s">unsubscribe</a>)' % {
'user_email': request.user.email,
'unsub_url': reverse('user_unsubscribe_address', kwargs={
'address_subscription_id': address_subscription.id,
}),
})
messages.info(request, msg, extra_tags='safe')
else:
msg = _('Private Message: you are not subscribed to this address because you have not clicked the link sent to <b>%(user_email)s</b>' % {
'user_email': request.user.email,
})
messages.error(request, msg, extra_tags='safe')
print('ERROR')
# there can be only one
af_initial = get_object_or_None(AddressForwarding,
auth_user=request.user,
initial_address=address,
coin_symbol=coin_symbol,
)
if af_initial:
msg = _('''
Private Message: this address will automatically forward to <a href="%(destination_addr_uri)s">%(destination_address)s</a>
any time a payment is received.
<br /><br /> <i>%(small_payments_msg)s</i>
''' % {
'destination_address': af_initial.destination_address,
'destination_addr_uri': reverse('address_overview', kwargs={
'address': af_initial.destination_address,
'coin_symbol': coin_symbol,
}),
'small_payments_msg': SMALL_PAYMENTS_MSG,
})
messages.info(request, msg, extra_tags='safe')
# There could be many
for af_destination in AddressForwarding.objects.filter(
auth_user=request.user,
destination_address=address,
coin_symbol=coin_symbol,
):
msg = _('''
Private Message: this address will automatically receive forwarded transactions from
<a href="%(initial_addr_uri)s">%(initial_address)s</a>.
<br /><br /> <i>%(small_payments_msg)s</i>
''' % {
'initial_address': af_destination.initial_address,
'initial_addr_uri': reverse('address_overview', kwargs={
'address': af_destination.initial_address,
'coin_symbol': coin_symbol,
}),
'small_payments_msg': SMALL_PAYMENTS_MSG,
})
messages.info(request, msg, extra_tags='safe')
all_transactions = address_details.get('unconfirmed_txrefs', []) + address_details.get('txrefs', [])
# transaction pagination: 0-indexed and inclusive
tx_start_num = (current_page - 1) * TXNS_PER_PAGE
tx_end_num = current_page * TXNS_PER_PAGE - 1
# filter address details for pagination. HACK!
all_transactions = all_transactions[tx_start_num:tx_end_num]
api_url = 'https://api.blockcypher.com/v1/%s/%s/addrs/%s' % (
COIN_SYMBOL_MAPPINGS[coin_symbol]['blockcypher_code'],
COIN_SYMBOL_MAPPINGS[coin_symbol]['blockcypher_network'],
address)
return {
'coin_symbol': coin_symbol,
'address': address,
'api_url': api_url,
'wallet_name': wallet_name,
'current_page': current_page,
'max_pages': get_max_pages(num_items=address_details['final_n_tx'], items_per_page=TXNS_PER_PAGE),
'total_sent_satoshis': address_details['total_sent'],
'total_received_satoshis': address_details['total_received'],
'unconfirmed_balance_satoshis': address_details['unconfirmed_balance'],
'confirmed_balance_satoshis': address_details['balance'],
'total_balance_satoshis': address_details['final_balance'],
'all_transactions': all_transactions,
'num_confirmed_txns': address_details['n_tx'],
'num_unconfirmed_txns': address_details['unconfirmed_n_tx'],
'num_all_txns': address_details['final_n_tx'],
'BLOCKCYPHER_PUBLIC_KEY': BLOCKCYPHER_PUBLIC_KEY,
}
def subscribe_forwarding(request):
kwargs = {'coin_symbol': 'btc'}
redir_url = reverse('subscribe_address', kwargs=kwargs)
return HttpResponseRedirect(redir_url)
@assert_valid_coin_symbol
@render_to('subscribe_address.html')
def subscribe_address(request, coin_symbol):
already_authenticated = request.user.is_authenticated()
# kind of tricky because we have to deal with both logged in and new users
initial = {'coin_symbol': coin_symbol}
if already_authenticated:
form = KnownUserAddressSubscriptionForm(initial=initial)
else:
form = NewUserAddressSubscriptionForm(initial=initial)
if request.method == 'POST':
if already_authenticated:
form = KnownUserAddressSubscriptionForm(data=request.POST)
else:
form = NewUserAddressSubscriptionForm(data=request.POST)
if form.is_valid():
coin_symbol = form.cleaned_data['coin_symbol']
coin_address = form.cleaned_data['coin_address']
if already_authenticated:
auth_user = request.user
else:
user_email = form.cleaned_data['email']
# Check for existing user with that email
existing_user = get_object_or_None(AuthUser, email=user_email)
if existing_user:
msg = _('Please first login to this account to create a notification')
messages.info(request, msg)
return HttpResponseRedirect(existing_user.get_login_uri())
else:
# Create user with unknown (random) password
auth_user = AuthUser.objects.create_user(
email=user_email,
password=None, # it will create a random pw
creation_ip=get_client_ip(request),
creation_user_agent=get_user_agent(request),
)
# Login the user
# http://stackoverflow.com/a/3807891/1754586
auth_user.backend = 'django.contrib.auth.backends.ModelBackend'
login(request, auth_user)
# Log the login
LoggedLogin.record_login(request)
existing_subscription_cnt = AddressSubscription.objects.filter(
auth_user=auth_user,
b58_address=coin_address).count()
if existing_subscription_cnt:
msg = _("You're already subscribed to that address. Please choose another address.")
messages.warning(request, msg)
else:
# TODO: this is inefficiently happening before email verification
# Hit blockcypher and return subscription id
callback_uri = reverse('address_webhook', kwargs={
'secret_key': WEBHOOK_SECRET_KEY,
# hack for rare case of two webhooks requested on same address:
'ignored_key': simple_pw_generator(num_chars=10),
})
callback_url = uri_to_url(callback_uri)
bcy_id = subscribe_to_address_webhook(
subscription_address=coin_address,
callback_url=callback_url,
coin_symbol=coin_symbol,
api_key=BLOCKCYPHER_API_KEY,
)
address_subscription = AddressSubscription.objects.create(
coin_symbol=coin_symbol,
b58_address=coin_address,
auth_user=auth_user,
blockcypher_id=bcy_id,
)
address_uri = reverse('address_overview', kwargs={
'coin_symbol': coin_symbol,
'address': coin_address,
})
if already_authenticated and auth_user.email_verified:
msg = _('You will now be emailed notifications for <a href="%(address_uri)s">%(coin_address)s</a>' % {
'coin_address': coin_address,
'address_uri': address_uri,
})
messages.success(request, msg, extra_tags='safe')
return HttpResponseRedirect(reverse('dashboard'))
else:
address_subscription.send_notifications_welcome_email()
return HttpResponseRedirect(reverse('unconfirmed_email'))
elif request.method == 'GET':
coin_address = request.GET.get('a')
subscriber_email = request.GET.get('e')
if coin_address:
initial['coin_address'] = coin_address
if subscriber_email and not already_authenticated:
initial['email'] = subscriber_email
if coin_address or subscriber_email:
if already_authenticated:
form = KnownUserAddressSubscriptionForm(initial=initial)
else:
form = NewUserAddressSubscriptionForm(initial=initial)
return {
'form': form,
'coin_symbol': coin_symbol,
}
@login_required
def user_unsubscribe_address(request, address_subscription_id):
'''
For logged-in users to unsubscribe an address
'''
address_subscription = get_object_or_404(AddressSubscription, id=address_subscription_id)
assert address_subscription.auth_user == request.user
if address_subscription.unsubscribed_at:
msg = _("You've already unsubscribed from this alert")
messages.info(request, msg)
else:
address_subscription.unsubscribed_at = now()
address_subscription.save()
address_uri = reverse('address_overview', kwargs={
'coin_symbol': address_subscription.coin_symbol,
'address': address_subscription.b58_address,
})
msg = _('You have been unsubscribed from notifications on <a href="%(address_uri)s">%(b58_address)s</a>' % {
'b58_address': address_subscription.b58_address,
'address_uri': address_uri,
})
messages.success(request, msg, extra_tags='safe')
return HttpResponseRedirect(reverse('dashboard'))
@login_required
def user_archive_forwarding_address(request, address_forwarding_id):
'''
For logged-in users to archive a forwarding address
For security, the address forwarding is never disabled and can't be changed.
We just stop displaying it in the UI.
For now we don't automatically stop sending email notices, though we may want to do that in the future.
'''
address_forwarding = get_object_or_404(AddressForwarding, id=address_forwarding_id)
assert address_forwarding.auth_user == request.user
if address_forwarding.archived_at:
msg = _("You've already archived this address")
messages.info(request, msg)
else:
address_forwarding.archived_at = now()
address_forwarding.save()
initial_addr_uri = reverse('address_overview', kwargs={
'coin_symbol': address_forwarding.coin_symbol,
'address': address_forwarding.initial_address,
})
destination_addr_uri = reverse('address_overview', kwargs={
'coin_symbol': address_forwarding.coin_symbol,
'address': address_forwarding.destination_address,
})
msg = _('''
You have archived the forwarding address <a href="%(initial_addr_uri)s">%(initial_address)s</a>.
For security, payments sent to <a href="%(destination_addr_uri)s">%(destination_address)s</a>
may continue to forward to <a href="%(initial_addr_uri)s">%(initial_address)s</a>.
''' % {
'initial_address': address_forwarding.initial_address,
'destination_address': address_forwarding.destination_address,
'initial_addr_uri': initial_addr_uri,
'destination_addr_uri': destination_addr_uri,
})
messages.success(request, msg, extra_tags='safe')
return HttpResponseRedirect(reverse('dashboard'))
def unsubscribe_address(request, unsub_code):
'''
1-click unsubscribe an address via email
'''
sent_email = get_object_or_404(SentEmail, unsub_code=unsub_code)
auth_user = sent_email.auth_user
# Login the user
# http://stackoverflow.com/a/3807891/1754586
auth_user.backend = 'django.contrib.auth.backends.ModelBackend'
login(request, auth_user)
# Log the login
LoggedLogin.record_login(request)
if sent_email.unsubscribed_at:
msg = _("You've already unsubscribed from this alert")
messages.info(request, msg)
else:
address_subscription = sent_email.address_subscription
assert address_subscription
address_subscription.unsubscribed_at = now()
address_subscription.save()
addr_uri = reverse('address_overview', kwargs={
'coin_symbol': address_subscription.coin_symbol,
'address': address_subscription.b58_address,
})
msg = _('You have been unsubscribed from notifications on <a href="%(addr_uri)s">%(b58_address)s</a>' % {
'b58_address': address_subscription.b58_address,
'addr_uri': addr_uri,
})
messages.info(request, msg, extra_tags='safe')
return HttpResponseRedirect(reverse('dashboard'))
@csrf_exempt
def address_webhook(request, secret_key, ignored_key):
'''
Process an inbound webhook from blockcypher
'''
# Log webhook
webhook = WebHook.log_webhook(request, WebHook.BLOCKCYPHER_ADDRESS_NOTIFICATION)
assert secret_key == WEBHOOK_SECRET_KEY
assert request.method == 'POST', 'Request has no post'
blockcypher_id = request.META.get('HTTP_X_EVENTID')
assert 'tx-confirmation' == request.META.get('HTTP_X_EVENTTYPE')
payload = json.loads(request.body.decode())
address_subscription = AddressSubscription.objects.get(blockcypher_id=blockcypher_id)
tx_hash = payload['hash']
num_confs = payload['confirmations']
double_spend = payload['double_spend']
satoshis_sent = payload['total']
fee_in_satoshis = payload['fees']
tx_event = get_object_or_None(
OnChainTransaction,
tx_hash=tx_hash,
address_subscription=address_subscription,
)
if tx_event:
tx_is_new = False
tx_event.num_confs = num_confs
tx_event.double_spend = double_spend
tx_event.save()
else:
tx_is_new = True
input_addresses = set()
for input_entry in payload['inputs']:
for address in input_entry.get('addresses', []):
input_addresses.add(address)
if address_subscription.b58_address in input_addresses:
is_withdrawal = True
else:
is_withdrawal = False
output_addresses = set()
for output_entry in payload.get('outputs', []):
for address in output_entry['addresses']:
output_addresses.add(address)
if address_subscription.b58_address in output_addresses:
is_deposit = True
else:
is_deposit = False
tx_event = OnChainTransaction.objects.create(
tx_hash=tx_hash,
address_subscription=address_subscription,
num_confs=num_confs,
double_spend=double_spend,
satoshis_sent=satoshis_sent,
fee_in_satoshis=fee_in_satoshis,
is_deposit=is_deposit,
is_withdrawal=is_withdrawal,
)
# email sending logic
# TODO: add logic for notify on deposit vs withdrawal
# TODO: add safety check to prevent duplicate email sending
if tx_event.is_subscribed():
if double_spend and (tx_is_new or not tx_event.double_spend):
# We have the first reporting of a double-spend
tx_event.send_double_spend_tx_notification()
elif num_confs == 0 and tx_is_new:
# First broadcast
if tx_event.address_subscription.notify_on_broadcast:
if tx_event.is_deposit and tx_event.address_subscription.notify_on_deposit:
tx_event.send_unconfirmed_tx_email()
elif tx_event.is_withdrawal and tx_event.address_subscription.notify_on_withdrawal:
tx_event.send_unconfirmed_tx_email()
elif num_confs == 6 and (tx_is_new or not tx_event.num_confs == num_confs):
# Sixth confirm
if tx_event.address_subscription.notify_on_sixth_confirm:
if tx_event.is_deposit and tx_event.address_subscription.notify_on_deposit:
tx_event.send_confirmed_tx_email()
elif tx_event.is_withdrawal and tx_event.address_subscription.notify_on_withdrawal:
tx_event.send_confirmed_tx_email()
# Update logging
webhook.address_subscription = address_subscription
webhook.succeeded = True
webhook.save()
# Return something
return HttpResponse("*ok*")
@xframe_options_exempt
@render_to('balance_widget.html')
def render_balance_widget(request, coin_symbol, address):
address_overview = get_address_overview(address=address,
coin_symbol=coin_symbol, api_key=BLOCKCYPHER_API_KEY)
return {
'address_overview': address_overview,
'coin_symbol': coin_symbol,
'b58_address': address,
'BASE_URL': BASE_URL,
}
@xframe_options_exempt
@render_to('received_widget.html')
def render_received_widget(request, coin_symbol, address):
address_overview = get_address_overview(address=address,
coin_symbol=coin_symbol, api_key=BLOCKCYPHER_API_KEY)
return {
'address_overview': address_overview,
'coin_symbol': coin_symbol,
'b58_address': address,
'BASE_URL': BASE_URL,
}
@render_to('search_widgets.html')
def search_widgets(request, coin_symbol):
form = AddressSearchForm()
if request.method == 'POST':
form = AddressSearchForm(data=request.POST)
if form.is_valid():
kwargs = {
'coin_symbol': form.cleaned_data['coin_symbol'],
'address': form.cleaned_data['coin_address'],
}
redir_url = reverse('widgets_overview', kwargs=kwargs)
return HttpResponseRedirect(redir_url)
elif request.method == 'GET':
new_coin_symbol = request.GET.get('c')
if new_coin_symbol:
initial = {'coin_symbol': new_coin_symbol}
form = AddressSearchForm(initial=initial)
return {
'form': form,
'coin_symbol': coin_symbol,
}
@render_to('widgets.html')
def widgets_overview(request, coin_symbol, address):
return {
'coin_symbol': coin_symbol,
'b58_address': address,
'BASE_URL': BASE_URL,
}
def widget_forwarding(request):
kwargs = {'coin_symbol': 'btc'}
redir_url = reverse('search_widgets', kwargs=kwargs)
return HttpResponseRedirect(redir_url)
@assert_valid_coin_symbol
@render_to('setup_address_forwarding.html')
def setup_address_forwarding(request, coin_symbol):
# kind of tricky because we have to deal with both logged in and new users
already_authenticated = request.user.is_authenticated()
initial = {'coin_symbol': coin_symbol}
if already_authenticated:
form = KnownUserAddressForwardingForm(initial=initial)
else:
form = NewUserAddressForwardingForm(initial=initial)
if request.method == 'POST':
if already_authenticated:
form = KnownUserAddressForwardingForm(data=request.POST)
else:
form = NewUserAddressForwardingForm(data=request.POST)
if form.is_valid():
coin_symbol = form.cleaned_data['coin_symbol']
destination_address = form.cleaned_data['coin_address']
user_email = form.cleaned_data.get('email')
# optional. null in case of KnownUserAddressForwardingForm
if already_authenticated:
auth_user = request.user
else:
auth_user = None
if user_email:
# Check for existing user with that email
existing_user = get_object_or_None(AuthUser, email=user_email)
if existing_user:
msg = _('Please first login to this account to create a notification')
messages.info(request, msg)
return HttpResponseRedirect(existing_user.get_login_uri())
else:
# Create user with unknown (random) password
auth_user = AuthUser.objects.create_user(
email=user_email,
password=None, # it will create a random pw
creation_ip=get_client_ip(request),
creation_user_agent=get_user_agent(request),
)
# Login the user
# http://stackoverflow.com/a/3807891/1754586
auth_user.backend = 'django.contrib.auth.backends.ModelBackend'
login(request, auth_user)
# Log the login
LoggedLogin.record_login(request)
else:
# No user email given, proceed anonymously
# FIXME: confirm this
pass
# Setup Payment Forwarding
forwarding_address_details = get_forwarding_address_details(
destination_address=destination_address,
api_key=BLOCKCYPHER_API_KEY,
callback_url=None, # notifications happen separately (and not always)
coin_symbol=coin_symbol,
)
if 'error' in forwarding_address_details:
# Display error message back to user
messages.warning(request, forwarding_address_details['error'], extra_tags='safe')
else:
initial_address = forwarding_address_details['input_address']
# create forwarding object
address_forwarding_obj = AddressForwarding.objects.create(
coin_symbol=coin_symbol,
initial_address=initial_address,
destination_address=destination_address,
auth_user=auth_user,
blockcypher_id=forwarding_address_details['id'],
)
subscribe_uri = reverse('subscribe_address', kwargs={'coin_symbol': coin_symbol})
uri_qs = {'a': initial_address}
if user_email:
uri_qs['e'] = user_email
if already_authenticated:
uri_qs['e'] = auth_user.email
subscribe_uri = '%s?%s' % (subscribe_uri, urlencode(uri_qs))
initial_addr_uri = reverse('address_overview', kwargs={
'coin_symbol': coin_symbol,
'address': initial_address,
})
destination_addr_uri = reverse('address_overview', kwargs={
'coin_symbol': coin_symbol,
'address': destination_address,
})
msg_merge_dict = {
'initial_address': initial_address,
'initial_addr_uri': initial_addr_uri,
'destination_address': destination_address,
'destination_addr_uri': destination_addr_uri,
'subscribe_uri': subscribe_uri,
'small_payments_msg': SMALL_PAYMENTS_MSG,
}
if auth_user:
msg_merge_dict['user_email'] = auth_user.email
if user_email or (already_authenticated and form.cleaned_data['wants_email_notification']):
# Create an address subscription for all of these cases
# Hit blockcypher and return subscription id
callback_uri = reverse('address_webhook', kwargs={
'secret_key': WEBHOOK_SECRET_KEY,
# hack for rare case of two webhooks requested on same address:
'ignored_key': simple_pw_generator(num_chars=10),
})
callback_url = uri_to_url(callback_uri)
bcy_id = subscribe_to_address_webhook(
subscription_address=initial_address,
callback_url=callback_url,
coin_symbol=coin_symbol,
api_key=BLOCKCYPHER_API_KEY,
)
# only notify for deposits
AddressSubscription.objects.create(
coin_symbol=coin_symbol,
b58_address=initial_address,
auth_user=auth_user,
blockcypher_id=bcy_id,
notify_on_deposit=True,
notify_on_withdrawal=False,
address_forwarding_obj=address_forwarding_obj,
)
if user_email:
# New signup
msg = _('''
Transactions sent to <a href="%(initial_addr_uri)s">%(initial_address)s</a>
will now be automatically forwarded to <a href="%(destination_addr_uri)s">%(destination_address)s</a>,
but you must confirm your email to receive notifications.
<br /><br /> <i>%(small_payments_msg)s</i>
''' % msg_merge_dict)
messages.success(request, msg, extra_tags='safe')
address_forwarding_obj.send_forwarding_welcome_email()
return HttpResponseRedirect(reverse('unconfirmed_email'))
else:
if auth_user.email_verified:
msg = _('''
Transactions sent to <a href="%(initial_addr_uri)s">%(initial_address)s</a>
will now be automatically forwarded to <a href="%(destination_addr_uri)s">%(destination_address)s</a>,
and you will immediately receive an email notification at <b>%(user_email)s</b>.
<br /><br /> <i>%(small_payments_msg)s</i>
''' % msg_merge_dict)
messages.success(request, msg, extra_tags='safe')
return HttpResponseRedirect(reverse('dashboard'))
else:
# existing unconfirmed user
msg = _('''
Transactions sent to <a href="%(initial_addr_uri)s">%(initial_address)s</a>
will now be automatically forwarded to <a href="%(destination_addr_uri)s">%(destination_address)s</a>,
but you must confirm your email to receive notifications.
<br /><br /> <i>%(small_payments_msg)s</i>
''' % msg_merge_dict)
messages.success(request, msg, extra_tags='safe')
address_forwarding_obj.send_forwarding_welcome_email()
return HttpResponseRedirect(reverse('unconfirmed_email'))
elif already_authenticated:
# already authenticated and doesn't want subscriptions
msg = _('''
Transactions sent to <a href="%(initial_addr_uri)s">%(initial_address)s</a>
will now be automatically forwarded to <a href="%(destination_addr_uri)s">%(destination_address)s</a>.
You will not receive email notifications (<a href="%(subscribe_uri)s">subscribe</a>).
<br /><br /> <i>%(small_payments_msg)s</i>
''' % msg_merge_dict)
messages.success(request, msg, extra_tags='safe')
return HttpResponseRedirect(reverse('dashboard'))
else:
# New signup sans email
msg = _('''
Transactions sent to <a href="%(initial_addr_uri)s">%(initial_address)s</a>
will now be automatically forwarded to <a href="%(destination_addr_uri)s">%(destination_address)s</a>.
You will not receive email notifications (<a href="%(subscribe_uri)s">subscribe</a>).
<br /><br /> <i>%(small_payments_msg)s</i>
''' % msg_merge_dict)
messages.success(request, msg, extra_tags='safe')
return HttpResponseRedirect(destination_addr_uri)
elif request.method == 'GET':
coin_address = request.GET.get('a')
subscriber_email = request.GET.get('e')
if coin_address:
initial['coin_address'] = coin_address
if subscriber_email and not already_authenticated:
initial['email'] = subscriber_email
if coin_address or subscriber_email:
if already_authenticated:
form = KnownUserAddressForwardingForm(initial=initial)
else:
form = NewUserAddressForwardingForm(initial=initial)
return {
'form': form,
'coin_symbol': coin_symbol,
}
def forward_forwarding(request):
kwargs = {'coin_symbol': 'btc'}
redir_url = reverse('setup_address_forwarding', kwargs=kwargs)
return HttpResponseRedirect(redir_url)
| apache-2.0 | 4,320,673,146,313,630,000 | 41.313283 | 185 | 0.572825 | false | 4.469358 | false | false | false |
CalebBell/ht | ht/conv_free_immersed.py | 1 | 58245 | # -*- coding: utf-8 -*-
'''Chemical Engineering Design Library (ChEDL). Utilities for process modeling.
Copyright (C) 2016, Caleb Bell <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.'''
from __future__ import division
from math import exp, log
__all__ = ['Nu_vertical_plate_Churchill',
'Nu_free_vertical_plate',
'Nu_free_vertical_plate_methods',
'Nu_horizontal_plate_McAdams',
'Nu_horizontal_plate_VDI',
'Nu_horizontal_plate_Rohsenow',
'Nu_free_horizontal_plate',
'Nu_free_horizontal_plate_methods',
'Nu_sphere_Churchill',
'Nu_vertical_cylinder_Griffiths_Davis_Morgan',
'Nu_vertical_cylinder_Jakob_Linke_Morgan',
'Nu_vertical_cylinder_Carne_Morgan',
'Nu_vertical_cylinder_Eigenson_Morgan',
'Nu_vertical_cylinder_Touloukian_Morgan',
'Nu_vertical_cylinder_McAdams_Weiss_Saunders',
'Nu_vertical_cylinder_Kreith_Eckert',
'Nu_vertical_cylinder_Hanesian_Kalish_Morgan',
'Nu_vertical_cylinder_Al_Arabi_Khamis',
'Nu_vertical_cylinder_Popiel_Churchill',
'Nu_vertical_cylinder',
'Nu_vertical_cylinder_methods',
'Nu_horizontal_cylinder_Churchill_Chu',
'Nu_horizontal_cylinder_Kuehn_Goldstein',
'Nu_horizontal_cylinder_Morgan',
'Nu_horizontal_cylinder',
'Nu_horizontal_cylinder_methods',
'Nu_coil_Xin_Ebadian']
def Nu_vertical_plate_Churchill(Pr, Gr):
r'''Calculates Nusselt number for natural convection around a vertical
plate according to the Churchill-Chu [1]_ correlation, also presented in
[2]_. Plate must be isothermal; an alternate expression exists for constant
heat flux.
.. math::
Nu_{L}=\left[0.825+\frac{0.387Ra_{L}^{1/6}}
{[1+(0.492/Pr)^{9/16}]^{8/27}}\right]^2
Parameters
----------
Pr : float
Prandtl number [-]
Gr : float
Grashof number [-]
Returns
-------
Nu : float
Nusselt number with respect to height, [-]
Notes
-----
Although transition from laminar to turbulent is discrete in reality, this
equation provides a smooth transition in value from laminar to turbulent.
Checked with the original source.
Can be applied to vertical cylinders as well, subject to the criteria below:
.. math::
\frac{D}{L}\ge \frac{35}{Gr_L^{1/4}}
Examples
--------
From [2]_, Example 9.2, matches:
>>> Nu_vertical_plate_Churchill(0.69, 2.63E9)
147.16185223770603
References
----------
.. [1] Churchill, Stuart W., and Humbert H. S. Chu. "Correlating Equations
for Laminar and Turbulent Free Convection from a Vertical Plate."
International Journal of Heat and Mass Transfer 18, no. 11
(November 1, 1975): 1323-29. doi:10.1016/0017-9310(75)90243-4.
.. [2] Bergman, Theodore L., Adrienne S. Lavine, Frank P. Incropera, and
David P. DeWitt. Introduction to Heat Transfer. 6E. Hoboken, NJ:
Wiley, 2011.
'''
Ra = Pr*Gr
term = (0.825 + (0.387*Ra**(1/6.)*(1.0 + (Pr/0.492)**(-0.5625))**(-8.0/27.0)))
return term*term
Nu_free_vertical_plate_all_methods = ["Churchill"]
def Nu_free_vertical_plate_methods(Pr, Gr, H=None, W=None, check_ranges=True):
r'''This function returns a list of methods for calculating heat transfer
coefficient for external free convection from a verical plate.
Requires at a minimum a fluid's Prandtl number `Pr`, and the Grashof
number `Gr` for the system fluid (which require T and P to obtain).
`L` and `W` are not used by any correlations presently, but are included
for future support.
Parameters
----------
Pr : float
Prandtl number with respect to fluid properties [-]
Gr : float
Grashof number with respect to fluid properties and plate - fluid
temperature difference [-]
H : float, optional
Height of vertical plate, [m]
W : float, optional
Width of the vertical plate, [m]
check_ranges : bool, optional
Whether or not to return only correlations suitable for the provided
data, [-]
Returns
-------
methods : list[str]
List of methods which can be used to calculate `Nu` with the given
inputs, [-]
Examples
--------
>>> Nu_free_vertical_plate_methods(0.69, 2.63E9)
['Churchill']
'''
return Nu_free_vertical_plate_all_methods
def Nu_free_vertical_plate(Pr, Gr, buoyancy=None, H=None, W=None, Method=None):
r'''This function calculates the heat transfer coefficient for external
free convection from a verical plate.
Requires at a minimum a fluid's Prandtl number `Pr`, and the Grashof
number `Gr` for the system fluid (which require T and P to obtain).
`L` and `W` are not used by any correlations presently, but are included
for future support.
If no correlation's name is provided as `Method`, the 'Churchill'
correlation is selected.
Parameters
----------
Pr : float
Prandtl number with respect to fluid properties [-]
Gr : float
Grashof number with respect to fluid properties and plate - fluid
temperature difference [-]
buoyancy : bool, optional
Whether or not the plate's free convection is buoyancy assisted (hot
plate) or not, [-]
H : float, optional
Height of vertical plate, [m]
W : float, optional
Width of the vertical plate, [m]
Returns
-------
Nu : float
Nusselt number with respect to plate height, [-]
Other Parameters
----------------
Method : string, optional
A string of the function name to use;
one of ('Churchill', ).
Examples
--------
Turbulent example
>>> Nu_free_vertical_plate(0.69, 2.63E9, False)
147.16185223770603
'''
if Method is None:
Method2 = 'Churchill'
else:
Method2 = Method
if Method2 == 'Churchill':
return Nu_vertical_plate_Churchill(Pr, Gr)
else:
raise ValueError("Correlation name not recognized; see the "
"documentation for the available options.")
def Nu_horizontal_plate_McAdams(Pr, Gr, buoyancy=True):
r'''Calculates the Nusselt number for natural convection above a horizontal
plate according to the McAdams [1]_ correlations. The plate must be
isothermal. Four different equations are used, two each for laminar and
turbulent; the two sets of correlations are required because if the plate
is hot, buoyancy lifts the fluid off the plate and enhances free convection
whereas if the plate is cold, the cold fluid above it settles on it and
decreases the free convection.
Parameters
----------
Pr : float
Prandtl number with respect to fluid properties [-]
Gr : float
Grashof number with respect to fluid properties and plate - fluid
temperature difference [-]
buoyancy : bool, optional
Whether or not the plate's free convection is buoyancy assisted (hot
plate) or not, [-]
Returns
-------
Nu : float
Nusselt number with respect to length, [-]
Notes
-----
Examples
--------
>>> Nu_horizontal_plate_McAdams(5.54, 3.21e8, buoyancy=True)
181.73121274384457
>>> Nu_horizontal_plate_McAdams(5.54, 3.21e8, buoyancy=False)
55.44564799362829
>>> Nu_horizontal_plate_McAdams(.01, 3.21e8, buoyancy=True)
22.857041558492334
>>> Nu_horizontal_plate_McAdams(.01, 3.21e8, buoyancy=False)
11.428520779246167
References
----------
.. [1] McAdams, William Henry. Heat Transmission. 3E. Malabar, Fla:
Krieger Pub Co, 1985.
'''
Ra = Pr*Gr
if buoyancy:
if Ra <= 1E7:
Nu = .54*Ra**0.25
else:
Nu = 0.15*Ra**(1.0/3.0)
else:
if Ra <= 1E10:
Nu = .27*Ra**0.25
else:
Nu = .15*Ra**(1.0/3.0)
return Nu
def Nu_horizontal_plate_VDI(Pr, Gr, buoyancy=True):
r'''Calculates the Nusselt number for natural convection above a horizontal
plate according to the VDI [1]_ correlations. The plate must be
isothermal. Three different equations are used, one each for laminar and
turbulent for the heat transfer happening at upper surface case and one for
the case of heat transfer happening at the lower surface. The lower surface
correlation is recommened for the laminar flow regime.
The two different sets of correlations are required because if the plate
is hot, buoyancy lifts the fluid off the plate and enhances free convection
whereas if the plate is cold, the cold fluid above it settles on it and
decreases the free convection.
Parameters
----------
Pr : float
Prandtl number with respect to fluid properties [-]
Gr : float
Grashof number with respect to fluid properties and plate - fluid
temperature difference [-]
buoyancy : bool, optional
Whether or not the plate's free convection is buoyancy assisted (hot
plate) or not, [-]
Returns
-------
Nu : float
Nusselt number with respect to length, [-]
Notes
-----
The characteristic length suggested for use is as follows, with `a` and
`b` being the length and width of the plate.
.. math::
L = \frac{ab}{2(a+b)}
The buoyancy enhanced cases are from [2]_; the other is said to be from
[3]_, although the equations there not quite the same and do not include
the Prandtl number correction.
Examples
--------
>>> Nu_horizontal_plate_VDI(5.54, 3.21e8, buoyancy=True)
203.89681224927565
>>> Nu_horizontal_plate_VDI(5.54, 3.21e8, buoyancy=False)
39.16864971535617
References
----------
.. [1] Gesellschaft, V. D. I., ed. VDI Heat Atlas. 2nd ed. 2010 edition.
Berlin ; New York: Springer, 2010.
.. [2] Stewartson, Keith. "On the Free Convection from a Horizontal Plate."
Zeitschrift Für Angewandte Mathematik Und Physik ZAMP 9, no. 3
(September 1, 1958): 276-82. https://doi.org/10.1007/BF02033031.
.. [3] Schlunder, Ernst U, and International Center for Heat and Mass
Transfer. Heat Exchanger Design Handbook. Washington:
Hemisphere Pub. Corp., 1987.
'''
Ra = Pr*Gr
if buoyancy:
f2 = (1.0 + (0.322/Pr)**(0.55))**(20.0/11.0)
if Ra*f2 < 7e4:
return 0.766*(Ra*f2)**0.2
else:
return 0.15*(Ra*f2)**(1.0/3.0)
else:
f1 = (1.0 + (0.492/Pr)**(9.0/16.0))**(-16.0/9.0)
return 0.6*(Ra*f1)**0.2
def Nu_horizontal_plate_Rohsenow(Pr, Gr, buoyancy=True):
r'''Calculates the Nusselt number for natural convection above a horizontal
plate according to the Rohsenow, Hartnett, and Cho (1998) [1]_ correlations.
The plate must be isothermal. Three different equations are used, one each
for laminar and turbulent for the heat transfer happening at upper surface
case and one for the case of heat transfer happening at the lower surface.
The lower surface correlation is recommened for the laminar flow regime.
The two different sets of correlations are required because if the plate
is hot, buoyancy lifts the fluid off the plate and enhances free convection
whereas if the plate is cold, the cold fluid above it settles on it and
decreases the free convection.
Parameters
----------
Pr : float
Prandtl number with respect to fluid properties [-]
Gr : float
Grashof number with respect to fluid properties and plate - fluid
temperature difference [-]
buoyancy : bool, optional
Whether or not the plate's free convection is buoyancy assisted (hot
plate) or not, [-]
Returns
-------
Nu : float
Nusselt number with respect to length, [-]
Notes
-----
The characteristic length suggested for use is as follows, with `a` and
`b` being the length and width of the plate.
.. math::
L = \frac{ab}{2(a+b)}
Examples
--------
>>> Nu_horizontal_plate_Rohsenow(5.54, 3.21e8, buoyancy=True)
175.91054716322836
>>> Nu_horizontal_plate_Rohsenow(5.54, 3.21e8, buoyancy=False)
35.95799244863986
References
----------
.. [1] Rohsenow, Warren and James Hartnett and Young Cho. Handbook of Heat
Transfer, 3E. New York: McGraw-Hill, 1998.
'''
Ra = Pr*Gr
if buoyancy:
C_tU = 0.14*((1.0 + 0.01707*Pr)/(1.0 + 0.01*Pr))
C_tV = 0.13*Pr**0.22/(1.0 + 0.61*Pr**0.81)**0.42
t1 = 1.0 # Ah/A # Heated to non heated area ratio
t2 = 0.0 # Lf*P/A # Lf vertical distance between lowest and highest point in body
# P is perimiter, A is area
Cl = (0.0972 - (0.0157 + 0.462*C_tV)*t1
+ (0.615*C_tV - 0.0548 - 6e-6*Pr)*t2)
Nu_T = 0.835*Cl*Ra**0.25 # average Cl
Nu_l = 1.4/(log(1.0 + 1.4/Nu_T))
Nu_t = C_tU*Ra**(1.0/3.0)
m = 10.0
Nu = ((Nu_l)**m + Nu_t**m)**(1.0/m)
return Nu
else:
# No friction/C term
Nu_T = 0.527*Ra**0.2/(1.0 + (1.9/Pr)**0.9)**(2.0/9.0)
Nu_l = 2.5/(log(1.0 + 2.5/Nu_T))
return Nu_l
conv_free_horizontal_plate_all_methods = {
'McAdams': (Nu_horizontal_plate_McAdams, ('Pr', 'Gr', 'buoyancy')),
'VDI': (Nu_horizontal_plate_VDI, ('Pr', 'Gr', 'buoyancy')),
'Rohsenow': (Nu_horizontal_plate_Rohsenow, ('Pr', 'Gr', 'buoyancy')),
}
Nu_free_horizontal_plate_all_methods = ["VDI", "McAdams", "Rohsenow"]
def Nu_free_horizontal_plate_methods(Pr, Gr, buoyancy, L=None, W=None,
check_ranges=True):
r'''This function returns a list of methods for calculating heat transfer
coefficient for external free convection from a verical plate.
Requires at a minimum a fluid's Prandtl number `Pr`, and the Grashof
number `Gr` for the system fluid, temperatures, and geometry.
`L` and `W` are not used by any correlations presently, but are included
for future support.
Parameters
----------
Pr : float
Prandtl number with respect to fluid properties [-]
Gr : float
Grashof number with respect to fluid properties and plate - fluid
temperature difference [-]
buoyancy : bool, optional
Whether or not the plate's free convection is buoyancy assisted (hot
plate) or not, [-]
L : float, optional
Length of horizontal plate, [m]
W : float, optional
Width of the horizontal plate, [m]
check_ranges : bool, optional
Whether or not to return only correlations suitable for the provided
data, [-]
Returns
-------
methods : list[str]
List of methods which can be used to calculate `Nu` with the given
inputs, [-]
Examples
--------
>>> Nu_free_horizontal_plate_methods(0.69, 2.63E9, True)
['VDI', 'McAdams', 'Rohsenow']
'''
return Nu_free_horizontal_plate_all_methods
def Nu_free_horizontal_plate(Pr, Gr, buoyancy, L=None, W=None,
Method=None):
r'''This function calculates the heat transfer coefficient for external
free convection from a horizontal plate.
Requires at a minimum a fluid's Prandtl number `Pr`, and the Grashof
number `Gr` for the system fluid, temperatures, and geometry.
`L` and `W` are not used by any correlations presently, but are included
for future support.
If no correlation's name is provided as `Method`, the 'VDI' correlation is
selected.
Parameters
----------
Pr : float
Prandtl number with respect to fluid properties [-]
Gr : float
Grashof number with respect to fluid properties and plate - fluid
temperature difference [-]
buoyancy : bool, optional
Whether or not the plate's free convection is buoyancy assisted (hot
plate) or not, [-]
L : float, optional
Length of horizontal plate, [m]
W : float, optional
Width of the horizontal plate, [m]
Returns
-------
Nu : float
Nusselt number with respect to plate length, [-]
Other Parameters
----------------
Method : string, optional
A string of the function name to use, as in the dictionary
conv_free_horizontal_plate_methods
Examples
--------
Turbulent example
>>> Nu_free_horizontal_plate(5.54, 3.21e8, buoyancy=True)
203.89681224927565
>>> Nu_free_horizontal_plate(5.54, 3.21e8, buoyancy=True, Method='McAdams')
181.73121274384457
'''
if Method is None:
Method2 = "VDI"
else:
Method2 = Method
if Method2 == 'VDI':
return Nu_horizontal_plate_VDI(Pr=Pr, Gr=Gr, buoyancy=buoyancy)
if Method2 == 'McAdams':
return Nu_horizontal_plate_McAdams(Pr=Pr, Gr=Gr, buoyancy=buoyancy)
if Method2 == 'Rohsenow':
return Nu_horizontal_plate_Rohsenow(Pr=Pr, Gr=Gr, buoyancy=buoyancy)
else:
raise ValueError("Correlation name not recognized; see the "
"documentation for the available options.")
def Nu_sphere_Churchill(Pr, Gr):
r'''Calculates Nusselt number for natural convection around a sphere
according to the Churchill [1]_ correlation. Sphere must be isothermal.
.. math::
Nu_D=2+\frac{0.589Ra_D^{1/4}} {\left[1+(0.469/Pr)^{9/16}\right]^{4/9}}
\cdot\left\{1 + \frac{7.44\times 10^{-8}Ra}
{[1+(0.469/Pr)^{9/16}]^{16/9}}\right\}^{1/12}
Parameters
----------
Pr : float
Prandtl number [-]
Gr : float
Grashof number [-]
Returns
-------
Nu : float
Nusselt number, [-]
Notes
-----
Although transition from laminar to turbulent is discrete in reality, this
equation provides a smooth transition in value from laminar to turbulent.
Checked with the original source.
Good for Ra < 1E13. Limit of Nu is 2 at low Grashof numbers.
Examples
--------
>>> Nu_sphere_Churchill(.7, 1E7)
25.670869440317578
References
----------
.. [1] Schlunder, Ernst U, and International Center for Heat and Mass
Transfer. Heat Exchanger Design Handbook. Washington:
Hemisphere Pub. Corp., 1987.
'''
Ra = Pr*Gr
Nu = 2 + (0.589*Ra**0.25/(1 + (0.469/Pr)**(9/16.))**(4/9.)*(
1 + 7.44E-8*Ra/(1 + (0.469/Pr)**(9/16.))**(16/9.))**(1/12.))
return Nu
### Vertical cylinders
def Nu_vertical_cylinder_Griffiths_Davis_Morgan(Pr, Gr, turbulent=None):
r'''Calculates Nusselt number for natural convection around a vertical
isothermal cylinder according to the results of [1]_ correlated by [2]_, as
presented in [3]_ and [4]_.
.. math::
Nu_H = 0.67 Ra_H^{0.25},\; 10^{7} < Ra < 10^{9}
.. math::
Nu_H = 0.0782 Ra_H^{0.357}, \; 10^{9} < Ra < 10^{11}
Parameters
----------
Pr : float
Prandtl number [-]
Gr : float
Grashof number [-]
turbulent : bool or None, optional
Whether or not to force the correlation to return the turbulent
result; will return the laminar regime if False; leave as None for
automatic selection
Returns
-------
Nu : float
Nusselt number, [-]
Notes
-----
Cylinder of diameter 17.43 cm, length from 4.65 to 263.5 cm. Air as fluid.
Transition between ranges is not smooth.
If outside of range, no warning is given.
Examples
--------
>>> Nu_vertical_cylinder_Griffiths_Davis_Morgan(.7, 2E10)
327.6230596100138
References
----------
.. [1] Griffiths, Ezer, A. H. Davis, and Great Britain. The Transmission of
Heat by Radiation and Convection. London: H. M. Stationery off., 1922.
.. [2] Morgan, V.T., The Overall Convective Heat Transfer from Smooth
Circular Cylinders, in Advances in Heat Transfer, eds. T.F. Irvin and
J.P. Hartnett, V 11, 199-264, 1975.
.. [3] Popiel, Czeslaw O. "Free Convection Heat Transfer from Vertical
Slender Cylinders: A Review." Heat Transfer Engineering 29, no. 6
(June 1, 2008): 521-36. doi:10.1080/01457630801891557.
.. [4] Boetcher, Sandra K. S. "Natural Convection Heat Transfer From
Vertical Cylinders." In Natural Convection from Circular Cylinders,
23-42. Springer, 2014.
'''
Ra = Pr*Gr
if turbulent or (Ra > 1E9 and turbulent is None):
Nu = 0.0782*Ra**0.357
else:
Nu = 0.67*Ra**0.25
return Nu
def Nu_vertical_cylinder_Jakob_Linke_Morgan(Pr, Gr, turbulent=None):
r'''Calculates Nusselt number for natural convection around a vertical
isothermal cylinder according to the results of [1]_ correlated by [2]_, as
presented in [3]_ and [4]_.
.. math::
Nu_H = 0.555 Ra_H^{0.25},\; 10^{4} < Ra < 10^{8}
.. math::
Nu_H = 0.129 Ra_H^{1/3},\; 10^{8} < Ra < 10^{12}
Parameters
----------
Pr : float
Prandtl number [-]
Gr : float
Grashof number [-]
turbulent : bool or None, optional
Whether or not to force the correlation to return the turbulent
result; will return the laminar regime if False; leave as None for
automatic selection
Returns
-------
Nu : float
Nusselt number, [-]
Notes
-----
Cylinder of diameter 3.5 cm, length from L/D = 4.3. Air as fluid.
Transition between ranges is not smooth.
If outside of range, no warning is given. Results are presented rounded in
[4]_, and the second range is not shown in [3]_.
Examples
--------
>>> Nu_vertical_cylinder_Jakob_Linke_Morgan(.7, 2E10)
310.90835207860454
References
----------
.. [1] Jakob, M., and Linke, W., Warmeubergang beim Verdampfen von
Flussigkeiten an senkrechten und waagerechten Flaschen, Phys. Z.,
vol. 36, pp. 267-280, 1935.
.. [2] Morgan, V.T., The Overall Convective Heat Transfer from Smooth
Circular Cylinders, in Advances in Heat Transfer, eds. T.F. Irvin and
J.P. Hartnett, V 11, 199-264, 1975.
.. [3] Popiel, Czeslaw O. "Free Convection Heat Transfer from Vertical
Slender Cylinders: A Review." Heat Transfer Engineering 29, no. 6
(June 1, 2008): 521-36. doi:10.1080/01457630801891557.
.. [4] Boetcher, Sandra K. S. "Natural Convection Heat Transfer From
Vertical Cylinders." In Natural Convection from Circular Cylinders,
23-42. Springer, 2014.
'''
Ra = Pr*Gr
if turbulent or (Ra > 1E8 and turbulent is None):
Nu = 0.129*Ra**(1/3.)
else:
Nu = 0.555*Ra**0.25
return Nu
def Nu_vertical_cylinder_Carne_Morgan(Pr, Gr, turbulent=None):
r'''Calculates Nusselt number for natural convection around a vertical
isothermal cylinder according to the results of [1]_ correlated by [2]_, as
presented in [3]_ and [4]_.
.. math::
Nu_H = 1.07 Ra_H^{0.28},\; 2\times 10^{6} < Ra < 2\times 10^{8}
.. math::
Nu_H = 0.152 Ra_H^{0.38},\; 2\times 10^{8} < Ra < 2\times 10^{11}
Parameters
----------
Pr : float
Prandtl number [-]
Gr : float
Grashof number [-]
turbulent : bool or None, optional
Whether or not to force the correlation to return the turbulent
result; will return the laminar regime if False; leave as None for
automatic selection
Returns
-------
Nu : float
Nusselt number, [-]
Notes
-----
Cylinder of diameters 0.475 cm to 7.62 cm, L/D from 8 to 127. Isothermal
boundary condition was assumed, but not verified. Transition between ranges
is not smooth. If outside of range, no warning is given. The higher range
of [1]_ is not shown in [3]_, and the formula for the first is actually for
the second in [3]_.
Examples
--------
>>> Nu_vertical_cylinder_Carne_Morgan(.7, 2E8)
204.31470629065677
References
----------
.. [1] J. B. Carne. "LIX. Heat Loss by Natural Convection from Vertical
Cylinders." The London, Edinburgh, and Dublin Philosophical Magazine and
Journal of Science 24, no. 162 (October 1, 1937): 634-53.
doi:10.1080/14786443708565140.
.. [2] Morgan, V.T., The Overall Convective Heat Transfer from Smooth
Circular Cylinders, in Advances in Heat Transfer, eds. T.F. Irvin and
J.P. Hartnett, V 11, 199-264, 1975.
.. [3] Popiel, Czeslaw O. "Free Convection Heat Transfer from Vertical
Slender Cylinders: A Review." Heat Transfer Engineering 29, no. 6
(June 1, 2008): 521-36. doi:10.1080/01457630801891557.
.. [4] Boetcher, Sandra K. S. "Natural Convection Heat Transfer From
Vertical Cylinders." In Natural Convection from Circular Cylinders,
23-42. Springer, 2014.
'''
Ra = Pr*Gr
if turbulent or (Ra > 2E8 and turbulent is None):
return 0.152*Ra**0.38
else:
return 1.07*Ra**0.28
def Nu_vertical_cylinder_Eigenson_Morgan(Pr, Gr, turbulent=None):
r'''Calculates Nusselt number for natural convection around a vertical
isothermal cylinder according to the results of [1]_ correlated by [2]_,
presented in [3]_ and in more detail in [4]_.
.. math::
Nu_H = 0.48 Ra_H^{0.25},\; 10^{9} < Ra
.. math::
Nu_H = 51.5 + 0.0000726 Ra_H^{0.63},\; 10^{9} < Ra < 1.69 \times 10^{10}
.. math::
Nu_H = 0.148 Ra_H^{1/3} - 127.6 ,\; 1.69 \times 10^{10} < Ra
Parameters
----------
Pr : float
Prandtl number [-]
Gr : float
Grashof number [-]
turbulent : bool or None, optional
Whether or not to force the correlation to return the turbulent
result; will return the laminar regime if False; leave as None for
automatic selection
Returns
-------
Nu : float
Nusselt number, [-]
Notes
-----
Author presents results as appropriate for both flat plates and cylinders.
Height of 2.5 m with diameters of 2.4, 7.55, 15, 35, and 50 mm. Another
experiment of diameter 58 mm and length of 6.5 m was considered.
Cylinder of diameters 0.475 cm to 7.62 cm, L/D from 8 to 127.Transition
between ranges is not smooth. If outside of range, no warning is given.
Formulas are presented similarly in [3]_ and [4]_, but only [4]_ shows
the transition formula.
Examples
--------
>>> Nu_vertical_cylinder_Eigenson_Morgan(0.7, 2E10)
230.55946525499715
References
----------
.. [1] Eigenson L (1940). Les lois gouvernant la transmission de la chaleur
aux gaz biatomiques par les parois des cylindres verticaux dans le cas
de convection naturelle. Dokl Akad Nauk SSSR 26:440-444
.. [2] Morgan, V.T., The Overall Convective Heat Transfer from Smooth
Circular Cylinders, in Advances in Heat Transfer, eds. T.F. Irvin and
J.P. Hartnett, V 11, 199-264, 1975.
.. [3] Popiel, Czeslaw O. "Free Convection Heat Transfer from Vertical
Slender Cylinders: A Review." Heat Transfer Engineering 29, no. 6
(June 1, 2008): 521-36. doi:10.1080/01457630801891557.
.. [4] Boetcher, Sandra K. S. "Natural Convection Heat Transfer From
Vertical Cylinders." In Natural Convection from Circular Cylinders,
23-42. Springer, 2014.
'''
Ra = Pr*Gr
if turbulent or (Ra > 1.69E10 and turbulent is None):
return 0.148*Ra**(1/3.) - 127.6
elif 1E9 < Ra < 1.69E10 and turbulent is not False:
return 51.5 + 0.0000726*Ra**0.63
else:
return 0.48*Ra**0.25
def Nu_vertical_cylinder_Touloukian_Morgan(Pr, Gr, turbulent=None):
r'''Calculates Nusselt number for natural convection around a vertical
isothermal cylinder according to the results of [1]_ correlated by [2]_, as
presented in [3]_ and [4]_.
.. math::
Nu_H = 0.726 Ra_H^{0.25},\; 2\times 10^{8} < Ra < 4\times 10^{10}
.. math::
Nu_H = 0.0674 (Gr_H Pr^{1.29})^{1/3},\; 4\times 10^{10} < Ra < 9\times 10^{11}
Parameters
----------
Pr : float
Prandtl number [-]
Gr : float
Grashof number [-]
turbulent : bool or None, optional
Whether or not to force the correlation to return the turbulent
result; will return the laminar regime if False; leave as None for
automatic selection
Returns
-------
Nu : float
Nusselt number, [-]
Notes
-----
Cylinder of diameters 2.75 inch, with heights of 6, 18, and 36.25 inch.
Temperature was controlled via multiple separately controlled heating
sections. Fluids were water and ethylene-glycol. Transition between ranges
is not smooth. If outside of range, no warning is given. [2]_, [3]_, and
[4]_ are in complete agreement about this formulation.
Examples
--------
>>> Nu_vertical_cylinder_Touloukian_Morgan(.7, 2E10)
249.72879961097854
References
----------
.. [1] Touloukian, Y. S, George A Hawkins, and Max Jakob. Heat Transfer by
Free Convection from Heated Vertical Surfaces to Liquids.
Trans. ASME 70, 13-18 (1948).
.. [2] Morgan, V.T., The Overall Convective Heat Transfer from Smooth
Circular Cylinders, in Advances in Heat Transfer, eds. T.F. Irvin and
J.P. Hartnett, V 11, 199-264, 1975.
.. [3] Popiel, Czeslaw O. "Free Convection Heat Transfer from Vertical
Slender Cylinders: A Review." Heat Transfer Engineering 29, no. 6
(June 1, 2008): 521-36. doi:10.1080/01457630801891557.
.. [4] Boetcher, Sandra K. S. "Natural Convection Heat Transfer From
Vertical Cylinders." In Natural Convection from Circular Cylinders,
23-42. Springer, 2014.
'''
Ra = Pr*Gr
if turbulent or (Ra > 4E10 and turbulent is None):
return 0.0674*(Gr*Pr**1.29)**(1/3.)
else:
return 0.726*Ra**0.25
def Nu_vertical_cylinder_McAdams_Weiss_Saunders(Pr, Gr, turbulent=None):
r'''Calculates Nusselt number for natural convection around a vertical
isothermal cylinder according to the results of [1]_ and [2]_ correlated by
[3]_, as presented in [4]_, [5]_, and [6]_.
.. math::
Nu_H = 0.59 Ra_H^{0.25},\; 10^{4} < Ra < 10^{9}
.. math::
Nu_H = 0.13 Ra_H^{1/3.},\; 10^{9} < Ra < 10^{12}
Parameters
----------
Pr : float
Prandtl number [-]
Gr : float
Grashof number [-]
turbulent : bool or None, optional
Whether or not to force the correlation to return the turbulent
result; will return the laminar regime if False; leave as None for
automatic selection
Returns
-------
Nu : float
Nusselt number, [-]
Notes
-----
Transition between ranges is not smooth. If outside of range, no warning is
given. For ranges under 10^4, a graph is provided, not included here.
Examples
--------
>>> Nu_vertical_cylinder_McAdams_Weiss_Saunders(.7, 2E10)
313.31849434277973
References
----------
.. [1] Weise, Rudolf. "Warmeubergang durch freie Konvektion an
quadratischen Platten." Forschung auf dem Gebiet des Ingenieurwesens
A 6, no. 6 (November 1935): 281-92. doi:10.1007/BF02592565.
.. [2] Saunders, O. A. "The Effect of Pressure Upon Natural Convection in
Air." Proceedings of the Royal Society of London A: Mathematical,
Physical and Engineering Sciences 157, no. 891 (November 2, 1936):
278-91. doi:10.1098/rspa.1936.0194.
.. [3] McAdams, William Henry. Heat Transmission. 3E. Malabar, Fla:
Krieger Pub Co, 1985.
.. [4] Morgan, V.T., The Overall Convective Heat Transfer from Smooth
Circular Cylinders, in Advances in Heat Transfer, eds. T.F. Irvin and
J.P. Hartnett, V 11, 199-264, 1975.
.. [5] Popiel, Czeslaw O. "Free Convection Heat Transfer from Vertical
Slender Cylinders: A Review." Heat Transfer Engineering 29, no. 6
(June 1, 2008): 521-36. doi:10.1080/01457630801891557.
.. [6] Boetcher, Sandra K. S. "Natural Convection Heat Transfer From
Vertical Cylinders." In Natural Convection from Circular Cylinders,
23-42. Springer, 2014.
'''
Ra = Pr*Gr
if turbulent or (Ra > 1E9 and turbulent is None):
return 0.13*Ra**(1/3.)
else:
return 0.59*Ra**0.25
def Nu_vertical_cylinder_Kreith_Eckert(Pr, Gr, turbulent=None):
r'''Calculates Nusselt number for natural convection around a vertical
isothermal cylinder according to the results of [1]_ correlated by
[2]_, also as presented in [3]_, [4]_, and [5]_.
.. math::
Nu_H = 0.555 Ra_H^{0.25},\; 10^{5} < Ra < 10^{9}
.. math::
Nu_H = 0.021 Ra_H^{0.4},\; 10^{9} < Ra < 10^{12}
Parameters
----------
Pr : float
Prandtl number [-]
Gr : float
Grashof number [-]
turbulent : bool or None, optional
Whether or not to force the correlation to return the turbulent
result; will return the laminar regime if False; leave as None for
automatic selection
Returns
-------
Nu : float
Nusselt number, [-]
Notes
-----
Transition between ranges is not smooth. If outside of range, no warning is
given.
Examples
--------
>>> Nu_vertical_cylinder_Kreith_Eckert(.7, 2E10)
240.25393473033196
References
----------
.. [1] Eckert, E. R. G., Thomas W. Jackson, and United States. Analysis of
Turbulent Free-Convection Boundary Layer on Flat Plate. National
Advisory Committee for Aeronautics, no. 2207. Washington, D.C.: National
Advisoty Committee for Aeronautics, 1950.
.. [2] Kreith, Frank, Raj Manglik, and Mark Bohn. Principles of Heat
Transfer. Cengage, 2010.
.. [3] Morgan, V.T., The Overall Convective Heat Transfer from Smooth
Circular Cylinders, in Advances in Heat Transfer, eds. T.F. Irvin and
J.P. Hartnett, V 11, 199-264, 1975.
.. [4] Popiel, Czeslaw O. "Free Convection Heat Transfer from Vertical
Slender Cylinders: A Review." Heat Transfer Engineering 29, no. 6
(June 1, 2008): 521-36. doi:10.1080/01457630801891557.
.. [5] Boetcher, Sandra K. S. "Natural Convection Heat Transfer From
Vertical Cylinders." In Natural Convection from Circular Cylinders,
23-42. Springer, 2014.
'''
Ra = Pr*Gr
if turbulent or (Ra > 1E9 and turbulent is None):
return 0.021*Ra**0.4
else:
return 0.555*Ra**0.25
def Nu_vertical_cylinder_Hanesian_Kalish_Morgan(Pr, Gr):
r'''Calculates Nusselt number for natural convection around a vertical
isothermal cylinder according to the results of [1]_ correlated by
[2]_, also as presented in [3]_ and [4]_.
.. math::
Nu_H = 0.48 Ra_H^{0.23},\; 10^{6} < Ra < 10^{8}
Parameters
----------
Pr : float
Prandtl number [-]
Gr : float
Grashof number [-]
Returns
-------
Nu : float
Nusselt number, [-]
Notes
-----
For air and fluoro-carbons. If outside of range, no warning is given.
Laminar range only!
Examples
--------
>>> Nu_vertical_cylinder_Hanesian_Kalish_Morgan(.7, 1E7)
18.014150492696604
References
----------
.. [1] Hanesian, D. and Kalish, R. "Heat Transfer by Natural Convection
with Fluorocarbon Gases." IEEE Transactions on Parts, Materials and
Packaging 6, no. 4 (December 1970): 147-148.
doi:10.1109/TPMP.1970.1136270.
.. [2] Morgan, V.T., The Overall Convective Heat Transfer from Smooth
Circular Cylinders, in Advances in Heat Transfer, eds. T.F. Irvin and
J.P. Hartnett, V 11, 199-264, 1975.
.. [3] Popiel, Czeslaw O. "Free Convection Heat Transfer from Vertical
Slender Cylinders: A Review." Heat Transfer Engineering 29, no. 6
(June 1, 2008): 521-36. doi:10.1080/01457630801891557.
.. [4] Boetcher, Sandra K. S. "Natural Convection Heat Transfer From
Vertical Cylinders." In Natural Convection from Circular Cylinders,
23-42. Springer, 2014.
'''
Ra = Pr*Gr
return 0.48*Ra**0.23
### Vertical cylinders, more complex correlations
def Nu_vertical_cylinder_Al_Arabi_Khamis(Pr, Gr, L, D, turbulent=None):
r'''Calculates Nusselt number for natural convection around a vertical
isothermal cylinder according to [1]_, also as presented in [2]_ and [3]_.
.. math::
Nu_H = 2.9Ra_H^{0.25}/Gr_D^{1/12},\; 9.88 \times 10^7 \le Ra_H \le 2.7\times10^{9}
.. math::
Nu_H = 0.47 Ra_H^{0.333}/Gr_D^{1/12},\; 2.7 \times 10^9 \le Ra_H \le 2.95\times10^{10}
Parameters
----------
Pr : float
Prandtl number [-]
Gr : float
Grashof number with respect to cylinder height [-]
L : float
Length of vertical cylinder, [m]
D : float
Diameter of cylinder, [m]
turbulent : bool or None, optional
Whether or not to force the correlation to return the turbulent
result; will return the laminar regime if False; leave as None for
automatic selection, [-]
Returns
-------
Nu : float
Nusselt number, [-]
Notes
-----
For air. Local Nusselt number results also given in [1]_. D from 12.75 to
51 mm; H from 300 to 2000 mm. Temperature kept constant by steam condensing.
If outside of range, no warning is given. Applies for range of:
.. math::
1.08 \times 10^4 \le Gr_D \le 6.9 \times 10^5
Examples
--------
>>> Nu_vertical_cylinder_Al_Arabi_Khamis(.71, 2E10, 10, 1)
280.39793209114765
References
----------
.. [1] Al-Arabi, M., and M. Khamis. "Natural Convection Heat Transfer from
Inclined Cylinders." International Journal of Heat and Mass Transfer 25,
no. 1 (January 1982): 3-15. doi:10.1016/0017-9310(82)90229-0.
.. [2] Popiel, Czeslaw O. "Free Convection Heat Transfer from Vertical
Slender Cylinders: A Review." Heat Transfer Engineering 29, no. 6
(June 1, 2008): 521-36. doi:10.1080/01457630801891557.
.. [3] Boetcher, Sandra K. S. "Natural Convection Heat Transfer From
Vertical Cylinders." In Natural Convection from Circular Cylinders,
23-42. Springer, 2014.
'''
Gr_D = Gr/L**3*D**3
Ra = Pr*Gr
if turbulent or (Ra > 2.6E9 and turbulent is None):
return 0.47*Ra**(1/3.)*Gr_D**(-1/12.)
else:
return 2.9*Ra**0.25*Gr_D**(-1/12.)
def Nu_vertical_cylinder_Popiel_Churchill(Pr, Gr, L, D):
r'''Calculates Nusselt number for natural convection around a vertical
isothermal cylinder according to [1]_, also presented in [2]_.
.. math::
\frac{Nu}{Nu_{L,fp}} = 1 + B\left[32^{0.5}Gr_L^{-0.25}\frac{L}{D}\right]^C
.. math::
B = 0.0571322 + 0.20305 Pr^{-0.43}
.. math::
C = 0.9165 - 0.0043Pr^{0.5} + 0.01333\ln Pr + 0.0004809/Pr
Parameters
----------
Pr : float
Prandtl number [-]
Gr : float
Grashof number with respect to cylinder height [-]
L : float
Length of vertical cylinder, [m]
D : float
Diameter of cylinder, [m]
Returns
-------
Nu : float
Nusselt number, [-]
Notes
-----
For 0.01 < Pr < 100. Requires a vertical flat plate correlation.
Both [2], [3] present a power of 2 instead of 0.5 on the 32 in the equation,
but the original has the correct form.
Examples
--------
>>> Nu_vertical_cylinder_Popiel_Churchill(0.7, 1E10, 2.5, 1)
228.89790055149896
References
----------
.. [1] Popiel, C. O., J. Wojtkowiak, and K. Bober. "Laminar Free Convective
Heat Transfer from Isothermal Vertical Slender Cylinder." Experimental
Thermal and Fluid Science 32, no. 2 (November 2007): 607-613.
doi:10.1016/j.expthermflusci.2007.07.003.
.. [2] Popiel, Czeslaw O. "Free Convection Heat Transfer from Vertical
Slender Cylinders: A Review." Heat Transfer Engineering 29, no. 6
(June 1, 2008): 521-36. doi:10.1080/01457630801891557.
.. [3] Boetcher, Sandra K. S. "Natural Convection Heat Transfer From
Vertical Cylinders." In Natural Convection from Circular Cylinders,
23-42. Springer, 2014.
'''
B = 0.0571322 + 0.20305*Pr**-0.43
C = 0.9165 - 0.0043*Pr**0.5 + 0.01333*log(Pr) + 0.0004809/Pr
Nu_fp = Nu_vertical_plate_Churchill(Pr, Gr)
return Nu_fp*(1 + B*(32**0.5*Gr**-0.25*L/D)**C)
# Nice Name : (function_call, does_turbulent, does_laminar, transition_Ra, is_only_Pr_Gr)
vertical_cylinder_correlations = {
'Churchill Vertical Plate': (Nu_vertical_plate_Churchill, True, True, None, True),
'Griffiths, Davis, & Morgan': (Nu_vertical_cylinder_Griffiths_Davis_Morgan, True, True, 1.00E+009, True),
'Jakob, Linke, & Morgan': (Nu_vertical_cylinder_Jakob_Linke_Morgan, True, True, 1.00E+008, True),
'Carne & Morgan': (Nu_vertical_cylinder_Carne_Morgan, True, True, 2.00E+008, True),
'Eigenson & Morgan': (Nu_vertical_cylinder_Eigenson_Morgan, True, True, 6.90E+011, True),
'Touloukian & Morgan': (Nu_vertical_cylinder_Touloukian_Morgan, True, True, 4.00E+010, True),
'McAdams, Weiss & Saunders': (Nu_vertical_cylinder_McAdams_Weiss_Saunders, True, True, 1.00E+009, True),
'Kreith & Eckert': (Nu_vertical_cylinder_Kreith_Eckert, True, True, 1.00E+009, True),
'Hanesian, Kalish & Morgan': (Nu_vertical_cylinder_Hanesian_Kalish_Morgan, False, True, 1.00E+008, True),
'Al-Arabi & Khamis': (Nu_vertical_cylinder_Al_Arabi_Khamis, True, True, 2.60E+009, False),
'Popiel & Churchill': (Nu_vertical_cylinder_Popiel_Churchill, False, True, 1.00E+009, False),
}
def Nu_vertical_cylinder_methods(Pr, Gr, L=None, D=None, check_ranges=True):
r'''This function returns a list of correlation names for free convetion
to a vertical cylinder.
The functions returned are 'Popiel & Churchill' for fully defined geometries,
and 'McAdams, Weiss & Saunders' otherwise.
Parameters
----------
Pr : float
Prandtl number [-]
Gr : float
Grashof number with respect to cylinder height [-]
L : float, optional
Length of vertical cylinder, [m]
D : float, optional
Diameter of cylinder, [m]
check_ranges : bool, optional
Whether or not to return only correlations suitable for the provided
data, [-]
Returns
-------
methods : list[str]
List of methods which can be used to calculate `Nu` with the given
inputs
Examples
--------
>>> Nu_vertical_cylinder_methods(0.72, 1E7)[0]
'McAdams, Weiss & Saunders'
'''
if L is None or D is None:
return ['McAdams, Weiss & Saunders', 'Churchill Vertical Plate',
'Griffiths, Davis, & Morgan', 'Jakob, Linke, & Morgan', 'Carne & Morgan',
'Eigenson & Morgan', 'Touloukian & Morgan', 'Kreith & Eckert', 'Hanesian, Kalish & Morgan']
else:
return ['Popiel & Churchill', 'Churchill Vertical Plate', 'Griffiths, Davis, & Morgan',
'Jakob, Linke, & Morgan', 'Carne & Morgan', 'Eigenson & Morgan', 'Touloukian & Morgan',
'McAdams, Weiss & Saunders', 'Kreith & Eckert', 'Hanesian, Kalish & Morgan',
'Al-Arabi & Khamis']
def Nu_vertical_cylinder(Pr, Gr, L=None, D=None, Method=None):
r'''This function handles choosing which vertical cylinder free convection
correlation is used. Generally this is used by a helper class, but can be
used directly. Will automatically select the correlation to use if none is
provided; returns None if insufficient information is provided.
Preferred functions are 'Popiel & Churchill' for fully defined geometries,
and 'McAdams, Weiss & Saunders' otherwise.
Examples
--------
>>> Nu_vertical_cylinder(0.72, 1E7)
30.562236756513943
Parameters
----------
Pr : float
Prandtl number [-]
Gr : float
Grashof number with respect to cylinder height [-]
L : float, optional
Length of vertical cylinder, [m]
D : float, optional
Diameter of cylinder, [m]
Returns
-------
Nu : float
Nusselt number, [-]
Other Parameters
----------------
Method : string, optional
A string of the function name to use, as in the dictionary
vertical_cylinder_correlations
'''
if Method is None:
if L is None or D is None:
Method2 = 'McAdams, Weiss & Saunders'
else:
Method2 = 'Popiel & Churchill'
else:
Method2 = Method
if Method2 == 'Churchill Vertical Plate':
return Nu_vertical_plate_Churchill(Pr=Pr, Gr=Gr)
elif Method2 == 'Griffiths, Davis, & Morgan':
return Nu_vertical_cylinder_Griffiths_Davis_Morgan(Pr=Pr, Gr=Gr)
elif Method2 == 'Jakob, Linke, & Morgan':
return Nu_vertical_cylinder_Jakob_Linke_Morgan(Pr=Pr, Gr=Gr)
elif Method2 == 'Carne & Morgan':
return Nu_vertical_cylinder_Carne_Morgan(Pr=Pr, Gr=Gr)
elif Method2 == 'Eigenson & Morgan':
return Nu_vertical_cylinder_Eigenson_Morgan(Pr=Pr, Gr=Gr)
elif Method2 == 'Touloukian & Morgan':
return Nu_vertical_cylinder_Touloukian_Morgan(Pr=Pr, Gr=Gr)
elif Method2 == 'McAdams, Weiss & Saunders':
return Nu_vertical_cylinder_McAdams_Weiss_Saunders(Pr=Pr, Gr=Gr)
elif Method2 == 'Kreith & Eckert':
return Nu_vertical_cylinder_Kreith_Eckert(Pr=Pr, Gr=Gr)
elif Method2 == 'Hanesian, Kalish & Morgan':
return Nu_vertical_cylinder_Hanesian_Kalish_Morgan(Pr=Pr, Gr=Gr)
elif Method2 == 'Al-Arabi & Khamis':
return Nu_vertical_cylinder_Al_Arabi_Khamis(Pr=Pr, Gr=Gr, L=L, D=D)
elif Method2 == 'Popiel & Churchill':
return Nu_vertical_cylinder_Popiel_Churchill(Pr=Pr, Gr=Gr, L=L, D=D)
else:
raise ValueError("Correlation name not recognized; see the "
"documentation for the available options.")
#import matplotlib.pyplot as plt
#import numpy as np
##L, D = 1.5, 0.1
#Pr, Gr = 0.72, 1E8
#methods = Nu_vertical_cylinder_methods(Pr, Gr)
#Grs = np.logspace(2, 12, 10000)
#
#for method in methods:
# Nus = [Nu_vertical_cylinder(Pr=Pr, Gr=i, Method=method) for i in Grs]
# plt.loglog(Grs, Nus, label=method)
#plt.legend()
#plt.show()
### Horizontal Cylinders
def Nu_horizontal_cylinder_Churchill_Chu(Pr, Gr):
r'''Calculates Nusselt number for natural convection around a horizontal
cylinder according to the Churchill-Chu [1]_ correlation, also presented in
[2]_. Cylinder must be isothermal; an alternate expression exists for
constant heat flux.
.. math::
Nu_{D}=\left[0.60+\frac{0.387Ra_{D}^{1/6}}
{[1+(0.559/Pr)^{9/16}]^{8/27}}\right]^2
Parameters
----------
Pr : float
Prandtl number [-]
Gr : float
Grashof number with respect to cylinder diameter, [-]
Returns
-------
Nu : float
Nusselt number with respect to cylinder diameter, [-]
Notes
-----
Although transition from laminar to turbulent is discrete in reality, this
equation provides a smooth transition in value from laminar to turbulent.
Checked with the original source, which has its powers unsimplified but
is equivalent.
[1]_ recommends 1E-5 as the lower limit for Ra, but no upper limit. [2]_
suggests an upper limit of 1E12.
Examples
--------
From [2]_, Example 9.2, matches:
>>> Nu_horizontal_cylinder_Churchill_Chu(0.69, 2.63E9)
139.13493970073597
References
----------
.. [1] Churchill, Stuart W., and Humbert H. S. Chu. "Correlating Equations
for Laminar and Turbulent Free Convection from a Horizontal Cylinder."
International Journal of Heat and Mass Transfer 18, no. 9
(September 1975): 1049-53. doi:10.1016/0017-9310(75)90222-7.
.. [2] Bergman, Theodore L., Adrienne S. Lavine, Frank P. Incropera, and
David P. DeWitt. Introduction to Heat Transfer. 6E. Hoboken, NJ:
Wiley, 2011.
'''
Ra = Pr*Gr
return (0.6 + 0.387*Ra**(1/6.)/(1. + (0.559/Pr)**(9/16.))**(8/27.))**2
def Nu_horizontal_cylinder_Kuehn_Goldstein(Pr, Gr):
r'''Calculates Nusselt number for natural convection around a horizontal
cylinder according to the Kuehn-Goldstein [1]_ correlation, also shown in
[2]_. Cylinder must be isothermal.
.. math::
\frac{2}{Nu_D} = \ln\left[1 + \frac{2}{\left[\left\{0.518Ra_D^{0.25}
\left[1 + \left(\frac{0.559}{Pr}\right)^{3/5}\right]^{-5/12}
\right\}^{15} + (0.1Ra_D^{1/3})^{15}\right]^{1/15}}\right]
Parameters
----------
Pr : float
Prandtl number with respect to film temperature [-]
Gr : float
Grashof number with respect to cylinder diameter, [-]
Returns
-------
Nu : float
Nusselt number with respect to cylinder diameter, [-]
Notes
-----
[1]_ suggests this expression is valid for all cases except low-Pr fluids.
[2]_ suggests no restrictions.
Examples
--------
>>> Nu_horizontal_cylinder_Kuehn_Goldstein(0.69, 2.63E9)
122.99323525628186
References
----------
.. [1] Kuehn, T. H., and R. J. Goldstein. "Correlating Equations for
Natural Convection Heat Transfer between Horizontal Circular Cylinders."
International Journal of Heat and Mass Transfer 19, no. 10
(October 1976): 1127-34. doi:10.1016/0017-9310(76)90145-9
.. [2] Boetcher, Sandra K. S. "Natural Convection Heat Transfer From
Vertical Cylinders." In Natural Convection from Circular Cylinders,
23-42. Springer, 2014.
'''
Ra = Pr*Gr
return 2./log(1 + 2./((0.518*Ra**0.25*(1. + (0.559/Pr)**0.6)**(-5/12.))**15
+ (0.1*Ra**(1/3.))**15)**(1/15.))
def Nu_horizontal_cylinder_Morgan(Pr, Gr):
r'''Calculates Nusselt number for natural convection around a horizontal
cylinder according to the Morgan [1]_ correlations, a product of a very
large review of the literature. Sufficiently common as to be shown in [2]_.
Cylinder must be isothermal.
.. math::
Nu_D = C Ra_D^n
+----------+----------+-------+-------+
| Gr min | Gr max | C | n |
+==========+==========+=======+=======+
| 10E-10 | 10E-2 | 0.675 | 0.058 |
+----------+----------+-------+-------+
| 10E-2 | 10E2 | 1.02 | 0.148 |
+----------+----------+-------+-------+
| 10E2 | 10E4 | 0.850 | 0.188 |
+----------+----------+-------+-------+
| 10E4 | 10E7 | 0.480 | 0.250 |
+----------+----------+-------+-------+
| 10E7 | 10E12 | 0.125 | 0.333 |
+----------+----------+-------+-------+
Parameters
----------
Pr : float
Prandtl number with respect to film temperature [-]
Gr : float
Grashof number with respect to cylinder diameter, [-]
Returns
-------
Nu : float
Nusselt number with respect to cylinder diameter, [-]
Notes
-----
Most comprehensive review with a new proposed equation to date.
Discontinuous among the jumps in range. Blindly runs outside if upper and
lower limits without warning.
Examples
--------
>>> Nu_horizontal_cylinder_Morgan(0.69, 2.63E9)
151.3881997228419
References
----------
.. [1] Morgan, V.T., The Overall Convective Heat Transfer from Smooth
Circular Cylinders, in Advances in Heat Transfer, eds. T.F. Irvin and
J.P. Hartnett, V 11, 199-264, 1975.
.. [2] Boetcher, Sandra K. S. "Natural Convection Heat Transfer From
Vertical Cylinders." In Natural Convection from Circular Cylinders,
23-42. Springer, 2014.
'''
Ra = Pr*Gr
if Ra < 1E-2:
C, n = 0.675, 0.058
elif Ra < 1E2:
C, n = 1.02, 0.148
elif Ra < 1E4:
C, n = 0.850, 0.188
elif Ra < 1E7:
C, n = 0.480, 0.250
else:
# up to 1E12
C, n = 0.125, 0.333
return C*Ra**n
horizontal_cylinder_correlations = {
'Churchill-Chu': (Nu_horizontal_cylinder_Churchill_Chu),
'Kuehn & Goldstein': (Nu_horizontal_cylinder_Kuehn_Goldstein),
'Morgan': (Nu_horizontal_cylinder_Morgan)
}
def Nu_horizontal_cylinder_methods(Pr, Gr, check_ranges=True):
r'''This function returns a list of correlation names for free convetion
to a horizontal cylinder.
Preferred functions are 'Morgan' when discontinuous results are acceptable
and 'Churchill-Chu' otherwise.
Parameters
----------
Pr : float
Prandtl number with respect to film temperature [-]
Gr : float
Grashof number with respect to cylinder diameter, [-]
check_ranges : bool, optional
Whether or not to return only correlations suitable for the provided
data, [-]
Returns
-------
methods : list[str]
List of methods which can be used to calculate `Nu` with the given
inputs
Examples
--------
>>> Nu_horizontal_cylinder_methods(0.72, 1E7)[0]
'Morgan'
'''
return ['Morgan', 'Churchill-Chu', 'Kuehn & Goldstein']
def Nu_horizontal_cylinder(Pr, Gr, Method=None):
r'''This function handles choosing which horizontal cylinder free convection
correlation is used. Generally this is used by a helper class, but can be
used directly. Will automatically select the correlation to use if none is
provided; returns None if insufficient information is provided.
Preferred functions are 'Morgan' when discontinuous results are acceptable
and 'Churchill-Chu' otherwise.
Parameters
----------
Pr : float
Prandtl number with respect to film temperature [-]
Gr : float
Grashof number with respect to cylinder diameter, [-]
Returns
-------
Nu : float
Nusselt number with respect to cylinder diameter, [-]
Other Parameters
----------------
Method : string, optional
A string of the function name to use, as in the dictionary
horizontal_cylinder_correlations
Notes
-----
All fluid properties should be evaluated at the film temperature, the
average between the outer surface temperature of the solid, and the fluid
temperature far away from the heat transfer interface - normally the same
as the temperature before any cooling or heating occurs.
.. math::
T_f = (T_{\text{surface}} + T_\infty)/2
Examples
--------
>>> Nu_horizontal_cylinder(0.72, 1E7)
24.864192615468973
'''
if Method is None:
Method2 = 'Morgan'
else:
Method2 = Method
if Method2 == 'Churchill-Chu':
return Nu_horizontal_cylinder_Churchill_Chu(Pr=Pr, Gr=Gr)
elif Method2 == 'Kuehn & Goldstein':
return Nu_horizontal_cylinder_Kuehn_Goldstein(Pr=Pr, Gr=Gr)
elif Method2 == 'Morgan':
return Nu_horizontal_cylinder_Morgan(Pr=Pr, Gr=Gr)
else:
raise ValueError("Correlation name not recognized; see the "
"documentation for the available options.")
#import matplotlib.pyplot as plt
#import numpy as np
#Pr, Gr = 0.72, 1E8
#methods = Nu_horizontal_cylinder_methods(Pr, Gr)
#Grs = np.logspace(-2, 2.5, 10000)
#
#for method in methods:
# Nus = [Nu_horizontal_cylinder(Pr=Pr, Gr=i, Method=method) for i in Grs]
# plt.semilogx(Grs, Nus, label=method)
#plt.legend()
#plt.show()
def Nu_coil_Xin_Ebadian(Pr, Gr, horizontal=False):
r'''Calculates Nusselt number for natural convection around a vertical
or horizontal helical coil suspended in a fluid without
forced convection.
For horizontal cases:
.. math::
Nu_D = 0.318 Ra_D^{0.293},\; 5 \times {10}^{3} < Ra < 1 \times {10}^5
For vertical cases:
.. math::
Nu_D = 0.290 Ra_D^{0.293},\; 5 \times {10}^{3} < Ra < 1 \times {10}^5
Parameters
----------
Pr : float
Prandtl number calculated with the film temperature -
wall and temperature very far from the coil average, [-]
Gr : float
Grashof number calculated with the film temperature -
wall and temperature very far from the coil average,
and using the outer diameter of the coil [-]
horizontal : bool, optional
Whether the coil is horizontal or vertical, [-]
Returns
-------
Nu : float
Nusselt number using the outer diameter of the coil
and the film temperature, [-]
Notes
-----
This correlation is also reviewed in [2]_.
Examples
--------
>>> Nu_coil_Xin_Ebadian(0.7, 2E4, horizontal=False)
4.755689726250451
>>> Nu_coil_Xin_Ebadian(0.7, 2E4, horizontal=True)
5.2148597687849785
References
----------
.. [1] Xin, R. C., and M. A. Ebadian. "Natural Convection Heat Transfer
from Helicoidal Pipes." Journal of Thermophysics and Heat Transfer 10,
no. 2 (1996): 297-302.
.. [2] Prabhanjan, Devanahalli G., Timothy J. Rennie, and G. S. Vijaya
Raghavan. "Natural Convection Heat Transfer from Helical Coiled Tubes."
International Journal of Thermal Sciences 43, no. 4 (April 1, 2004):
359-65.
'''
Ra = Pr*Gr
if horizontal:
return 0.318*Ra**0.293
else:
return 0.290*Ra**0.293 | mit | 6,765,372,046,632,319,000 | 33.813509 | 107 | 0.621733 | false | 3.161374 | false | false | false |
thijsmie/imp_flask | imp_flask/extensions.py | 1 | 1038 | """Flask and other extensions instantiated here.
To avoid circular imports with views and create_app(), extensions are instantiated here. They will be initialized
(calling init_app()) in application.py.
"""
from logging import getLogger
from flask_mail import Mail
from flask_sqlalchemy import SQLAlchemy
from flask_wtf.csrf import CsrfProtect
from sqlalchemy.event import listens_for
from sqlalchemy.pool import Pool
LOG = getLogger(__name__)
@listens_for(Pool, 'connect', named=True)
def _on_connect(dbapi_connection, **_):
"""Set MySQL mode to TRADITIONAL on databases that don't set this automatically.
Without this, MySQL will silently insert invalid values in the database, causing very long debugging sessions in the
long run.
http://www.enricozini.org/2012/tips/sa-sqlmode-traditional/
"""
LOG.debug('Setting SQL Mode to TRADITIONAL.')
dbapi_connection.cursor().execute("SET SESSION sql_mode='TRADITIONAL'")
db = SQLAlchemy()
mail = Mail()
csrf = CsrfProtect()
| mit | 1,401,830,091,470,726,700 | 30.4375 | 120 | 0.728324 | false | 3.844444 | false | false | false |
leonid-shevtsov/ClickableUrls_SublimeText | clickable_urls.py | 1 | 5787 | import sublime
import sublime_plugin
import webbrowser
import threading
class UrlHighlighter(sublime_plugin.EventListener):
# Thanks Jeff Atwood http://www.codinghorror.com/blog/2008/10/the-problem-with-urls.html
# ^ that up here is a URL that should be matched
URL_REGEX = "\\bhttps?://[-A-Za-z0-9+&@#/%?=~_()|!:,.;']*[-A-Za-z0-9+&@#/%=~_(|]"
DEFAULT_MAX_URLS = 200
SETTINGS_FILENAME = 'ClickableUrls.sublime-settings'
urls_for_view = {}
scopes_for_view = {}
ignored_views = []
browser = None
highlight_semaphore = threading.Semaphore()
def on_activated(self, view):
self.update_url_highlights(view)
# Blocking handlers for ST2
def on_load(self, view):
if sublime.version() < '3000':
self.update_url_highlights(view)
def on_modified(self, view):
if sublime.version() < '3000':
self.update_url_highlights(view)
# Async listeners for ST3
def on_load_async(self, view):
self.update_url_highlights_async(view)
def on_modified_async(self, view):
self.update_url_highlights_async(view)
def on_close(self, view):
for map in [self.urls_for_view, self.scopes_for_view, self.ignored_views]:
if view.id() in map:
del map[view.id()]
"""The logic entry point. Find all URLs in view, store and highlight them"""
def update_url_highlights(self, view):
settings = sublime.load_settings(UrlHighlighter.SETTINGS_FILENAME)
should_highlight_urls = settings.get('highlight_urls', True)
max_url_limit = settings.get('max_url_limit', UrlHighlighter.DEFAULT_MAX_URLS)
if view.id() in UrlHighlighter.ignored_views:
return
urls = view.find_all(UrlHighlighter.URL_REGEX)
# Avoid slowdowns for views with too much URLs
if len(urls) > max_url_limit:
print("UrlHighlighter: ignoring view with %u URLs" % len(urls))
UrlHighlighter.ignored_views.append(view.id())
return
UrlHighlighter.urls_for_view[view.id()] = urls
should_highlight_urls = sublime.load_settings(UrlHighlighter.SETTINGS_FILENAME).get('highlight_urls', True)
if (should_highlight_urls):
self.highlight_urls(view, urls)
"""Same as update_url_highlights, but avoids race conditions with a
semaphore."""
def update_url_highlights_async(self, view):
UrlHighlighter.highlight_semaphore.acquire()
try:
self.update_url_highlights(view)
finally:
UrlHighlighter.highlight_semaphore.release()
"""Creates a set of regions from the intersection of urls and scopes,
underlines all of them."""
def highlight_urls(self, view, urls):
# We need separate regions for each lexical scope for ST to use a proper color for the underline
scope_map = {}
for url in urls:
scope_name = view.scope_name(url.a)
scope_map.setdefault(scope_name, []).append(url)
for scope_name in scope_map:
self.underline_regions(view, scope_name, scope_map[scope_name])
self.update_view_scopes(view, scope_map.keys())
"""Apply underlining with provided scope name to provided regions.
Uses the empty region underline hack for Sublime Text 2 and native
underlining for Sublime Text 3."""
def underline_regions(self, view, scope_name, regions):
if sublime.version() >= '3019':
# in Sublime Text 3, the regions are just underlined
view.add_regions(
u'clickable-urls ' + scope_name,
regions,
scope_name,
flags=sublime.DRAW_NO_FILL|sublime.DRAW_NO_OUTLINE|sublime.DRAW_SOLID_UNDERLINE)
else:
# in Sublime Text 2, the 'empty region underline' hack is used
char_regions = [sublime.Region(pos, pos) for region in regions for pos in range(region.a, region.b)]
view.add_regions(
u'clickable-urls ' + scope_name,
char_regions,
scope_name,
sublime.DRAW_EMPTY_AS_OVERWRITE)
"""Store new set of underlined scopes for view. Erase underlining from
scopes that were used but are not anymore."""
def update_view_scopes(self, view, new_scopes):
old_scopes = UrlHighlighter.scopes_for_view.get(view.id(), None)
if old_scopes:
unused_scopes = set(old_scopes) - set(new_scopes)
for unused_scope_name in unused_scopes:
view.erase_regions(u'clickable-urls ' + unused_scope_name)
UrlHighlighter.scopes_for_view[view.id()] = new_scopes
def open_url(url):
browser = sublime.load_settings(UrlHighlighter.SETTINGS_FILENAME).get('clickable_urls_browser')
try:
webbrowser.get(browser).open(url, autoraise=True)
except(webbrowser.Error):
sublime.error_message('Failed to open browser. See "Customizing the browser" in the README.')
class OpenUrlUnderCursorCommand(sublime_plugin.TextCommand):
def run(self, edit):
if self.view.id() in UrlHighlighter.urls_for_view:
selection = self.view.sel()[0]
if selection.empty():
selection = next((url for url in UrlHighlighter.urls_for_view[self.view.id()] if url.contains(selection)), None)
if not selection:
return
url = self.view.substr(selection)
open_url(url)
class OpenAllUrlsCommand(sublime_plugin.TextCommand):
def run(self, edit):
if self.view.id() in UrlHighlighter.urls_for_view:
for url in set([self.view.substr(url_region) for url_region in UrlHighlighter.urls_for_view[self.view.id()]]):
open_url(url)
| mit | -7,902,812,575,243,236,000 | 38.636986 | 128 | 0.632625 | false | 3.671954 | false | false | false |
iniweb/deployCD | app/views.py | 1 | 4840 | from flask import render_template, flash, url_for, request, redirect, session
from flask_login import login_user, logout_user, current_user, login_required
from app import app, db, gitlab, login_manager
from forms import ProjectForm
from models import User, Project, ROLE_USER, ROLE_ADMIN
import copy
import ansible.runner
import ansible.inventory
import ansible.callbacks
import ansible.utils
@app.route('/')
@app.route('/projects')
@app.route('/projects/<int:page>')
def index(page=1):
if current_user.is_authenticated():
projects = Project.query.order_by(Project.deploy_at.desc(), Project.updated_at.desc()).paginate(page, 10, False)
return render_template('index.html', projects=projects)
return redirect(url_for('login'))
@app.route('/project/create', methods=["GET", "POST"])
@login_required
def project_create():
form = ProjectForm()
if form.validate_on_submit():
new_project = Project(
title=form.title.data,
branch=form.branch.data,
user_id=current_user.get_id(),
repo_url=form.repo_url.data
)
db.session.add(new_project)
db.session.commit()
flash('Project has been created successfully.', 'success')
return redirect(url_for('project', project_id=new_project.id))
return render_template('project/form.html', form=form, action_url=url_for('project_create'))
@app.route('/project/<int:project_id>/edit', methods=["GET", "POST"])
@login_required
def project_edit(project_id):
project = Project.query.filter_by(id=project_id).first_or_404()
form = ProjectForm(obj=project)
if request.method == 'POST' and form.validate():
form.populate_obj(project)
db.session.commit()
flash('Project has been updated successfully.', 'success')
return redirect(url_for('project', project_id=project.id))
return render_template('project/form.html', form=form, action_url=url_for('project_edit', project_id=project.id))
@app.route('/project/<int:project_id>')
@login_required
def project(project_id):
project = Project.query.filter_by(id=project_id).first_or_404()
return render_template('project/show.html', project=project)
@app.route('/project/<int:project_id>/servers')
@login_required
def project_servers(project_id):
project = Project.query.filter_by(id=project_id).first_or_404()
return render_template('servers/list.html', project=project)
@app.route('/project/<int:project_id>/deploy')
@login_required
def project_deploy(project_id):
project = Project.query.filter_by(id=project_id).first_or_404()
hosts = ["localhost"]
ansible.utils.VERBOSITY = 1
inventory = ansible.inventory.Inventory(hosts)
base_runner = ansible.runner.Runner(
pattern='all',
transport='local',
inventory=inventory,
# callbacks=runner_cb,
check=False,
background=1
)
runner = copy.copy(base_runner)
runner.module_name = 'git'
runner.module_args = '[email protected]:iniweb/ansible-vagrant-sf2.git'
result = runner.run()
print result
return render_template('project/deploy.html', project=project)
@app.route('/login')
def login():
return render_template('login.html')
@app.route('/login/gitlab')
def login_gitlab():
if current_user.is_authenticated():
return redirect(url_for('index'))
return gitlab.authorize(callback=url_for('authorized', _external=True))
@app.route('/logout')
def logout():
logout_user()
session.pop('gitlab_token', None)
return redirect(url_for('index'))
@app.route('/oauth-authorized')
def authorized():
if current_user.is_authenticated():
return redirect(url_for('index'))
resp = gitlab.authorized_response()
if resp is None:
return 'Access denied: reason=%s error=%s' % (
request.args['error'],
request.args['error_description']
)
session['gitlab_token'] = (resp['access_token'], '')
me = gitlab.get('user')
user = User.query.filter_by(email=me.data['email']).first()
if not user:
role = ROLE_ADMIN if me.data['is_admin'] else ROLE_USER
user = User(
role=role,
email=me.data['email'],
avatar_url=me.data['avatar_url'],
enabled=True
)
db.session.add(user)
db.session.commit()
login_user(user, True)
return redirect(url_for('index'))
@app.errorhandler(404)
def not_found_error():
return render_template('404.html'), 404
@app.errorhandler(500)
def internal_error():
db.session.rollback()
return render_template('500.html'), 500
@login_manager.user_loader
def load_user(user_id):
return User.query.get(user_id)
@gitlab.tokengetter
def get_gitlab_token():
return session.get('gitlab_token')
| mit | 3,487,567,261,366,403,000 | 26.816092 | 120 | 0.66157 | false | 3.556209 | false | false | false |
alexpeattie/wethepeopletoolkit | wethepeopletoolkit/clusterer.py | 1 | 2828 | import pandas
import numpy as np
import click
from bitstring import BitArray
from base58 import b58encode_int, b58decode_int
class Clusterer:
def __init__(self):
pass
def cluster(self, n, state_processor, pca = False, model_type = 'kmeans', z_score_exclude = 0.0, seed = None, quiet = False):
from sklearn.cluster import FeatureAgglomeration, KMeans, SpectralClustering
from scipy import stats
model_types = {
'feature-agglomeration': FeatureAgglomeration,
'kmeans': KMeans,
'spectral': SpectralClustering,
}
states = state_processor.states(two_d = pca)
excluded_states, labels = [], []
if z_score_exclude > 0:
if not model_type == 'kmeans':
raise click.UsageError("--z-score-exclude can only be used when --model-type is 'kmeans'")
states_2d = state_processor.states(two_d = True)
excluded_states = states[-(np.abs(stats.zscore(states_2d)) < z_score_exclude).all(axis=1)]
states = states[(np.abs(stats.zscore(states_2d)) < z_score_exclude).all(axis=1)]
seed = seed or np.random.randint(0, 10 ** 6)
np.random.seed(seed)
if not quiet:
click.echo("Clustering with seed %d..." % seed)
self.model = model_types[model_type](n_clusters = n)
self.data = states.as_matrix()
self.model.fit(self.data)
labels = self.model.labels_
self.results = pandas.DataFrame([states.index, self.model.labels_]).T.sort_values(by=0)
if any(excluded_states):
excluded_results = pandas.DataFrame([excluded_states.index, self.model.predict(excluded_states)]).T
self.results = pandas.DataFrame(np.concatenate([self.results, excluded_results]))
def cluster_ids(self):
labels = self.results[1]
sorted_labels = sorted(labels.unique())
ids = map(lambda l: b58encode_int(BitArray((labels == l).astype(int).tolist()).uint), sorted_labels)
return zip(sorted_labels, ids)
def cluster_id_to_states(self, cluster_id):
states = np.array(['AL', 'AK', 'AZ', 'AR', 'CA', 'CO', 'CT', 'DE', 'FL', 'GA', 'HI', 'ID', 'IL', 'IN', 'IA', 'KS', 'KY', 'LA', 'ME', 'MD', 'MA', 'MI', 'MN', 'MS', 'MO', 'MT', 'NE', 'NV', 'NH', 'NJ', 'NM', 'NY', 'NC', 'ND', 'OH', 'OK', 'OR', 'PA', 'RI', 'SC', 'SD', 'TN', 'TX', 'UT', 'VT', 'VA', 'WA', 'WV', 'WI', 'WY'])
return states[list(BitArray(uint = b58decode_int(cluster_id), length = 50))]
def evaluate(self, metric, distance = None):
from sklearn.metrics import silhouette_score, calinski_harabaz_score
if metric == 'silhouette':
return silhouette_score(self.data, self.model.labels_, metric = distance)
if metric == 'calinski_harabaz':
return calinski_harabaz_score(self.data, self.model.labels_)
if metric == 'inertia':
return self.model.inertia_
def results_dict(self):
return self.results.set_index(0)[1].to_dict() | mit | -3,378,057,671,160,845,000 | 41.223881 | 323 | 0.642857 | false | 3.114537 | false | false | false |
bkidwell/wshare | wshare.py | 1 | 4231 | # See https://github.com/bkidwell/wshare
import argparse
import subprocess
import re
import sys
from wshare_config import config
def getConnDict():
txt = subprocess.getoutput('net use')
if re.search(r'There are no entries in the list\.', txt):
return dict()
match = re.search(r'--------.*\n([\w\W]*?)The command completed', txt)
if match is None:
print("Can't parse 'net use' output.")
sys.exit()
data = match.group(1).split('\n')
data = [row for row in data if not re.match('^ ', row)]
data = [re.split(r' +', row) for row in data]
result = dict()
for row in data:
if len(row) < 2: continue
if re.match(r'\w:', row[1]):
result[(row[1] + row[2]).lower()] = {
'drive_letter': row[1],
'path': row[2],
'username': None,
'password': None,
'status': row[0],
'in_conf': False,
}
else:
result[row[1].lower()] = {
'drive_letter': None,
'path': row[1],
'username': None,
'password': None,
'status': row[0],
'in_conf': False,
}
return result
def getAll():
conns = getConnDict()
for key, value in config.items():
if value['drive_letter']:
value['drive_letter'] = value['drive_letter'][0].upper() + ':'
path = value['path'].replace('/', '\\')
skey = (value['drive_letter'] + path if value['drive_letter'] else path).lower()
value['username'] = value['username'].replace('/', '\\')
if skey in conns:
conn = conns[skey]
conn['username'] = value['username']
conn['password'] = value['password'] if 'password' in value else ''
conn['drive_letter'] = conn['drive_letter'] or value['drive_letter']
conn['in_conf'] = key
else:
value['path'] = path
value['in_conf'] = key
value['status'] = 'Not connected'
conns[path.lower()] = value
conns = [conns[key] for key in sorted(conns.keys())]
return conns
def printStatus(connList):
i = 0
for conn in connList:
i += 1
if conn['in_conf']:
print(str(i) + ' [' + conn['in_conf'] + ']:: ' + (conn['drive_letter'] or '') + ' ' + conn['path'])
else:
print(':: ' + (conn['drive_letter'] or '') + ' ' + conn['path'] + ' (not in config)')
print(' ' + str(conn['status']))
def main(sel):
conns = getAll()
if sel is None:
print('\nNetwork shares:')
print('')
printStatus(conns)
print('')
num = input('Reconnect which share number or name? (ENTER to quit) ')
print('')
else:
num = sel
if num == '' or num == '0': return False
conn = None
for value in conns:
if value['in_conf'] and value['in_conf'] == num:
conn = value
if conn is None:
try:
num = int(num)
conn = conns[num - 1]
except:
print('Bad number or name.')
if sel: return False
else: return True
if sel:
print('Reconnecting ' + sel + '...')
if conn['drive_letter']:
subprocess.getoutput('net use ' + conn['drive_letter'] + ' /delete')
subprocess.getoutput('net use ' + conn['path'] + ' /delete')
if not 'password' in conn: conn['password'] = ''
p = ' "' + conn['password'] + '"' if conn['password'] else ''
subprocess.call(
'net use ' +
(conn['drive_letter'] if conn['drive_letter'] else '') + ' ' +
conn['path'] + ' ' +
'/user:' + conn['username'] + p
)
if not sel is None:
input('Press ENTER to continue.')
return False
else:
return True
parser = argparse.ArgumentParser(description='List Windows File Sharing shares and reconnect bookmarks.')
parser.add_argument(
'selection', metavar='NAME', type=str, nargs='?',
help='The name of the bookmark from wshare_config.py to reconnect'
)
args = parser.parse_args()
while True:
if not main(args.selection): break
| mit | 739,811,618,023,276,800 | 30.81203 | 111 | 0.509572 | false | 3.794619 | false | false | false |
wasim21k/pihome | cron/port_scanner.py | 1 | 1392 | #!/usr/bin/env python
#TheZero
#This code is under Public Domain
#ref: https://gist.github.com/TheZ3ro/7255052
from threading import Thread
import socket
import os, re, time, sys, subprocess
class bc:
HEADER = '\033[0;36;40m'
ENDC = '\033[0m'
SUB = '\033[3;30;45m'
WARN = '\033[0;31;40m'
GREEN = '\033[0;32;40m'
org = '\033[91m'
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('google.com', 0))
ip = s.getsockname()[0]
#Get the Local IP
end = re.search('^[\d]{1,3}.[\d]{1,3}.[\d]{1,3}.[\d]{1,3}', ip)
#Chop down the last IP Digits
create_ip = re.search('^[\d]{1,3}.[\d]{1,3}.[\d]{1,3}.', ip)
print "PiHome IP Address: "+bc.GREEN+str(end.group(0))+bc.ENDC
host = str(end.group(0))
host = '192.168.99.5'
from_port = 5000
to_port = 5005
#host = raw_input('host > ')
#from_port = input('start scan from port > ')
#to_port = input('finish scan to port > ')
counting_open = []
counting_close = []
threads = []
def scan(port):
s = socket.socket()
result = s.connect_ex((host,port))
print('working on port > '+(str(port)))
if result == 0:
counting_open.append(port)
print((str(port))+' -> open')
s.close()
else:
counting_close.append(port)
#print((str(port))+' -> close')
s.close()
for i in range(from_port, to_port+1):
t = Thread(target=scan, args=(i,))
threads.append(t)
t.start()
#[x.join() for x in threads]
print(counting_open)
| gpl-3.0 | -1,480,537,210,011,121,200 | 21.819672 | 63 | 0.623563 | false | 2.433566 | false | false | false |
datawire/quark | quarkc/backend.py | 1 | 34267 | # Copyright 2015 datawire. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os, types, tempfile, logging, inspect
import json
from collections import OrderedDict
from . import java, python, javascript, ruby, shell
from .ast import (
AST, Method, Class, Function, Package, File, Dependency, Interface, Primitive,
Macro, Field, Type, TypeParam, Import, Local, ExprStmt,
Assign, If, Return, While, Break, Continue, Var, Call, String, Number,
Bool, List, Map, Name, Null, Native, NativeCase, Fixed, Attr, Cast,
Param, Declaration, Super, Expression,
)
from .compiler import texpr, TypeExpr
from .constants import (BUILTIN, BUILTIN_FILE, REFLECT)
from .dispatch import overload
from .helpers import (
is_meta, has_super, compiled_quark, is_newer, namever, mdroot, readme,
base_type, get_defaulted_methods, is_abstract, base_constructors, doc,
get_field, constructors, get_defaulted_statics
)
from quarkc import reflection
class FakeExpr(object): pass
class Backend(object):
PRETTY_INSTALL = "TBD"
ext = None
gen = None
def __init__(self, include_stdlib=False):
self.include_stdlib = include_stdlib
self.files = OrderedDict()
self._imports = OrderedDict()
self.current_file = None
self.current_package = None
self.packages = []
self.definitions = []
self.names = []
self.bindings = None
self.entry = None
self.dist = None
self.root = None
self.roots = None
self.dependencies = OrderedDict()
self.log = logging.getLogger("quark.compile")
def install(self, offline):
cls = self.__class__.__name__
pkg = self.packages[0].name
target = self.install_target()
if os.path.exists(self.root.url):
deps = (compiled_quark(self.root.url),)
else:
deps = ()
modified = getattr(self.root, "_modified", False)
newer = is_newer(target,
__file__, inspect.getsourcefile(self.gen), *deps)
uptodate = not modified and bool(newer)
# F F T T
# F F T F
# T T F T
# F T F F
self.log.debug("Uptodate: %s, Modified %s, Newer: %s",
uptodate, modified, newer)
if uptodate:
self.log.debug("Skipping %s for %s[%s]", cls, pkg, target)
return
self.log.debug("Emitting generated %s for %s", cls, pkg)
dir = tempfile.mkdtemp(suffix="-%s" % cls,
prefix="%s-" % pkg)
self.write(dir)
quark_pkg = "quark"
if offline:
# XXX: while use` of external packages is private...
if str(pkg) == quark_pkg:
offline = False
mode = "online, automatic"
else:
mode = "offline"
else:
mode = "online, selected by user"
self.log.info("Installing %s %s with %s (%s)",
cls, repr(pkg), self.PRETTY_INSTALL, mode)
self.install_command(dir, offline)
self.root._modified = False
def visit_Root(self, r):
self.root = r
def visit_DistUnit(self, du):
self.dist = du
def visit_Dependency(self, dep):
self.dependencies["%s:%s.%s-%s" % (dep.lang, dep.group, dep.artifact, dep.version)] = dep
def visit_Use(self, use):
# XXX This is *probably* not a bug, but fact self.roots starts as None
# isn't great...
entry = self.roots[use.qualified].files[0] # pylint: disable=unsubscriptable-object
name, ver = namever(entry)
self.dependencies[name] = entry
def visit_File(self, file):
if not self.entry and not is_meta(file):
self.entry = file
def visit_Class(self, cls):
self.definitions.append(cls)
def visit_Primitive(self, p):
pass
def visit_Function(self, f):
if not isinstance(f, Method):
self.definitions.append(f)
def visit_Package(self, p):
self.packages.append(p)
self.definitions.append(p)
def add_native_includes(self, code):
if self.entry.name.endswith(BUILTIN_FILE):
return code
du_name, _ = namever(self.entry)
includes = []
for path, content in self.entry.root.included.items():
if path.endswith(self.ext):
includes.append(self.gen.native_include(path, du_name))
code.head += "".join(includes)
return code
def setfile(self, fname, maker):
self.current_file = fname
if fname not in self._imports:
self._imports[fname] = OrderedDict()
if fname not in self.files:
self.files[fname] = self.add_native_includes(maker())
return False
return True
def leave_Root(self, r):
if self.dist:
self.entry = self.dist.file
self.mdpkg, cleanup = reflection.reflect(r, self)
self.main = None
for d in self.definitions:
fname = self.file(d)
if fname is None:
continue
self.current_package = d.package
if self.setfile(fname, lambda _d=d: self.make_file(_d)):
self.files[fname] += "\n"
dfn_code = self.definition(d)
if dfn_code and d.package is None and d.file.name.endswith(BUILTIN_FILE):
self.files[fname] += self.gen.comment("BEGIN_BUILTIN") + "\n"
self.files[fname] += dfn_code
self.files[fname] += "\n" + self.gen.comment("END_BUILTIN")
else:
self.files[fname] += dfn_code
cleanup()
if self.main:
self.genmain()
for name in self.files:
code = self.files[name]
# XXX: this is a hack to avoid circularly dependent
# imports for generated metadata. To fix this properly, we
# really need to change the import model for python and js
# to import classes on demand at the point of use rather
# than into the module/package level scope.
raw_imports = self._imports[name].keys()
refimps = filter(lambda x: x[0] == (BUILTIN, REFLECT), raw_imports)
imports = filter(lambda x: x[0] != (BUILTIN, REFLECT), raw_imports)
mdimps = filter(lambda x: x[0][0].endswith("_md"), imports)
imports = filter(lambda x: not x[0][0].endswith("_md"), imports)
if name.split("/")[0].endswith("_md"):
headimps = self.genimps(refimps)
tailimps = self.genimps(imports + mdimps, lazy=True)
else:
headimps = self.genimps(refimps + imports)
tailimps = self.genimps(mdimps, lazy=True)
if headimps: code.head += headimps + "\n\n"
if tailimps: code.tail = "\n\n" + tailimps + "\n\n" + code.tail
content = str(code)
if content[-1:] != "\n": content += "\n"
self.files[name] = content
def genmain(self):
self.current_package = None
name, ver = namever(self.entry)
fname = self.gen.main_file(self.gen.name(name))
self.setfile(fname, lambda: self.gen.make_main_file(self.gen.name(name)))
path = self.add_import(self.main)
self.files[fname] += self.gen.main(path, self.name(self.main.name))
def genimps(self, imps, lazy=False):
seen = set()
imps = [self.gen.import_(pkg, org, dep, seen=seen, lazy=lazy) for (pkg, org, dep) in imps]
return "\n".join(filter(lambda x: x is not None, imps))
@overload(AST)
def add_import(self, obj):
return self.add_import(tuple(self.package(obj)), obj.root, obj.file)
@overload(list)
def add_import(self, pkg, root, file):
return self.add_import(tuple(pkg), root, file)
@overload(tuple)
def add_import(self, pkg, root, file):
imports = self._imports[self.current_file]
if self.current_package:
org = tuple(self.package(self.current_package))
else:
org = ()
if pkg != org:
if root != self.root:
dep, ver = namever(file or root)
else:
dep = None
imports[(pkg, org, dep)] = True
return list(self.qualify(pkg, org))
@overload(Class)
def file(self, cls):
return self.gen.class_file(self.package(cls), self.name(cls.name), self.fname(cls))
@overload(Function)
def file(self, fun):
return self.gen.function_file(self.package(fun), self.name(fun.name), self.fname(fun))
@overload(Package)
def file(self, pkg):
return self.gen.package_file(self.package(pkg.package), self.name(pkg.name), self.fname(pkg))
def fname(self, obj):
return os.path.splitext(os.path.basename(obj.file.name))[0]
@property
def rtloc(self):
if self.include_stdlib:
rtloc, _ = namever(self.entry)
else:
rtloc = BUILTIN
return rtloc
@overload(Class)
def make_file(self, cls):
return self.gen.make_class_file(self.package(cls), self.name(cls.name), rtloc=self.rtloc)
@overload(Function)
def make_file(self, fun):
return self.gen.make_function_file(self.package(fun), self.name(fun.name), mdroot(self.entry))
@overload(Package)
def make_file(self, pkg):
return self.gen.make_package_file(self.package(pkg.package), self.name(pkg.name), rtloc = self.rtloc)
def write(self, target):
if not os.path.exists(target):
os.makedirs(target)
name, version = namever(self.entry)
packages = OrderedDict()
for pkg in self.packages:
lines = []
readme(pkg, lines)
packages[tuple(self.package(pkg))] = "\n".join(lines)
packages[tuple(self.mdpkg)] = "## Root\n"
files_to_emit = OrderedDict(self.files)
for path, content in self.entry.root.included.items():
if path.endswith(self.ext):
files_to_emit[path] = content
deps = [] # List of (group, artifact, version)
for dep in self.dependencies.values():
if isinstance(dep, File):
dep_name, dep_ver = namever(dep)
deps.append((dep_name, dep_name, dep_ver))
elif isinstance(dep, Dependency):
if dep.lang.text == self.ext:
deps.append((dep.group, dep.artifact, dep.version))
else:
assert False, (dep, type(dep))
files = self.gen.package(name, version, packages, files_to_emit, deps)
for name, content in files.items():
path = os.path.join(target, name)
dir = os.path.dirname(path)
if not os.path.exists(dir):
os.makedirs(dir)
with open(path, "wb") as fd:
fd.write(content)
self.log.debug(" wrote %s", path)
@overload(Package)
def definition(self, pkg):
self.current_package = pkg
for d in pkg.definitions:
if isinstance(d, Package):
self.add_import(d)
return "" # self.doc(pkg)
def is_entry_package(self, pkg):
name, ver = namever(pkg)
return pkg.name.text == name
@overload(Function)
def definition(self, fun):
if fun.body is None: return ""
prolog = ""
if fun.name.text == "main" and len(fun.params) == 1 and \
fun.params[0].resolved.type.name.text == "List":
if fun.file == self.root.files[0] and \
self.is_entry_package(fun.package):
self.main = fun
prolog = self.gen.main_prolog()
return prolog + self.gen.function(self.doc(fun),
self.type(fun.type),
self.name(fun.name),
[self.param(p) for p in fun.params],
self.block(fun.body))
@overload(Class)
def definition(self, cls):
clazz = self.name(cls.name)
parameters = [self.name(p.name) for p in cls.parameters]
base = self.type(base_type(cls))
interfaces = [self.type(t) for t in cls.bases
if isinstance(t.resolved.type, (Interface, Primitive))]
static_fields = []
fields = []
methods = []
constructors = []
defaulted, self.bindings = get_defaulted_methods(cls)
defaulted_statics = get_defaulted_statics(cls)
for d in cls.definitions + [None] + defaulted.values() + defaulted_statics.values():
if isinstance(d, Macro): continue
if d is None:
extra_methods = getattr(cls, "_extra_methods", None)
if extra_methods:
methods.extend(extra_methods())
del cls._extra_methods
extra_statics = getattr(cls, "_extra_statics", None)
if extra_statics:
static_fields.extend(extra_statics())
del cls._extra_statics
continue
doc = self.doc(d)
if isinstance(d, Field):
fun = self.gen.static_field if d.static else self.gen.field
holder = static_fields if d.static else fields
holder.append(fun(doc,
clazz,
self.type(d.type),
self.name(d.name),
self.expr(d.value)))
elif d.type:
if d.body:
fun = self.gen.static_method if d.static else self.gen.method
methods.append(fun(doc,
clazz,
self.type(d.type),
self.name(d.name),
[self.param(p) for p in d.params],
self.block(d.body)))
else:
methods.append(self.gen.abstract_method(doc,
clazz,
self.type(d.type),
self.name(d.name),
[self.param(p) for p in d.params]))
else:
if base and not has_super(d):
header = [self.gen.expr_stmt(self.gen.invoke_super(clazz,
self.name(base_type(cls).resolved.type.name),
[]))]
elif not base:
finit = self.gen.field_init()
if finit:
header = [finit]
else:
header = None
else:
header = None
constructors.append(self.gen.constructor(doc,
clazz,
[self.param(p) for p in d.params],
self.block(d.body, header)))
if not constructors:
constructors = self.default_constructors(cls)
return self.gen.clazz(self.doc(cls), is_abstract(cls), clazz, parameters, base,
interfaces, static_fields, fields, constructors, methods)
@overload(Interface)
def definition(self, iface):
name = self.name(iface.name)
parameters = [self.name(p.name) for p in iface.parameters]
bases = [self.type(t) for t in iface.bases]
methods = []
static_fields = []
for d in iface.definitions + [None]:
if d is None:
extra_methods = getattr(iface, "_extra_methods", None)
if extra_methods:
methods.extend(extra_methods())
del iface._extra_methods
extra_statics = getattr(iface, "_extra_statics", None)
if extra_statics:
static_fields.extend(extra_statics())
del iface._extra_statics
if isinstance(d, Field) and d.static:
static_fields.append(self.gen.static_field(self.doc(d),
name,
self.type(d.type),
self.name(d.name),
self.expr(d.value)))
elif isinstance(d, Method):
methods.append(self.gen.interface_method(self.doc(d),
name,
self.type(d.type),
self.name(d.name),
[self.param(p) for p in d.params],
self.block(d.body)))
return self.gen.interface(self.doc(iface), name, parameters, bases, static_fields,
methods)
def default_constructors(self, cls):
name = self.name(cls.name)
btype = base_type(cls)
base = btype.resolved.type if btype else None
cons = base_constructors(cls)
result = []
for con in cons:
params = [self.param(p) for p in con.params]
args = [self.name(p.name) for p in con.params]
stmt = self.gen.expr_stmt(self.gen.invoke_super(name, self.name(base.name), args))
result.append(self.gen.constructor("", name, params, self.gen.block([stmt])))
if result:
return result
elif base:
body = self.gen.block([self.gen.expr_stmt(self.gen.invoke_super(name, self.name(base.name), []))])
result.append(self.gen.constructor("", name, [], body))
else:
result.append(self.gen.default_constructor(name))
return result
def doc(self, obj):
return self.gen.doc(doc(obj))
def push(self, env):
self.names.append(env)
def pop(self):
self.names.pop()
@overload(Name)
def name(self, n):
return self.name(n.text)
@overload(basestring)
def name(self, n):
if self.names:
env = self.names[-1]
if n in env:
return env[n]
return self.gen.name(n)
@overload(AST)
def package(self, node):
if isinstance(node, Package):
me = self.name(node.name)
if node.package:
return self.package(node.package) + [me]
else:
return [me]
elif node.package:
return self.package(node.package)
else:
assert False
@overload(types.NoneType)
def package(self, node):
return []
@overload(list)
def package(self, path):
return path
@overload(Type)
def type(self, t):
return self.type(t.resolved)
@overload(TypeExpr)
def type(self, texpr):
return self.type(texpr.type, texpr.bindings)
@overload(Class, dict)
def type(self, cls, bindings):
mapping = None
for a in cls.annotations:
if a.name.text == "mapping":
mapping = a
break
if mapping:
path = []
name = self.expr(mapping.arguments[0])
else:
path = self.add_import(cls)
name = self.name(cls.name)
if cls.parameters:
params = [self.type(texpr(bindings[p].type, bindings, bindings[p].bindings)) for p in cls.parameters]
else:
params = []
return self.gen.type(path, name, params)
def qualify(self, package, origin):
return self.gen.qualify(package, origin)
@overload(TypeParam)
def type(self, tparam, bindings):
if tparam in bindings:
return self.type(bindings[tparam])
elif self.bindings and tparam in self.bindings:
return self.type(self.bindings[tparam])
else:
return self.name(tparam.name)
@overload(types.NoneType)
def type(self, n):
return None
def param(self, p):
return self.gen.param(self.type(p.type),
self.name(p.name),
self.expr(p.value))
def block(self, b, header=None):
if b is None:
return header
else:
return self.gen.block((header or []) + [self.statement(s) for s in b.statements])
@overload(Import)
def statement(self, imp):
return self.gen.comment(imp.code())
@overload(Local)
def statement(self, s):
return self.gen.local(self.type(s.declaration.type),
self.name(s.declaration.name),
self.maybe_cast(s.declaration.type, s.declaration.value))
@overload(ExprStmt)
def statement(self, s):
return self.gen.expr_stmt(self.expr(s.expr))
@overload(Assign)
def statement(self, ass):
return self.gen.assign(self.expr(ass.lhs), self.maybe_cast(ass.lhs, ass.rhs))
@overload(Return)
def statement(self, ret):
return self.gen.return_(self.maybe_cast(ret.callable.type, ret.expr))
@overload(If)
def statement(self, iff):
return self.gen.if_(self.expr(iff.predicate),
self.block(iff.consequence),
self.block(iff.alternative))
@overload(While)
def statement(self, wh):
return self.gen.while_(self.expr(wh.condition), self.block(wh.body))
@overload(Break)
def statement(self, brk):
return self.gen.break_()
@overload(Continue)
def statement(self, cnt):
return self.gen.continue_()
@overload(str)
def expr(self, s):
return s
@overload(Var)
def expr(self, v):
return self.var(v.definition, v)
@overload(Call)
def expr(self, c):
type = c.expr.resolved.type
return self.invoke(type, c.expr, [self.coerce(a) for a in c.args])
@overload(String)
def expr(self, s):
return self.gen.string(s)
@overload(Number)
def expr(self, n):
return self.gen.number(n)
@overload(Bool)
def expr(self, b):
return self.gen.bool_(b)
@overload(List)
def expr(self, l):
return self.gen.list_([self.expr(e) for e in l.elements])
@overload(Map)
def expr(self, m):
return self.gen.map([(self.expr(e.key), self.expr(e.value)) for e in m.entries])
@overload(Null)
def expr(self, n):
return self.gen.null()
@overload(Native)
def expr(self, n):
return "".join([self.expr(c) for c in n.cases])
@overload(NativeCase)
def expr(self, nc):
if nc.name in (None, self.ext):
return "".join([self.expr(c) for c in nc.children])
else:
return ""
@overload(Fixed)
def expr(self, f):
return f.text
@overload(Attr)
def expr(self, a):
type = a.expr.resolved.type
return self.get(type, a.resolved.type, a.expr, a.attr)
@overload(Type)
def expr(self, t):
return self.type(t)
@overload(Cast)
def expr(self, c):
return self.maybe_cast(c, c.expr)
@overload(types.NoneType)
def expr(self, n):
return None
@overload(Param)
def var(self, _, v):
return self.gen.local_ref(self.name(v.name))
@overload(Declaration)
def var(self, _, v):
return self.gen.local_ref(self.name(v.name))
@overload(Class)
def var(self, _, v):
return self.gen.class_ref(self.name(v.name))
@overload(Method)
def var(self, _, v):
return self.gen.method_ref(self.name(v.name))
@overload(Field)
def var(self, f, v):
if f.static:
path = self.add_import(f.clazz)
return self.gen.get_static_field(path, self.name(f.clazz.name), self.name(v.name))
else:
return self.gen.field_ref(self.name(v.name))
@overload(Class, Class)
def get(self, cls, type, expr, attr):
f = get_field(cls, attr)
if f.static:
path = self.add_import(f.clazz)
return self.gen.get_static_field(path, self.name(f.clazz.name), self.name(attr))
else:
return self.gen.get_field(self.expr(expr), self.name(attr))
@overload(Class, TypeParam)
def get(self, cls, type, expr, attr):
return self.gen.get_field(self.expr(expr), self.name(attr))
@overload(Class, Method)
def get(self, cls, type, expr, attr):
return self.gen.get_method(self.expr(expr), self.name(attr))
@overload(Package, Package)
def get(self, pkg, type, expr, attr):
return self.gen.get_package(self.expr(expr), self.name(attr))
@overload(Package, Function)
def get(self, pkg, type, expr, attr):
return self.gen.get_function(self.expr(expr), self.name(attr))
@overload(Function)
def invoke(self, func, expr, args):
path = self.add_import(func)
return self.gen.invoke_function(path, self.name(func.name), args)
@overload(Method, Attr)
def invoke(self, method, expr, args):
if isinstance(expr.expr, Super):
return self.gen.invoke_super_method(self.name(expr.clazz.name),
self.name(expr.expr.resolved.type.name),
self.name(method.name),
args)
else:
if method.static:
path = self.add_import(method.clazz)
return self.gen.invoke_static_method(path, self.name(method.clazz.name), self.name(method.name), args)
else:
return self.gen.invoke_method(self.expr(expr.expr), self.name(method.name), args)
@overload(Method, Var)
def invoke(self, method, var, args):
if method.static:
path = self.add_import(method.clazz)
return self.gen.invoke_static_method(path, self.name(method.clazz.name), self.name(method.name), args)
else:
return self.gen.invoke_method_implicit(self.name(method.name), args)
@overload(Class)
def invoke(self, cls, expr, args):
cons = constructors(cls)
con = cons[0] if cons else None
if isinstance(con, Macro):
return self.apply_macro(con, expr, args)
else:
return self.gen.construct(self.type(expr.resolved), args)
@overload(Class, Super)
def invoke(self, cls, sup, args):
return self.gen.invoke_super(self.name(sup.clazz.name), self.name(cls.name), args)
@overload(Macro)
def invoke(self, macro, expr, args):
return self.apply_macro(macro, expr, args)
@overload(Expression)
def coerce(self, expr):
if expr.coersion:
if isinstance(expr.coersion, Macro):
fake = FakeExpr()
fake.expr = expr
fake.resolved = expr.coersion.resolved
return self.apply_macro(expr.coersion, fake, ())
else:
return self.gen.invoke_method(self.expr(expr), self.name(expr.coersion.name), [])
else:
return self.expr(expr)
def apply_macro(self, macro, expr, args):
env = {}
if macro.clazz and macro.type:
bindings = expr.resolved.bindings
for tparam in bindings:
env[tparam.name.text] = self.type(bindings[tparam])
# for method macros we use expr to access self
env["self"] = self.expr(expr.expr)
idx = 0
for p in macro.params:
env[p.name.text] = args[idx]
idx += 1
self.push(env)
try:
result = self.expr(macro.body)
return result
finally:
self.pop()
@overload(AST, object)
def maybe_cast(self, type, expr):
return self.maybe_cast(type.resolved, expr)
@overload(TypeExpr, object)
def maybe_cast(self, texpr, expr):
if expr is None: return None
if expr.coersion:
return self.coerce(expr)
if texpr.assignableFrom(expr.resolved):
return self.expr(expr)
else:
return self.gen.cast(self.type(texpr), self.expr(expr))
def fake(self, type, expr):
fake = FakeExpr()
fake.resolved = type
fake.coersion = None
fake.expr = expr
return fake
@overload(FakeExpr)
def expr(self, fake):
return fake.expr
def is_virtual(python_command):
output = shell.call(python_command, "-c", 'import sys; print(hasattr(sys, "real_prefix"))')
return output.strip() == "True"
def is_root():
return os.geteuid() == 0
def is_user(python_command):
return not is_virtual(python_command) and not is_root()
class Java(Backend):
PRETTY_INSTALL = "Maven"
argswitch = "--java"
ext = "java"
gen = java
def install_target(self):
name, ver = namever(self.entry)
return self._install_target(name, ver)
def _install_target(self, name, ver):
jar = os.path.join(os.environ["HOME"], ".m2/repository", name, name, ver, "%s-%s.jar" % (name, ver))
if os.path.exists(jar):
return jar
return None
def install_command(self, dir, offline):
cmd = ["mvn", "install", "-DskipTests"]
if offline: cmd += ["--offline"]
shell.call(*cmd, cwd=dir, stage="install")
def run(self, name, version, args):
jar = os.path.join(os.environ["HOME"], ".m2", "repository", name, name, version,
"%s-%s.jar" % (name, version))
os.execlp("java", "java", "-jar", jar, name, *args)
class Python(Backend):
PRETTY_INSTALL = "PIP"
argswitch = "--python"
ext = "py"
gen = python
python_command = "python2"
pip_command = "pip2"
def install_target(self):
name, ver = namever(self.entry)
return self._install_target(name, ver)
def _install_target(self, name, ver):
return shell.get_pip_pkg(name, stage="install", command=self.pip_command)
def install_command(self, dir, offline):
shell.call(self.python_command, "setup.py", "-q", "bdist_wheel", cwd=dir, stage="install")
wheels = [name for name in os.listdir(os.path.join(dir, "dist")) if name.endswith(".whl")]
for wheel in wheels:
cmd = [self.pip_command, "install",]
if offline: cmd += ["--no-index"]
if is_user(self.python_command): cmd += ["--user"]
cmd += ["--upgrade", "dist/%s" % wheel]
shell.call(*cmd, cwd=dir, stage="install")
def run(self, name, version, args):
main = self.gen.name(name)
python = shell.user_override((self.python_command,))[0]
os.execlp(python, python, "-c",
"import %s; %s.call_main()" % (main, main), name, *args)
class Python3(Python):
argswitch = "--python3"
python_command = "python3"
pip_command = "pip3"
class JavaScript(Backend):
PRETTY_INSTALL = "NPM"
argswitch = "--javascript"
ext = "js"
gen = javascript
def install_target(self):
name, ver = namever(self.entry)
return self._install_target(name, ver)
def _install_target(self, name, ver):
try:
output = shell.call("npm", "ll", "--depth", "0", "--json", name, errok=True)
return json.loads(output).get("dependencies",{}).get(name,{}).get("path")
except ValueError:
pass
except shell.ShellError:
pass
return None
def install_command(self, dir, offline):
cmd = ["npm", "install"]
if offline: cmd += ["--cache-min", "9999999"]
cmd += [dir]
shell.call(*cmd, stage="install")
def run(self, name, version, args):
main = self.gen.name(name)
os.execlp("node", "node", "-e", 'require("%s").%s.call_main()' % (name, main), name, *args)
class Ruby(Backend):
PRETTY_INSTALL = "GEM"
argswitch = "--ruby"
ext = "rb"
gen = ruby
def install_target(self):
name, ver = namever(self.entry)
return self._install_target(name, ver)
def _install_target(self, name, ver):
try:
output = shell.call("gem", "which", name, stage="install", errok=True)
return output.strip()
except shell.ShellError:
pass
return None
def install_command(self, dir, offline):
name, ver = namever(self.entry)
cmd = ["gem", "build", "-q", "%s.gemspec" % name]
shell.call(*cmd, cwd=dir, stage="install")
cmd = ["gem", "install"]
if offline: cmd += ["--local"]
cmd += ["%s/%s-%s.gem" % (dir, name, ver)]
shell.call(*cmd, stage="install")
def run(self, name, version, args):
main = self.gen.name(name)
os.execlp("ruby", "ruby", "-e", "require('%s'); ::Quark.%s.call_main()" % (name, main), name, *args)
| apache-2.0 | 6,072,187,217,217,607,000 | 34.181725 | 118 | 0.538857 | false | 3.809137 | false | false | false |
heromod/migrid | mig/simulation/user.py | 1 | 2151 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# --- BEGIN_HEADER ---
#
# user - [insert a few words of module description on this line]
# Copyright (C) 2003-2009 The MiG Project lead by Brian Vinter
#
# This file is part of MiG.
#
# MiG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# MiG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# -- END_HEADER ---
#
#
# User class used for simulating users that randomly submit jobs
#
import random
class User:
id = ''
submit_prob = None
logger = None
server = None
maxprice = None
length = 2
jobs = 0
def __init__(
self,
id,
logger,
prob,
price,
server,
vgrid,
):
self.id = id
self.logger = logger
self.submit_prob = prob
self.maxprice = price
self.server = server
self.vgrid = vgrid
def submit_job(self, step):
self.logger.info('%s submitting job with maxprice %s to %s in step %d'
, self.id, self.maxprice, self.server.id, step)
name = '%s' % self.id
self.server.submit(name, self.length, self.maxprice, self.vgrid)
self.jobs += 1
def sleep(self):
self.logger.debug('%s sleeping', self.id)
def simulate(self, timestep):
# Randomly submit a job during given timestep
rand = random.random()
qlen = self.server.job_queue.queue_length()
if rand <= self.submit_prob and qlen < 200:
self.submit_job(timestep)
else:
self.sleep()
| gpl-2.0 | -7,407,505,360,815,817,000 | 24.607143 | 81 | 0.621571 | false | 3.753927 | false | false | false |
googleapis/python-pubsublite | google/cloud/pubsublite_v1/services/subscriber_service/transports/grpc_asyncio.py | 1 | 11672 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.pubsublite_v1.types import subscriber
from .base import SubscriberServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import SubscriberServiceGrpcTransport
class SubscriberServiceGrpcAsyncIOTransport(SubscriberServiceTransport):
"""gRPC AsyncIO backend transport for SubscriberService.
The service that a subscriber client application uses to
receive messages from subscriptions.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(
cls,
host: str = "pubsublite.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
def __init__(
self,
*,
host: str = "pubsublite.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def subscribe(
self,
) -> Callable[
[subscriber.SubscribeRequest], Awaitable[subscriber.SubscribeResponse]
]:
r"""Return a callable for the subscribe method over gRPC.
Establishes a stream with the server for receiving
messages.
Returns:
Callable[[~.SubscribeRequest],
Awaitable[~.SubscribeResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "subscribe" not in self._stubs:
self._stubs["subscribe"] = self.grpc_channel.stream_stream(
"/google.cloud.pubsublite.v1.SubscriberService/Subscribe",
request_serializer=subscriber.SubscribeRequest.serialize,
response_deserializer=subscriber.SubscribeResponse.deserialize,
)
return self._stubs["subscribe"]
__all__ = ("SubscriberServiceGrpcAsyncIOTransport",)
| apache-2.0 | -8,276,490,674,875,871,000 | 43.549618 | 87 | 0.620459 | false | 4.729335 | false | false | false |
pbvarga1/qimage2ndarray | qimage2ndarray/__init__.py | 1 | 14973 | import sys as _sys
import numpy as _np
from .dynqt import QtGui as _qt
from .dynqt import qt as _qt_driver
if _qt_driver.name() == 'PythonQt':
from .qimageview import QImage2ndarray as _temp
_qimageview = _temp.qimageview
else:
from .qimageview_python import qimageview as _qimageview
__version__ = "1.5"
if _sys.byteorder == 'little':
_bgra = (0, 1, 2, 3)
else:
_bgra = (3, 2, 1, 0)
_bgra_fields = {'b': (_np.uint8, _bgra[0], 'blue'),
'g': (_np.uint8, _bgra[1], 'green'),
'r': (_np.uint8, _bgra[2], 'red'),
'a': (_np.uint8, _bgra[3], 'alpha')}
bgra_dtype = _np.dtype(_bgra_fields)
"""Complex dtype offering the named fields 'r','g','b', and 'a' and
corresponding long names, conforming to QImage_'s 32-bit memory layout."""
try:
_basestring = basestring
except NameError:
# 'basestring' undefined, must be Python 3
_basestring = str
def _qimage_or_filename_view(qimage):
if isinstance(qimage, _basestring):
qimage = _qt.QImage(qimage)
return _qimageview(qimage)
def raw_view(qimage):
"""Returns raw 2D view of the given QImage_'s memory. The result
will be a 2-dimensional numpy.ndarray with an appropriately sized
integral dtype. (This function is not intented to be used
directly, but used internally by the other -- more convenient --
view creation functions.)
:param qimage: image whose memory shall be accessed via NumPy
:type qimage: QImage_
:rtype: numpy.ndarray_ with shape (height, width)"""
return _qimage_or_filename_view(qimage)
def byte_view(qimage, byteorder = 'little'):
"""Returns raw 3D view of the given QImage_'s memory. This will
always be a 3-dimensional numpy.ndarray with dtype numpy.uint8.
Note that for 32-bit images, the last dimension will be in the
[B,G,R,A] order (if little endian) due to QImage_'s memory layout
(the alpha channel will be present for Format_RGB32 images, too).
For 8-bit (indexed) images, the array will still be 3-dimensional,
i.e. shape will be (height, width, 1).
The order of channels in the last axis depends on the `byteorder`,
which defaults to 'little', i.e. BGRA order. You may set the
argument `byteorder` to 'big' to get ARGB, or use None which means
sys.byteorder here, i.e. return native order for the machine the
code is running on.
For your convenience, `qimage` may also be a filename, see
`Loading and Saving Images`_ in the documentation.
:param qimage: image whose memory shall be accessed via NumPy
:type qimage: QImage_
:param byteorder: specify order of channels in last axis
:rtype: numpy.ndarray_ with shape (height, width, 1 or 4) and dtype uint8"""
raw = _qimage_or_filename_view(qimage)
result = raw.view(_np.uint8).reshape(raw.shape + (-1, ))
if byteorder and byteorder != _sys.byteorder:
result = result[...,::-1]
return result
def rgb_view(qimage, byteorder = 'big'):
"""Returns RGB view of a given 32-bit color QImage_'s memory.
Similarly to byte_view(), the result is a 3D numpy.uint8 array,
but reduced to the rgb dimensions (without alpha), and reordered
(using negative strides in the last dimension) to have the usual
[R,G,B] order. The image must have 32 bit pixel size, i.e. be
RGB32, ARGB32, or ARGB32_Premultiplied. (Note that in the latter
case, the values are of course premultiplied with alpha.)
The order of channels in the last axis depends on the `byteorder`,
which defaults to 'big', i.e. RGB order. You may set the argument
`byteorder` to 'little' to get BGR, or use None which means
sys.byteorder here, i.e. return native order for the machine the
code is running on.
For your convenience, `qimage` may also be a filename, see
`Loading and Saving Images`_ in the documentation.
:param qimage: image whose memory shall be accessed via NumPy
:type qimage: QImage_ with 32-bit pixel type
:param byteorder: specify order of channels in last axis
:rtype: numpy.ndarray_ with shape (height, width, 3) and dtype uint8"""
if byteorder is None:
byteorder = _sys.byteorder
bytes = byte_view(qimage, byteorder)
if bytes.shape[2] != 4:
raise ValueError("For rgb_view, the image must have 32 bit pixel size (use RGB32, ARGB32, or ARGB32_Premultiplied)")
if byteorder == 'little':
return bytes[...,:3] # strip A off BGRA
else:
return bytes[...,1:] # strip A off ARGB
def alpha_view(qimage):
"""Returns alpha view of a given 32-bit color QImage_'s memory.
The result is a 2D numpy.uint8 array, equivalent to
byte_view(qimage)[...,3]. The image must have 32 bit pixel size,
i.e. be RGB32, ARGB32, or ARGB32_Premultiplied. Note that it is
not enforced that the given qimage has a format that actually
*uses* the alpha channel -- for Format_RGB32, the alpha channel
usually contains 255 everywhere.
For your convenience, `qimage` may also be a filename, see
`Loading and Saving Images`_ in the documentation.
:param qimage: image whose memory shall be accessed via NumPy
:type qimage: QImage_ with 32-bit pixel type
:rtype: numpy.ndarray_ with shape (height, width) and dtype uint8"""
bytes = byte_view(qimage, byteorder = None)
if bytes.shape[2] != 4:
raise ValueError("For alpha_view, the image must have 32 bit pixel size (use RGB32, ARGB32, or ARGB32_Premultiplied)")
return bytes[...,_bgra[3]]
def recarray_view(qimage):
"""Returns recarray_ view of a given 32-bit color QImage_'s
memory.
The result is a 2D array with a complex record dtype, offering the
named fields 'r','g','b', and 'a' and corresponding long names.
Thus, each color components can be accessed either via string
indexing or via attribute lookup (through numpy.recarray_):
For your convenience, `qimage` may also be a filename, see
`Loading and Saving Images`_ in the documentation.
>>> from PyQt4.QtGui import QImage, qRgb
>>> qimg = QImage(320, 240, QImage.Format_ARGB32)
>>> qimg.fill(qRgb(12,34,56))
>>>
>>> import qimage2ndarray
>>> v = qimage2ndarray.recarray_view(qimg)
>>>
>>> red = v["r"]
>>> red[10,10]
12
>>> pixel = v[10,10]
>>> pixel["r"]
12
>>> (v.g == v["g"]).all()
True
>>> (v.alpha == 255).all()
True
:param qimage: image whose memory shall be accessed via NumPy
:type qimage: QImage_ with 32-bit pixel type
:rtype: numpy.ndarray_ with shape (height, width) and dtype :data:`bgra_dtype`"""
raw = _qimage_or_filename_view(qimage)
if raw.itemsize != 4:
raise ValueError("For rgb_view, the image must have 32 bit pixel size (use RGB32, ARGB32, or ARGB32_Premultiplied)")
return raw.view(bgra_dtype, _np.recarray)
def _normalize255(array, normalize, clip = (0, 255)):
if normalize:
if normalize is True:
normalize = array.min(), array.max()
if clip == (0, 255):
clip = None
elif _np.isscalar(normalize):
normalize = (0, normalize)
nmin, nmax = normalize
if nmin:
array = array - nmin
if nmax != nmin:
scale = 255. / (nmax - nmin)
if scale != 1.0:
array = array * scale
if clip:
low, high = clip
_np.clip(array, low, high, array)
return array
def gray2qimage(gray, normalize = False):
"""Convert the 2D numpy array `gray` into a 8-bit, indexed QImage_
with a gray colormap. The first dimension represents the vertical
image axis.
The parameter `normalize` can be used to normalize an image's
value range to 0..255:
`normalize` = (nmin, nmax):
scale & clip image values from nmin..nmax to 0..255
`normalize` = nmax:
lets nmin default to zero, i.e. scale & clip the range 0..nmax
to 0..255
`normalize` = True:
scale image values to 0..255 (same as passing (gray.min(),
gray.max()))
If the source array `gray` contains masked values, the result will
have only 255 shades of gray, and one color map entry will be used
to make the corresponding pixels transparent.
A full alpha channel cannot be supported with indexed images;
instead, use `array2qimage` to convert into a 32-bit QImage.
:param gray: image data which should be converted (copied) into a QImage_
:type gray: 2D or 3D numpy.ndarray_ or `numpy.ma.array <masked arrays>`_
:param normalize: normalization parameter (see above, default: no value changing)
:type normalize: bool, scalar, or pair
:rtype: QImage_ with RGB32 or ARGB32 format"""
if _np.ndim(gray) != 2:
raise ValueError("gray2QImage can only convert 2D arrays" +
" (try using array2qimage)" if _np.ndim(gray) == 3 else "")
h, w = gray.shape
result = _qt.QImage(w, h, _qt.QImage.Format_Indexed8)
if not _np.ma.is_masked(gray):
for i in range(256):
result.setColor(i, _qt.qRgb(i,i,i))
_qimageview(result)[:] = _normalize255(gray, normalize)
else:
# map gray value 1 to gray value 0, in order to make room for
# transparent colormap entry:
result.setColor(0, _qt.qRgb(0,0,0))
for i in range(2, 256):
result.setColor(i-1, _qt.qRgb(i,i,i))
_qimageview(result)[:] = _normalize255(gray, normalize, clip = (1, 255)) - 1
result.setColor(255, 0)
_qimageview(result)[gray.mask] = 255
return result
def array2qimage(array, normalize = False):
"""Convert a 2D or 3D numpy array into a 32-bit QImage_. The
first dimension represents the vertical image axis; the optional
third dimension is supposed to contain 1-4 channels:
========= ===================
#channels interpretation
========= ===================
1 scalar/gray
2 scalar/gray + alpha
3 RGB
4 RGB + alpha
========= ===================
Scalar data will be converted into corresponding gray RGB triples;
if you want to convert to an (indexed) 8-bit image instead, use
`gray2qimage` (which cannot support an alpha channel though).
The parameter `normalize` can be used to normalize an image's
value range to 0..255:
`normalize` = (nmin, nmax):
scale & clip image values from nmin..nmax to 0..255
`normalize` = nmax:
lets nmin default to zero, i.e. scale & clip the range 0..nmax
to 0..255
`normalize` = True:
scale image values to 0..255 (same as passing (array.min(),
array.max()))
If `array` contains masked values, the corresponding pixels will
be transparent in the result. Thus, the result will be of
QImage.Format_ARGB32 if the input already contains an alpha
channel (i.e. has shape (H,W,4)) or if there are masked pixels,
and QImage.Format_RGB32 otherwise.
:param array: image data which should be converted (copied) into a QImage_
:type array: 2D or 3D numpy.ndarray_ or `numpy.ma.array <masked arrays>`_
:param normalize: normalization parameter (see above, default: no value changing)
:type normalize: bool, scalar, or pair
:rtype: QImage_ with RGB32 or ARGB32 format"""
if _np.ndim(array) == 2:
array = array[...,None]
elif _np.ndim(array) != 3:
raise ValueError("array2qimage can only convert 2D or 3D arrays (got %d dimensions)" % _np.ndim(array))
if array.shape[2] not in (1, 2, 3, 4):
raise ValueError("array2qimage expects the last dimension to contain exactly one (scalar/gray), two (gray+alpha), three (R,G,B), or four (R,G,B,A) channels")
h, w, channels = array.shape
hasAlpha = _np.ma.is_masked(array) or channels in (2, 4)
fmt = _qt.QImage.Format_ARGB32 if hasAlpha else _qt.QImage.Format_RGB32
result = _qt.QImage(w, h, fmt)
array = _normalize255(array, normalize)
if channels >= 3:
rgb_view(result)[:] = array[...,:3]
else:
rgb_view(result)[:] = array[...,:1] # scalar data
alpha = alpha_view(result)
if channels in (2, 4):
alpha[:] = array[...,-1]
else:
alpha[:] = 255
if _np.ma.is_masked(array):
alpha[:] *= _np.logical_not(_np.any(array.mask, axis = -1))
return result
def imread(filename, masked = False):
"""Convenience function that uses the QImage_ constructor to read an
image from the given file and return an `rgb_view` of the result.
This is intentionally similar to scipy.ndimage.imread (which uses
PIL), scipy.misc.imread, or matplotlib.pyplot.imread (using PIL
for non-PNGs).
For grayscale images, return 2D array (even if it comes from a 32-bit
representation; this is a consequence of the QImage API).
For images with an alpha channel, the resulting number of channels
will be 2 (grayscale+alpha) or 4 (RGB+alpha). Alternatively, one may
pass `masked = True' in order to get `numpy.ma.array <masked
arrays>`_ back. Note that only fully transparent pixels are masked
(and that masked arrays only support binary masks). The value of
`masked` is ignored when the loaded image has no alpha channel
(i.e., one would not get a masked array in that case).
This function has been added in version 1.3.
"""
qImage = _qt.QImage(filename)
isGray = qImage.isGrayscale()
if isGray and qImage.depth() == 8:
return byte_view(qImage)[...,0]
hasAlpha = qImage.hasAlphaChannel()
if hasAlpha:
targetFormat = _qt.QImage.Format_ARGB32
else:
targetFormat = _qt.QImage.Format_RGB32
if qImage.format() != targetFormat:
qImage = qImage.convertToFormat(targetFormat)
result = rgb_view(qImage)
if isGray:
result = result[...,0]
if hasAlpha:
if masked:
mask = (alpha_view(qImage) == 0)
if _np.ndim(result) == 3:
mask = _np.repeat(mask[...,None], 3, axis = 2)
result = _np.ma.masked_array(result, mask)
else:
result = _np.dstack((result, alpha_view(qImage)))
return result
def imsave(filename, image, normalize = False, format = None, quality = -1):
"""Convenience function that uses QImage.save to save an image to the
given file. This is intentionally similar to scipy.misc.imsave.
However, it supports different optional arguments:
:param normalize: see :func:`array2qimage` (which is used internally)
:param format: image filetype (e.g. 'PNG'), (default: check filename's suffix)
:param quality: see QImage.save (0 = small .. 100 = uncompressed, -1 = default compression)
:returns: boolean success, see QImage.save
This function has been added in version 1.4.
"""
qImage = array2qimage(image, normalize = normalize)
return qImage.save(filename, format, quality)
| bsd-3-clause | 2,436,135,751,722,920,000 | 36.153846 | 165 | 0.645896 | false | 3.620164 | false | false | false |
Backflipz/plugin.video.excubed | addon.py | 1 | 81609 | from whoosh.index import create_in
# from xbmcswift2 import Plugin
from kodiswift import Plugin
import os
import sys
import re
import json
import xbmc
import xbmcaddon
import xbmcplugin
import xbmcgui
import threading
import glob
import shlex
from BeautifulSoup import BeautifulSoup as BS
from whoosh.filedb.filestore import FileStorage
from whoosh.fields import *
from whoosh.qparser import QueryParser
import hurry.filesize as hf
import datetime
# import xbmcswift2_playlists
# import socket
plugin = Plugin()
# lists = xbmcswift2_playlists.Playlists(plugin)
# lib = os.path.join(plugin._addon_id, 'resources', 'lib' )
# print lib
olib = 'special://home' + '/addons/' + plugin._addon_id
lib = xbmc.translatePath(olib)
cache_dir = 'special://home' + '/userdata/addon_data/' \
+ plugin._addon_id
cache_dir += '/cache/'
cache_dir = xbmc.translatePath(cache_dir)
print lib
lib = os.path.join(lib, 'resources', 'lib')
print lib
sys.path.append(lib)
sys.path.append(xbmc.translatePath(os.path.join(os.getcwd(), 'resources'
, 'lib')))
import requests
# from xbmcswift2 import actions
from kodiswift import actions
import cfscrape
from pprint import pformat as pp
# from xdcc import XDCC
import xbot
import dataset
import copy
# from m2g import magnet2torrent as m2t
# from autonomotorrent.BTManager import BTManager
# from autonomotorrent.BTApp import BTApp,BTConfig
# plugin.log.info(cache_dir)
nick = plugin.get_setting('nickname')
db = dataset.connect('sqlite:///' + cache_dir + 'Meta.db')
table = db['meta']
scraper = cfscrape.create_scraper()
# Borrowed from metahandlers
import thetvdbapi
api = thetvdbapi.TheTVDB()
# s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
# s.bind((plugin.get_setting('host'),plugin.get_setting('listen_port',int)))
api_key = plugin.get_setting('api_key', str)
api_key = api_key.replace(' ', '')
headers = {'Authorization': api_key}
api_url = 'http://%s:%s/api/1.0/' % (plugin.get_setting('host', str),
plugin.get_setting('port', str))
tmp_path = plugin.get_setting('tmp_path', str)
tmp_path += '*.*'
dl_path = plugin.get_setting('xg_dl_path', str)
dl_path += '*.*'
log = plugin.log.info
whoosh_path = plugin.get_setting('whoosh_path', str)
class SEP(object):
def __init__(self, **entries):
self.__dict__.update(entries)
FA_api = 'a9494e131f434a23f1c130ec6cb8a2a3'
@plugin.cached_route('/')
def index():
items = [{'label': 'Search XG...', 'path': plugin.url_for('search',
search_term='first_page', page='1'),
'is_playable': False}, {'label': 'Enter Custom Message',
'path': plugin.url_for('play_local_file')},
{'label': 'Webpage Parsers',
'path': plugin.url_for('parsers')}]
# {'label': 'Enter Magnet Link',
# 'path': plugin.url_for('torrent')}] # ,
# {
# 'label' : 'Enter Custom File Request',
# 'path' : plugin.url_for('enter_custom')}]
return items
# @plugin.route('/torrent/')
# def torrent():
# labs = {'title': 'Test'}
# app = BTApp(save_dir=plugin.get_setting('xg_dl_path'),
# listen_port=plugin.get_setting('listen_port', int),
# enable_DHT=True)
# try:
# labs = get_meta()
# except:
# pass
# mag = plugin.keyboard(heading='Enter Magnet Link')
# try:
# Torrent().stop_all_torrents()
# except:
# pass
# app.save_dir = plugin.get_setting('xg_dl_path')
# config = BTConfig(m2t(mag, plugin.get_setting('tmp_path')))
# biggest = 0
# for f in config.metainfo.files:
# if f['length'] > biggest:
# biggest = f['length']
# path = f['path']
# path = plugin.get_setting('xg_dl_path') + path
# plugin.log.info(path)
# app.add_torrent(config)
# manager = BTManager(app, config)
# dialog = xbmcgui.DialogProgress()
# dialog.create('Preparing File')
# threading.Thread(target=manager.app.start_reactor).start()
# while not os.path.exists(path):
# plugin.log.info(manager.get_speed())
# if dialog.iscanceled():
# break
# dialog.close()
# t.join()
# plugin.finish([{
# 'label': labs['title'],
# 'info': labs,
# 'path': path,
# 'context_menu': [('Stop All Torrents',
# actions.background(app.stop_all_torrents()))],
# 'is_playable': True,
# }])
@plugin.route('/search/<search_term>/<page>/')
def search(
search_term='first_page',
page='1',
id=None,
labs=None,
):
# packs = xdcc_search.get_packs('http://xdcc.horriblesubs.info','naruto')
# plugin.log.info('Packs' + str(packs))
# %s.%s?searchTerm=%s' % (port,type,format,searchTerm)
if search_term == 'first_page':
keyboard = xbmc.Keyboard('', 'Enter Search Term', False)
keyboard.doModal()
if keyboard.isConfirmed():
search_term = keyboard.getText()
search_packets = 'packets.json?searchTerm=%s&maxResults=20&page=%s' \
% (search_term, page)
request = requests.get(api_url + search_packets, headers=headers)
results = request.json()
# results = json.loads(results)
items = []
idx = 0
for option in results['Results']:
guid_url = api_url + 'packets/%s/enable.json' % option['Guid']
item = {
'label': option['Name'] + ' || Size: %s'
% hf.size(option['Size']),
'path': plugin.url_for('play_file', url=guid_url,
name=option['Name']),
'is_playable': True,
'context_menu': [
('Assign Metadata', actions.update_view(plugin.url_for(
'assign_metadata',
id=idx,
search_term=search_term,
page=page,
from_XG=True,
name=False,
bot=False,
cache=False,
))),
('Reapply Metadata', actions.update_view(plugin.url_for(
'assign_metadata',
id=idx,
search_term=search_term,
page=page,
from_XG=True,
name=False,
bot=False,
cache=True,
))),
('Just Download',
actions.background(plugin.url_for('just_download',
url=guid_url, data=False))),
('Delete File',
actions.background(plugin.url_for('delete_file',
name=option['Name'], all_files=False))),
('Delete All Files',
actions.background(plugin.url_for('delete_file',
name=option['Name'], all_files=True))),
],
}
try:
if str(idx) == str(id):
item['info'] = labs
item['thumbnail'] = labs['cover_url']
item['properties'] = \
{'Fanart_Image': labs['backdrop_url']}
except:
pass
idx += 1
items.append(item.copy())
items.append({'label': 'Next Page >>',
'path': plugin.url_for('search',
search_term=search_term, page=str(int(page) + 1))})
return plugin.finish(items)
# noinspection PyArgumentList
@plugin.route('/play/<name>/<url>/')
def play_file(name, url, data=None):
if data is None:
data = {}
plugin.log.info('Url is: %s' % url)
# Check to see if file already exists
tmp_files = glob.glob(tmp_path)
tmpName = re.sub(r'[\W_]+', '', name)
tmpName = tmpName.lower()
dl_file = False
local_url = ''
plugin.log.info('Temp Name is' + tmpName)
dl_files = glob.glob(dl_path)
for filename in dl_files:
plugin.log.info('Filepath is ' + re.sub(r'[\W_]+', '',
filename).lower())
if tmpName in re.sub(r'[\W_]+', '', filename).lower():
local_url = filename
dl_file = True
break
if local_url == '':
for filename in tmp_files:
plugin.log.info('Filepath is ' + filename)
if tmpName in filename:
local_url = filename
break
if len(local_url) > 0:
plugin.set_resolved_url(local_url)
else:
# if data:
# headers['Content-Type'] = 'application/json'
# r = requests.put(url,headers = headers, data = json.dumps(data))
# plugin.log.info('Url is %s \n Data is %s \n Status is %s \n Text is %s' % (r.url,data,r.status_code,r.text))
# else: r = requests.post(url,headers=headers)....
if data:
stream(
server=data['server'],
channel=data['channel'],
bot=data['bot'],
packetId=data['packetId'],
filename=data['packetName'],
download=True,
)
# if manual_meta: infoLabels = get_meta()
# else: infoLabels = {'title' : name,'cover_url':''}
tmp_files = glob.glob(tmp_path)
tmpName = re.sub(r'[\W_]+', '', name)
tmpName = tmpName.lower()
local_url = ''
plugin.log.info('Temp Name is' + tmpName)
for filename in tmp_files:
plugin.log.info('Filepath is ' + filename)
if tmpName in filename:
local_url = filename
break
plugin.log.info('Playing url: %s' % local_url)
# item = {'info':infoLabels, 'path' : local_url , 'thumbnail' : infoLabels['cover_url']}
plugin.set_resolved_url(local_url)
@plugin.route('/play_local_file/')
def play_local_file():
# tmp_files = glob.glob(tmp_path)
# keyboard = xbmc.Keyboard('','Enter File Name',False)
# keyboard.doModal()
# if keyboard.isConfirmed(): name = keyboard.getText()
# names = name.strip()
# local_url = ''
# for filename in tmp_files:
# plugin.log.info('Filepath is ' + filename)
# for term in names:
# if term in filename:
# allTerms = True
# break
# else:
# allTerms = False
# break
# if allTerms:....local_url = filename
# if local_url == '':
# dialog = xbmcgui.Dialog()
# dialog.notification(message = 'Could Not find file')
# plugin.log.info('Playing url: %s' % local_url)
# item = {'path':local_url,'label':name}
# plugin.set_resolved_url(local_url)
s = plugin.get_storage('message')
dialog = xbmcgui.Dialog()
options = ['Manual', 'Storage']
storageopt = []
# try:
for i in s:
plugin.log.info(i)
storageopt.append(i)
# except: pass
plugin.log.info(options)
index = dialog.select('Choose', options)
if index == 0:
server = \
plugin.keyboard(heading='Enter server (Ex: irc.server.net)')
channel = plugin.keyboard(heading='Enter channel (Ex: #channel)'
)
s[channel] = {'server': server, 'channel': channel}
else:
index = dialog.select('Stored', storageopt)
server = s[storageopt[index]]['server']
channel = storageopt[index]
plugin.log.info(channel + server)
filename = \
plugin.keyboard(heading='Enter filename (Ex: A.Movie.mkv)')
if '#' not in channel:
channel = '#' + channel
message = \
plugin.keyboard(heading='Enter message (Ex: /msg bot xdcc send #packetid)'
)
parts = shlex.split(message)
bot = parts[1]
id = parts[4].replace('#', '')
labs = get_meta()
return [{
'label': labs['title'],
'info': labs,
'path': plugin.url_for(
'stream',
download=False,
server=server,
channel=channel,
bot=bot,
packetId=id,
filename=filename,
),
'is_playable': True,
}]
@plugin.route('/webpages/')
def parsers():
items = [{'label': 'Add a Channel...',
'path': plugin.url_for('add_server')},
{'label': 'Search ixIRC...',
'path': plugin.url_for('search_ix', query='**just_search**'
, page='0')}, {'label': 'Search Haruhichan...',
'path': plugin.url_for('haruhichan', key='None')},
{'label': 'Search xweasel...', 'path': plugin.url_for('xweasel', query='lala', page='1')},
{'label': 'Ginpachi-Sensei', 'path': plugin.url_for('gin_sensei', search='blah')},
{'label': 'Hi10', 'path': plugin.url_for('cloud10')}]
for storage in plugin.list_storage():
if storage == 'meta_cache' or storage == 'showcache' or storage \
== 'message':
continue
try:
storage = plugin.get_storage(storage)
except:
continue
# plugin.log.info('Storage %s' % storage)
try:
items.append({'label': storage['name'],
'path': plugin.url_for('channel',
name=storage['name']),
'context_menu': [('Refresh Packlist',
actions.background(plugin.url_for('refresh',
name=storage['name']))), ('Refresh Local Packlist',
actions.background(plugin.url_for('refresh',
name=storage['name']+".Local"))),('Refresh AniDB',
actions.background(
plugin.url_for(
'refresh',
name='animetitles')))]})
except:
pass
return items
@plugin.route('/add_server/')
def add_server():
global name, server, url
keyboard = xbmc.Keyboard('',
'Enter Host Server (Ex: irc.server.net)',
False)
keyboard.doModal()
if keyboard.isConfirmed():
server = keyboard.getText()
keyboard = xbmc.Keyboard('', 'Enter Channel Name', False)
keyboard.doModal()
if keyboard.isConfirmed():
name = keyboard.getText()
channel = plugin.get_storage('%s' % name, ttl=60 * 24 * 5)
channel['name'] = name
keyboard = xbmc.Keyboard('',
'Enter Webpage Url (Ex: http://xdcc.channel.com/'
, False)
keyboard.doModal()
if keyboard.isConfirmed():
url = keyboard.getText()
packlist = get_packlist(url)
channel['url'] = url
channel['server'] = server
channel['packlist'] = packlist
channel['bots'] = []
@plugin.cached_route('/webpages/<name>/')
def channel(name):
items = [{'label': 'Search Packlist...',
'path': plugin.url_for('search_channel', name=name,
bot='list_all')}, {'label': 'List All Packlist',
'path': plugin.url_for('list_packlist', name=name,
search_term='list_all', bot='list_all',
page='1')},
{'label': 'List Bots', 'path': plugin.url_for('list_bots',
channel=name)}]
return items
def file_meta(name):
wstorage = FileStorage(whoosh_path)
ix = wstorage.open_index()
google = ix.searcher()
try:
show, ep = name.split(']')[1].split('[')[0].lstrip().rstrip().replace(' - ', ' ').rpartition(
re.search('\d{1,3}', name).group(0))[:2]
except:
show = name.split('_-_')[0].rpartition(')_')[2].replace('_', ' ')
ep = name.split('_-_')[1].split('_')[0]
plugin.log.info('ShowEp %s %s' % (show, ep))
if int(ep) == 0: return {}
info = plugin.get_storage('%s' % show)
infoLabels = {}
plugin.log.info('SHOW STORAGE %s' % pp([x for x in info.items() if len(repr(x[1])) < 20]))
if len(info.keys()) == 0 or info is None or (
'last' in info.keys() and datetime.datetime.today().toordinal() - info['last'] >= 5):
info['last'] = datetime.datetime.today().toordinal()
query = QueryParser("title", ix.schema).parse(show)
results = google.search(query)
plugin.log.info('SEARCH %s' % pp([(x['title'], x['content']) for x in results[:5]]))
info['noresults'] = 0 if len(results) else 1
v = []
ot = None
if len(results):
aid = results[0]['aid']
info['aid'] = aid
log('REQUESTING ANIDB DATA')
r = requests.get(
'http://api.anidb.net:9001/httpapi?request=anime&client=anidbtvdbmeta&clientver=1&protover=1&aid=%s' % aid)
log("Status %s\n" % r.status_code)
soup = BS(r.text)
v = [x for x in soup.findAll('epno') if x.text == str(int(ep))]
info['aniupdate'] = 0 if len(v) else 1
plugin.log.info('V %s' % v)
'''try:
log('CHANGING SHOW SEARCH FROM %s to %s' %(show,results[0]['content'][0]))
show = results[0]['content'][0]
except:
pass'''
ot = results[0]['content']
ot = [ot[-1]] + ot[:-1]
log('OT %s' % ot)
google.close()
id = None
theaders = {'Content-Type': 'application/json',
'trakt-api-version': '2',
'trakt-api-key': '05bcd2c0baf2685b8c196162d099e539033c21f7aa9fe1f87b234c2d62c2c1e4'}
results = \
requests.get('https://api-v2launch.trakt.tv/search?query=%s&type=show'
% show, headers=theaders)
log('STATUS %s' % results)
results = results.json()
# results = api.get_matching_shows(title)
search_meta = []
for item in results:
option = {
'tvdb_id': item['show']['ids']['tvdb'],
'title': item['show']['title'],
'imdb_id': item['show']['ids']['imdb'],
'trakt_id': item['show']['ids']['trakt'],
}
search_meta.append(option)
log('Search Meta %s' % pp(search_meta))
if len(search_meta):
id = str(search_meta[0]['tvdb_id'])
info['id'] = id
log('ID %s' % id)
else:
shws = api.get_matching_shows(show)
log('Matching Shows %s' % pp(shws))
try:
id = shws[0][0] if show != 'Drifters' else shws[1][0]
except:
if ot is not None:
for x in ot:
try:
id = api.get_matching_shows(x)[0][0] if show != 'Drifters' else \
api.get_matching_shows(x)[1][0]
if len(id) > 0: break
except:
pass
info['noid'] = 0 if id is not None else 1
if id is None: return {}
info['id'] = id
if info['noid'] == 0: info['aniupdate'] = 0
e = api.get_show_and_episodes(id)
info['shownep'] = [e[0].__dict__, [i.__dict__ for i in e[1]]]
log(pp(info))
if len(v):
info['anidb'] = repr(v[0].parent.parent)
try:
info['EP%sairdate' % ep] = v[0].parent.airdate.text
log('AIRDATE %s' % v[0].parent.airdate.text)
airdate = api.convert_date(v[0].parent.airdate.text)
episode = [i for i in e[1] if 2 >= (lambda x: x.days)(
airdate - api.convert_date(
i.first_aired) if i.first_aired else airdate - airdate) >= -2] # Was a -9 after else
except Exception, ed:
#log(e.__dict__)
log('ERROR %s LINE: %s' % (ed, sys.exc_info()[2].tb_lineno))
log('AIRDATE DIDNT WORK ON EPISODE %s' % ep)
try:
episode = [i for i in e[1] if int(i.absolute_number) == int(ep)]
except:
episode = [i for i in e[1] if int(i.episode_number) == int(ep)]
info['tvupdate'] = 0 if len(episode) else 1
try:
infoLabels = transform_ep_object(episode[0])
except Exception, excptn:
log('ERROR %s LINE: %s' % (excptn, sys.exc_info()[2].tb_lineno))
infoLabels['TVShowTitle'] = e[0].name
infoLabels['backdrop_url'] = e[0].fanart_url
info['EP%s' % ep] = infoLabels
plugin.log.info('INFO %s' % info.keys())
return infoLabels
elif id:
episode = [x for x in e[1] if
(lambda i: int(i.absolute_number) if i.absolute_number != '' else int(i.episode_number))(
x) == int(ep)]
info['tvupdate'] = 0 if len(episode) else 1
try:
infoLabels = transform_ep_object(episode[0])
except Exception, excptn:
log('ERROR %s LINE: %s' % (excptn, sys.exc_info()[2].tb_lineno))
infoLabels['TVShowTitle'] = e[0].name
infoLabels['backdrop_url'] = e[0].fanart_url
info['EP%s' % ep] = infoLabels
plugin.log.info('INFO %s' % info.keys())
return infoLabels
else:
if 'EP%s' % ep in info.keys():
infoLabels = info['EP%s' % ep]
return infoLabels
if info['noid']: return {}
if info['aniupdate']:
query = QueryParser("title", ix.schema).parse(show)
results = google.search(query)
aid = results[0]['aid']
google.close()
info['aid'] = aid
r = requests.get(
'http://api.anidb.net:9001/httpapi?request=anime&client=anidbtvdbmeta&clientver=1&protover=1&aid=%s' % aid)
log("Status %s\n" % r.status_code)
log("HTML CODE: %s" % r.text)
soup = BS(r.text)
v = [x for x in soup.findAll('epno') if x.text == str(int(ep))]
info['anidb'] = repr(v[0].parent.parent)
info['EP%sairdate' % ep] = v[0].parent.airdate.text
info['aniupdate'] = 0 if len(v) else 1
if info['tvupdate']:
e = api.get_show_and_episodes(info['id'])
info['shownep'] = [e[0].__dict__, [i.__dict__ for i in e[1]]]
try:
airdate = api.convert_date(info['EP%sairdate' % ep])
episode = [i for i in e[1] if 2 >= (lambda x: x.days)(airdate - api.convert_date(i.first_aired)) >= -2]
except Exception, excptn:
log('ERROR %s LINE: %s' % (excptn, sys.exc_info()[2].tb_lineno))
try:
episode = [i for i in e[1] if int(i.absolute_number) == int(ep)]
except:
episode = [i for i in e[1] if int(i.episode_number) == int(ep)]
info['tvupdate'] = 0 if len(episode) else 1
try:
infoLabels = transform_ep_object(episode[0])
except Exception, excptn:
log('ERROR %s LINE: %s' % (excptn, sys.exc_info()[2].tb_lineno))
infoLabels['TVShowTitle'] = e[0].name
infoLabels['backdrop_url'] = e[0].fanart_url
info['EP%s' % ep] = infoLabels
return infoLabels
if 'EP%s' % ep not in info.keys():
e = [SEP(**info['shownep'][0]), [SEP(**i) for i in info['shownep'][1]]]
try:
soup = BS(info['anidb'])
v = [x for x in soup.findAll('epno') if x.text == str(int(ep))]
info['EP%sairdate' % ep] = v[0].parent.airdate.text
airdate = api.convert_date(v[0].parent.airdate.text)
episode = [i for i in e[1] if 2 >= (lambda x: x.days)(airdate - api.convert_date(i.first_aired)) >= -2]
info['tvupdate'] = 0 if len(episode) else 1
try:
infoLabels = transform_ep_object(episode[0])
except Exception, excptn:
log(excptn)
infoLabels['TVShowTitle'] = e[0].name
infoLabels['backdrop_url'] = e[0].fanart_url
info['EP%s' % ep] = infoLabels
plugin.log.info('INFO %s' % info.keys())
return infoLabels
except Exception, er:
plugin.log.info('EP ERROR %s' % er)
try:
episode = [x for x in e[1] if x.absolute_number != '' and int(x.absolute_number) == int(ep)]
except:
episode = [x for x in e[1] if x.episode_number != '' and int(x.episode_number) == int(ep)]
info['tvupdate'] = 0 if len(episode) else 1
infoLabels = transform_ep_object(episode[0])
infoLabels['TVShowTitle'] = e[0].name
infoLabels['backdrop_url'] = e[0].fanart_url
info['EP%s' % ep] = infoLabels
plugin.log.info('INFO %s' % info.keys())
return infoLabels
else:
return {}
@plugin.route('/webpages/<name>/list_packlist/<bot>/<search_term>/<page>')
def list_packlist(
name,
search_term='',
bot='',
page='1',
labs=None,
id='',
cache='nope'
):
global all_Terms
if labs is None:
labs = {}
page = int(page)
cache = plugin.get_storage('%s' % name)
log(cache.keys())
packlist = copy.copy(cache['packlist'])
items = []
prev = (page - 1) * 20
curr = page * 20
if bot != 'list_all':
bot_packlist = []
for item in packlist:
if bot == item['bot']:
bot_packlist.append(item)
packlist = bot_packlist
if search_term != 'list_all':
search_packlist = []
search_terms = search_term.split()
plugin.log.info('Search Terms %s' % search_terms)
for i in packlist:
for term in search_terms:
if term.lower() in i['filename'].lower():
all_Terms = True
else:
all_Terms = False
break
if all_Terms:
search_packlist.append(i)
packlist = search_packlist
idx = 0
for item in packlist: # [prev:curr]:
flabs = {'title':'','plot':'','season':'','episode':'','premiered':''}
try:
flabs.update(file_meta(item['filename']))
flabs['plot'] = item['filename'] + ' || Size: ' + str(item['size']) + ' MB || Bot : ' + item[
'bot'] + '\n\n' + flabs['plot']
log(flabs['premiered'])
try:
flabs['Size'] = api.convert_date(flabs['premiered']).toordinal()
except Exception, e:
log(e)
flabs['Size'] = flabs['premiered']
except Exception, ed:
log('ERROR %s LINE: %s' % (ed, sys.exc_info()[2].tb_lineno))
flabs = {}
log(pp(flabs))
items.append({
'label': item['filename'] + ' || Size: '
+ str(item['size']) + ' MB || Bot : ' + item['bot'
],
'path': plugin.url_for(
'stream',
download=item['size'],
server=cache['server'],
channel=name,
bot=item['bot'],
packetId=item['packetId'],
filename=item['filename'],
),
'is_playable': True,
'context_menu': [('Assign Metadata',
actions.update_view(plugin.url_for(
'assign_metadata',
id=idx,
search_term=search_term,
page=page,
name=name,
bot=bot,
from_XG=False,
cache=False,
))), ('Reapply Metadata',
actions.update_view(plugin.url_for(
'assign_metadata',
id=idx,
search_term=search_term,
page=page,
name=name,
bot=bot,
from_XG=False,
cache='reapply',
))), ('Next Episode',
actions.update_view(plugin.url_for(
'assign_metadata',
id=idx,
search_term=search_term,
page=page,
name=name,
bot=bot,
from_XG=False,
cache='next',
))), ('Previous Episode',
actions.update_view(plugin.url_for(
'assign_metadata',
id=idx,
search_term=search_term,
page=page,
name=name,
bot=bot,
from_XG=False,
cache='prev',
))), ('File Metadata',
actions.update_view(plugin.url_for(
'assign_metadata',
id=idx,
search_term=search_term,
page=page,
name=name,
bot=bot,
from_XG=False,
cache=item['filename']
))), ('Just Download',
actions.background(plugin.url_for(
'stream',
download=True,
server=cache['server'],
channel=name,
bot=item['bot'],
packetId=item['packetId'],
filename=item['filename'],
))), ('Delete File',
actions.background(plugin.url_for('delete_file'
, name=item[
'filename'],
all_files=False))),
('Delete All Files',
actions.background(plugin.url_for('delete_file'
, name=item['filename'], all_files=True)))],
'info': flabs if flabs else '',
'thumbnail': flabs['cover_url'] if 'cover_url' in flabs.keys() else '',
'properties': {'Fanart_Image': flabs['backdrop_url']} if 'backdrop_url' in flabs.keys() else '',
'info_type': 'video'
})
try:
if str(idx) == str(id):
items[idx]['info'] = labs
items[idx]['thumbnail'] = labs['cover_url']
items[idx]['properties'] = \
{'Fanart_Image': labs['backdrop_url']}
except:
pass
idx += 1
# if curr <= len(packlist):
# items.append({'label': 'Next Page >>',
# 'path': plugin.url_for('list_packlist', name=name,
# search_term=search_term, bot=bot, page=str(page
# + 1))})
# if page > 1:
# items.insert(0, {'label': '<< Previous Page',
# 'path': plugin.url_for('list_packlist', name=name,
# search_term=search_term, bot=bot, page=str(page
# - 1))})
plugin.finish(items=items, sort_methods=['Size'])
@plugin.route('/webpages/<name>/search/<bot>/')
def search_channel(name, bot='all_bots'):
lastsearch = plugin.get_storage('lastsearch')
if 'last' not in lastsearch.keys():
lastsearch['last'] = ''
search_term = plugin.keyboard(default=lastsearch['last'], heading='Enter Search Term')
lastsearch['last'] = search_term
return list_packlist(name=name, search_term=search_term, page='1', bot=bot)
# plugin.finish(items=[{'label': 'Results',
# 'path': plugin.url_for('list_packlist', name=name, search_term=search_term, page='1',
# bot=bot)}])
@plugin.route('/webpages/<channel>/bots/')
def list_bots(channel):
cache = plugin.get_storage(channel)
packlist = cache['packlist']
log(cache.keys())
if not cache['bots']:
for item in packlist:
log('KEYS %s' % item.keys())
if item['bot'] not in str(cache['bots']):
cache['bots'].append({'label': item['bot'],
'path': plugin.url_for('bots', channel=channel,
bot=item['bot'])})
return cache['bots']
@plugin.cached_route('/webpages/<channel>/bots/<bot>/')
def bots(channel, bot):
return [{'label': 'Search Bot Packlist',
'path': plugin.url_for('search_channel', name=channel,
bot=bot)}, {'label': 'List All Packs for %s' % bot,
'path': plugin.url_for('list_packlist', name=channel,
search_term='list_all', bot=bot, page='1')}]
@plugin.route('/update_packlist/<name>/')
def refresh(name):
if name == 'animetitles':
# t = requests.get('http://anidb.net/api/anime-titles.xml.gz')
# log('ANITITLES STATUS %s' % t.status_code)
anilist = xbmc.translatePath(olib) + '\\anime-titles.xml'
import shutil
with open(anilist, 'rb') as ani:
soup = BS(ani)
log('FINISHED PARSING BS ANITITLES')
shutil.rmtree(whoosh_path)
os.mkdir(whoosh_path)
log('REMOVED ORIGINAL WHOOSH PATH')
wstorage = FileStorage(whoosh_path)
# ix = wstorage.open_index()
log('OPENING WHOOSH INDEX')
schema = Schema(title=TEXT(stored=True), aid=NUMERIC(stored=True), content=NGRAMWORDS(stored=True))
ix = create_in(whoosh_path, schema)
writer = ix.writer()
log('BEGINNING WRITING PROCESS')
for x in soup.findAll('title', type='main'):
c = [unicode(i.text) for i in x.parent.findAll('title', attrs={'xml:lang': 'en'})]
c.append(unicode(x.text))
writer.add_document(title=x.text, aid=x.parent['aid'], content=c)
writer.commit()
log('FINISHED WRITING PROCESS')
local = 0
if '.Local' in name:
local = 1
name=name.split('.Local')[0]
storage = plugin.get_storage(name)
if local:
if 'local' not in storage.keys():
storage['local'] = plugin.keyboard(heading='Enter local Packlist location')
if 'packlist' not in storage.keys():
storage['packlist'] = ''
storage['packlist'] = get_packlist(storage['local'],local)
else:
storage['packlist'] = get_packlist(storage['url'],local)
y = len(storage['packlist'])
dlg = xbmcgui.DialogProgress()
x = 0
dlg.create("Refreshing...")
for item in storage['packlist']:
if item['bot'] not in str(storage['bots']):
storage['bots'].append({'label': item['bot'],
'path': plugin.url_for('bots',
channel=name, bot=item['bot'])})
x += 1
dlg.update(int(float((x / y)) * 100), item['bot'])
def get_packlist(url,local=0):
if local==0:
url += 'search.php'
specific = xbmcgui.Dialog().yesno('Select Specific Bot',"Add a Specific Bot Nickname?")
if specific:
url+= '?nick=' + plugin.keyboard()
try:
r = scraper.get(url)
except:
r = requests.get(url)
plugin.log.info('Packlist Status %s' % r)
if str(r.status_code) != '200':
xbmcgui.Dialog().ok(line1 = "Failed to get Packlist status %s" % r.status_code, heading = '')
text = r.text
else:
text = open(url, 'rb').read()
m = re.findall('= (.+?);\n', text)
items = []
for item in m:
item = item.replace('b:', "'bot':").replace('n:', "'packetId':"
).replace('f:', "'filename':").replace('s:', "'size':")
try:
dict = eval(item)
items.append(dict.copy())
except:
pass
return items
@plugin.cached(ttl=60 * 24 * 3)
def get_gin():
plugin.log.info('Getting Text')
with open(cache_dir + 'Gin.txt', 'wb') as gtxt:
gtxt.write(scraper.get('https://gin.sadaharu.eu/Gin.txt').text)
with open(cache_dir + 'Gin.txt', 'rb') as gtxt:
items = []
for x in gtxt.readlines():
if x[0] == '#' and x[:3] != '#1 ':
num = x.find(' ')
num = x[1:num]
s = x.find('[') + 1
f = x.find(']') - 1
size = x[s:f]
size = int(size) if '.' not in size else float(size)
if size < 100 and x[f] == 'M': size *= 10
if x[f] == 'G': size = int(hf.size(size * 1073741824, [(1048576, '')]))
if x[f] == 'K': size = int(hf.size(size * 1024, [(1048576, '')]))
name = x[f + 3:-1]
items.append({'packetId': num, 'filename': name, 'bot': 'Gintoki', 'size': size})
g = plugin.get_storage('Ginpachi-Sensei')
g.update({'packlist': items, 'server': 'irc.rizon.net'})
@plugin.route('/gin_sensei/<search>')
def gin_sensei(search):
get_gin()
if search != 'none':
lastsearch = plugin.get_storage('lastsearch')
search = plugin.keyboard(default=lastsearch['last'], heading='Enter Search Term')
lastsearch['last'] = search
return [{'label': 'Results',
'path': plugin.url_for(list_packlist, name='Ginpachi-Sensei', search_term=search, page='1',
bot='Gintoki')}]
@plugin.route('/stream/<download>/<server>/<channel>/<bot>/<packetId>/<filename>')
def stream(
server,
channel,
bot,
packetId,
filename,
download=False,
):
if '#' not in channel:
channel = '#' + channel
data = {
'server': server,
'channel': channel,
'bot': bot,
'packetId': int(packetId),
'packetName': filename,
}
# dl_path = plugin.get_setting('xg_dl_path',str)
# plugin.log.info(dl_path)
# from data import Networks
# networks = Networks()
# import socket
# server =....socket.gethostbyname(server)
fstring = plugin.get_setting('xg_dl_path', str) + filename.replace("'","_")
log(fstring)
log('EXISTS %s' % os.path.exists(fstring))
if bot == 'Ginpachi-Sensei': bot = 'Gintoki'
plugin.log.info(channel)
# if str(download) == 'True':
# pass
# else:
# return play_file(filename, url='', data=data)
if download == 'True' or not os.path.exists(fstring):
log('IRC DOWNLOAD')
sc = '#mg-chat' if channel == '#moviegods' else None
sc = '#zw-chat' if channel == '#Zombie-Warez' else None
c = xbot.Download(channel=channel, server=server,
numPaquet=int(packetId), nomRobot=bot, secondChannel=channel,
nickname=nick)
if channel == '#moviegods':
c.secondChannel = '#mg-chat'
if channel == '#Zombie-Warez':
c.secondChannel = '#zw-chat'
if channel == '#Ginpachi-Sensei':
c.secondChannel = ''
d = xbot.Grabator(
channel=channel,
secondChannel='',
server=server,
numPaquet=int(packetId),
nomRobot=bot,
nickname=nick,
objetDL=c
)
if channel == '#moviegods':
d.secondChannel = '#mg-chat'
if channel == '#Ginpachi-Sensei':
d.secondChannel = ''
t = threading.Thread(target=d.start)
t.start()
# x.start()
# t = threading.Thread(target=d.start)
# t.start()
# t.join()
streamlink = 'http://localhost:9085/vfs/%s' % fstring
if download.isdigit():
log('Start play process')
dialog = xbmcgui.DialogProgress()
size = float(download)
status = lambda x: (float(x) / size) * 100
dialog.create('Downloading File', 'Checking if it Exists...')
cancel = 0
tsys = copy.copy(hf.traditional)
tsys = [(tsys[-3][0], '')]
b = plugin.get_setting('bf_time', int)
up = dialog.update
log('Checking existence')
while not os.path.exists(fstring):
up(0)
if dialog.iscanceled():
cancel = 1
break
log('Found')
up(0, 'File Found')
xsize = os.path.getsize(fstring)
import timeit
start = timeit.default_timer()
wait = 0
ysize = 0
from VideoParser import VideoParser as VP
while wait <= 5:
up(int(status(hf.size(os.path.getsize(fstring), tsys))),
'Downloading File', '{} of {}'.format(hf.size(os.path.getsize(fstring),
hf.traditional),
size))
ysize = os.path.getsize(fstring) - xsize
wait = timeit.default_timer() - start
spd = (ysize / wait) / float(hf.alternative[3][0])
log('SPEED %.2f M/s' % spd)
# lngth = 0
# from multiprocessing.pool import ThreadPool
# p = ThreadPool(1)
# l = p.apply_async(VP().getVideoLength,(fstring,))
# while lngth == 0:
# lngth = l.get()
# log('VP Length %s' % lngth)
factor = b * (((size / 1420) * 2) / spd) if ysize != 0 else b
log('FACTOR %s' % factor)
factor = factor if factor <= 100 else 90
while status(hf.size(os.path.getsize(fstring),
tsys)) <= factor: # ((float(size)/5)/size)*.6*100:# while status(hf.size(os.path.getsize(fstring), tsys)) <= b:
up(int(status(hf.size(os.path.getsize(fstring), tsys))),
'Downloading File', '{} of {}'.format(hf.size(os.path.getsize(fstring),
hf.traditional),
size))
if dialog.iscanceled():
cancel = 1
break
log('Cancel: %s' % cancel)
if not cancel:
dialog.close()
plugin.set_resolved_url(fstring)
def get_meta():
dialog = xbmcgui.Dialog()
showcache = plugin.get_storage('showcache')
optionlist = ['tvshow', 'movie', 'Storage', 'none']
storagelist = []
try:
for show in showcache:
plugin.log.info(showcache)
storagelist = [x for x in showcache if x != 'last']
plugin.log.info(storagelist)
except Exception, e:
plugin.log.info('ERROR %s' % e)
imdb = ''
tvdb = ''
tmdb = ''
headers = {'Content-Type': 'application/json',
'trakt-api-version': '2',
'trakt-api-key': '05bcd2c0baf2685b8c196162d099e539033c21f7aa9fe1f87b234c2d62c2c1e4'}
index = dialog.select('Choose Video Type', optionlist)
stype = optionlist[index]
search_meta = []
option_list = []
if index == 3: return {}
plugin.log.info('INDEX: %s' % index)
if index == 0 or index == 2:
if stype == 'tvshow':
keyboard = xbmc.Keyboard('', 'Enter a Title', False)
keyboard.doModal()
if keyboard.isConfirmed():
title = keyboard.getText()
results = \
requests.get('https://api-v2launch.trakt.tv/search?query=%s&type=show'
% title, headers=headers).json()
# results = api.get_matching_shows(title)
for item in results:
option = {
'tvdb_id': item['show']['ids']['tvdb'],
'title': item['show']['title'],
'imdb_id': item['show']['ids']['imdb'],
'trakt_id': item['show']['ids']['trakt'],
'year': item['show']['year']
}
search_meta.append(option)
for option in search_meta:
disptitle = option['title'] + ' (' + str(option['year']) + ')'
option_list.append(disptitle)
index = dialog.select('Choose', option_list)
Show = search_meta[index]
shownep = api.get_show_and_episodes(Show['tvdb_id'])
showcache[str(Show['title'])] = {'title': Show['title'],
'data': [shownep[0].__dict__, [x.__dict__ for x in
shownep[1]]],
'day': datetime.datetime.today().toordinal()}
showcache['last'] = showcache[str(Show['title'])]
elif stype == 'Storage':
# xbmc.sleep(200)
# showcache.sync()
today = datetime.datetime.today().toordinal()
index = dialog.select('Stored Meta', storagelist)
sdata = showcache[storagelist[index]]
showcache['last'] = sdata
data = sdata['data']
if today - sdata['day'] <= 5:
shownep = [SEP(**data[0]), [SEP(**x) for x in data[1]]]
else:
shownep = api.get_show_and_episodes(data[0]['id'])
showcache[storagelist[index]]['data'] = [shownep[0].__dict__, [x.__dict__ for x in shownep[1]]]
plugin.log.info('STORAGE FOUND')
stype = 'tvshow'
Show = {'title': shownep[0].name, 'tvdb_id': shownep[0].id,
'imdb_id': shownep[0].imdb_id}
option2 = '-1'
season_list = []
for item in shownep[1]:
if option2 != item.season_number:
option2 = item.season_number
ep_list = []
for item2 in shownep[1]:
if item2.season_number == option2:
ep_list.append(item2)
start_ep = ep_list[0].absolute_number
end_ep = ep_list[-1].absolute_number
season_list.append('Season %s Episodes (%s - %s)'
% (option2, start_ep, end_ep))
index = dialog.select('Choose Season', season_list)
season = re.search('Season (.+?) Episodes',
season_list[index]).group(1)
episode_list = [[], []]
plugin.log.info('SEASON' + season)
for item in shownep[1]:
if item.season_number == season:
disptitle = '%sx%s (%s) %s' % (item.season_number,
item.episode_number, item.absolute_number,
item.name)
episode_list[0].append(disptitle)
episode_list[1].append(item)
index = dialog.select('Choose Episode', episode_list[0])
episode = episode_list[1][index]
showcache['last']['index'] = showcache['last']['data'][1].index(episode.__dict__)
# keyboard = xbmc.Keyboard('','Enter a Season',False)
# keyboard.doModal()
# if keyboard.isConfirmed(): season = keyboard.getText()
# keyboard = xbmc.Keyboard('','Enter an Episode',False)
# keyboard.doModal()
# if keyboard.isConfirmed(): episode = keyboard.getText()
# episode = shownep[1][episode]api.get_episode_by_season_ep(Show['tvdb_id'],season,episode)
try:
infoLabels = transform_ep_object(episode)
except Exception, e:
log(e)
infoLabels['TVShowTitle'] = Show['title']
imdb = Show['imdb_id']
tvdb = Show['tvdb_id']
img = infoLabels['cover_url']
infoLabels['backdrop_url'] = shownep[0].fanart_url
plugin.log.info('INFO Labels \t %s' % infoLabels)
elif stype == 'movie':
title = plugin.keyboard(heading='Enter a Title')
results = \
requests.get('https://api-v2launch.trakt.tv/search?query=%s&type=movie'
% title, headers=headers).json()
plugin.log.info('Results %s' % results)
for option in results:
disptitle = '%s (%s)' % (option['movie']['title'],
option['movie']['year'])
option_list.append(disptitle)
dialog = xbmcgui.Dialog()
index = dialog.select('Choose', option_list)
Movie = results[index]['movie']
plugin.log.info('Movie: %s' % Movie)
infoLabels = {'cover_url': Movie['images']['poster']['medium'], 'plot': Movie['overview'],
'backdrop_url': Movie['images']['fanart']['full'], 'year': Movie['year'], 'title': Movie['title']}
# if stype == 'tvshow':
# api_url = 'https://api-v2launch.trakt.tv/search?id_type=trakt-show&id=%s' % (Show['trakt_id'])
# request = requests.get(api_url, headers=headers)
# plugin.log.info('TRAKT JSON %s' % request.json())
# trakt_meta = request.json()[0]['show']
# plugin.log.info("Trakt_meta %s" % trakt_meta)
# infoLabels['TVShowTitle'] = trakt_meta['title']
# infoLabels['backdrop_url'] = trakt_meta['images']['fanart']['full']
plugin.log.info('infoLabels: %s' % infoLabels)
latest = infoLabels
latest['latest'] = 'latest'
table.delete(latest='latest')
table.upsert(latest, ['latest'])
return infoLabels
def transform_ep_object(episode):
meta = {'episode_id': episode.id, 'plot': api.check(episode.overview)}
if episode.guest_stars:
guest_stars = episode.guest_stars
if guest_stars.startswith('|'):
guest_stars = guest_stars[1:-1]
guest_stars = guest_stars.replace('|', ', ')
meta['plot'] = meta['plot'] + 'Guest Starring: ' \
+ guest_stars
meta['rating'] = float(api.check(episode.rating, 0))
meta['premiered'] = api.check(episode.first_aired)
meta['title'] = api.check(episode.name)
meta['poster'] = api.check(episode.image)
meta['director'] = api.check(episode.director)
meta['writer'] = api.check(episode.writer)
meta['season'] = int(api.check(episode.season_number, 0))
meta['episode'] = int(api.check(episode.episode_number, 0))
meta['cover_url'] = api.check(episode.image)
return meta
@plugin.route('/delete_file/<name>/<all_files>')
def delete_file(name, all_files=False):
plugin.log.info('NAME ' + name)
tmp_files = glob.glob(tmp_path)
dl_files = glob.glob(dl_path)
import shutil
if str(all_files) == 'True':
try:
for file in dl_files:
log('Deleting %s ...' % file)
try:
shutil.rmtree(file)
except Exception, e:
os.remove(file)
log('DELETE ALL FILES ERROR: %s' % e)
continue
except Exception, e:
log('DELETE ALL FILES ERROR: %s' % e)
pass
try:
for file in tmp_files:
shutil.rmtree(file)
except:
pass
tmpName = re.sub(r'[\W_]+', '', name)
tmpName = tmpName.lower()
plugin.log.info('Temp Name is' + tmpName)
try:
for filename in tmp_files:
plugin.log.info('Filepath is ' + filename)
if tmpName in filename.lower():
os.remove(filename)
except:
pass
try:
for filename in dl_files:
if tmpName in re.sub(r'[\W_]+', '', filename.lower()):
os.remove(filename)
except:
pass
@plugin.route('/webpages/search_ix/<query>/<page>')
def search_ix(
query='**just_search**',
page='0',
id=-1,
labs=None,
):
if labs is None:
labs = {}
page = int(page)
items = []
ix_url = 'http://ixirc.com/api/'
if query == '**just_search**':
query = plugin.keyboard()
results = requests.get(ix_url + '?q=%s&pn=%s' % (query,
page)).json()
total_pages = results['pc']
plugin.log.info('RESULTS %s', results)
results = results['results']
idx = 0
tsys = copy.copy(hf.traditional)
tsys = [(tsys[-3][0], '')]
for item in results:
try:
size = item['szf']
rsize = [float(size[:-3]) * x[0] for x in hf.alternative if x[1] == size[-3:]][0]
log('Size %s' % rsize)
items.append({
'label': item['name'] + ' || Size : %s' % item['szf'],
'info': {'title': item['name'],
'plot': 'Size: %s Network: %s Channel: %s Bot: %s' % (
item['szf'], item['nname'], item['cname'], item['uname'])},
'path': plugin.url_for(
'stream',
download=hf.size(rsize, tsys).replace(' MB', ''),
server=item['naddr'],
channel=item['cname'],
bot=item['uname'],
packetId=item['n'],
filename=item['name'],
),
'is_playable': True,
'context_menu': [('Assign Metadata',
actions.update_view(plugin.url_for(
'assign_metadata',
id=idx,
search_term=query,
page=page,
from_XG='IX',
name=False,
bot=False,
cache=False,
))), ('Just Download',
actions.background(plugin.url_for(
'stream',
download=True,
server=item['naddr'],
channel=item['cname'],
bot=item['uname'],
packetId=item['n'],
filename=item['name'],
))), ('Delete File',
actions.background(plugin.url_for('delete_file'
, name=item['name'], all_files=False))),
('Delete All Files',
actions.background(plugin.url_for('delete_file'
, name=item['name'], all_files=True)))],
})
except:
continue
try:
if str(idx) == str(id):
plugin.log.info('SUCCESS')
items[idx]['info'] = labs
items[idx]['thumbnail'] = labs['cover_url']
items[idx]['properties'] = \
{'Fanart_Image': labs['backdrop_url']}
except:
pass
plugin.log.info('IDX INFO %s' % items[idx]['info'])
idx += 1
if page < total_pages:
items.append({'label': 'Next Page >>',
'path': plugin.url_for('search_ix', query=query,
page=str(page + 1))})
return items
@plugin.route('/just_download/<url>/<data>')
def just_download(url, data=None):
if data is None:
data = {}
if str(data) != 'False':
headers['Content-Type'] = 'application/json'
r = requests.put(url, headers=headers, data=json.dumps(data))
else:
r = requests.post(url, headers=headers, data=data)
plugin.log.info('''URL %s
DATA %s
STATUS CODE %s
TEXT %s'''
% (r.url, data, r.status_code, r.text))
@plugin.route('/assign_metadata/<id>/<search_term>/<page>/<name>/<bot>/<from_XG>/<cache>'
)
def assign_metadata(
id,
search_term,
page,
name=False,
bot=False,
from_XG=False,
cache=False,
):
plugin.log.info('NAME %s \n BOT %s CACHE: %s' % (name, bot,
str(cache)))
if cache != 'nope':
meta_cache = plugin.get_storage('meta_cache')
if str(cache) == 'False':
labs = get_meta()
meta_cache = labs
# plugin.log.info('META_CACHE %s' % meta_cache)
elif cache == 'reapply':
labs = table.find_one(latest='latest')
log('META_CACHE: %s' % pp(labs))
elif cache == 'next' or cache == 'prev':
showcache = plugin.get_storage('showcache')
index = showcache['last']['index']
log('CURRENT EP INDEX %s' % index)
index = index + 1 if cache == 'next' else index - 1
episode = SEP(**showcache['last']['data'][1][index])
showcache['last']['index'] = index
try:
labs = transform_ep_object(episode)
except Exception, e:
log(e)
labs['TVShowTitle'] = showcache['last']['title']
labs['backdrop_url'] = showcache['last']['data'][0]['fanart_url']
elif cache != name:
labs = file_meta(cache)
if str(from_XG) == 'HI':
return hi10eps(show=search_term, url=name, labs=labs, id=id)
elif str(from_XG) == 'True':
plugin.log.info('GOING THROUGH XG')
return search(search_term=search_term, page=page, id=id,
labs=labs)
elif str(from_XG) == 'IX':
plugin.log.info('GOING THROUGH IX')
return search_ix(query=search_term, page=page, id=id, labs=labs)
elif str(name) != 'False':
plugin.log.info('GOING THROUGH LIST_PACKLIST')
return list_packlist(
name=name,
search_term=search_term,
bot=bot,
page=page,
labs=labs,
id=id,
cache='nope'
)
# @plugin.route('/enter_custom/')........
# def enter_custom():
# server = plugin.keyboard(heading='Enter server (Ex: irc.server.net)')
# channel = plugin.keyboard(heading = 'Enter channel (Ex: #channel)')
# bot = plugin.keyboard(heading = 'Enter bot name')
# packetId = plugin.keyboard(heading = 'Enter Packet Number')
# filename = plugin.keyboard(heading = 'Enter file name (Ex: Movie.mkv)')
# return stream(server=server,channel=channel,bot=bot,packetId=packetId,filename=filename)
@plugin.route('/haruhichan/<key>/<doMeta>/<filename>', name='haru')
@plugin.route('/haruhichan/<key>/')
def haruhichan(key='None', filename='', doMeta='F'):
url = 'http://intel.haruhichan.com/?s='
server = 'irc.rizon.net'
channel = 'intel'
items = []
if key == 'None':
key = plugin.keyboard(heading='Enter Search Term')
if doMeta == 'T':
labs = get_meta()
soup = BS(scraper.get(url + key).text)
results = soup.findAll(attrs={'class': re.compile('noselect')})
for pack in results:
p = pack.findAll('td')
bot = p[0].text
id = p[1].text
name = p[4].string
size = p[3].text
item = {'label': '%s || %s || %s' % (name, size, bot),
'path': plugin.url_for(
'stream',
download=False,
server=server,
channel=channel,
bot=bot,
packetId=id,
filename=name,
), 'context_menu': [('Assign Metadata',
actions.update_view(plugin.url_for('haru'
, doMeta='T', filename=name,
key=key))), ('Just Download',
actions.background(plugin.url_for(
'stream',
download=True,
server=server,
channel=channel,
bot=bot,
packetId=id,
filename=name,
))), ('Delete File',
actions.background(
plugin.url_for(
'delete_file',
name=name,
all_files=False))),
('Delete All Files',
actions.background(plugin.url_for('delete_file',
name=name, all_files=True)))]}
if name == filename:
item['info'] = labs
item['thumbnail'] = labs['cover_url']
item['properties'] = {'Fanart_Image': labs['backdrop_url']}
items.append(item)
return items
@plugin.route('/webpages/xweasel/<query>/<page>')
def xweasel(query='lala', page='1'):
# log('Table %s'% pp(list(table.all())))
# return
global network
lastsearch = plugin.get_storage('lastsearch')
log('PAGE %s QUERY %s' % (page, query))
page = int(page)
if query == 'lala':
query = plugin.keyboard(heading='Search', default=lastsearch['last'])
lastsearch['last'] = query
xrequest = plugin.get_storage('%s_%s' % (query, page), ttl=60)
if len(xrequest.keys()) == 0:
r1 = requests.get('http://www.xweasel.org/Search.php?Description=%s&Page=%s' % (query, page))
log("Request %s" % r1.status_code)
soup = BS(r1.text)
pages = len(soup.findAll('center')[-1].findChildren()) - 2
xrequest['pages'] = pages
results = soup.findAll('tr', attrs={'class': re.compile('row')})
log('RESULTS %s' % len(results))
if len(results) == 0: return
mtitle = (lambda x: re.findall(re.compile(r'(.*?[ .]\d{4})[ .a-zA-Z]*'),
re.sub(r'(\w*)([\\()\\](\b\w*)\S)', '', x))[0])
items = []
idx = 0
for item in results:
try:
i = list(eval(item['onmouseover'].replace('ShowToolTip', '')))
i = [x for x in i if x != '' and x != ' (Ready)' and x != ' (Full)' and x != ' (0/50)']
i = i[:-1]
filename, network, channel, bot, pack = i
except Exception, e:
log('ERROR: %s %s' % (e, list(eval(item['onmouseover'].replace('ShowToolTip', '')))))
try:
title = mtitle(filename)
title = title.replace('.', ' ')
except:
title = filename
network = 'irc.{}.net'.format(network)
log('NETWORK %s' % network)
log('Movie Item Title: %s' % title)
size = item.findAll('td')[1].text.replace(r' ', ' ')
speed = item.findAll('td')[4].text.replace(r' ', ' ')
log('Item Stats: Speed %s, Size %s' % (speed, size))
realsize = [float(size[:-3]) * x[0] for x in hf.alternative if x[1] == size[-3:]][0]
tsys = copy.copy(hf.traditional)
tsys = [(tsys[-3][0], '')]
mlabs = {}
if title != filename:
mlabs['Size'] = realsize
mlabs['Album'] = speed
mlabs['Artist'] = [bot]
mlabs['Genre'] = str(channel)
# mlabs['plot'] = '\n FILENAME {} \n CHANNEL {} \n BOT {} \n SPEED {} \n SIZE {}'.format(filename,channel,bot,speed,size)
# mlabs['Plot'] = str(filename + ' || Size: ' + size +' || Bot : ' + bot + ' || Speed: '+speed)
c = copy.copy(movie_meta(title))
c['plot'] += '\n {} \n CHANNEL {} \n BOT {} \n SPEED {} \n SIZE {}'.format(filename, channel, bot,
speed, size)
mlabs.update(c)
item = {
'label': str(filename + ' || Size: ' + size + ' || Bot : ' + bot + ' || Speed: ' + speed),
'path': plugin.url_for(
'stream',
download=hf.size(realsize, tsys).replace(' MB', ''),
server=network,
channel=channel,
bot=bot,
packetId=pack,
filename=filename,
),
'is_playable': True,
'context_menu': [('Assign Metadata',
actions.update_view(plugin.url_for(
'assign_metadata',
id=idx,
search_term=query,
page=page,
name=filename,
bot=bot,
from_XG=False,
cache=False,
))), ('Reapply Metadata',
actions.update_view(plugin.url_for(
'assign_metadata',
id=idx,
search_term=query,
page=page,
name=filename,
bot=bot,
from_XG=False,
cache=True,
))), ('Just Download',
actions.background(plugin.url_for(
'stream',
download=True,
server=network,
channel=channel,
bot=bot,
packetId=pack,
filename=filename,
))), ('Delete File',
actions.background(plugin.url_for('delete_file'
, name=filename,
all_files=False))),
('Delete All Files',
actions.background(plugin.url_for('delete_file'
, name=filename, all_files=True)))],
'info': mlabs if mlabs else '',
'thumbnail': mlabs['thumb'] if mlabs else '',
'properties': {'Fanart_Image': mlabs['backdrop_url']}
}
items.append(item)
try:
if str(idx) == str(id):
items[idx]['info'] = labs
items[idx]['thumbnail'] = labs['cover_url']
items[idx]['properties'] = \
{'Fanart_Image': labs['backdrop_url']}
except:
pass
log('ITEMS %s' % len(items))
idx += 1
xrequest['data'] = items
if page < xrequest['pages']:
xrequest['data'].append({'label': 'Next Page >>',
'path': plugin.url_for('xweasel', query=query,
page=str(page + 1))})
log('ITEMS %s' % len(xrequest['data']))
plugin.finish(items=xrequest['data'], sort_methods=['Size', 'Album', 'Genre', 'Artist'])
# @plugin.cached()
def movie_meta(title):
# cacheMovie = plugin.get_storage(title)
# if len(cacheMovie.keys()): return cacheMovie['labs']
sqtitle = table.find_one(stitle=title)
if sqtitle:
log('FUCK YEAH')
return sqtitle
headers = {'Content-Type': 'application/json',
'trakt-api-version': '2',
'trakt-api-key': '05bcd2c0baf2685b8c196162d099e539033c21f7aa9fe1f87b234c2d62c2c1e4'}
results = \
requests.get('https://api-v2launch.trakt.tv/search?query=%s&type=movie'
% title[:-5], headers=headers).json()
yr = title[-4:]
plugin.log.info('Results %s' % pp(results))
if len(results) == 0: return
Movie = results[0]['movie']
img_url = 'http://webservice.fanart.tv/v3/movies/%s?api_key=%s' % (Movie['ids']['imdb'], FA_api)
plugin.log.info('Movie: %s' % pp(Movie))
infoLabels = {}
img_dat = requests.get(img_url).json()
log('IMAGE DATA: %s' % pp(img_dat))
try:
infoLabels['poster'] = img_dat['movieposter'][0]['url']
except:
infoLabels['poster'] = ''
try:
infoLabels['cover_url'] = img_dat['movieposter'][0]['url']
except:
infoLabels['cover_url'] = ''
try:
infoLabels['plot'] = Movie['overview']
except:
infoLabels['plot'] = ''
try:
infoLabels['backdrop_url'] = img_dat['moviebackground'][0]['url']
except:
infoLabels['backdrop_url'] = ''
try:
infoLabels['year'] = Movie['year']
except:
infoLabels['year'] = ''
try:
infoLabels['title'] = Movie['title']
except:
infoLabels['title'] = ''
try:
infoLabels['thumb'] = img_dat['moviethumb'][0]['url']
except:
infoLabels['thumb'] = ''
try:
infoLabels['banner'] = img_dat['moviebanner'][0]['url']
except:
infoLabels['banner'] = ''
try:
infoLabels['fanart'] = img_dat['moviebackground'][0]['url']
except:
infoLabels['fanart'] = ''
try:
infoLabels['clearart'] = img_dat['hdmovieclearart'][0]['url']
except:
infoLabels['clearart'] = ''
try:
infoLabels['clearlogo'] = img_dat['hdmovieclearlogo'][0]['url']
except:
infoLabels['clearlogo'] = ''
# cacheMovie['labs'] = infoLabels
infoLabels['stitle'] = title
table.upsert(infoLabels, ['stitle'])
return infoLabels
@plugin.route('/hi10/', name='cloud10', options={'term': ''})
def hi10(term):
last = plugin.get_storage('lastsearch')
if not term:
term = plugin.keyboard(heading='Search', default=last['last'])
items = []
url = 'http://hi10anime.com/?s=%s' % term
u = requests.get(url)
log(u.status_code)
soup = BS(u.text)
results = soup.findAll(attrs={'class': 'entry-title'})
for r in results:
show = r.parent.find('a').text
link = r.a['href']
title = r.a.text
item = {
'label': title,
'path': plugin.url_for('hi10eps', url=link, show=show),
'info': {'TVShowTitle': show}
}
items.append(item)
return items
# @plugin.cached()
def hi_login(url):
log_url = 'https://hi10anime.com/wp-login.php'
hiuser = plugin.get_setting('hiusr', str)
hipwd = plugin.get_setting('hipwd', str)
data = {
'log': hiuser,
'pwd': hipwd
}
sess = scraper
s = sess.post(log_url, data=data)
log("Status: %s" % s.status_code)
return sess.get(url).text
@plugin.route('/hi10eps/<show>/<url>')
def hi10eps(show, url, id=None, labs=None):
soup = BS(hi_login(url))
bc = soup.findAll(attrs={'class': 'showLinksTable'})#soup.findAll(attrs={'class': 'postMakerTABLE'})
typ = 'column'
try:
eptest = bc[2].findAll(attrs={'class': 'postMakerTR'})[2:]
except Exception, ed:
# log(e.__dict__)
log('ERROR %s LINE: %s' % (ed, sys.exc_info()[2].tb_lineno))
eptest = soup.findAll('a', href=re.compile('mkv'))
typ = 'single'
try:
aid = soup.find('a', attrs={'title': 'AniDB'})
aid = aid['href'].split('aid=')[1]
except Exception, ed:
# log(e.__dict__)
log('ERROR %s LINE: %s' % (ed, sys.exc_info()[2].tb_lineno))
aid = ''
items = []
img = soup.find('p').img['src']
idx = 0
prev_link = ''
for e in eptest:
if typ == 'column':
link = e.find('a')['href']
link = 'https://' + link[link.find('hi10'):]
c = [x for x in e.contents if x != '\n']
episode = c[1].text.split('v')[0]
else:
link = e['href']
link = 'https://' + link[link.find('hi10'):]
episode = e.previous.previous
if link == prev_link:
continue
prev_link = link
try:
episode = int(episode)
info = gethimeta(episode, show, aid)
label = info['title']
except Exception, e:
log('ERROR %s LINE: %s' % (e, sys.exc_info()[2].tb_lineno))
try:
fname = link.rsplit('/')[-1][:-4]
log(fname)
info = file_meta(fname)
label = info['title']
except Exception, f:
log('ERROR %s LINE: %s' % (f, sys.exc_info()[2].tb_lineno))
label = link.rsplit('/')[-1][:-4]
info = {'TVShowTitle': show, 'cover_url': img, 'backdrop_url': img}
try:
if str(idx) == str(id) and labs:
info = labs
except Exception, e:
log('ERROR %s LINE: %s' % (e, sys.exc_info()[2].tb_lineno))
item = {
'label': label,
'path': link,
'info': info,
'thumbnail': info['cover_url'],
'context_menu': [('Assign Metadata',
actions.update_view(plugin.url_for(
'assign_metadata',
id=idx,
search_term=show,
page=False,
name=url,
bot=False,
from_XG='HI',
cache=False,
))), ('Reapply Metadata',
actions.update_view(plugin.url_for(
'assign_metadata',
id=idx,
search_term=show,
page=False,
name=url,
bot=False,
from_XG='HI',
cache='reapply',
))), ('Next Episode',
actions.update_view(plugin.url_for(
'assign_metadata',
id=idx,
search_term=show,
page=False,
name=url,
bot=False,
from_XG='HI',
cache='next',
))), ('Previous Episode',
actions.update_view(plugin.url_for(
'assign_metadata',
id=idx,
search_term=show,
page=False,
name=url,
bot=False,
from_XG='HI',
cache='prev',
)))],
'properties': {'Fanart_Image': info['backdrop_url']},
'info_type': 'video',
'is_playable': True}
idx += 1
log(pp(item))
items.append(item)
for i in items:
log(i['path'])
return items
def gethimeta(episode, show, aid=''):
shw = plugin.get_storage(show)
if 'anidb' not in shw.keys() and aid:
log('REQUESTING ANIDB DATA')
r = requests.get(
'http://api.anidb.net:9001/httpapi?request=anime&client=anidbtvdbmeta&clientver=1&protover=1&aid=%s' % aid)
log("Status %s\n" % r.status_code)
anitext = r.text
shw['anidb'] = anitext
else:
anitext = shw['anidb']
soup = BS(anitext)
year = soup.find('startdate').text[:4]
v = [x for x in soup.findAll('epno') if x.text == str(episode)][0]
if 'shownep' not in shw.keys():
title = ' '.join([show, year])
log(title)
id = api.get_matching_shows(show)
log(id)
shw['id'] = id[0][0]
e = api.get_show_and_episodes(shw['id'])
shw['shownep'] = [e[0].__dict__, [i.__dict__ for i in e[1]]]
else:
e = [SEP(**shw['shownep'][0]), [SEP(**i) for i in shw['shownep'][1]]]
airdate = api.convert_date(v.parent.airdate.text)
ep = [i for i in e[1] if
2 >= (lambda x: x.days)(
(airdate - api.convert_date(i.first_aired if i.first_aired else '1963-01-01'))) >= -2][0]
try:
info = transform_ep_object(ep)
except Exception, e:
log(e)
info['TVShowTitle'] = e[0].name
info['backdrop_url'] = e[0].fanart_url
return info
if __name__ == '__main__':
plugin.run()
| gpl-2.0 | 2,370,805,098,750,441,500 | 39.784108 | 141 | 0.451372 | false | 4.065003 | false | false | false |
alkurbatov/squirrel | src/convert.py | 1 | 1073 | # This file is a part of Squirrel project
#
# Copyright (C) 2014, Alexander Kurbatov <[email protected]>
#
# Squirrel is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Squirrel is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
def to_seconds(time):
s = 0
d = re.search(r"(?i)([0-9]+)d", time)
if d:
s += int(d.group(1)) * 24 * 60 * 60
h = re.search(r"(?i)([0-9]+)h", time)
if h:
s += int(h.group(1)) * 60 * 60
m = re.search(r"(?i)([0-9]+)m", time)
if m:
s += int(m.group(1)) * 60
return s
| gpl-3.0 | 9,016,646,689,706,981,000 | 28.805556 | 70 | 0.657036 | false | 3.241692 | false | false | false |
dyermd/legos | scripts/QC/merger.py | 1 | 14082 | #! /usr/bin/env python
# Goal: Merge and run coverage analysis on the two Samples generated.
# Output: A mered bam file, and coverage analysis on the merged bam file.
from optparse import OptionParser
import os
import os.path
import sys
import re
import datetime
import json
from QC_Run import QC_Run
from tools import *
class Merger:
# @param bams_to_merge a list of the bam files to merge together
# @param merged_dir the directory in which to place the merged bam file
# @param sample_name the name of the sample. Used for the SM tag
# @param cleanup Flag to delete the temporary files or not. Default: false
def __init__(self, sample_json=None):
if sample_json:
self.sample_json = json.load(open(sample_json))
self.merge_dir = ''
self.bams_to_merge = []
self.runs_to_merge = []
self.QC_Run = QC_Run(self.sample_json)
# merge the following runs
def merge(self):
# this could be just a temporary fix
if os.path.isfile(self.path_to_merged_bam):
print "%s already exists. Not making it again."%self.path_to_merged_bam
else:
print "Sample %s is merging the following runs: %s"%(self.sample_name, self.bams_to_merge)
merge_command = "java -jar /opt/picard/picard-tools-current/MergeSamFiles.jar "
# Add each run's bam file to mergeJob.sh
for bam in self.bams_to_merge:
if not os.path.isfile(bam) or bam[-4:] != ".bam":
print "ERROR: the bam file '%s' does not exist!"%bam
sys.exit(4)
merge_command += "INPUT=%s "%bam
# make sure the merged_dir exists, or make it.
runCommandLine("mkdir -p %s"%self.merged_dir)
#if not os.path.isdir(merged_dir):
#print "ERROR: the output dir '%s' does not exist!"%bam
#sys.exit(4)
# Now set the output file, and then run the merge command
merge_command += " OUTPUT=%s/merged_badHeader.bam "%self.merged_dir
if runCommandLine(merge_command) != 0:
print "ERROR: %s something went wrong with merging!"%self.sample_name
sys.exit(1)
#echo "fixing header for %s/merged_badHeader.bam"
correct_header_command = "samtools view -H %s/merged_badHeader.bam > %s/merged.header.sam "%(self.merged_dir, self.merged_dir)
if runCommandLine(correct_header_command) != 0:
print "ERROR: samtools view -H failed!"
sys.exit(1)
# A better way would be to check to see if the SM tags already match. Then we would be able to use ionstats and such.
SM_check_command = "grep -Eo 'SM:[a-zA-Z0-9_&/-]*"
# NEED TO TEST THIS COMMAND. Is there anything that comes before the next : that is important?
# Change the SM: tag so that it matches for every run merged. (There should be one SM tag for each run merged)
# This was the old command. We will keep using this, and then if there are problems, we can manually correct them.
sed_command = 'sed "s/SM:[a-zA-Z0-9_&/-]*/SM:%s/" %s/merged.header.sam > %s/merged.headerCorrected.sam'%(self.sample_name, self.merged_dir, self.merged_dir)
# this updated command will change the SM tag to match everything up to the next : after the SM tag.
# this command deletes the KS: tag!! not good! I don't know why but some headers are tab delimited, and some are not it seems.
#sed_command = 'sed -E "s/SM:[^:]*:/SM:%s:/" %s/merged.header.sam > %s/merged.headerCorrected.sam'%(self.sample_name, self.merged_dir, self.merged_dir)
if runCommandLine(sed_command) != 0:
print "ERROR: sed command failed!"
sys.exit(1)
# write the new header to merged.bam
reheader_command = "samtools reheader %s/merged.headerCorrected.sam %s/merged_badHeader.bam > %s "%(self.merged_dir, self.merged_dir, self.path_to_merged_bam)
if runCommandLine(reheader_command) != 0:
print "ERROR: sed command failed!"
sys.exit(1)
# set some extra variables for the JSON file.
self.merged_json = "%s/merged.json"%self.merged_dir
# if there is already an index file from a previous merge try, delete it.
if os.path.isfile(self.path_to_merged_bam + ".bai"):
os.remove(self.path_to_merged_bam + ".bai")
# IF specified, cleanup the temporary files
#if self.cleanup:
# Need to cleanup here inorder for TVC to work. there can only be one bam file in the merged dir.
os.remove("%s/merged_badHeader.bam"%self.merged_dir)
os.remove("%s/merged.headerCorrected.sam"%self.merged_dir)
os.remove("%s/merged.header.sam"%self.merged_dir)
print "%s finished merging "%self.merged_dir
# Update the final merged run status
def update_merged_run_status(self, run, merged_perc_avail_bases=0):
pass_fail_merged_status = 'pass'
run_json = json.load(open(run))
if run_json['run_type'] == 'germline':
merged_perc_aval_bases = run_json['run_data']['amp_cov']
print merged_perc_avail_bases, self.sample_json['analysis']['settings']['cutoffs']['merged_amp_cov']
# check to see if >90% of the bases are shared between the tumor normal comparison
if 'merged_amp_cov' in self.sample_json['analysis']['settings']['cutoffs'] and merged_perc_avail_bases != '':
if merged_perc_avail_bases < self.sample_json['analysis']['settings']['cutoffs']['merged_amp_cov']:
pass_fail_merged_status = 'REQUEUE'
# write the final statuses here
run_json['pass_fail_merged_status'] = pass_fail_merged_status
run_json['merged_perc_avail_bases'] = merged_perc_avail_bases
write_json(run, run_json)
# @param runs the runs of a sample
# @param run_name either '', 'Normal/' or 'Tumor/'
# @param pref the prefix of this type of merge. either 'normal_' 'tumor_' or ''
# @returns a list of the passing bam files to merge, and the path to the merged dir.
def check_merge(self, runs, run_name='', pref=''):
# vars to return
merge = False
self.bams_to_merge = []
self.runs_to_merge = []
# Use this count so that we won't have to write over past merges if there are multiple merges.
if 'merged_%scount'%pref not in self.sample_json:
self.sample_json['merged_%scount'%pref] = 0
# first check to see if all of the runs pass.
# Get all of the passing bam files for this sample.
pending_runs, passing_runs = self.QC_Run.get_runs_status(runs)
if len(pending_runs) != 0:
print "Not merging. After QC_runs, runs should either be 'pass' or 'fail', not 'pending'. Pending runs: ", pending_runs
elif len(passing_runs) < 1:
# if none of the runs are passing, then don't do anything.
pass
elif self.sample_json['sample_status'] != "pending_merge" and self.sample_json['sample_status'] != "merged":
# If any runs of the sample are not ready to be merged either because of 3x3 table error rate questions or other reasons, don't merge this sample.
print "%s the 'sample_status' is '%s'. Needs to be 'pending_merge' to merge the runs."%(self.sample_json['sample_name'], self.sample_json['sample_status'])
elif self.sample_json['sample_status'] == 'pending_merge':
# Merge these runs.
# First get the passing bams from the passing runs.
for run in passing_runs:
run_json = json.load(open(run))
self.bams_to_merge.append("%s/%s"%(run_json['run_folder'], run_json['analysis']['files'][0]))
self.runs_to_merge.append(run_json['run_name'])
# sort the run names
self.runs_to_merge.sort()
# If this sample has already been merged: If the runs to generate the merged bam don't match the current list:
# then delete the last created bam file and merge these runs
# else don't remerge these files
if len(self.bams_to_merge) == 1:
# There is only one run, so don't merge it. Set the "final_%sjson"%pref flag to show what the final run is
self.sample_json["final_%sjson"%pref] = run
# use the 'merged_json' flag rather than the 'final_json' flag because 'final_json' can be set by a single non-merged run.
elif 'merged_%sjson'%pref in self.sample_json and os.path.isfile(self.sample_json['merged_%sjson'%pref]):
merged_json_data = json.load(open(self.sample_json['merged_%sjson'%pref]))
# If the runs used to generate the current merged.bam file dont match the current bams_to_merge, then merge them. Otherwise don't
if merged_json_data['json_type'] == 'merged' and set(self.bams_to_merge) != set(merged_json_data['bams_used_to_merge']):
# in order to manage space, delete the last merged folder that was created.
if self.sample_json['analysis']['settings']['cleanup'] == True:
# IDEA delete the entire folder? Or just the bam file?
merged_bam = "%s/%s"%(merged_json_data['run_folder'], merged_json_data['analysis']['files'][0])
print " Deleting the old merged bam file: %s"%merged_bam
os.remove(merged_bam)
# Add one to the merged_count
self.sample_json['merged_%scount'%pref] += 1
# set new path to the merged_json
self.merged_dir = "%s/%sMerged_%d"%(self.sample_json['sample_folder'], run_name, self.sample_json['merged_%scount'%pref])
merge = True
else:
# Don't merge these runs because they've already been merged.
print "%s the runs: '%s' have already been merged"%(self.sample_json['sample_name'], self.bams_to_merge)
else:
# Merge these runs
self.merged_dir = "%s/%sMerged"%(self.sample_json['sample_folder'], run_name)
# Add one to the merged_count
self.sample_json['merged_%scount'%pref] += 1
merge = True
return merge
# merge the runs of a sample
# @param runs the bam files to merge
# @param merged_dir the ouptut_dir in which to place the merged bam file
# @param pref the prefix (either '', 'normal_', or 'tumor')
# @param run_type either germline, normal, or tumor.
# @param run_name either Merged, Normal_Merged or Tumor_Merged. Used for the titles of the 3x3 tables.
def merge_runs(self, run_type, run_name='', pref=''):
# if the file already exists, then merging must have finished, and don't merge again.
self.merged_json = "%s/merged.json"%self.merged_dir
if os.path.isfile(self.merged_json):
print "%s already exists so not merging the bam files again"%self.merged_json
else:
self.sample_name = self.sample_json['sample_name']
# get today's date to format the mreged.bam file name
curr_date = datetime.date.today()
# the name follows this format: A_227_Tumor_Merged_02152015
run_name = "%s_%sMerged_%02d%02d%s"%(self.sample_name, run_name, curr_date.month, curr_date.day, curr_date.year)
merged_bam = "%s.bam"%(run_name)
self.path_to_merged_bam = "%s/%s"%(self.merged_dir, merged_bam)
self.merge()
# now set the json files
# create the merged_bam's json file here so that the merger.py script can run on its own if necessary.
merged_json = {
'analysis': {
'files': [merged_bam]
},
'bams_used_to_merge':self.bams_to_merge,
'sample_name': self.sample_name,
'merged_bam': self.path_to_merged_bam,
'json_file': self.merged_json,
"json_type": "merged",
"pass_fail_status": "pending",
"project": self.sample_json['project'],
"run_folder": self.merged_dir,
"run_name": run_name,
"run_num": self.sample_json['merged_%scount'%pref],
"run_type": run_type,
"runs_used_to_merge": ', '.join(self.runs_to_merge),
"sample": self.sample_json['sample_name'],
"sample_folder": self.sample_json['sample_folder'],
"sample_json": self.sample_json['json_file']
}
#write new json file
write_json(self.merged_json, merged_json)
# QC the merged run.
self.QC_Run.runTVC_COV(self.merged_json, pref)
self.QC_Run.getRunInfo(self.merged_json, pref)
# Update the merge pass/fail status based on the metrics gathered by QC_getRunInfo.sh
self.QC_Run.update_run_status(self.merged_json, 1)
# Also store the path to this merged bam file in the sample's json file. Not really necessary, but it seems like a good idea.
#if 'merged' not in self.sample_json['analysis']['files']:
# self.sample_json['analysis']['files']['merged'] = {}
#self.sample_json['analysis']['files']['merged']['%sbam'%pref] = merger.path_to_merged_bam
# store the path to this merged bam folder in the sample's json file.
#self.sample_json['merged_%sjson'%pref] = merged_dir
# If the merge_json passes the cutoffs, set it as the final_json
merge_json = json.load(open(self.merged_json))
# add the path to this merge even if it doesn't pass
self.sample_json["merged_%sjson"%pref] = self.merged_json
if merge_json['pass_fail_status'] == 'pass':
# Add a path to the final merged_json
self.sample_json["final_%sjson"%pref] = self.merged_json
# write the modified sample_json file
write_json(self.sample_json['json_file'], self.sample_json)
# If we need this script to run on its own, update it when it is needed
#if __name__ == '__main__':
#
# # set up the option parser
# parser = OptionParser()
#
# # add the options to parse
# parser.add_option('-j', '--json', dest='json', help='The samples json file. Will be used to get the passing bams.')
# parser.add_option('-o', '--merged_dir', dest='output', help='The output file. If no output file is specified, output will be written to the screen')
# parser.add_option('-s', '--sample_name', dest='sample', help='The name of the sample. Will be used to fix the SM tag of the merged BAM file')
# parser.add_option('-b', '--bams', dest='bams', action='append', help='Use a -b for for each bam to include in merging')
# parser.add_option('-c', '--cleanup', dest='cleanup', action='store_true', help='option to cleanup the temporary files used in merging and such.')
#
# (options, args) = parser.parse_args()
#
# if options.json and (not options.output and not options.sample and not options.bams):
# Merger(options.json)
# # I don't have time to implement these other options yet...
# #elif not options.json and (options.output and options.sample and options.bams):
## merger = Merger()
## merger.merge()
## Merger(options.bams, options.output, options.sample)
# else:
# print "USAGE_ERROR: -j or (-o, -s and -b) are required. If the json file is provided, do not provide the other options. If the other options are provided, do not provide a json file."
# print "only -j is implemented so far..."
# parser.print_help()
# sys.exit(1)
#
| gpl-2.0 | 7,565,517,798,160,435,000 | 48.066202 | 186 | 0.684704 | false | 3.105182 | false | false | false |
taoistly/dec | dec.py | 1 | 11798 | import shutil
import os
import time
import suffixtree
def scan_kmer(read):
global K
offset, seq1, seq2 = read.split("\t")
ret = []
for base in seq1:
if base not in "ACGTN": return ret
for base in seq2:
if base not in "ACGTN": return ret
for idx in range(len(seq1) - K + 1):
ret += [(seq1[idx:idx + K], (offset, idx + 1))]
for idx in range(len(seq2) - K + 1):
ret += [(seq2[idx:idx + K], (offset, -idx - 1))]
return filter(lambda x: "N" not in x[0], ret)
def align_by_anchor(kv):
global ref_file_path, K, MATCH_RATE, JUDGE_THRESHOLD
class anchored_read(object):
__slots__ = ('id', 'kmeridx', 'reversed', 'offset1', 'offset2', 'seq1', 'seq2')
def __init__(self, id, kmeridx, readFile1, readFile2):
self.id = int(id)
self.kmeridx = kmeridx
self.reversed = False
readFile1.seek(self.id)
readFile2.seek(self.id)
self.seq1 = readFile1.readline().strip()
self.seq2 = readFile2.readline().strip()
def reverse(self):
self.reversed = not self.reversed
self.kmeridx = -self.kmeridx
self.seq1, self.seq2 = self.seq2, self.seq1
def match_one_seq(self, seq1, seq2, offset, heuristic):
""" to compute match score for same end with given offset
seq1 and seq2 are one end of reads to compare,
offset==3 means seq1[0] match seq2[3] (r2[offset])
0123---
0123456
"""
if offset < 0:
seq1, seq2 = seq2, seq1
offset = -offset
overlap = min(len(seq1), len(seq2) - offset)
match_count = 0
for i in range(overlap):
match_count += (seq1[i] == seq2[i + offset])
if heuristic and i == 4 and match_count < 4: return
if float(match_count) / overlap < MATCH_RATE or match_count < K: return
return match_count - (overlap - match_count) # * (overlap - match_count)
def match(self, read):
""" "read" is the r1 on top, kmer locates at "a";
"self" is the r2(anchor) on bottom, kmer locates at "b";
"""
x = self.kmeridx - read.kmeridx
anchored_score = self.match_one_seq(read.seq1, self.seq1, x, False)
if not anchored_score: return
best_match = None
for y in range(-len(read.seq2) + 1, len(self.seq2)):
match_result = self.match_one_seq(read.seq2, self.seq2, y, True)
if not match_result: continue
score = anchored_score + match_result # - 0.05 * (x - y) * (x - y)
if not best_match or best_match[0] < score:
best_match = (score, x, y)
return best_match
class anchor_aligment(list):
def __init__(self, read):
super(anchor_aligment, self).__init__()
self.anchor(read, 0, 0)
def anchor(self, read, left_offset, right_offset):
self += [read]
read.offset1, read.offset2 = (left_offset, right_offset)
def match(self, read):
return self[0].match(read)
def report_doubt(self, kmer, col_id):
doubts = []
seq1_left, seq1_right = 0, len(self[0].seq1)
seq2_left, seq2_right = 0, len(self[0].seq2)
for read in self:
seq1_left = min(seq1_left, read.offset1)
seq1_right = max(seq1_right, read.offset1 + len(read.seq1))
seq2_left = min(seq2_left, read.offset2)
seq2_right = max(seq2_right, read.offset2 + len(read.seq2))
for col_idx in range(seq1_left, seq1_right):
basecount = {i: 0 for i in "ACGTN"}
for read in self:
if 0 <= col_idx - read.offset1 < len(read.seq1):
basecount[read.seq1[col_idx - read.offset1]] += 1
if basecount.values().count(0) == 4: continue
for read in self:
if 0 <= col_idx - read.offset1 < len(read.seq1) and basecount[read.seq1[col_idx - read.offset1]] <= JUDGE_THRESHOLD:
doubts += [(kmer + str(col_id),
(read.seq1[col_idx - read.offset1],
(read.id + col_idx - read.offset1) * [1, -1][read.reversed],
)
)]
for i in basecount:
if basecount[i] > JUDGE_THRESHOLD:
doubts += [(kmer + str(col_id), (i.lower(), basecount[i]))]
col_id += 1
for col_idx in range(seq2_left, seq2_right):
basecount = {i: 0 for i in "ACGTN"}
for read in self:
if 0 <= col_idx - read.offset2 < len(read.seq2):
basecount[read.seq2[col_idx - read.offset2]] += 1
if basecount.values().count(0) == 4: continue
for read in self:
if 0 <= col_idx - read.offset2 < len(read.seq2) and basecount[read.seq2[col_idx - read.offset2]] <= JUDGE_THRESHOLD:
doubts += [(kmer + str(col_id),
(read.seq2[col_idx - read.offset2],
(read.id + col_idx - read.offset2) * [-1, 1][read.reversed],
)
)]
for i in basecount:
if basecount[i] > JUDGE_THRESHOLD:
doubts += [(kmer + str(col_id), (i.lower(), basecount[i]))]
col_id += 1
return doubts, col_id
def print_pileup(self):
pileup = ""
seq1_left, seq1_right = 0, len(self[0].seq1)
seq2_left, seq2_right = 0, len(self[0].seq2)
for read in self:
seq1_left = min(seq1_left, read.offset1)
seq1_right = max(seq1_right, read.offset1 + len(read.seq1))
seq2_left = min(seq2_left, read.offset2)
seq2_right = max(seq2_right, read.offset2 + len(read.seq2))
for read in self:
pileup += str(read.id).center(10) + "." * (read.offset1 - seq1_left)
pileup += read.seq1 + "." * (seq1_right - read.offset1 - len(read.seq1))
pileup += "..." + "." * (read.offset2 - seq2_left) + read.seq2
pileup += "." * (seq2_right - read.offset2 - len(read.seq2)) + "\n"
return pileup
# load reads from disk
readFile1 = open(ref_file_path[0])
readFile2 = open(ref_file_path[1])
Ast = suffixtree.SuffixTree()
Ust = suffixtree.SuffixTree()
alignment_group = []
for value in kv[1]:
read = anchored_read(value[0], value[1], readFile1, readFile2)
best, bestidx = None, -1
if read.kmeridx < 0: read.reverse()
Avote = Ast.tid_vote(read.seq1)
Uvote = Ust.tid_vote(read.seq2)
vote = [(tid, Avote[tid], Uvote[tid]) for tid in range(len(alignment_group))]
if vote:
bestidx, Abest, Ubest = max(vote, key=lambda x: x[1][1] + x[2][1])
if read.match_one_seq(read.seq1, alignment_group[bestidx][0].seq1, Abest[0], False) and \
read.match_one_seq(read.seq2, alignment_group[bestidx][0].seq2, Ubest[0], False):
best = (bestidx, Abest[0], Ubest[0], read.reversed)
# for i, alignment in enumerate(alignment_group):
# match_result = alignment.match(read)
# if match_result and (not best or best[0] < match_result[0]):
# best, bestidx = match_result, i
if not best:
Ast.append(read.seq1, len(alignment_group))
Ust.append(read.seq2, len(alignment_group))
alignment_group += [anchor_aligment(read)]
else:
alignment_group[bestidx].anchor(read, best[1], best[2])
report, col = [], 0
log = "===%s===\n" % (kv[0])
for alignment in alignment_group:
doubts, col = alignment.report_doubt(kv[0], col)
report += doubts
log += alignment.print_pileup()
# logfile = open("/temp/refread/log" + kv[0], "w")
# logfile.write(log)
# logfile.close()
return report
def merge_in_partition(kv_iter):
ufset = {}
posdict = {}
basecount = {}
def find(k):
if ufset[k] != k: ufset[k] = find(ufset[k])
return ufset[k]
for v, k in kv_iter:
if v not in ufset:
ufset[v] = v
basecount[v] = [0, 0, 0, 0]
else:
v = find(v)
if k[0] == "a": basecount[v][0] += k[1]
elif k[0] == "c": basecount[v][1] += k[1]
elif k[0] == "g": basecount[v][2] += k[1]
elif k[0] == "t": basecount[v][3] += k[1]
elif k not in posdict:
posdict[k] = v
if k[0] == "A": basecount[v][0] += 1
if k[0] == "C": basecount[v][1] += 1
if k[0] == "G": basecount[v][2] += 1
if k[0] == "T": basecount[v][3] += 1
else:
u = find(posdict[k])
ufset[v] = u
basecount[u] = [basecount[u][i] + basecount[v][i] for i in range(4)]
for k in posdict:
u = find(posdict[k])
if k[0] == "A" and basecount[u][0] > JUDGE_THRESHOLD: continue
if k[0] == "C" and basecount[u][1] > JUDGE_THRESHOLD: continue
if k[0] == "G" and basecount[u][2] > JUDGE_THRESHOLD: continue
if k[0] == "T" and basecount[u][3] > JUDGE_THRESHOLD: continue
yield (u, k)
for v in ufset:
if ufset[v] == v:
count = basecount[v]
if count[0] > JUDGE_THRESHOLD: yield (v, ('a', count[0]))
if count[1] > JUDGE_THRESHOLD: yield (v, ('c', count[1]))
if count[2] > JUDGE_THRESHOLD: yield (v, ('g', count[2]))
if count[3] > JUDGE_THRESHOLD: yield (v, ('t', count[3]))
def judge(key_values):
values = list(key_values[1])
ACGT = {i: 0 for i in "ACGT"}
for value in values:
if value[0] != "N":
if value[0] in "ACGT": ACGT[value[0]] += 1
if value[0] in "acgt": ACGT[value[0].upper()] += value[1]
ref, refcnt = max(ACGT.items(), key=lambda x: x[1])
if refcnt <= JUDGE_THRESHOLD: return
for value in values:
if value[0] in "acgt" or ACGT[value[0]] > JUDGE_THRESHOLD and value[0] != 'N': continue
yield str(value[1]) + "\t" + ref
def run(sc, inputfile="/home/x/xieluyu/reads/lineno_seq1_seq2.txt", outputdir="/home/x/xieluyu/output"):
global P1, P2, P3, P4
if os.path.exists(outputdir): shutil.rmtree(outputdir)
start = time.time()
reads_file = sc.textFile(inputfile, 64)
P1 = reads_file.flatMap(scan_kmer)
P2 = P1.groupByKey(1024).filter(lambda kv: len(kv[1]) > 1).flatMap(align_by_anchor)
P3 = P2.mapPartitions(merge_in_partition)
num_partitions = P2.getNumPartitions()
while num_partitions != 1:
num_partitions = (num_partitions - 1) / 4 + 1
P3 = P3.partitionBy(num_partitions).mapPartitions(merge_in_partition)
P4 = P3.groupByKey().flatMap(judge)
P4.saveAsTextFile("file://" + outputdir)
end = time.time()
print "elapse:", end - start, "seconds"
# ref_file_path = ["/temp/refread/Phix1.fq", "/temp/refread/Phix2.fq"]
ref_file_path = ["/temp/refread/Ecoli1.fq", "/temp/refread/Ecoli2.fq"]
K = 14
MATCH_RATE = 0.9
JUDGE_THRESHOLD = 2
if __name__ == '__main__':
from pyspark import SparkContext, SparkConf
conf = SparkConf().setAppName("dec") # .setMaster("local[4]")
sc = SparkContext(conf=conf)
run(sc)
| mit | -735,112,777,300,059,300 | 41.746377 | 136 | 0.515765 | false | 3.221737 | false | false | false |
guaq/paikkis | model.py | 1 | 3427 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
0 paikannimi
1 nimen kielikoodi
2 kielen nimi
3 paikkatyypin koodi
4 paikkatyypin selite
5 kkj/pkj pohjoinen
6 kkj/pkj itä
7 kkj/ykj pohjoinen
8 kkj/ykj itä
9 etrs/tm35fin pohjoinen
10 etrs/tm35fin itä
11 kuntakoodi
12 kunnan nimi
13 seutukuntakoodi
14 seutukunnan nimi
15 maakunnan koodi
16 maakunnan nimi
17 suuraluekoodi
18 suuralueen nimi
19 läänikoodi
20 läänin nimi
21 lehtijaon 5x5 tunnus
22 pelastuslehtijaon tunnus
23 etrs-tm35 -tunnus
24 nimen kielen virallisuuskoodi
25 nimen kielen virallisuusselite
26 nimen kielen enemmistöasemakoodi
27 nimen kielen enemmistöselitys
28 paikannimenlähdekoodi
29 paikannimen lähdeselitys
30 paikka-id
31 paikannimen id
Suomi-englanti:
http://www.google.fi/url?sa=t&rct=j&q=&esrc=s&source=web&cd=18&ved=0CEUQFjAHOAo&url=http%3A%2F%2Fwww.pohjois-karjala.fi%2Fdman%2FDocument.phx%2F~maakuntaliitto%2FJulkiset%2FEUFUND%2FHankesanasto%3FfolderId%3D~maakuntaliitto%252FJulkiset%252FEUFUND%26cmd%3Ddownload&ei=-RKIUISCGMKA4gS9roHYCg&usg=AFQjCNEqVl4XU868FwPn8C-_qlnozH81Vw&cad=rja
"""
from __future__ import print_function
import sys
import codecs
import sqlite3
from coordinates import Translate, COORD_TYPE_WGS84, COORD_TYPE_ETRSTM35FIN
o8 = codecs.getwriter('utf-8')(sys.stdout)
e8 = codecs.getwriter('utf-8')(sys.stderr)
# Input: dictionary with ['type'] is coordinate system type identifier
# ['N'] is coordinate Northing / Lat
# ['E'] in coordinate Easting / Lon
# type identifier of the coordinate system to transform the input
# coordinates to
# Output: dictionary with ['type'] is coordinate system type identifier
# ['N'] is coordinate Northing / Lat
# ['E'] in coordinate Easting / Lon
class Place(object):
def __init__(self, lst):
self.name = lst[0]
wgs84_coords = Translate({'type': COORD_TYPE_ETRSTM35FIN,
'N': float(lst[9]), 'E': float(lst[10])}, COORD_TYPE_WGS84)
self.lat = wgs84_coords['N']
self.lon = wgs84_coords['E']
self.type_id = lst[3]
self.municipality_id = lst[11]
self.sub_region_id = lst[13]
self.NUTS3_region_id = lst[15]
self.NUTS2_region_id = lst[17]
self.id = lst[30]
def __repr__(self):
return "<Place %s %s>" % (self.id, str(self))
def __str__(self):
return unicode(self).encode('ASCII', 'backslashreplace')
def __unicode__(self):
return u"{0}, {1}; {2}, {3}".format(self.name, self.municipality_id, self.lat, self.lon)
def insert_stmt(self):
return (u"INSERT INTO places (name, municipality_id, id, lat, lon, type_id, sub_region_id, NUTS2_region_id, NUTS3_region_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
(self.name,
self.municipality_id,
self.id,
self.lat,
self.lon,
self.type_id,
self.sub_region_id,
self.NUTS2_region_id,
self.NUTS3_region_id))
def insert_fts_stmt(self):
return (u"INSERT INTO places_fts (id, name) VALUES (?, ?)",
(self.id,
self.name))
| mit | -5,161,143,840,188,040,000 | 32.821782 | 338 | 0.605679 | false | 2.743775 | false | false | false |
yusihao51/Minecraft | views.py | 1 | 24491 | # -*- coding: utf-8 -*-
# Imports, sorted alphabetically.
# Python packages
import os
import socket
import subprocess
import sys
import datetime
from math import sin, pi
# Third-party packages
import pyglet
from pyglet.text import Label
from pyglet.gl import *
# Modules from this project
import globals as G
from gui import frame_image, Rectangle, backdrop, Button, button_image, \
button_highlighted, ToggleButton, TextWidget, ScrollbarWidget, \
button_disabled, resize_button_image
from textures import TexturePackList
from utils import image_sprite, load_image
__all__ = (
'View', 'MainMenuView', 'OptionsView', 'ControlsView', 'TexturesView', 'MultiplayerView'
)
class Layout(object):
def __init__(self, x, y):
self.components = []
self._position = x, y
self.width, self.height = 0, 0
def add(self, component):
self.components.append(component)
def _set_component_position(self, component, x, y):
try:
component.position = x, y
except AttributeError:
try:
component.resize(x, y, component.width, component.height)
except AttributeError:
component.x, component.y = x, y
@property
def position(self):
return _position
@position.setter
def position(self, value):
self._position = value
class VerticalLayout(Layout):
def add(self, component):
self.components.append(component)
self.height += component.height + 10
self.width = max(component.width, self.width)
self._put_components()
def _put_components(self):
c_x, c_y = self._position[0], self._position[-1] + self.height
for component in self.components:
self._set_component_position(component, c_x, c_y)
c_y -= component.height + 10
@property
def position(self):
return _position
@position.setter
def position(self, value):
self._position = value
self._put_components()
class HorizontalLayout(Layout):
def add(self, component):
self.components.append(component)
self.width += component.width + 10
self.height = max(component.height, self.height)
self._put_components()
def _put_components(self):
c_x, c_y = self._position[0], self._position[-1]
for component in self.components:
self._set_component_position(component, c_x, c_y)
c_x += component.width + 10
@property
def position(self):
return _position
@position.setter
def position(self, value):
self._position = value
self._put_components()
class View(pyglet.event.EventDispatcher):
def __init__(self, controller):
super(View, self).__init__()
self.controller = controller
self.batch = pyglet.graphics.Batch()
self.buttons = []
def setup(self):
pass
def add_handlers(self):
self.setup()
self.controller.window.push_handlers(self)
def pop_handlers(self):
self.controller.window.set_mouse_cursor(None)
self.controller.window.pop_handlers()
def update(self, dt):
pass
def clear(self):
glClearColor(0.0, 0.0, 0.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
def on_mouse_press(self, x, y, button, modifiers):
self.dispatch_event('on_mouse_click', x, y, button, modifiers)
def on_mouse_motion(self, x, y, dx, dy):
cursor = None
for button in self.buttons:
if button.enabled:
if button.highlighted:
button.highlighted = False
button.draw()
if button.hit_test(x, y):
button.highlighted = True
button.draw()
cursor = self.controller.window.get_system_mouse_cursor(pyglet.window.Window.CURSOR_HAND)
self.controller.window.set_mouse_cursor(cursor)
def on_draw(self):
self.clear()
glColor3d(1, 1, 1)
self.controller.set_2d()
self.batch.draw()
View.register_event_type('on_mouse_click')
class MenuView(View):
def setup(self):
self.group = pyglet.graphics.OrderedGroup(3)
self.labels_group = pyglet.graphics.OrderedGroup(4)
self.layout = Layout(0, 0)
image = frame_image
self.frame_rect = Rectangle(0, 0, image.width, image.height)
self.background = G.texture_pack_list.selected_texture_pack.load_texture(['gui', 'background.png'])
self.background = self.background.get_texture()
self.background.height = 64
self.background.width = 64
self.frame = Rectangle(0, 0, image.width, image.height)
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT)
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT)
def Button(self, x=0, y=0, width=400, height=40, image=button_image, image_highlighted=button_highlighted, caption="Unlabeled", batch=None, group=None, label_group=None, font_name='ChunkFive Roman', on_click=None, enabled=True):
button = Button(self, x=x, y=y, width=width, height=height, image=resize_button_image(image, 400, width), image_highlighted=resize_button_image(image_highlighted, 400, width), caption=caption, batch=(batch or self.batch), group=(group or self.group), label_group=(label_group or self.labels_group), font_name=font_name, enabled=enabled)
if on_click:
button.push_handlers(on_click=on_click)
return button
def ToggleButton(self, x=0, y=0, width=400, height=40, image=button_image, image_highlighted=button_highlighted, caption="Unlabeled", batch=None, group=None, label_group=None, font_name='ChunkFive Roman', on_click=None, on_toggle=None, enabled=True):
button = ToggleButton(self, x=x, y=y, width=width, height=height, image=resize_button_image(image, 400, width), image_highlighted=resize_button_image(image_highlighted, 400, width), caption=caption, batch=(batch or self.batch), group=(group or self.group), label_group=(label_group or self.labels_group), font_name=font_name, enabled=enabled)
if on_click:
button.push_handlers(on_click=on_click)
if on_toggle:
button.push_handlers(on_toggle=on_toggle)
return button
def Scrollbar(self, x=0, y=0, width=400, height=40, sb_width=40, sb_height=40, style=1, background_image=button_disabled, scrollbar_image=button_image, caption="Test", font_size=12, font_name=G.DEFAULT_FONT, batch=None, group=None, label_group=None, pos=0, on_pos_change=None):
sb = ScrollbarWidget(self.controller.window, x=x, y=y, width=width, height=height,
sb_width=sb_width, sb_height=sb_height,
style=style,
background_image=resize_button_image(background_image, 400, width),
scrollbar_image=resize_button_image(scrollbar_image, 400, sb_width),
caption=caption, font_size=font_size, font_name=font_name,
batch=(batch or self.batch), group=(group or self.group), label_group=(label_group or self.labels_group),
pos=pos, on_pos_change=on_pos_change)
return sb
def draw_background(self):
glBindTexture(self.background.target, self.background.id)
glEnable(self.background.target)
glColor4f(0.3, 0.3, 0.3, 1.0)
width = float(self.controller.window.get_size()[0])
height = float(self.controller.window.get_size()[1])
bg_width = self.background.width
bg_height = self.background.height
vert_list = [0.0, 0.0, 0.0, width, 0.0, 0.0, width, height, 0.0, 0.0, height, 0.0]
uv_list = [0.0, 0.0, width / bg_width, 0.0, width / bg_width, height / bg_height, 0.0, height / bg_height]
l = pyglet.graphics.vertex_list(4,
('v3f/static', vert_list),
('t2f/static', uv_list),
)
l.draw(GL_QUADS)
glDisable(self.background.target)
def on_draw(self):
self.clear()
glColor3d(1, 1, 1)
self.draw_background()
self.controller.set_2d()
self.batch.draw()
def on_resize(self, width, height):
self.frame.x, self.frame.y = (width - self.frame.width) / 2, (height - self.frame.height) / 2
self.layout.position = (width - self.layout.width) / 2, self.frame.y
class MainMenuView(MenuView):
def setup(self):
self.group = pyglet.graphics.OrderedGroup(3)
self.labels_group = pyglet.graphics.OrderedGroup(4)
image = frame_image
self.layout = VerticalLayout(0, 0)
# Custom background
self.background = None
self.frame_rect = Rectangle(0, 0, self.controller.window.get_size()[0], image.height)
self.frame = Rectangle(0, 0, self.controller.window.get_size()[0], image.height)
width, height = self.controller.window.width, self.controller.window.height
self.label = Label(G.APP_NAME, font_name='ChunkFive Roman', font_size=50, x=width/2, y=self.frame.y + self.frame.height,
anchor_x='center', anchor_y='top', color=(255, 255, 255, 255), batch=self.batch,
group=self.labels_group)
self.label.height = self.label.content_height
self.layout.add(self.label)
button = self.Button(caption=G._("Singleplayer"),on_click=self.controller.start_singleplayer_game)
self.layout.add(button)
self.buttons.append(button)
button = self.Button(caption=G._("Multiplayer"),on_click=self.controller.multiplayer)
self.layout.add(button)
self.buttons.append(button)
button = self.Button(caption=G._("Options..."),on_click=self.controller.game_options)
self.layout.add(button)
self.buttons.append(button)
button = self.Button(caption=G._("Exit game"),on_click=self.controller.exit_game)
self.layout.add(button)
self.buttons.append(button)
# Splash text
self.splash_text = 'Hello!'
now = datetime.datetime.now()
if now.month == 1 and now.day == 1:
self.splash_text = 'Happy new year!'
self.splash_text_label = Label(self.splash_text, font_name='Arial', font_size=30, x=self.label.x, y=self.label.y,
anchor_x='center', anchor_y='top', color=(255, 255, 0, 255),
group=self.labels_group)
self.on_resize(width, height)
# Panorama
self.panorama = [G.texture_pack_list.selected_texture_pack.load_texture(['title', 'bg', 'panorama' + str(x) + '.png']) for x in range(6)]
self.panorama_timer = 0
pyglet.clock.schedule_interval(self.update_panorama_timer, .05)
self.blur_texture = pyglet.image.Texture.create(256, 256)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR)
def update_panorama_timer(self, dt):
self.panorama_timer += 1
def draw_panorama(self):
glMatrixMode(GL_PROJECTION)
glPushMatrix()
glLoadIdentity()
gluPerspective(120.0, 1.0, 0.05, 10.0)
glMatrixMode(GL_MODELVIEW)
glPushMatrix()
glLoadIdentity()
glColor4f(1.0, 1.0, 1.0, 1.0)
glRotatef(180.0, 1.0, 0.0, 0.0)
glEnable(GL_BLEND)
glDisable(GL_ALPHA_TEST)
glDisable(GL_CULL_FACE)
glDepthMask(False)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glPushMatrix()
glRotatef(sin(float(self.panorama_timer) / 400.0) * 25.0 + 20.0, 1.0, 0.0, 0.0)
glRotatef(-float(self.panorama_timer) * 0.1, 0.0, -1.0, 0.0)
# 6 faces
for i in range(6):
glPushMatrix()
if i == 1:
glRotatef(90.0, 0.0, 1.0, 0.0)
elif i == 2:
glRotatef(180.0, 0.0, 1.0, 0.0)
elif i == 3:
glRotatef(-90.0, 0.0, 1.0, 0.0)
elif i == 4:
glRotatef(90.0, 1.0, 0.0, 0.0)
elif i == 5:
glRotatef(-90.0, 1.0, 0.0, 0.0)
glBindTexture(self.panorama[i].texture.target, self.panorama[i].texture.id)
glEnable(self.panorama[i].texture.target)
vert_list = [-1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 1.0, 1.0, 1.0, -1.0, 1.0, 1.0]
uv_list = [0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0]
l = pyglet.graphics.vertex_list(4,
('v3f/static', vert_list),
('t2f/static', uv_list),
)
l.draw(GL_QUADS)
glDisable(self.panorama[i].texture.target)
glPopMatrix()
glPopMatrix()
glColorMask(True, True, True, False)
glColorMask(True, True, True, True)
glMatrixMode(GL_PROJECTION)
glPopMatrix()
glMatrixMode(GL_MODELVIEW)
glPopMatrix()
glDepthMask(True)
glEnable(GL_CULL_FACE)
glEnable(GL_ALPHA_TEST)
glEnable(GL_DEPTH_TEST)
def render_to_texture(self):
glViewport(0, 0, 256, 256)
self.draw_panorama()
glBindTexture(GL_TEXTURE_2D, self.blur_texture.id)
glCopyTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, 0, 0, 256, 256, 0)
glClearColor(0.0, 0.0, 0.5, 0.5)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glViewport(0, 0, self.controller.window.get_size()[0], self.controller.window.get_size()[1])
def draw_blur(self, times=5):
alpha = 0.5
glDisable(GL_TEXTURE_GEN_S)
glDisable(GL_TEXTURE_GEN_T)
glEnable(GL_TEXTURE_2D)
glDisable(GL_DEPTH_TEST)
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE)
glBindTexture(GL_TEXTURE_2D, self.blur_texture.id)
glMatrixMode(GL_PROJECTION)
glPushMatrix()
glLoadIdentity()
glOrtho(0, self.controller.window.get_size()[0] , self.controller.window.get_size()[1] , 0, -1, 1 )
glMatrixMode(GL_MODELVIEW)
glPushMatrix()
glLoadIdentity()
alphainc = alpha / float(times)
spost = 0
width = self.controller.window.get_size()[0]
height = self.controller.window.get_size()[1]
glBegin(GL_QUADS)
for _ in range(times):
glColor4f(1.0, 1.0, 1.0, alpha)
glTexCoord2f(0, 1)
glVertex2f(0, 0)
glTexCoord2f(0, 0)
glVertex2f(0, height)
glTexCoord2f(1, 0)
glVertex2f(width, height)
glTexCoord2f(1, 1)
glVertex2f(width, 0)
alpha = alpha - alphainc
if alpha < 0:
alpha = 0
glEnd()
glMatrixMode(GL_PROJECTION)
glPopMatrix()
glMatrixMode(GL_MODELVIEW)
glPopMatrix()
glEnable(GL_DEPTH_TEST)
glDisable(GL_TEXTURE_2D)
glDisable(GL_BLEND)
glBindTexture(GL_TEXTURE_2D, 0)
def draw_splash_text(self):
glPushMatrix()
glTranslatef(float(self.controller.window.get_size()[0] / 2 - self.label.content_width / 2), -float(self.controller.window.get_size()[1] / 3), 0.0)
glRotatef(20.0, 0.0, 0.0, 1.0)
self.splash_text_label.draw()
glPopMatrix()
def on_resize(self, width, height):
MenuView.on_resize(self, width, height)
self.label.y = self.frame.y + self.frame.height - 15
self.label.x = width / 2
self.splash_text_label.x = self.label.x
self.splash_text_label.y = self.label.y
def on_draw(self):
self.clear()
glColor3d(1, 1, 1)
#self.render_to_texture()
self.draw_panorama()
#self.draw_blur()
self.controller.set_2d()
self.batch.draw()
self.draw_splash_text()
class OptionsView(MenuView):
def setup(self):
MenuView.setup(self)
width, height = self.controller.window.width, self.controller.window.height
self.layout = VerticalLayout(0, 0)
textures_enabled = len(G.texture_pack_list.available_texture_packs) > 1
self.text_input = TextWidget(self.controller.window, G.USERNAME, 0, 0, width=160, height=20, font_name='Arial', batch=self.batch)
self.controller.window.push_handlers(self.text_input)
self.text_input.focus()
self.text_input.caret.mark = len(self.text_input.document.text) # Don't select the whole text
def text_input_callback(symbol, modifier):
G.USERNAME = self.text_input.text
self.text_input.push_handlers(key_released=text_input_callback)
hl = HorizontalLayout(0, 0)
sb = self.Scrollbar(x=0, y=0, width=300, height=40, sb_width=20, sb_height=40, caption="Music")
hl.add(sb)
def change_sound_volume(pos):
print G.EFFECT_VOLUME
G.EFFECT_VOLUME = float(float(pos) / 100)
sb = self.Scrollbar(x=0, y=0, width=300, height=40, sb_width=20, sb_height=40, caption="Sound", pos=int(G.EFFECT_VOLUME * 100), on_pos_change=change_sound_volume)
hl.add(sb)
self.layout.add(hl)
hl = HorizontalLayout(0, 0)
button = self.Button(width=300, caption=G._("Controls..."), on_click=self.controller.controls)
hl.add(button)
self.buttons.append(button)
button = self.Button(width=300, caption=G._("Textures"), on_click=self.controller.textures, enabled=textures_enabled)
hl.add(button)
self.buttons.append(button)
self.layout.add(hl)
button = self.Button(width=610, caption=G._("Done"), on_click=self.controller.main_menu)
self.layout.add(button)
self.buttons.append(button)
self.label = Label('Options', font_name='ChunkFive Roman', font_size=25, x=width/2, y=self.frame.y + self.frame.height,
anchor_x='center', anchor_y='top', color=(255, 255, 255, 255), batch=self.batch,
group=self.labels_group)
self.on_resize(width, height)
def on_resize(self, width, height):
MenuView.on_resize(self, width, height)
self.text_input.resize(x=self.frame.x + (self.frame.width - self.text_input.width) / 2 + 5, y=self.frame.y + (self.frame.height) / 2 + 75, width=150)
class ControlsView(MenuView):
def setup(self):
MenuView.setup(self)
width, height = self.controller.window.width, self.controller.window.height
self.layout = VerticalLayout(0, 0)
self.key_buttons = []
for identifier in ('move_backward', 'move_forward', 'move_left', 'move_right'):
button = self.ToggleButton(width=200, caption=pyglet.window.key.symbol_string(getattr(G, identifier.upper() + '_KEY')))
button.id = identifier
self.buttons.append(button)
self.key_buttons.append(button)
self.button_return = self.Button(caption=G._("Done"),on_click=self.controller.game_options)
self.buttons.append(self.button_return)
self.on_resize(width, height)
def on_resize(self, width, height):
self.background.scale = 1.0
self.background.scale = max(float(width) / self.background.width, float(height) / self.background.height)
self.background.x, self.background.y = 0, 0
self.frame.x, self.frame.y = (width - self.frame.width) / 2, (height - self.frame.height) / 2
default_button_x = button_x = self.frame.x + 30
button_y = self.frame.y + (self.frame.height) / 2 + 10
i = 0
for button in self.key_buttons:
button.position = button_x, button_y
if i%2 == 0:
button_x += button.width + 20
else:
button_x = default_button_x
button_y -= button.height + 20
i += 1
button_x = self.frame.x + (self.frame.width - self.button_return.width) / 2
self.button_return.position = button_x, button_y
def on_key_press(self, symbol, modifiers):
active_button = None
for button in self.buttons:
if isinstance(button, ToggleButton) and button.toggled:
active_button = button
break
if not active_button:
return
active_button.caption = pyglet.window.key.symbol_string(symbol)
active_button.toggled = False
G.config.set("Controls", active_button.id, pyglet.window.key.symbol_string(symbol))
G.save_config()
class TexturesView(MenuView):
def setup(self):
MenuView.setup(self)
width, height = self.controller.window.width, self.controller.window.height
self.layout = VerticalLayout(0, 0)
self.texture_buttons = []
self.current_toggled = None
texture_packs = G.texture_pack_list.available_texture_packs
for texture_pack in texture_packs:
button = self.ToggleButton(caption=texture_pack.texture_pack_file_name,on_toggle=self.on_button_toggle)
button.id = texture_pack.texture_pack_file_name
button.toggled = G.texture_pack_list.selected_texture_pack == texture_pack
if button.toggled:
self.current_toggled = button
self.buttons.append(button)
self.layout.add(button)
self.texture_buttons.append(button)
self.button_return = self.Button(caption="Done",on_click=self.controller.game_options)
self.buttons.append(self.button_return)
self.layout.add(self.button_return)
self.on_resize(width, height)
def on_button_toggle(self):
for button in self.texture_buttons:
if button != self.current_toggled and button.toggled:
self.current_toggled.toggled = False
self.current_toggled = button
G.config.set("Graphics", "texture_pack", button.id)
G.TEXTURE_PACK = button.id
for block in G.BLOCKS_DIR.values():
block.update_texture() #Reload textures
G.save_config()
def on_resize(self, width, height):
MenuView.on_resize(self, width, height)
self.background.scale = 1.0
self.background.scale = max(float(width) / self.background.width, float(height) / self.background.height)
self.background.x, self.background.y = 0, 0
self.frame.x, self.frame.y = (width - self.frame.width) / 2, (height - self.frame.height) / 2
class MultiplayerView(MenuView):
def setup(self):
MenuView.setup(self)
width, height = self.controller.window.width, self.controller.window.height
self.layout = VerticalLayout(0, 0)
self.text_input = TextWidget(self.controller.window, G.IP_ADDRESS, 0, 0, width=160, height=20, font_name='Arial', batch=self.batch)
self.controller.window.push_handlers(self.text_input)
self.text_input.focus()
def text_input_callback(symbol, modifier):
G.IP_ADDRESS = self.text_input.text
self.text_input.push_handlers(key_released=text_input_callback)
button = self.Button(caption=G._("Connect to server"), on_click=self.controller.start_multiplayer_game)
self.layout.add(button)
self.buttons.append(button)
button= self.Button(caption=G._("Launch server"), on_click=self.launch_server)
self.layout.add(button)
self.buttons.append(button)
button= self.Button(caption=G._("Done"), on_click=self.controller.main_menu)
self.layout.add(button)
self.buttons.append(button)
self.label = Label('Play Multiplayer', font_name='ChunkFive Roman', font_size=25, x=width/2, y=self.frame.y + self.frame.height,
anchor_x='center', anchor_y='top', color=(255, 255, 255, 255), batch=self.batch,
group=self.labels_group)
self.on_resize(width, height)
def launch_server(self):
if os.name == 'nt':
subprocess.Popen([sys.executable, "server.py"], creationflags=subprocess.CREATE_NEW_CONSOLE)
else:
subprocess.Popen([sys.executable, "server.py"])
localip = [ip for ip in socket.gethostbyname_ex(socket.gethostname())[2] if not ip.startswith("127.")][0]
self.text_input.text = localip
G.IP_ADDRESS = localip
def on_resize(self, width, height):
MenuView.on_resize(self, width, height)
self.text_input.resize(x=self.frame.x + (self.frame.width - self.text_input.width) / 2 + 5, y=self.frame.y + (self.frame.height) / 2 + 75, width=150)
| mit | 5,070,663,446,137,972,000 | 37.507862 | 350 | 0.613695 | false | 3.388351 | false | false | false |
danieljabailey/inkscape_experiments | share/extensions/fractalize.py | 1 | 3762 | #!/usr/bin/env python
'''
Copyright (C) 2005 Carsten Goetze [email protected]
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
'''
import random, math, inkex, simplepath
def calculateSubdivision(x1,y1,x2,y2,smoothness):
""" Calculate the vector from (x1,y1) to (x2,y2) """
x3 = x2 - x1
y3 = y2 - y1
""" Calculate the point half-way between the two points """
hx = x1 + x3/2
hy = y1 + y3/2
""" Calculate normalized vector perpendicular to the vector (x3,y3) """
length = math.sqrt(x3*x3 + y3*y3)
if length != 0:
nx = -y3/length
ny = x3/length
else:
nx = 1
ny = 0
""" Scale perpendicular vector by random factor """
r = random.uniform(-length/(1+smoothness),length/(1+smoothness))
nx = nx * r
ny = ny * r
""" add scaled perpendicular vector to the half-way point to get the final
displaced subdivision point """
x = hx + nx
y = hy + ny
return [x, y]
class PathFractalize(inkex.Effect):
def __init__(self):
inkex.Effect.__init__(self)
self.OptionParser.add_option("-s", "--subdivs",
action="store", type="int",
dest="subdivs", default="6",
help="Number of subdivisons")
self.OptionParser.add_option("-f", "--smooth",
action="store", type="float",
dest="smooth", default="4.0",
help="Smoothness of the subdivision")
def effect(self):
for id, node in self.selected.iteritems():
if node.tag == inkex.addNS('path','svg'):
d = node.get('d')
p = simplepath.parsePath(d)
a = []
first = 1
for cmd,params in p:
if cmd != 'Z':
if first == 1:
x1 = params[-2]
y1 = params[-1]
a.append(['M',params[-2:]])
first = 2
else :
x2 = params[-2]
y2 = params[-1]
self.fractalize(a,x1,y1,x2,y2,self.options.subdivs,self.options.smooth)
x1 = x2
y1 = y2
a.append(['L',params[-2:]])
node.set('d', simplepath.formatPath(a))
def fractalize(self,a,x1,y1,x2,y2,s,f):
subdivPoint = calculateSubdivision(x1,y1,x2,y2,f)
if s > 0 :
""" recursively subdivide the segment left of the subdivision point """
self.fractalize(a,x1,y1,subdivPoint[-2],subdivPoint[-1],s-1,f)
a.append(['L',subdivPoint])
""" recursively subdivide the segment right of the subdivision point """
self.fractalize(a,subdivPoint[-2],subdivPoint[-1],x2,y2,s-1,f)
if __name__ == '__main__':
e = PathFractalize()
e.affect()
# vim: expandtab shiftwidth=4 tabstop=8 softtabstop=4 fileencoding=utf-8 textwidth=99
| gpl-2.0 | 2,135,665,781,079,177,200 | 37.783505 | 99 | 0.542265 | false | 3.743284 | false | false | false |
Octonius/boxv2 | boxv2/session.py | 1 | 18801 | from .request import BoxRestRequest
from .upload import MultipartUploadWrapper
from .exceptions import BoxError, BoxHttpResponseError
class BoxSession(object):
"""Manage files and folder from Box.
When you instanciate this class you have to provide at least the Refresh Token (found with :class:`BoxAuthenticateFlow`). If the Access Token is not provided a request will be made to Box to get a new one (and a new Refresh Token will be generated).
The Access Token expires every hour. When you use this class with an Access Token expired, a new one will be requested automatically.
Use the "tokens_changed" callback to backup the Access Token and the Refresh Token each time they change. If you do not backup them, you will have to follow the authenticate flow again (with :class:`BoxAuthenticateFlow`).
Usage:
>>> def tokens_changed(refresh_token, access_token):
... save_to_file(refresh_token, access_token)
...
>>> box = BoxSession('my_id', 'my_secret', refresh_token, access_token, tokens_changed)
>>> print box.get_folder_info(0)
"""
def __init__(self, client_id, client_secret,
last_refresh_token,
last_access_token=None,
tokens_changed=None,
timeout=None):
"""Constructor
Args:
client_id (str): Client ID provided by Box.
client_secret (str): Client Secret provided by Box.
last_refresh_token (str): Refresh Token found with the class :class:`BoxAuthenticateFlow`.
last_access_token (str): Access Token found with the class :class:`BoxAuthenticateFlow`. If None, a new Access Token will be requested to Box.
tokens_changed (func): Function called each time the Refresh Token and the Access Token is refreshed (because of expiration). Use this to backup your Refresh Token and the Access Token in order to reuse this class without using :class:`BoxAuthenticateFlow` class for getting tokens.
timeout (float): Stop waiting for a response after a given number of seconds with the timeout parameter. If None, waiting forever. http://www.python-requests.org/en/latest/user/quickstart/#timeouts
Raises:
BoxError: An error response is returned from Box (status_code >= 400).
BoxHttpResponseError: Response from Box is malformed.
requests.exceptions.*: Any connection related problem.
"""
self.box_request = BoxRestRequest(client_id, client_secret, timeout)
self.client_id = client_id
self.client_secret = client_secret
self.refresh_token = last_refresh_token
self.access_token = last_access_token
self.box_request.access_token = last_access_token
self.tokens_changed = tokens_changed
if self.access_token == None:
self.__refresh_access_token()
def __check_response(self, response, stream=False):
if stream:
log_debug('Response from box.com: %s. {Streamed content}' % (response,))
else:
log_debug('Response from box.com: %s. %s' %(response, response.text))
try:
if stream:
att = response
elif response.text is not None and len(response.text) > 0:
att = response.json()
else:
att = {}
except Exception, ex:
raise BoxHttpResponseError(ex)
if response.status_code >= 400:
raise BoxError(response.status_code, att)
else:
return att
def __refresh_access_token(self):
log_debug('Access token expired, refreshing it from refresh token')
resp = self.box_request.refresh_access_token(self.refresh_token)
self.__log_debug_request(resp)
att = self.__check_response(resp)
self.access_token = att['access_token']
self.refresh_token = att['refresh_token']
self.box_request.access_token = self.access_token
if self.tokens_changed:
self.tokens_changed(self.refresh_token, self.access_token)
def __request(self, method, command, data=None,
querystring=None, files=None, headers=None,
stream=None,
json_data=True,
raise_if_token_expired=False):
resp = self.box_request.request(method, command,
data, querystring,
files, headers, stream, json_data)
self.__log_debug_request(resp)
try:
att = self.__check_response(resp, stream)
except BoxError, ex:
if ex.status != 401:
raise
self.__refresh_access_token()
if raise_if_token_expired:
raise
resp = self.box_request.request(method, command,
data, querystring,
files, headers, stream, json_data)
self.__log_debug_request(resp)
att = self.__check_response(resp, stream)
return att
def __log_debug_request(self, resp):
if hasattr(resp.request, 'data'):
data_req = resp.request.data
else:
data_req = ''
log_debug('Request made to box.com: %s %s\nHEADERS:\n%s\nDATA:\n%s\nBODY:\n%s' %
(resp.request.method,
resp.request.url,
resp.request.headers,
data_req,
resp.request.body))
def find_id_in_folder(self, name, parent_folder_id=0):
"""Find a folder or a file ID from its name, inside a given folder.
Args:
name (str): Name of the folder or the file to find.
parent_folder_id (int): ID of the folder where to search.
Returns:
int. ID of the file or folder found. None if not found.
Raises:
BoxError: An error response is returned from Box (status_code >= 400).
BoxHttpResponseError: Response from Box is malformed.
requests.exceptions.*: Any connection related problem.
"""
if name is None or len(name) == 0:
return parent_folder_id
offset = 0
resp = self.get_folder_items(parent_folder_id,
limit=1000, offset=offset,
fields_list=['name'])
total = int(resp['total_count'])
while offset < total:
found = self.__find_name(resp, name)
if found is not None:
return found
offset += int(len(resp['entries']))
resp = self.get_folder_items(parent_folder_id,
limit=1000, offset=offset,
fields_list=['name'])
return None
def __find_name(self, response, name_to_find):
for entry in response['entries']:
if entry['name'] == name_to_find:
return int(entry['id'])
return None
def get_folder_info(self, folder_id):
"""Get info on a folder
Args:
folder_id (int): ID of the folder.
Returns:
dict. Response from Box.
Raises:
BoxError: An error response is returned from Box (status_code >= 400).
BoxHttpResponseError: Response from Box is malformed.
requests.exceptions.*: Any connection related problem.
"""
return self.__request("GET", "folders/%s" % (folder_id, ))
def create_folder(self, name, parent_folder_id=0):
"""Create a folder
If the folder exists, a BoxError will be raised.
Args:
folder_id (int): Name of the folder.
parent_folder_id (int): ID of the folder where to create the new one.
Returns:
dict. Response from Box.
Raises:
BoxError: An error response is returned from Box (status_code >= 400).
BoxHttpResponseError: Response from Box is malformed.
requests.exceptions.*: Any connection related problem.
"""
return self.__request("POST", "folders",
data={ "name": name,
"parent": {"id": unicode(parent_folder_id)} })
def delete_folder(self, folder_id, recursive=True):
"""Delete an existing folder
Args:
folder_id (int): ID of the folder to delete.
recursive (bool): Delete all subfolder if True.
Returns:
dict. Response from Box.
Raises:
BoxError: An error response is returned from Box (status_code >= 400).
BoxHttpResponseError: Response from Box is malformed.
requests.exceptions.*: Any connection related problem.
"""
return self.__request("DELETE", "folders/%s" % (folder_id, ),
querystring={'recursive': unicode(recursive).lower()})
def get_folder_items(self, folder_id,
limit=100, offset=0, fields_list=None):
"""Get files and folders inside a given folder
Args:
folder_id (int): Where to get files and folders info.
limit (int): The number of items to return.
offset (int): The item at which to begin the response.
fields_list (list): List of attributes to get. All attributes if None.
Returns:
dict. Response from Box.
Raises:
BoxError: An error response is returned from Box (status_code >= 400).
BoxHttpResponseError: Response from Box is malformed.
requests.exceptions.*: Any connection related problem.
"""
qs = { "limit": limit,
"offset": offset }
if fields_list:
qs['fields'] = ','.join(fields_list)
return self.__request("GET", "folders/%s/items" % (folder_id, ),
querystring=qs)
def upload_file(self, name, folder_id, file_path):
"""Upload a file into a folder.
Use function for small file otherwise there is the chunk_upload_file() function
Args::
name (str): Name of the file on your Box storage.
folder_id (int): ID of the folder where to upload the file.
file_path (str): Local path of the file to upload.
Returns:
dict. Response from Box.
Raises:
BoxError: An error response is returned from Box (status_code >= 400).
BoxHttpResponseError: Response from Box is malformed.
requests.exceptions.*: Any connection related problem.
"""
try:
return self.__do_upload_file(name, folder_id, file_path)
except BoxError, ex:
if ex.status != 401:
raise
#tokens had been refreshed, so we start again the upload
return self.__do_upload_file(name, folder_id, file_path)
def __do_upload_file(self, name, folder_id, file_path):
file_obj = open(file_path, 'rb')
try:
return self.__request("POST", "files/content",
files = {'filename': (name, file_obj)},
data = {'parent_id': unicode(folder_id)},
json_data = False,
raise_if_token_expired=True)
finally:
file_obj.close()
def chunk_upload_file(self, name, folder_id, file_path,
progress_callback=None,
chunk_size=1024*1024*1):
"""Upload a file chunk by chunk.
The whole file is never loaded in memory.
Use this function for big file.
The callback(transferred, total) to let you know the upload progress.
Upload can be cancelled if the callback raise an Exception.
>>> def progress_callback(transferred, total):
... print 'Uploaded %i bytes of %i' % (transferred, total, )
... if user_request_cancel:
... raise MyCustomCancelException()
Args:
name (str): Name of the file on your Box storage.
folder_id (int): ID of the folder where to upload the file.
file_path (str): Local path of the file to upload.
progress_callback (func): Function called each time a chunk is uploaded.
chunk_size (int): Size of chunks.
Returns:
dict. Response from Box.
Raises:
BoxError: An error response is returned from Box (status_code >= 400).
BoxHttpResponseError: Response from Box is malformed.
requests.exceptions.*: Any connection related problem.
"""
try:
return self.__do_chunk_upload_file(name, folder_id, file_path,
progress_callback,
chunk_size)
except BoxError, ex:
if ex.status != 401:
raise
#tokens had been refreshed, so we start again the upload
return self.__do_chunk_upload_file(name, folder_id, file_path,
progress_callback,
chunk_size)
def __do_chunk_upload_file(self, name, folder_id, file_path,
progress_callback,
chunk_size):
file_obj = open(file_path, 'rb')
try:
muw = MultipartUploadWrapper({'parent_id': unicode(folder_id),
'filename': (name, file_obj)},
progress_callback=progress_callback,
chunk_size=chunk_size)
return self.__request("POST", "files/content",
data = muw,
headers = muw.content_type_header,
json_data = False,
raise_if_token_expired=True)
finally:
file_obj.close()
def get_file_info(self, file_id):
"""Get info on a file
Args:
file_id (int): ID of the folder.
Returns:
dict. Response from Box.
Raises:
BoxError: An error response is returned from Box (status_code >= 400).
BoxHttpResponseError: Response from Box is malformed.
requests.exceptions.*: Any connection related problem.
"""
return self.__request("GET", "files/%s" % (file_id, ))
def download_file(self, file_id, dest_file_path,
progress_callback=None,
chunk_size=1024*1024*1):
"""Download a file.
The whole file is never loaded in memory.
The callback(transferred, total) to let you know the download progress.
Download can be cancelled if the callback raise an Exception.
>>> def progress_callback(transferred, total):
... print 'Downloaded %i bytes of %i' % (transferred, total, )
... if user_request_cancel:
... raise MyCustomCancelException()
Args:
file_id (int): ID of the file to download.
dest_file_path (str): Local path where to store the downloaded filed.
progress_callback (func): Function called each time a chunk is downloaded.
chunk_size (int): Size of chunks.
Raises:
BoxError: An error response is returned from Box (status_code >= 400).
BoxHttpResponseError: Response from Box is malformed.
requests.exceptions.*: Any connection related problem.
"""
with open(dest_file_path, 'wb') as fp:
req = self.__request("GET", "files/%s/content" % (file_id, ),
stream=True,
json_data=False)
total = -1
if hasattr(req, 'headers'):
lower_headers = {k.lower():v for k,v in req.headers.items()}
if 'content-length' in lower_headers:
total = lower_headers['content-length']
transferred = 0
for chunk in req.iter_content(chunk_size=chunk_size):
if chunk: # filter out keep-alive new chunks
if progress_callback:
progress_callback(transferred, total)
fp.write(chunk)
fp.flush()
transferred += len(chunk)
if progress_callback:
progress_callback(transferred, total)
def delete_file(self, file_id):
"""Delete an existing file
Args:
file_id (int): ID of the file to delete.
Returns:
dict. Response from Box.
Raises:
BoxError: An error response is returned from Box (status_code >= 400).
BoxHttpResponseError: Response from Box is malformed.
requests.exceptions.*: Any connection related problem.
"""
return self.__request("DELETE", "files/%s" % (file_id, ))
def search(self, **kwargs):
"""Searches for files/folders
Args:
kwargs (dict): A dictionary containing necessary parameters (check
https://developers.box.com/docs/#search
for list of parameters)
Returns:
dict. Response from Box.
Raises:
BoxError: An error response is returned from Box (status_code >= 400).
BoxHttpResponseError: Response from Box is malformed.
requests.exceptions.*: Any connection related problem.
"""
query_string = {}
for key, value in kwargs.iteritems():
query_string[key] = value
return self.__request("GET","search",querystring=query_string)
def get_user_info(self):
"""Gets the user's information
Args:
Returns:
dict. Response from box
Raises:
BoxError: An error response is returned from Box (status_code >= 400).
BoxHttpResponseError: Response from Box is malformed.
requests.exceptions.*: Any connection related problem.
"""
return self.__request("GET","users/me")
show_debug_messages = False
def log_debug(message):
if show_debug_messages == False:
return
print '------------------------'
print message
| mit | -432,496,499,673,475,100 | 35.225434 | 294 | 0.55013 | false | 4.678029 | false | false | false |
GrognardsFromHell/TemplePlus | tpdatasrc/co8infra/scr/Spell307 - Meteor Swarm.py | 1 | 3572 | from toee import *
from utilities import *
def OnBeginSpellCast( spell ):
print "Meteor Swarm OnBeginSpellCast"
print "spell.target_list=", spell.target_list
print "spell.caster=", spell.caster, " caster.level= ", spell.caster_level
game.particles( "sp-evocation-conjure", spell.caster )
def OnSpellEffect( spell ):
print "Meteor Swarm OnSpellEffect"
def OnBeginProjectile( spell, projectile, index_of_target ):
print "Meteor Swarm OnBeginProjectile"
projectiles = 4
if index_of_target < projectiles:
projectile.obj_set_int( obj_f_projectile_part_sys_id, game.particles( 'sp-Spheres of Fire-proj', projectile ) )
def OnEndProjectile( spell, projectile, index_of_target ):
print "Meteor Swarm OnEndProjectile"
dam = dice_new( '2d6' )
dam2 = dice_new( '6d6' )
projectiles = 4
if index_of_target < projectiles:
spell.duration = 0
game.particles_end( projectile.obj_get_int( obj_f_projectile_part_sys_id ) )
target_item = spell.target_list[ index_of_target ]
return_val = spell.caster.perform_touch_attack( target_item.obj )
xx,yy = location_to_axis(target_item.obj.location)
if target_item.obj.map == 5067 and ( xx >= 521 and xx <= 555 ) and ( yy >= 560 and yy <= 610):
target_item.obj.float_mesfile_line( 'mes\\skill_ui.mes', 2000 , 1)
game.particles( 'swirled gas', target_item.obj )
game.sound(7581,1)
game.sound(7581,1)
else:
if (return_val & D20CAF_HIT):
# hit target
if index_of_target > 0:
return_val |= D20CAF_NO_PRECISION_DAMAGE
game.particles( 'sp-Spheres of Fire-hit', target_item.obj )
target_item.obj.spell_damage_weaponlike( spell.caster, D20DT_BLUDGEONING, dam, D20DAP_UNSPECIFIED, 100, D20A_CAST_SPELL, spell.id, return_val, index_of_target )
target_item.obj.spell_damage_weaponlike( spell.caster, D20DT_FIRE, dam2, D20DAP_UNSPECIFIED, 100, D20A_CAST_SPELL, spell.id, return_val, index_of_target )
else:
# miss target
target_item.obj.float_mesfile_line( 'mes\\spell.mes', 30007 )
game.particles( 'Fizzle', target_item.obj )
if target_item.obj.reflex_save_and_damage( spell.caster, spell.dc, D20_Save_Reduction_Half, D20STD_F_NONE, dam2, D20DT_FIRE, D20DAP_UNSPECIFIED, D20A_CAST_SPELL, spell.id ):
# saving throw successful
target_item.obj.float_mesfile_line( 'mes\\spell.mes', 30001 )
else:
# saving throw unsuccessful
target_item.obj.float_mesfile_line( 'mes\\spell.mes', 30002 )
game.particles( 'sp-Fireball-Hit', target_item.obj )
for critter in game.obj_list_cone( target_item.obj, OLC_CRITTERS, 40, -180, 360 ):
if (critter != target_item.obj) and (critter.d20_query(Q_Dead) == 0):
xx,yy = location_to_axis(critter.location)
if critter.map == 5067 and ( xx >= 521 and xx <= 555 ) and ( yy >= 560 and yy <= 610):
critter.float_mesfile_line( 'mes\\skill_ui.mes', 2000 , 1)
game.particles( 'swirled gas', critter )
game.sound(7581,1)
game.sound(7581,1)
else:
game.particles( 'hit-FIRE-burst', critter )
if critter.reflex_save_and_damage( spell.caster, spell.dc, D20_Save_Reduction_Half, D20STD_F_NONE, dam2, D20DT_FIRE, D20DAP_UNSPECIFIED, D20A_CAST_SPELL, spell.id ):
# saving throw successful
critter.float_mesfile_line( 'mes\\spell.mes', 30001 )
else:
# saving throw unsuccessful
critter.float_mesfile_line( 'mes\\spell.mes', 30002 )
spell.num_of_projectiles = spell.num_of_projectiles - 1
if ( spell.num_of_projectiles <= 0 ):
spell.spell_end( spell.id, 1 )
def OnEndSpellCast( spell ):
print "Meteor Swarm OnEndSpellCast" | mit | -3,126,905,346,155,246,600 | 40.068966 | 177 | 0.69037 | false | 2.571634 | false | false | false |
eriknyk/devcon | devcon/lib/orderColumn.py | 1 | 1133 | from tg import expose, flash, require, url, request, redirect, validate, response
from sqlalchemy import asc, desc
from tw.forms.datagrid import Column
import genshi
class SortableColumn(Column):
def __init__(self, title, name):
super(SortableColumn, self).__init__(name)
self._title_ = title
def set_title(self, title):
self._title_ = title
def get_title(self):
current_ordering = request.GET.get('ordercol')
if current_ordering and current_ordering[1:] == self.name:
#current_ordering = '-' if current_ordering[0] == '+' else '+'
if current_ordering[0] == '+':
current_ordering = '-'
else:
current_ordering = '+'
else:
current_ordering = '+'
current_ordering += self.name
new_params = dict(request.GET)
new_params['ordercol'] = current_ordering
new_url = url(request.path_url, params=new_params)
return genshi.Markup('<a href="%(page_url)s">%(title)s</a>' % dict(page_url=new_url, title=self._title_))
title = property(get_title, set_title)
| lgpl-2.1 | -7,254,929,501,285,495,000 | 34.40625 | 113 | 0.595763 | false | 3.880137 | false | false | false |
computationalBiology/NPLB | NPLB/savefiles.py | 1 | 14566 |
##################### NPLB #####################
# No Promoter Left Behind (NPLB) is a tool to
# find the different promoter architectures within a set of promoter
# sequences. More information can be found in the README file.
# Copyright (C) 2015 Sneha Mitra and Leelavati Narlikar
# NPLB is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# NPLB is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
################################################
import weblogoMod.weblogolib as wl
import plotExtras
import numpy
import os
import gc
import sys
import pickle
from config import *
import copy
def createLogo(sequences, filename, pos, features, eps): # Create logo using Weblogo 3.3
if sequences == []: return
seqs = wl.read_seq_data(sequences)
data = wl.LogoData.from_seqs(seqs)
options = wl.LogoOptions()
options.title = ""
options.size = "large"
options.color_scheme = wl.colorscheme.monochrome
options.stacks_per_line = 100
options.text_font = "Arial-BoldMT"
options.annotate = pos
formt = wl.LogoFormat(data, options)
fout = open(filename + ".png", "w")
wl.png_formatter(data, formt, fout)
fout.close()
if eps == 1:
fout = open(filename + ".eps", "w")
wl.eps_formatter(data, formt, fout)
fout.close()
def sampleOne(l, n, tu, pos, features): # Sample values based on model
arr = ['A', 'C', 'G']
arr = arr + ['T'] if tu == 0 else arr + ['U']
l1 = map(list, zip(*map(lambda x: numpy.array(x).cumsum().searchsorted(numpy.random.sample(n)).tolist(), l)))
l1 = map(lambda x: "".join(map(lambda y: arr[y], l1[x])), range(n))
return l1
def makeImages(d, dirname, tu, tss, prefix, eps): # Create logo for each architecture of given model
lst = [map(lambda x: " ", range(1, d['features'] + 1)) for i in range(d['arch'])]
numpy.random.seed(5)
l = numpy.zeros(shape=(d['arch'], d['features'], d['featureValues']))
for i in range(d['arch']):
for j in range(d['features']):
for k in range(d['featureValues']):
l[i][j][k] = float(d['fvNoise'][j][k] + d['alpha'])/(d['fnoise'][j] + d['featureValues']*d['alpha'])
for j in d['pos'][i]:
for k in range(d['featureValues']):
l[i][j][k] = float(d['count'][i][j][k] + d['alpha'])/(d['t'][i] + d['featureValues']*d['alpha'])
lst[i][j] = "*"
lst[i][tss] = "+1"
if tss in d['pos'][i]: lst[i][tss] = "+1*"
diffN = 25
c = -diffN
c1 = tss - diffN
while c1 >= 0:
lst[i][c1] = str(c) + lst[i][c1]
c = c - diffN
c1 = c1 - diffN
c = diffN
c1 = tss + diffN - 1
while c1 < d['features']:
lst[i][c1] = str(c) + lst[i][c1]
c = c + diffN
c1 = c1 + diffN
l = map(lambda x: sampleOne(l[x], d['t'][x], tu, d['pos'][x], d['features']), range(d['arch']))
for i in range(d['arch']):
createLogo(l[i], dirname + "/" + prefix + str(i), lst[i], d['features'], eps)
def makehtml(dirname, d, l): # Create HTML file containing logos for best model learned by NPLB
f = open(dirname + modelOutHTML, "w")
f.write("<!DOCTYPE html>\n<html>\n<body>\n<h1>MODEL</h1>\n")
f.write("<h3>Lambda: " + str(d['lambda']) + "</h3>\n")
f.write("<h3>Dataset structure: " + str(d['n']) + " sequences with " + str(d['features']) + " features</h3>\n")
f.write("<h3>Number of architectures in the best model: " + str(d['arch']) + "</h3>\n")
f.write("<h3>Likelihood of best model: " + str(l) + "</h3>\n")
for i in range(d['arch']):
f.write("<h4>Architecture " + str(i+1) + ": " + str(d['t'][i]) + " sequences with " + str(d['posCount'][i]) + " important features</h4>\n")
if d['t'][i] == 0:
f.write("<h5>No Sequences</h5>\n")
else:
f.write("<h5>Sequence logo for the important positions in architecture " + str(i+1) + "</h5>\n")
f.write("<img src=\"" + htmlFiles + "/" + str(i) + ".png\" style=\"border:thin solid black\">\n")
f.write("<p><i>NOTE: All important positions in the logos are followed by an asterisk symbol and are coloured blue</i></p>")
f.write("</body>\n</html>\n")
f.close()
def makehtmlOrig(dirname, d, l, dO): # Create HTML file containing logos for best model learned by NPLB along with the logo of raw data
f = open(dirname + modelOutHTML, "w")
f.write("<!DOCTYPE html>\n<html>\n<body>\n<h1>MODEL</h1>\n")
f.write("<h3>Lambda: " + str(d['lambda']) + "</h3>\n")
f.write("<h3>Dataset structure: " + str(d['n']) + " sequences with " + str(d['features']) + " features</h3>\n")
f.write("<h3>Number of architectures in the best model: " + str(d['arch']) + "</h3>\n")
f.write("<h3>Likelihood of best model: " + str(l) + "</h3>\n")
for i in range(d['arch']):
f.write("<h4>Architecture " + str(i+1) + ": " + str(d['t'][i]) + " sequences with " + str(d['posCount'][i]) + " important features</h4>\n")
if d['t'][i] == 0:
f.write("<h5>No Sequences</h5>\n")
else:
f.write("<h5>Sequence logo for the important positions in architecture " + str(i+1) + "</h5>\n")
f.write("<img src=\"" + htmlFiles + "/" + str(i) + ".png\" style=\"border:thin solid black\">\n")
f.write("<h5>Logo for the raw data</h5>\n")
f.write("<img src=\"" + htmlFiles + "/" + rawDataImgPref + "0.png\" style=\"border:thin solid black\">\n")
f.write("<p><i>NOTE: All important positions in the logos are followed by an asterisk symbol and are coloured blue</i></p>")
f.write("</body>\n</html>\n")
f.close()
def maketxt(dirname, d): # Create text file containing details about the best model
f = open(dirname + modelOutTxt, "w")
f.write("MODEL\n\n")
f.write("Lambda: " + str(d['m']['lambda']) + "\n\n")
f.write("Dataset structure: " + str(d['m']['n']) + " sequences with " + str(d['m']['features']) + " features\n")
f.write("Number of architectures in the best model: " + str(d['m']['arch']) + "\n\n")
for i in range(d['m']['arch']):
f.write("Architecture " + str(i+1) + ": " + str(d['m']['t'][i]) + " sequences with " + str(d['m']['posCount'][i]) + " important features\n")
for j in range(d['m']['posCount'][i]):
f.write(str(d['m']['pos'][i][j]+1) + " (");
f.write(str(float(d['m']['count'][i][d['m']['pos'][i][j]][0] + d['m']['alpha'])/(d['m']['t'][i] + d['m']['featureValues']*d['m']['alpha'])) + " {" + str(d['m']['count'][i][d['m']['pos'][i][j]][0]) + "/" + str(d['m']['t'][i]) + "}")
for k in range(1, d['m']['featureValues']):
f.write(", " + str(float(d['m']['count'][i][d['m']['pos'][i][j]][k] + d['m']['alpha'])/(d['m']['t'][i] + d['m']['featureValues']*d['m']['alpha'])) + " {" + str(d['m']['count'][i][d['m']['pos'][i][j]][k]) + "/" + str(d['m']['t'][i]) + "}")
f.write(")\n")
f.write("\n")
f.close()
f = open(dirname + tempLabelsModelFile, "w")
for i in d['lp']: f.write(str(i) + "\n")
f.close()
os.system("paste" + " " + dirname + tempLabelsModelFile + " " + dirname + tempLabelsFile + " " + ">" + " " + dirname + clusterDetailsFile)
os.system("rm" + " " + dirname + tempLabelsModelFile + " " + dirname + tempLabelsFile)
def makeImage(dirname, model, rfile, tss, imgfile, imgfileeps, inpfile): # Create image matrix of input model
os.system("cut" + " " + "-f1" + " " + dirname + inpfile + " " + ">" + " " + dirname + hiddenLabels)
os.system("rev" + " " + dirname + inpfile + " " + "|" + " " + "cut" + " " + "-f1" + " | rev " + ">" + " " + dirname + hiddenOldData)
indices = [[] for i in range(model['arch'])]
j = 0
with open(dirname + hiddenLabels) as infile:
for line in infile:
tmp = int(line)
indices[model['arch'] - tmp] = indices[model['arch'] - tmp] + [j]
j = j + 1
f = open(dirname + hiddenData, "w")
for i in indices:
j = 0
k = 0
with open(dirname + hiddenOldData) as infile:
for line in infile:
try:
if k == i[j]:
f.write(line)
j = j + 1
k = k + 1
except: pass
f.close()
if sys.platform == "darwin":
os.system("sed" + " " + "-i" + " '' " + "'s/A/0\t/g;s/a/0\t/g;s/C/1\t/g;s/c/1\t/g;s/G/2\t/g;s/g/2\t/g;s/T/3\t/g;s/t/3\t/g;'" + " " + dirname + hiddenData) # Modify input Fasta file to replace A, C, G, and T with 0, 1, 2 and 3 respectively on OS X.
else:
os.system("sed" + " " + "-i" + " " + "'s/A/0\t/g;s/a/0\t/g;s/C/1\t/g;s/c/1\t/g;s/G/2\t/g;s/g/2\t/g;s/T/3\t/g;s/t/3\t/g;'" + " " + dirname + hiddenData) # Modify input Fasta file to replace A, C, G, and T with 0, 1, 2 and 3 respectively on Linux.
f = open(dirname + hiddenDrawLines, "w") # Save lines to be drawn on image matrix
# Save labels for both axes of image matrix
f1 = open(dirname + hiddenDrawLabels1, "w")
c = 0
for i in (model['t'][::-1])[:-1]:
c = c + i
f.write("-0.5\t" + str(c) + "\n" + str(model['features']-0.5) + "\t" + str(c) + "\n\n")
f1.write(str(c) + "\n")
f.close()
f1.close()
f = open(dirname + hiddenDrawLabels2, "w")
c = 0
for i in reversed(range(model['arch'])):
f.write("A" + str(i + 1) + "\t" + str((c + c + model['t'][i])/2) + "\n\n")
c = c + model['t'][i]
f.close()
lst = []
gap = max(int(round(imgMatrixNumGap*model['features'])), 1)
c = -gap
c1 = tss - gap
while c1 >= 0:
lst = [(str(c1), str(c))] + lst
c = c - gap
c1 = c1 - gap
lst = lst + [(str(tss), "+1")]
c = gap
c1 = tss + gap - 1
while c1 < model['features']:
lst = lst + [(str(c1), "+" + str(c))]
c = c + gap
c1 = c1 + gap
f = open(dirname + hiddenDrawXTics, "w")
for (i1, i2) in lst:
f.write(i1 + "\t" + i2 + "\n")
f.close()
os.system("gnuplot" + " " + "-e" + " " + "'filename=\"" + dirname + hiddenData + "\"; var=\"" + dirname + imgfile + "\"; var1=\"" + dirname + hiddenDrawLines + "\"; var2=\"" + dirname + hiddenDrawLabels1 + "\"; var3=\"" + dirname + hiddenDrawLabels2 + "\"; var4=\"" + dirname + hiddenDrawXTics + "\"'" + " " + rfile[0] + " 2> /dev/null")
if imgfileeps != "": os.system("gnuplot" + " " + "-e" + " " + "'filename=\"" + dirname + hiddenData + "\"; var=\"" + dirname + imgfileeps + "\"; var1=\"" + dirname + hiddenDrawLines + "\"; var2=\"" + dirname + hiddenDrawLabels1 + "\"; var3=\"" + dirname + hiddenDrawLabels2 + "\"; var4=\"" + dirname + hiddenDrawXTics + "\"'" + " " + rfile[1] + " 2> /dev/null")
os.system("rm" + " " + "-f" + " " + dirname + "/.??*")
def savecvls(dirname, cvals): # Save cross validation likelihood of the models learned
if cvals == []: return
maxArch = len(cvals)
f = open(dirname + cvLikelihoods, "w")
f.write("Cross validation likelihood of the best models\n\n")
for i in range(maxArch):
f.write(str(cvals[i][0]) + " architectures: ")
if cvals[i][1] == 0: f.write("Not calculated\n")
else: f.write(str(cvals[i][1]) + "\n")
f.close()
def saveDetails(d, dirname, rfile, cvals, tss, flag, pEx, pCol, sBy, eps):
dirname = dirname + "/"
tmp_d_m_pos = d['m']['pos'][0]
if ((tmp_d_m_pos[0] == 0 or tmp_d_m_pos[0] == 1) and (tmp_d_m_pos[1] == 0 or tmp_d_m_pos[1] == 1) and (tmp_d_m_pos[2] == 0 or tmp_d_m_pos[2] == 1)):
for i in range(d['m']['arch']):
d['m']['pos'][i] = filter(lambda x: d['m']['pos'][i][x] == 1, range(d['m']['features']))
if flag == 0: pickle.dump(d['m'], open(dirname + bestModelFile, "wb"))
os.system("rm" + " " + "-rf" + " " + dirname + htmlFiles)
try:
os.mkdir(dirname + htmlFiles)
except OSError:
print "ERROR: Cannot create directory", dirname + htmlFiles
exit(2)
if pEx != '':
if pEx[0] != '' and pCol != 0 and sBy != 0 and flag == 0:
cv = plotExtras.checkValid(pEx[0], sBy, d['m']['n'])
if cv == -1 or cv == 2:
print "Could not sort by values in column", sBy
print "Please check -plotExtra file and/or -sortBy column number"
else:
d = plotExtras.rearrange(d, pEx, sBy)
savecvls(dirname, cvals)
makeImages(d['m'], dirname + htmlFiles, 0, tss, "", eps)
if flag != 0: makehtml(dirname, d['m'], d['l'])
if flag == 0:
# Save information about the raw data
dOrig = {}
dOrig['features'] = d['m']['features']
dOrig['arch'] = 1
dOrig['featureValues'] = d['m']['featureValues']
dOrig['fvNoise'] = map(lambda z: map(lambda y: sum(map(lambda x: d['m']['count'][x][z][y], range(d['m']['arch']))), range(d['m']['featureValues'])), range(d['m']['features']))
dOrig['pos'] = [[]]
dOrig['alpha'] = d['m']['alpha']
dOrig['fnoise'] = [d['m']['n'] for i in range(d['m']['features'])]
dOrig['t'] = [d['m']['n']]
makeImages(dOrig, dirname + htmlFiles, 0, tss, rawDataImgPref, eps)
makehtmlOrig(dirname, d['m'], d['l'], dOrig)
if rfile != 0:
os.system("sed -e 's/^/1\t/' " + dirname + tempLabelsFile + " > " + dirname + rawClusterDetailsFile)
if eps == 0: makeImage(dirname, dOrig, rfile, tss, rawDataImage, "", rawClusterDetailsFile)
else: makeImage(dirname, dOrig, rfile, tss, rawDataImage, rawDataImageEPS, rawClusterDetailsFile)
maketxt(dirname, d)
if rfile != 0 and flag == 0:
if eps == 0: makeImage(dirname, d['m'], rfile, tss, imageMatrix, "", clusterDetailsFile)
else: makeImage(dirname, d['m'], rfile, tss, imageMatrix, imageMatrixEPS, clusterDetailsFile)
if pEx != '':
if pEx[0] != '' and pCol != 0 and flag == 0: plotExtras.plotExt(d, pEx, pCol, dirname)
collected = gc.collect()
| gpl-3.0 | -7,730,404,251,325,200,000 | 48.883562 | 365 | 0.532885 | false | 2.97144 | false | false | false |
mortonjt/American-Gut | scripts/mod2_pcoa.py | 1 | 14487 | #!/usr/bin/env python
import os
import click
from matplotlib import use
use('Agg') # noqa
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
from skbio import read, DistanceMatrix
from skbio.stats import isubsample
from skbio.stats.ordination import OrdinationResults
from collections import defaultdict
from collections import OrderedDict
ALPHA = 1.0
LINE_WIDTH = 0.3
LINE_WIDTH_WHITE = 2.0
LINE_WIDTH_BLACK = 1.0
@click.group()
def mod2_pcoa():
pass
@mod2_pcoa.command()
@click.option('--coords', required=True, type=click.Path(
resolve_path=True, readable=True, exists=True),
help='Coordinates file')
@click.option('--mapping_file', required=True, type=click.Path(
resolve_path=True, readable=True, exists=True),
help='Mapping file')
@click.option('--output', required=True, type=click.Path(exists=True,
writable=True, resolve_path=True), help='Output directory')
@click.option('--prefix', required=True, type=str, help='Output file prefix')
@click.option('--samples', required=False, type=str,
help='Comma separated list of samples to print')
def body_site(coords, mapping_file, output, prefix, samples):
"""Generates as many figures as samples in the coordinates file"""
o = read(coords, into=OrdinationResults)
# coordinates
c_df = pd.DataFrame(o.site, o.site_ids)
# mapping file
mf = pd.read_csv(mapping_file, '\t', converters=defaultdict(str),
index_col='#SampleID')
mf = mf.loc[o.site_ids]
if samples is None:
samples = mf.index
else:
samples = set(samples.split(',')).intersection(set(o.site_ids))
samples = mf.loc[samples].index
color_hmp_fecal = sns.color_palette('Paired', 12)[10] # light brown
color_agp_fecal = sns.color_palette('Paired', 12)[11] # dark brown
color_hmp_oral = sns.color_palette('Paired', 12)[0] # light blue
color_agp_oral = sns.color_palette('Paired', 12)[1] # dark blue
color_hmp_skin = sns.color_palette('Paired', 12)[2] # light green
color_agp_skin = sns.color_palette('Paired', 12)[3] # dark green
grp_colors = {'AGP-FECAL': color_agp_fecal,
'AGP-ORAL': color_agp_oral,
'AGP-SKIN': color_agp_skin,
'HMP-FECAL': color_hmp_fecal,
'GG-FECAL': color_hmp_fecal,
'PGP-FECAL': color_hmp_fecal,
'HMP-ORAL': color_hmp_oral,
'PGP-ORAL': color_hmp_oral,
'HMP-SKIN': color_hmp_skin,
'PGP-SKIN': color_hmp_skin}
for sample in samples:
# plot categories as 50 slices with random zorder
for grp, color in grp_colors.iteritems():
sub_coords = c_df[mf.TITLE_BODY_SITE == grp].values
for i in np.array_split(sub_coords, 50):
plt.scatter(i[:, 0], i[:, 1], color=color,
edgecolor=np.asarray(color)*0.6, lw=LINE_WIDTH,
alpha=ALPHA, zorder=np.random.rand())
# plot participant's dot
plt.scatter(c_df.loc[sample][0], c_df.loc[sample][1],
color=grp_colors[mf.loc[sample]['TITLE_BODY_SITE']],
s=270, edgecolor='w', zorder=1, lw=LINE_WIDTH_WHITE)
plt.scatter(c_df.loc[sample][0], c_df.loc[sample][1],
color=grp_colors[mf.loc[sample]['TITLE_BODY_SITE']],
s=250, edgecolor=np.asarray(
grp_colors[mf.loc[sample]['TITLE_BODY_SITE']])*0.6,
zorder=2, lw=LINE_WIDTH_BLACK)
plt.axis('off')
my_dpi = 72
figsize = (1000 / my_dpi, 1000 / my_dpi)
out_file = os.path.join(output, '.'.join([prefix, sample, 'pdf']))
plt.savefig(out_file, figsize=figsize, dpi=my_dpi)
plt.close()
@mod2_pcoa.command()
@click.option('--distmat', required=True, type=click.Path(resolve_path=True,
readable=True,
exists=True),
help='Input distance matrix to subsample nearest sample')
@click.option('--mapping_file', required=True, type=click.Path(
resolve_path=True, readable=True, exists=True),
help='Mapping file')
@click.option('--max', required=True, type=int,
help='Max number of samples per category value')
@click.option('--category', required=True, type=str,
help='The category to subsample in (likely COUNTRY)')
@click.option('--output', required=True, type=click.Path(exists=False,
writable=True, resolve_path=True), help='Output file')
def subsample_dm(distmat, mapping_file, max, category, output):
"""Subsample the distmat to max samples per category value"""
mf = pd.read_csv(mapping_file, '\t', converters=defaultdict(str),
index_col='#SampleID')
id_to_cat = dict(mf[category])
def bin_f(x):
return id_to_cat[x]
dm = read(distmat, into=DistanceMatrix)
dm = dm.filter([id for _, id in isubsample(dm.ids, max, bin_f=bin_f)])
dm.to_file(output)
@mod2_pcoa.command()
@click.option('--coords', required=True, type=click.Path(resolve_path=True,
readable=True, exists=True), help='Coordinates file')
@click.option('--mapping_file', required=True, type=click.Path(
resolve_path=True, readable=True, exists=True),
help='Mapping file')
@click.option('--output', required=True, type=click.Path(exists=True,
writable=True, resolve_path=True), help='Output directory')
@click.option('--prefix', required=True, type=str, help='Output file prefix')
@click.option('--samples', required=False, type=str,
help='Comma separated list of samples to print')
@click.option('--distmat', required=True, type=click.Path(resolve_path=True,
readable=True,
exists=True),
help=('Input distance matrix to find nearest sample (if not '
'present in the coordinates'))
def country(coords, mapping_file, output, prefix, samples, distmat):
"""Generates as many figures as samples in the coordinates file"""
o = read(coords, into=OrdinationResults)
o_id_lookup = set(o.site_ids)
dm = read(distmat, into=DistanceMatrix)
dm_id_lookup = {i: idx for idx, i in enumerate(dm.ids)}
coord_samples_in_dm = {idx for idx, i in enumerate(dm.ids)
if i in o_id_lookup}
# we'll be computing min values, so we need to avoid catching the diagonal
np.fill_diagonal(dm._data, np.inf)
x, y = o.site[:, 0], o.site[:, 1]
# coordinates
c_df = pd.DataFrame(o.site, o.site_ids)
# mapping file
mf = pd.read_csv(mapping_file, '\t', converters=defaultdict(str),
index_col='#SampleID')
# mf = mf.loc[o.site_ids]
if samples is None:
samples = dm.ids[:]
else:
samples = set(samples.split(',')).intersection(set(dm.ids))
samples = mf.loc[samples].index
color_Venezuela = sns.color_palette('Paired', 12)[10]
color_Malawi = sns.color_palette('Paired', 12)[1]
color_Western = sns.color_palette('Paired', 12)[4]
color_Highlight = sns.color_palette('Paired', 12)[5]
color_no_data = (0.5, 0.5, 0.5)
grp_colors = OrderedDict()
grp_colors['no_data'] = color_no_data
grp_colors['Australia'] = color_Western
grp_colors['Belgium'] = color_Western
grp_colors['Canada'] = color_Western
grp_colors['China'] = color_Western
grp_colors['Finland'] = color_Western
grp_colors['France'] = color_Western
grp_colors['Germany'] = color_Western
grp_colors['Great Britain'] = color_Western
grp_colors['Ireland'] = color_Western
grp_colors['Japan'] = color_Western
grp_colors['Netherlands'] = color_Western
grp_colors['New Zealand'] = color_Western
grp_colors['Norway'] = color_Western
grp_colors['Scotland'] = color_Western
grp_colors['Spain'] = color_Western
grp_colors['Switzerland'] = color_Western
grp_colors['Thailand'] = color_Western
grp_colors['United Arab Emirates'] = color_Western
grp_colors['United Kingdom'] = color_Western
grp_colors['United States of America'] = color_Western
grp_colors['Malawi'] = color_Malawi
grp_colors['Venezuela'] = color_Venezuela
for sample_to_plot in samples:
if sample_to_plot in o_id_lookup:
sample = sample_to_plot
else:
# find the closest sample in the distance matrix that is in the
# coordinates data
sample = None
for i in dm[dm_id_lookup[sample_to_plot]].argsort():
if i in coord_samples_in_dm:
sample = dm.ids[i]
break
# this should not ever happen
if sample is None:
raise ValueError("Unable to find a similar sample?")
# countour plot superimposed
sns.kdeplot(x, y, cmap='bone')
sns.set_context(rc={"lines.linewidth": 0.75})
# change particapant's country's color to color_Highlight unless
# country is Venezuela or Malawi
if (mf.loc[sample_to_plot]['COUNTRY'] != 'Malawi') & (
mf.loc[sample_to_plot]['COUNTRY'] != 'Venezuela'):
grp_colors[mf.loc[sample_to_plot]['COUNTRY']] = color_Highlight
# plot each country except participant's according to colors above
for grp, color in grp_colors.iteritems():
if grp == mf.loc[sample_to_plot]['COUNTRY']:
continue
sub_coords = c_df[mf.COUNTRY == grp]
plt.scatter(sub_coords[0], sub_coords[1], color=color,
edgecolor=np.asarray(color)*0.6, lw=LINE_WIDTH,
alpha=ALPHA)
# now plot participant's country
grp = mf.loc[sample_to_plot]['COUNTRY']
color = grp_colors[grp]
sub_coords = c_df[mf.COUNTRY == grp]
plt.scatter(sub_coords[0], sub_coords[1], color=color,
edgecolor=np.asarray(color)*0.6, lw=LINE_WIDTH,
alpha=ALPHA)
# plot participant's dot
plt.scatter(c_df.loc[sample][0], c_df.loc[sample][1],
color=grp_colors[mf.loc[sample_to_plot]['COUNTRY']],
s=270, edgecolor='w', zorder=1, lw=LINE_WIDTH_WHITE)
plt.scatter(c_df.loc[sample][0], c_df.loc[sample][1],
color=grp_colors[mf.loc[sample_to_plot]['COUNTRY']],
s=250, edgecolor=np.asarray(grp_colors[
mf.loc[sample_to_plot]['COUNTRY']])*0.6,
zorder=2, lw=LINE_WIDTH_BLACK)
# reset particapant's country's color to color_Western unless country
# is Venezuela or Malawi
if (mf.loc[sample_to_plot]['COUNTRY'] != 'Malawi') & (
mf.loc[sample_to_plot]['COUNTRY'] != 'Venezuela'):
grp_colors[mf.loc[sample_to_plot]['COUNTRY']] = color_Western
plt.axis('off')
my_dpi = 72
figsize = (1000 / my_dpi, 1000 / my_dpi)
out_file = os.path.join(output, '.'.join([prefix, sample, 'pdf']))
plt.savefig(out_file, figsize=figsize, dpi=my_dpi)
plt.close()
@mod2_pcoa.command()
@click.option('--coords', required=True, type=click.Path(resolve_path=True,
readable=True, exists=True), help='Coordinates file')
@click.option('--mapping_file', required=True, type=click.Path(
resolve_path=True, readable=True, exists=True),
help='Mapping file')
@click.option('--color', required=True, type=str,
help='Metadata category to set color by')
@click.option('--output', required=True, type=click.Path(exists=True,
writable=True, resolve_path=True), help='Output directory')
@click.option('--prefix', required=True, type=str, help='Output file prefix')
@click.option('--samples', required=False, type=str,
help='Comma separated list of samples to print')
def gradient(coords, mapping_file, color, output, prefix, samples):
"""Generates as many figures as samples in the coordinates file"""
o = read(coords, into=OrdinationResults)
# coordinates
c_df = pd.DataFrame(o.site, o.site_ids)
# mapping file
mf = pd.read_csv(mapping_file, '\t', converters=defaultdict(str),
index_col='#SampleID')
mf = mf.loc[o.site_ids]
mf[color] = mf[color].convert_objects(convert_numeric=True)
if samples is None:
samples = mf.index
else:
samples = set(samples.split(',')).intersection(set(o.site_ids))
samples = mf.loc[samples].index
numeric = mf[~pd.isnull(mf[color])]
non_numeric = mf[pd.isnull(mf[color])]
color_array = plt.cm.RdBu(numeric[color]/max(numeric[color]))
for sample in samples:
# plot numeric metadata as colored gradient
ids = numeric.index
x, y = c_df.loc[ids][0], c_df.loc[ids][1]
plt.scatter(x, y, c=numeric[color], cmap=plt.get_cmap('RdBu'),
alpha=ALPHA, lw=LINE_WIDTH, edgecolor=color_array*0.6)
# plt.colorbar()
# plot non-numeric metadata as gray
ids = non_numeric.index
x, y = c_df.loc[ids][0], c_df.loc[ids][1]
plt.scatter(x, y, c='0.5', alpha=ALPHA, lw=LINE_WIDTH, edgecolor='0.3')
# plot individual's dot
try:
color_index = numeric.index.tolist().index(sample)
except ValueError:
color_index = None
if color_index is None:
_color = (0.5, 0.5, 0.5)
else:
_color = color_array[color_index]
plt.scatter(c_df.loc[sample][0], c_df.loc[sample][1],
color=_color, s=270, edgecolor='w', lw=LINE_WIDTH_WHITE)
plt.scatter(c_df.loc[sample][0], c_df.loc[sample][1],
color=_color, s=250, edgecolor=np.asarray(_color)*0.6,
lw=LINE_WIDTH_BLACK)
plt.axis('off')
my_dpi = 72
figsize = (1000 / my_dpi, 1000 / my_dpi)
out_file = os.path.join(output, '.'.join([prefix, sample, 'pdf']))
plt.savefig(out_file, figsize=figsize, dpi=my_dpi)
plt.close()
if __name__ == '__main__':
mod2_pcoa()
| bsd-3-clause | -6,790,581,772,316,809,000 | 40.03966 | 79 | 0.58908 | false | 3.449286 | false | false | false |
Skydes/Monitoring | src/capture.py | 1 | 4332 | #!/usr/bin/env python
'''
Copyright (c) 2016, Paul-Edouard Sarlin
All rights reserved.
Project: Autonomous Monitoring System
File: capture.py
Date: 2016-08-08
Author: Paul-Edouard Sarlin
Website: https://github.com/skydes/monitoring
'''
from multiprocessing import Process, Event, Lock, Queue
from Queue import Full
from time import sleep
import v4l2capture
import select
import cv2
import os
import logging
import numpy as np
FAIL = False
class Capture(Process):
def __init__(self, out_queue, conf, conf_lock):
Process.__init__(self)
self._out_queue = out_queue
self._stop = Event()
self._stop.set()
self._new_conf = Event()
self._new_conf.clear()
self._conf_lock = conf_lock
self._conf = conf
self._stream = None
self._device_name = None
def setDevice(self, device):
self._device_name = device
def openStream(self):
logging.debug("Opening stream.")
try:
self._stream = v4l2capture.Video_device("/dev/"+self._device_name)
except IOError as err_pref:
logging.debug("Could not open default device.")
devices = [x for x in os.listdir("/dev/") if x.startswith("video")]
devices.sort()
for device_new in devices:
try:
self._stream = v4l2capture.Video_device("/dev/"+device_new)
except IOError as err_new:
pass
else:
logging.warning("Device {default} was not available but {new} could be opened.".format(default=self._device_name, new=device_new))
self._device_name = device_new
return
raise err_pref
else:
return
def setupStream(self):
with self._conf_lock:
self._stream.set_format(self._conf["capture-res"][0], self._conf["capture-res"][1], fourcc='MJPG')
self._stream.create_buffers(1)
self._stream.queue_all_buffers()
def newConf(self):
self._new_conf.set()
def run(self):
self._stop.clear()
with self._conf_lock:
conf = self._conf.copy() # Create thread-safe local copy
sleep(float(conf["capture-warmup"])) # Camera warm-up wait
while True :
if self._stop.is_set():
break
if self._new_conf.is_set():
with self._conf_lock:
conf = self._conf.copy()
self._new_conf.clear()
logging.debug("New configuration set: {conf}".format(conf=conf))
if conf["capture"]:
if self._stream is None:
if self.tryOpenStream() is FAIL:
continue
try:
select.select((self._stream,), (), ())
raw = self._stream.read_and_queue()
except IOError as err_first:
self._stream.close()
self.tryOpenStream()
continue
if raw is None:
logging.warning("Grabbed frame is empty.")
while True:
try:
self._out_queue.put(cv2.imdecode(np.fromstring(raw, dtype=np.byte), flags=cv2.IMREAD_COLOR), block=False)
except Full:
self._out_queue.get()
else:
break
else:
sleep(1) # Reduce CPU consumption
if self._stream is not None:
self._stream.close()
logging.info("Thread stopped.")
def tryOpenStream(self):
try:
self.openStream()
except IOError as err:
with self._conf_lock:
self._conf["capture"] = False
self._conf["error"]["capture"] = True
self._stream = None
self.newConf()
logging.error("Capture disabled: could not open stream, no device available.")
return FAIL
else:
self.setupStream()
self._stream.start()
return (not FAIL)
def stop(self):
self._stop.set()
| bsd-3-clause | 8,370,857,560,826,415,000 | 31.088889 | 150 | 0.511311 | false | 4.415902 | false | false | false |
mpercich/Calendarize | ios/dateparser/lib/python2.7/site-packages/convertdate/julianday.py | 1 | 1740 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of convertdate.
# http://github.com/fitnr/convertdate
# Licensed under the GPL-v3.0 license:
# http://opensource.org/licenses/MIT
# Copyright (c) 2016, fitnr <fitnr@fakeisthenewreal>
from datetime import datetime
from . import gregorian
from . import julian
from pytz import utc
def to_datetime(jdc):
'''Return a datetime for the input floating point Julian Day Count'''
year, month, day = gregorian.from_jd(jdc)
# in jdc: 0.0 = noon, 0.5 = midnight
# the 0.5 changes it to 0.0 = midnight, 0.5 = noon
frac = (jdc + 0.5) % 1
hours = int(24 * frac)
mfrac = frac * 24 - hours
mins = int(60 * round(mfrac, 6))
sfrac = mfrac * 60 - mins
secs = int(60 * round(sfrac, 6))
msfrac = sfrac * 60 - secs
# down to ms, which are 1/1000 of a second
ms = int(1000 * round(msfrac, 6))
return datetime(year, month, day, int(hours), int(mins), int(secs), int(ms), tzinfo=utc)
def from_datetime(dt):
# take account of offset (if there isn't one, act like it's utc)
try:
dt = dt + dt.utcoffset()
except TypeError:
# Assuming UTC
pass
jdc = gregorian.to_jd(dt.year, dt.month, dt.day)
hfrac = dt.hour / 24.
mfrac = round(dt.minute / (24. * 60), 5)
sfrac = round(dt.second / (24. * 60 * 60), 5)
msfrac = dt.microsecond / (24. * 60 * 60 * 1000)
return jdc + hfrac + mfrac + sfrac + msfrac
def to_gregorian(jdc):
return gregorian.from_jd(jdc)
def from_gregorian(year, month, day):
return gregorian.to_jd(year, month, day)
def to_julian(jdc):
return julian.from_jd(jdc)
def from_julian(year, month, day):
return julian.to_jd(year, month, day)
| mit | -8,246,665,454,322,578,000 | 23.166667 | 92 | 0.624713 | false | 2.885572 | false | false | false |
zacharyvoase/zrpc | tests/server_test.py | 1 | 4208 | from __future__ import with_statement
from contextlib import contextmanager
from Queue import Queue
import threading
from bson import BSON
from nose.tools import assert_equal
import zmq
from zrpc.concurrency import Callback
from zrpc.server import Server
from zrpc.registry import Registry
REGISTRY = Registry()
class Unserializable(object):
def __repr__(self):
return '<unserializable>'
@REGISTRY.method
def add(x, y):
return x + y
@REGISTRY.method
def raises_error():
raise Exception("some error occurred")
@REGISTRY.method
def returns_bson_unserializable_obj():
return Unserializable()
@contextmanager
def server(addr, registry, connect=False, context=None):
context = context or zmq.Context.instance()
# Set up a server, tell it to run in a separate thread, and pass in a
# callback so that we can wait for the server to be bound before connecting
# our client. This avoids an issue we were having with inproc:// transport,
# wherein if the client connected before the server had bound, it would
# raise an error.
callback = Callback()
server = Server(addr, registry, connect=connect, context=context)
server_thread = threading.Thread(
target=server.run,
kwargs=dict(callback=callback))
server_thread.daemon = True
server_thread.start()
server_socket = callback.wait()
try:
yield
finally:
context.term()
@contextmanager
def get_client(addr, context=None):
context = context or zmq.Context.instance()
client = context.socket(zmq.REQ)
client.connect(addr)
try:
yield client
finally:
client.close()
@contextmanager
def server_and_client(addr, registry, connect=False, context=None):
context = context or zmq.Context.instance()
with server(addr, registry, connect=connect, context=context):
with get_client(addr, context=context) as client:
yield client
def test_server_responds_correctly():
with server_and_client('inproc://zrpc', REGISTRY) as client:
client.send(BSON.encode({
"id": "abc",
"method": "add",
"params": [3, 4]}))
assert_equal(BSON(client.recv()).decode(),
{"id": "abc", "result": 7, "error": None})
def test_missing_method_returns_an_error():
with server_and_client('inproc://zrpc', REGISTRY) as client:
client.send(BSON.encode({
"id": "abc",
"method": "doesnotexist",
"params": [3, 4]}))
assert_equal(BSON(client.recv()).decode(),
{"id": "abc",
"result": None,
"error": {
"type": "zrpc.exceptions.MissingMethod",
"args": ["doesnotexist"],
"message": "MissingMethod: doesnotexist"
}})
def test_errors_raised_in_method_are_returned():
with server_and_client('inproc://zrpc', REGISTRY) as client:
client.send(BSON.encode({
"id": "abc",
"method": "raises_error",
"params": []}))
assert_equal(BSON(client.recv()).decode(),
{"id": "abc",
"result": None,
"error": {
"type": "exceptions.Exception",
"args": ["some error occurred"],
"message": "Exception: some error occurred"
}})
def test_bson_unserializable_objects_returned_raise_an_error():
with server_and_client('inproc://zrpc', REGISTRY) as client:
client.send(BSON.encode({
"id": "abc",
"method": "returns_bson_unserializable_obj",
"params": []}))
assert_equal(BSON(client.recv()).decode(),
{"id": "abc",
"result": None,
"error": {
"type": "bson.errors.InvalidDocument",
"args": ["Cannot encode object: <unserializable>"],
"message": "InvalidDocument: Cannot encode object: <unserializable>"
}})
| unlicense | -380,070,639,397,219,400 | 29.492754 | 94 | 0.568679 | false | 4.113392 | false | false | false |
KarlTDebiec/Ramaplot | PDistDataset.py | 1 | 20943 | # -*- coding: utf-8 -*-
# ramaplot.PDistDataset.py
#
# Copyright (C) 2015 Karl T Debiec
# All rights reserved.
#
# This software may be modified and distributed under the terms of the
# BSD license. See the LICENSE file for details.
"""
Manages probability distribution datasets.
"""
################################### MODULES ###################################
from __future__ import absolute_import,division,print_function,unicode_literals
from .myplotspec.Dataset import Dataset
################################### CLASSES ###################################
class PDistDataset(Dataset):
"""
Manages probability distribution datasets.
Generates probability distribution from series of Φ/Ψ values,
representing either the probability of Φ/Ψ, or the expectation value
of a selected measurement (e.g. energy) at that Φ/Ψ.
Input data should be providied in a whitespace-delimited text file
including columns for Φ, Ψ, and any additional data, such as this
output from `cpptraj`'s `multidihedral` command::
#Frame phi:2 psi:2 chip:2 ...
1 -62.1431 144.6768 72.2964 ...
2 -63.2487 151.6551 71.9101 ...
... ... ... ...
"""
@classmethod
def get_cache_key(cls, infile, phikey="phi", psikey="psi",
zkey="free energy", mode="hist", bins=72, bandwidth=5, wrap=True,
mask_cutoff=None,
calc_populations=False, plot_populations=False,
*args, **kwargs):
"""
Generates tuple of arguments to be used as key for dataset
cache.
Arguments documented under :func:`__init__`.
"""
from os.path import expandvars
if zkey in ["free energy", "probability"]:
x_bins, y_bins = cls.process_bins_arg(bins, dim=2)
bins = (tuple(x_bins), tuple(y_bins))
else:
x_bins, y_bins, z_bins = cls.process_bins_arg(bins, dim=3)
bins = (tuple(x_bins), tuple(y_bins), tuple(z_bins))
if mode == "hist":
return (cls, expandvars(infile), phikey, psikey, zkey, mode, bins,
wrap, mask_cutoff, calc_populations, plot_populations)
elif mode == "kde":
return (cls, expandvars(infile), phikey, psikey, zkey, mode, bins,
bandwidth, wrap, mask_cutoff, calc_populations,
plot_populations)
@staticmethod
def process_bins_arg(bins, dim=2):
"""
Processes bin argument.
Arguments:
bins (int, list, ndarray): Bins to use for histogram or grid
to use for kernel density estimate; if int, number of bins
or gride points between -180° and 180° in Φ and Ψ, if list
or ndarray, bins or grid directly
Returns:
out_bins (tuple): Processed bins
"""
import numpy as np
if dim == 2:
if isinstance(bins, int):
x_bins = y_bins = np.linspace(-180, 180, bins + 1)
elif isinstance(bins, list):
if len(bins) == 2:
if isinstance(bins[0], int):
x_bins = np.linspace(-180, 180, bins[0] + 1)
elif isinstance(bins[0], list):
x_bins = np.array(bins[0])
if isinstance(bins[1], int):
y_bins = np.linspace(-180, 180, bins[1] + 1)
elif isinstance(bins[1], list):
y_bins = np.array(bins[1])
else:
x_bins = y_bins = np.array(bins)
elif isinstance(bins, np.ndarray):
x_bins = y_bins = bins
return x_bins, y_bins
elif dim == 3:
if isinstance(bins, int):
x_bins = y_bins = z_bins = np.linspace(-180, 180, bins + 1)
elif isinstance(bins, list):
if len(bins) == 2:
if isinstance(bins[0], int):
x_bins = y_bins = np.linspace(-180, 180, bins[0] + 1)
elif (isinstance(bins[0], list)
or isinstance(bins[0], np.ndarray)):
x_bins = y_bins = np.array(bins[0])
if isinstance(bins[1], int):
z_bins = np.linspace(-180, 180, bins[1] + 1)
elif (isinstance(bins[1], list)
or isinstance(bins[1], np.ndarray)):
z_bins = np.array(bins[1])
elif len(bins) == 3:
if isinstance(bins[0], int):
x_bins = np.linspace(-180, 180, bins[0] + 1)
elif (isinstance(bins[0], list)
or isinstance(bins[0], np.ndarray)):
x_bins = np.array(bins[0])
if isinstance(bins[1], int):
y_bins = np.linspace(-180, 180, bins[1] + 1)
elif (isinstance(bins[1], list)
or isinstance(bins[1], np.ndarray)):
y_bins = np.array(bins[1])
if isinstance(bins[2], int):
z_bins = np.linspace(-180, 180, bins[2] + 1)
elif (isinstance(bins[2], list)
or isinstance(bins[2], np.ndarray)):
z_bins = np.array(bins[2])
else:
x_bins = y_bins = z_bins = np.array(bins)
elif isinstance(bins, np.ndarray):
x_bins = y_bins = z_bins = bins
return x_bins, y_bins, z_bins
else:
raise TypeError()
def __init__(self, phikey="phi", psikey="psi", zkey="free energy",
mode="hist", bins=72, bandwidth=5, wrap=True, mask_cutoff=None,
calc_populations=False, plot_populations=False,
verbose=1, debug=0, **kwargs):
"""
Arguments:
infile (str): Path to text input file, may contain environment
variables
phikey (str): Key from which to load Φ
psikey (str): Key from which to load Ψ
zkey (str): Key from which to load distribution; if 'free
energy' or 'probability', the 2D probability density of Φ
and Ψ will be calculated and the selected representation
returned; for other values a third dimension will be loaded
from the `zkey` column of `infile`, the 3D probability
density of Φ, Ψ, and `zkey` will be calculated, and the
expectation value of `zkey` as a function of Φ and Ψ will be
returned
mode (str): Method of calculating probability distribution;
may be either 'hist', to use a histogram, or 'kde', to use a
kernel density estimate
bins (int, list, ndarray): Bins to use for histogram or grid
to use for kernel density estimate; if int, number of bins
or gride points between -180° and 180° in Φ and Ψ, if list
or ndarray, bins or grid directly
bandwidth (float, optional): Bandwidth to use for kernel
density estimate
wrap (bool): Wrap x and y coordinates between 180° and 360° to
between -180° and 0°
wrap_z (bool): Wrap z coordinates between -180° and 0 to between 180°
and 360°; probably only useful for plotting ω
mask_cutoff (float): Cutoff beyond which distribution is
masked, if `zkey` is 'free energy', this is a the maximum
free energy above which the mask will be set, and if `zkey`
is 'probability', this is the minimum probability below
which the mask will be set
hist_kw: Keyword arguments passed to numpy.histogram2d or
numpy.histogramdd
kde_kw: Keyword arguments passed to
sklearn.neighbors.KernelDensity
verbose (int): Level of verbose output
debug (int): Level of debug output
kwargs (dict): Additional keyword arguments
.. todo:
- Fix and validate 3D KDE
- Auto-detect phikey and psikey
- Support periodicic kernel density estimate
- Support variable bandwidth kernel density estimate
"""
import numpy as np
import pandas as pd
from .myplotspec import multi_get_copy
# Manage arguments
if str(mode.lower()) not in ["hist", "kde", "none"]:
raise ValueError("Argument 'mode' does not support provided " +
"value '{0}', may be 'hist', 'kde', or 'none'".format(mode))
read_csv_kw = dict(delim_whitespace=True, index_col=0)
read_csv_kw.update(kwargs.pop("read_csv_kw", {}))
# Load data
dataframe = self.load_dataset(verbose=verbose, debug=debug,
read_csv_kw=read_csv_kw, **kwargs).dataframe
if wrap:
dataframe[phikey][dataframe[phikey] > 180] -= 360
dataframe[psikey][dataframe[psikey] > 180] -= 360
# Option 0: Store Φ, Ψ
if mode == "none":
# Store data in instance variable
self.x = dataframe[phikey]
self.y = dataframe[psikey]
# Option 1: Calculate probability and free energy of Φ, Ψ
elif zkey in ["free energy", "probability"]:
x_bins, y_bins = self.process_bins_arg(bins, dim=2)
x_centers = (x_bins[:-1] + x_bins[1:]) / 2
y_centers = (y_bins[:-1] + y_bins[1:]) / 2
x_width = np.mean(x_centers[1:] - x_centers[:-1])
y_width = np.mean(y_centers[1:] - y_centers[:-1])
# Option 1a: Use a histogram (fast but noisy)
if mode == "hist":
if verbose >= 1:
print("calculating probability distribution of " +
"'{0}' and '{1}' using a ".format(phikey, psikey) +
"histogram")
hist_kw = dict(normed=False)
hist_kw.update(kwargs.get("hist_kw", {}))
hist_kw["bins"] = hist_kw.get("bins", [x_bins, y_bins])
probability, _, _ = np.histogram2d(
dataframe[phikey], dataframe[psikey], **hist_kw)
# Option 1b: Use a kernel density estimate (smooth but slow)
elif mode == "kde":
if verbose >= 1:
print("calculating probability distribution of " +
"'{0}' and '{1}' using a ".format(phikey, psikey) +
"kernel density estimate")
from sklearn.neighbors import KernelDensity
kde_kw = multi_get_copy("kde_kw", kwargs, {})
kde_kw["bandwidth"] = kde_kw.get("bandwidth", bandwidth)
xg, yg = np.meshgrid(x_centers, y_centers)
xyg = np.vstack([yg.ravel(), xg.ravel()]).T
samples = np.column_stack((dataframe[phikey],
dataframe[psikey]))
kde = KernelDensity(**kde_kw)
kde.fit(samples)
probability_series = np.exp(kde.score_samples(xyg))
probability = np.zeros((x_centers.size, y_centers.size))
for phi, psi, p in np.column_stack((xyg, probability_series)):
x_index = np.where(x_centers == phi)[0][0]
y_index = np.where(y_centers == psi)[0][0]
probability[x_index, y_index] = p
# Normalize and calculate free energy
probability /= np.nansum(probability)
free_energy = -1 * np.log(probability)
free_energy[np.isinf(free_energy)] = np.nan
free_energy -= np.nanmin(free_energy)
# Store data in instance variable
self.x_centers = x_centers
self.y_centers = y_centers
self.x_width = x_width
self.y_width = y_width
self.x_bins = x_bins
self.y_bins = y_bins
self.x = dataframe[phikey]
self.y = dataframe[psikey]
# Option 2: Calculate mean value of a third observable as a
# function of Φ, Ψ
else:
x_bins, y_bins, z_bins = self.process_bins_arg(bins, dim=3)
x_centers = (x_bins[:-1] + x_bins[1:]) / 2
y_centers = (y_bins[:-1] + y_bins[1:]) / 2
z_centers = (z_bins[:-1] + z_bins[1:]) / 2
x_width = np.mean(x_centers[1:] - x_centers[:-1])
y_width = np.mean(y_centers[1:] - y_centers[:-1])
if kwargs.get("wrap_z"):
dataframe[zkey][dataframe[zkey] < 0] += 360
# Option 2a: Use a histogram (fast but noisy)
if mode == "hist":
if verbose >= 1:
print("calculating mean value of '{0}'".format(zkey) +
"as a function of '{0}' and ".format(phikey) +
"'{0}' using a histogram".format(psikey))
hist_kw = dict(normed=True)
hist_kw.update(kwargs.get("hist_kw", {}))
hist_kw["bins"] = hist_kw.get("bins", [x_bins, y_bins, z_bins])
prob_xyz, _ = np.histogramdd(np.column_stack(
(dataframe[phikey], dataframe[psikey], dataframe[zkey])),
**hist_kw)
probability = np.sum(prob_xyz, axis=2)
prob_z_given_xy = prob_xyz / probability[:,:,np.newaxis]
weighted_z = prob_z_given_xy*z_centers[np.newaxis,np.newaxis,:]
mean_z = np.sum(weighted_z, axis=2)
# Option 2b: Use a kernel density estimate (smooth but slow)
elif mode == "kde":
raise()
from copy import copy
from sklearn.neighbors import KernelDensity
kde_kw = multi_get_copy("kde_kw", kwargs, {})
kde_kw["bandwidth"] = kde_kw.get("bandwidth", bandwidth)
# Only a single bandwidth is supported; scale z
# dimension to span range of 120-240
# scale_range = 340
z = copy(dataframe[zkey])
# z -= z.min() # shift bottom to 0
# z_range = z.max() # save max
# z *= (scale_range / z_range)
# z += (360 - scale_range) / 2 # Give buffer on top and bottom
xg, yg, zg = np.meshgrid(x_centers, y_centers, z_centers)
xyzg = np.vstack([xg.ravel(), yg.ravel(), zg.ravel()]).T
samples = np.column_stack((dataframe[phikey],
dataframe[psikey], z))
kde = KernelDensity(**kde_kw)
kde.fit(samples)
probability_series = np.exp(kde.score_samples(xyzg))
prob_xyz = np.zeros((x_centers.size, y_centers.size,
z_centers.size), np.float) * np.nan
for phi,psi,z,p in np.column_stack((xyzg, probability_series)):
x_index = np.where(x_centers == phi)[0][0]
y_index = np.where(y_centers == psi)[0][0]
z_index = np.where(z_centers == z)[0][0]
prob_xyz[x_index, y_index, z_index] = p
prob_xyz /= np.sum(prob_xyz)
probability = np.sum(prob_xyz, axis=2)
prob_z_given_xy = prob_xyz / probability[:,:,np.newaxis]
weighted_z = prob_z_given_xy*z_centers[np.newaxis,np.newaxis,:]
mean_z = np.sum(weighted_z, axis=2)
# mean_z -= (360 - scale_range) / 2 # Shift back down
# mean_z *= (z_range / scale_range) # Back from degrees to E
# free_energy *= 627.503 # Convert to kcal/mol
# Normalize and calculate free energy
probability /= np.nansum(probability)
free_energy = -1 * np.log(probability)
free_energy[np.isinf(free_energy)] = np.nan
free_energy -= np.nanmin(free_energy)
# Store data in instance variable
self.x_centers = x_centers
self.y_centers = y_centers
self.x_width = x_width
self.y_width = y_width
self.x_bins = x_bins
self.y_bins = y_bins
self.x = dataframe[phikey]
self.y = dataframe[psikey]
# Prepare mask
if mode == "none":
pass
elif zkey == "probability":
self.dist = probability
if mask_cutoff is not None:
self.mask = np.ma.masked_where(np.logical_and(
probability >= mask_cutoff,
np.logical_not(np.isnan(probability))),
np.ones_like(probability))
else:
self.mask = np.ma.masked_where(
np.logical_not(np.isnan(free_energy)),
np.ones_like(free_energy))
elif zkey == "free energy":
self.dist = free_energy
if mask_cutoff is not None:
self.mask = np.ma.masked_where(np.logical_and(
free_energy <= mask_cutoff,
np.logical_not(np.isnan(free_energy))),
np.ones_like(free_energy))
else:
self.mask = np.ma.masked_where(
np.logical_not(np.isnan(free_energy)),
np.ones_like(free_energy))
else:
self.dist = mean_z
if mask_cutoff is not None:
self.mask = np.ma.masked_where(np.logical_and(
free_energy <= mask_cutoff,
np.logical_not(np.isnan(free_energy))),
np.ones_like(free_energy))
else:
self.mask = np.ma.masked_where(
np.logical_not(np.isnan(free_energy)),
np.ones_like(free_energy))
# Calculate state populations
if calc_populations:
states = kwargs.get("states", [
("β", -151, 151),
("PPII", -66, 140),
("ξ", -145, 55),
("γ'", -81, 65),
("α", -70, -25),
("$L_α$", 55, 45),
("γ", 73, -35),
("PPII'", 56, -124),
("plateau", -100, -130)])
state_radius = kwargs.get("state_radius", 45)
distances = np.zeros((len(states), len(x_centers), len(y_centers)))
xs = []
ys = []
for i, (state, x, y) in enumerate(states):
xs += [x]
ys += [y]
# There must be a better way to do this, but this works
for j, xc in enumerate(x_centers):
for k, yc in enumerate(y_centers):
dx = (xc - x)
if dx <= -180 or dx >= 180:
dx = 360 - dx
else:
dx = dx
dy = (yc - y)
if dy <= -180 or dy >= 180:
dy = 360 - dy
else:
dy = dy
distances[i,j,k] = np.sqrt(dx**2 + dy**2)
assignments = np.argmin(distances, axis=0)
assignments[np.min(distances, axis=0) >= state_radius] = \
len(states) + 1
index, state_populations = [], []
for i, (state, x, y) in enumerate(states):
index += [state]
state_populations += [(x, y,
np.nansum(probability[assignments==i]))]
state_populations = pd.DataFrame(state_populations, index=index,
columns=["Φ center", "Ψ center", "population"])
self.state_populations = state_populations
if verbose >= 1:
print(state_populations)
if plot_populations:
self.dist = assignments
self.mask = np.ma.masked_where(
np.logical_not(assignments == len(states) + 1),
np.ones_like(assignments))
self.x = np.array(xs)
self.y = np.array(ys)
label, label_kw = [], []
from .myplotspec import multi_get_copy
default_label_kw = multi_get_copy(["default_label_kw",
"label_kw"], kwargs, {})
for index, row in state_populations.iterrows():
label += ["{0}\n{1:2d}%".format(index,
int(row["population"]*100))]
label_kw += [default_label_kw.copy()]
label_kw[-1]["x"] = row["Φ center"]
label_kw[-1]["y"] = row["Ψ center"]
self.label = label
self.label_kw = label_kw
| bsd-3-clause | 1,198,580,034,747,305,000 | 43.743041 | 80 | 0.494664 | false | 4.005943 | false | false | false |
mattcieslak/DSI2 | dsi2/ui/volume_slicer.py | 1 | 10860 | #!/usr/bin/env python
import numpy as np
import nibabel as nib
# Traits stuff
from traits.api import ( HasTraits, Instance, Array,
Bool, Dict, on_trait_change, Range, Color, Any, Int,
DelegatesTo, CInt, Property, File )
from traitsui.api import View, Item, VGroup, \
HGroup, Group, RangeEditor, ColorEditor, VSplit
from mayavi import mlab
from mayavi.core.api import PipelineBase, Source
from mayavi.core.ui.api import SceneEditor
from mayavi.tools.mlab_scene_model import MlabSceneModel
from tvtk.pyface.scene import Scene
from tvtk.api import tvtk
from chaco.chaco_plot_editor import ChacoPlotItem
from chaco.api import Plot, ArrayPlotData, gray
from enable.component_editor import ComponentEditor
from ..streamlines.track_math import sphere_around_ijk
from ..volumes.scalar_volume import ScalarVolume
from .chaco_slice import Slices
import os
from ..volumes import get_MNI152
class SlicerPanel(HasTraits):
# path to a nifti file that holds the data
reference_volume = File
scene3d_inited = Bool(False)
# MNI_152 objects. data holds the np array, data_src is for mayavi
data = Array(value=np.zeros((50,50,50)))
data_src = Instance(Source)
# --- Sphere configuration ---
# position of the cursor
# Radius of the sphere
radius = Range(low=0,high=14,value=1)
extent_x = Int(50)
extent_y = Int(50)
extent_z = Int(50)
sphere_x = Range(low=0, high='extent_x')
sphere_y = Range(low=0, high='extent_y')
sphere_z = Range(low=0, high='extent_z')
sphere_coords = Array
sphere_color = Color((255,0,0,255))
sphere_visible = Bool(True)
coordsupdated = Int(0)
# Spere's representation on the screen
sphere_viz = Instance(PipelineBase)
widgets_drawn = Bool(False)
x_slice_plane = Instance(PipelineBase)
y_slice_plane = Instance(PipelineBase)
z_slice_plane = Instance(PipelineBase)
# Slice plots
slice_plots = Instance(Slices)
x = DelegatesTo('slice_plots')
y = DelegatesTo('slice_plots')
z = DelegatesTo('slice_plots')
# 3d image plane widget
scene3d = Instance(MlabSceneModel, ())
camera_initialized = False
def __init__(self, **traits):
""" Creates a panel for viewing a 3d Volume.
Parameters:
===========
"""
super(SlicerPanel,self).__init__(**traits)
self.sphere_coords
self.scene3d
self.sphere_viz
@on_trait_change("reference_volume")
def render_volume(self):
if not os.path.exists(self.reference_volume):
print "No such file", self.reference_volume
return
print "Opening", self.reference_volume
try:
data = nib.load(self.reference_volume)
except Exception, e:
print "Unable to load data", e
return
# Remove imageplane widgets
self.scene3d.disable_render = True
if self.widgets_drawn:
self.x_slice_plane.remove()
self.y_slice_plane.remove()
self.z_slice_plane.remove()
# Set data and update the data_src
self.data = data.get_data()
# Change the extents to match the new volume
self.extent_x, self.extent_y, self.extent_z = self.data.shape
# Send to mayavi
self.data_src = mlab.pipeline.scalar_field(self.data,
figure=self.scene3d.mayavi_scene,
name='Data',colormap="gray")
# Send the new data to the slices
self.slice_plots.set_volume(self.data)
# Update the sphere to be in the middle of this volume
self.sphere_x = self.extent_x / 2
self.sphere_y = self.extent_y / 2
self.sphere_z = self.extent_z / 2
self.x_slice_plane = self.make_x_slice_plane()
self.x_slice_plane.ipw.sync_trait(
"slice_position", self, alias="x")
self.x_slice_plane.ipw.sync_trait(
"enabled", self.slice_plots, alias="x_slice_plane_visible")
self.y_slice_plane = self.make_y_slice_plane()
self.y_slice_plane.ipw.sync_trait(
"slice_position", self, alias="y")
self.y_slice_plane.ipw.sync_trait(
"enabled", self.slice_plots, alias="y_slice_plane_visible")
self.z_slice_plane = self.make_z_slice_plane()
self.z_slice_plane.ipw.sync_trait(
"slice_position", self, alias="z")
self.z_slice_plane.ipw.sync_trait(
"enabled", self.slice_plots, alias="z_slice_plane_visible")
self.scene3d.disable_render = False
self.widgets_drawn = True
def _slice_plots_default(self):
return Slices()
def _sphere_viz_default(self):
# different between wx and qt
try:
color_tuple = self.sphere_color.toTuple()
except:
color_tuple = self.sphere_color
try:
pts = mlab.points3d(
self.sphere_coords[:,0],
self.sphere_coords[:,1],
self.sphere_coords[:,2],
mode='cube',
scale_factor=1,
figure = self.scene3d.mayavi_scene,
color = (color_tuple[0]/255.,
color_tuple[1]/255.,
color_tuple[2]/255.)
)
except:
pts = mlab.points3d(
self.sphere_coords[:,0],
self.sphere_coords[:,1],
self.sphere_coords[:,2],
mode='cube',
scale_factor=1,
figure = self.scene3d.mayavi_scene,
color = (1.,0.,0.)
)
return pts
def _sphere_coords_default(self):
return np.array(sphere_around_ijk(
self.radius, np.array([self.x, self.y, self.z])))
def _sphere_visible_changed(self):
self.sphere_viz.visible = self.sphere_visible
def _sphere_color_changed(self):
print "changing sphere color to", self.sphere_color
# different between wx and qt
try:
color_tuple = self.sphere_color.toTuple()
except:
color_tuple = self.sphere_color
self.sphere_viz.actor.property.color = (
color_tuple[0]/255.,
color_tuple[1]/255.,
color_tuple[2]/255.)
def make_x_slice_plane(self):
ipw = mlab.pipeline.image_plane_widget(
self.data_src,
figure=self.scene3d.mayavi_scene,
plane_orientation='x_axes',
name='Cut x',colormap="gray"
)
ipw.ipw.slice_position=self.x
ipw.ipw.interaction = 0
return ipw
def make_y_slice_plane(self):
ipw = mlab.pipeline.image_plane_widget(
self.data_src, colormap='gray',
figure=self.scene3d.mayavi_scene,
plane_orientation='y_axes',
name='Cut y')
ipw.ipw.slice_position=self.y
ipw.ipw.interaction = 0
return ipw
def make_z_slice_plane(self):
ipw = mlab.pipeline.image_plane_widget(
self.data_src,colormap='gray',
figure=self.scene3d.mayavi_scene,
plane_orientation='z_axes',
name='Cut z')
ipw.ipw.slice_position=self.z
ipw.ipw.interaction = 0
return ipw
@on_trait_change('sphere_x,sphere_y,sphere_z,radius')
def _update_sphere(self):
self.disable_render = True
self.sphere_coords = np.array(sphere_around_ijk(
self.radius, np.array([self.sphere_x,
self.sphere_y,
self.sphere_z])))
self.sphere_viz.mlab_source.reset(
x=self.sphere_coords[:,0],
y=self.sphere_coords[:,1],
z=self.sphere_coords[:,2],
)
self.disable_render = False
self.coordsupdated += 1
def arbitrary_voxel_query(self,new_indices):
self.disable_render = True
self.sphere_coords = np.array(new_indices)
self.sphere_viz.mlab_source.reset(
x=self.sphere_coords[:,0],
y=self.sphere_coords[:,1],
z=self.sphere_coords[:,2],
)
self.disable_render = False
self.coordsupdated += 1
@on_trait_change('scene3d.activated')
def display_scene3d(self):
if self.scene3d_inited: return
self.scene3d.mlab.view(40, 50)
self.scene3d.scene.background = (0, 0, 0)
# Keep the view always pointing up
self.scene3d.scene.interactor.interactor_style = \
tvtk.InteractorStyleTerrain()
#self.scene3d.mayavi_scene.scene.light_manager.light_mode = "vtk"
self.scene3d_inited = True
@on_trait_change('x_slice_plane_visible,y_slice_plane_visible,z_slice_plane_visible')
def update_slice_opacity(self,obj,name,old,new):
if name=='x_slice_plane_visible':
self.x_slice_plane.ipw.texture_visibility = new
if name=="y_slice_plane_visible":
self.y_slice_plane.ipw.texture_visibility = new
if name=="z_slice_plane_visible":
self.z_slice_plane.ipw.texture_visibility = new
sphere_widgets = VGroup(
Item(name="sphere_x",
editor=RangeEditor(
auto_set=False,
mode="slider",
low=0,
high_name="extent_x",
format = "%i")),
Item(name="sphere_y",
editor=RangeEditor(
auto_set=False,
mode="slider",
low=0,
high_name='extent_y',
format = "%i")),
Item(name="sphere_z",
editor=RangeEditor(
auto_set=False,
mode="slider",
low=0,
high_name='extent_z',
format = "%i")),
Item(name="radius"),
Item(name="sphere_color"),
Item(name="sphere_visible"),
label="Search Sphere",
show_border=True
)
plot3d_group = Group(
Item('scene3d',
editor=SceneEditor(scene_class=Scene),
height=500, width=500),
show_labels=False)
slice_panel_group = HGroup(sphere_widgets,
Item('slice_plots',style="custom"),
show_labels=False)
# ----- Views -----
browser_view = View(
VSplit(
plot3d_group,
slice_panel_group
)
)
traits_view = View(
slice_panel_group
) | gpl-3.0 | -7,945,498,863,849,230,000 | 33.699681 | 89 | 0.550829 | false | 3.770833 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.